##// END OF EJS Templates
security: use safe escaped version of description for repo and repo group to potentially...
ergo -
r1830:d786fdd7 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,1551 +1,1551 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26 from collections import namedtuple
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31
32 32 from pylons.i18n.translation import _
33 33 from pylons.i18n.translation import lazy_ugettext
34 34 from pyramid.threadlocal import get_current_request
35 35 from sqlalchemy import or_
36 36
37 37 from rhodecode import events
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = 3
78 78
79 79 MERGE_STATUS_MESSAGES = {
80 80 MergeFailureReason.NONE: lazy_ugettext(
81 81 'This pull request can be automatically merged.'),
82 82 MergeFailureReason.UNKNOWN: lazy_ugettext(
83 83 'This pull request cannot be merged because of an unhandled'
84 84 ' exception.'),
85 85 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
86 86 'This pull request cannot be merged because of merge conflicts.'),
87 87 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
88 88 'This pull request could not be merged because push to target'
89 89 ' failed.'),
90 90 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
91 91 'This pull request cannot be merged because the target is not a'
92 92 ' head.'),
93 93 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
94 94 'This pull request cannot be merged because the source contains'
95 95 ' more branches than the target.'),
96 96 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
97 97 'This pull request cannot be merged because the target has'
98 98 ' multiple heads.'),
99 99 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
100 100 'This pull request cannot be merged because the target repository'
101 101 ' is locked.'),
102 102 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
103 103 'This pull request cannot be merged because the target or the '
104 104 'source reference is missing.'),
105 105 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
106 106 'This pull request cannot be merged because the target '
107 107 'reference is missing.'),
108 108 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
109 109 'This pull request cannot be merged because the source '
110 110 'reference is missing.'),
111 111 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
112 112 'This pull request cannot be merged because of conflicts related '
113 113 'to sub repositories.'),
114 114 }
115 115
116 116 UPDATE_STATUS_MESSAGES = {
117 117 UpdateFailureReason.NONE: lazy_ugettext(
118 118 'Pull request update successful.'),
119 119 UpdateFailureReason.UNKNOWN: lazy_ugettext(
120 120 'Pull request update failed because of an unknown error.'),
121 121 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
122 122 'No update needed because the source and target have not changed.'),
123 123 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
124 124 'Pull request cannot be updated because the reference type is '
125 125 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
126 126 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
127 127 'This pull request cannot be updated because the target '
128 128 'reference is missing.'),
129 129 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
130 130 'This pull request cannot be updated because the source '
131 131 'reference is missing.'),
132 132 }
133 133
134 134 def __get_pull_request(self, pull_request):
135 135 return self._get_instance((
136 136 PullRequest, PullRequestVersion), pull_request)
137 137
138 138 def _check_perms(self, perms, pull_request, user, api=False):
139 139 if not api:
140 140 return h.HasRepoPermissionAny(*perms)(
141 141 user=user, repo_name=pull_request.target_repo.repo_name)
142 142 else:
143 143 return h.HasRepoPermissionAnyApi(*perms)(
144 144 user=user, repo_name=pull_request.target_repo.repo_name)
145 145
146 146 def check_user_read(self, pull_request, user, api=False):
147 147 _perms = ('repository.admin', 'repository.write', 'repository.read',)
148 148 return self._check_perms(_perms, pull_request, user, api)
149 149
150 150 def check_user_merge(self, pull_request, user, api=False):
151 151 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
152 152 return self._check_perms(_perms, pull_request, user, api)
153 153
154 154 def check_user_update(self, pull_request, user, api=False):
155 155 owner = user.user_id == pull_request.user_id
156 156 return self.check_user_merge(pull_request, user, api) or owner
157 157
158 158 def check_user_delete(self, pull_request, user):
159 159 owner = user.user_id == pull_request.user_id
160 160 _perms = ('repository.admin',)
161 161 return self._check_perms(_perms, pull_request, user) or owner
162 162
163 163 def check_user_change_status(self, pull_request, user, api=False):
164 164 reviewer = user.user_id in [x.user_id for x in
165 165 pull_request.reviewers]
166 166 return self.check_user_update(pull_request, user, api) or reviewer
167 167
168 168 def get(self, pull_request):
169 169 return self.__get_pull_request(pull_request)
170 170
171 171 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
172 172 opened_by=None, order_by=None,
173 173 order_dir='desc'):
174 174 repo = None
175 175 if repo_name:
176 176 repo = self._get_repo(repo_name)
177 177
178 178 q = PullRequest.query()
179 179
180 180 # source or target
181 181 if repo and source:
182 182 q = q.filter(PullRequest.source_repo == repo)
183 183 elif repo:
184 184 q = q.filter(PullRequest.target_repo == repo)
185 185
186 186 # closed,opened
187 187 if statuses:
188 188 q = q.filter(PullRequest.status.in_(statuses))
189 189
190 190 # opened by filter
191 191 if opened_by:
192 192 q = q.filter(PullRequest.user_id.in_(opened_by))
193 193
194 194 if order_by:
195 195 order_map = {
196 196 'name_raw': PullRequest.pull_request_id,
197 197 'title': PullRequest.title,
198 198 'updated_on_raw': PullRequest.updated_on,
199 199 'target_repo': PullRequest.target_repo_id
200 200 }
201 201 if order_dir == 'asc':
202 202 q = q.order_by(order_map[order_by].asc())
203 203 else:
204 204 q = q.order_by(order_map[order_by].desc())
205 205
206 206 return q
207 207
208 208 def count_all(self, repo_name, source=False, statuses=None,
209 209 opened_by=None):
210 210 """
211 211 Count the number of pull requests for a specific repository.
212 212
213 213 :param repo_name: target or source repo
214 214 :param source: boolean flag to specify if repo_name refers to source
215 215 :param statuses: list of pull request statuses
216 216 :param opened_by: author user of the pull request
217 217 :returns: int number of pull requests
218 218 """
219 219 q = self._prepare_get_all_query(
220 220 repo_name, source=source, statuses=statuses, opened_by=opened_by)
221 221
222 222 return q.count()
223 223
224 224 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
225 225 offset=0, length=None, order_by=None, order_dir='desc'):
226 226 """
227 227 Get all pull requests for a specific repository.
228 228
229 229 :param repo_name: target or source repo
230 230 :param source: boolean flag to specify if repo_name refers to source
231 231 :param statuses: list of pull request statuses
232 232 :param opened_by: author user of the pull request
233 233 :param offset: pagination offset
234 234 :param length: length of returned list
235 235 :param order_by: order of the returned list
236 236 :param order_dir: 'asc' or 'desc' ordering direction
237 237 :returns: list of pull requests
238 238 """
239 239 q = self._prepare_get_all_query(
240 240 repo_name, source=source, statuses=statuses, opened_by=opened_by,
241 241 order_by=order_by, order_dir=order_dir)
242 242
243 243 if length:
244 244 pull_requests = q.limit(length).offset(offset).all()
245 245 else:
246 246 pull_requests = q.all()
247 247
248 248 return pull_requests
249 249
250 250 def count_awaiting_review(self, repo_name, source=False, statuses=None,
251 251 opened_by=None):
252 252 """
253 253 Count the number of pull requests for a specific repository that are
254 254 awaiting review.
255 255
256 256 :param repo_name: target or source repo
257 257 :param source: boolean flag to specify if repo_name refers to source
258 258 :param statuses: list of pull request statuses
259 259 :param opened_by: author user of the pull request
260 260 :returns: int number of pull requests
261 261 """
262 262 pull_requests = self.get_awaiting_review(
263 263 repo_name, source=source, statuses=statuses, opened_by=opened_by)
264 264
265 265 return len(pull_requests)
266 266
267 267 def get_awaiting_review(self, repo_name, source=False, statuses=None,
268 268 opened_by=None, offset=0, length=None,
269 269 order_by=None, order_dir='desc'):
270 270 """
271 271 Get all pull requests for a specific repository that are awaiting
272 272 review.
273 273
274 274 :param repo_name: target or source repo
275 275 :param source: boolean flag to specify if repo_name refers to source
276 276 :param statuses: list of pull request statuses
277 277 :param opened_by: author user of the pull request
278 278 :param offset: pagination offset
279 279 :param length: length of returned list
280 280 :param order_by: order of the returned list
281 281 :param order_dir: 'asc' or 'desc' ordering direction
282 282 :returns: list of pull requests
283 283 """
284 284 pull_requests = self.get_all(
285 285 repo_name, source=source, statuses=statuses, opened_by=opened_by,
286 286 order_by=order_by, order_dir=order_dir)
287 287
288 288 _filtered_pull_requests = []
289 289 for pr in pull_requests:
290 290 status = pr.calculated_review_status()
291 291 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
292 292 ChangesetStatus.STATUS_UNDER_REVIEW]:
293 293 _filtered_pull_requests.append(pr)
294 294 if length:
295 295 return _filtered_pull_requests[offset:offset+length]
296 296 else:
297 297 return _filtered_pull_requests
298 298
299 299 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
300 300 opened_by=None, user_id=None):
301 301 """
302 302 Count the number of pull requests for a specific repository that are
303 303 awaiting review from a specific user.
304 304
305 305 :param repo_name: target or source repo
306 306 :param source: boolean flag to specify if repo_name refers to source
307 307 :param statuses: list of pull request statuses
308 308 :param opened_by: author user of the pull request
309 309 :param user_id: reviewer user of the pull request
310 310 :returns: int number of pull requests
311 311 """
312 312 pull_requests = self.get_awaiting_my_review(
313 313 repo_name, source=source, statuses=statuses, opened_by=opened_by,
314 314 user_id=user_id)
315 315
316 316 return len(pull_requests)
317 317
318 318 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
319 319 opened_by=None, user_id=None, offset=0,
320 320 length=None, order_by=None, order_dir='desc'):
321 321 """
322 322 Get all pull requests for a specific repository that are awaiting
323 323 review from a specific user.
324 324
325 325 :param repo_name: target or source repo
326 326 :param source: boolean flag to specify if repo_name refers to source
327 327 :param statuses: list of pull request statuses
328 328 :param opened_by: author user of the pull request
329 329 :param user_id: reviewer user of the pull request
330 330 :param offset: pagination offset
331 331 :param length: length of returned list
332 332 :param order_by: order of the returned list
333 333 :param order_dir: 'asc' or 'desc' ordering direction
334 334 :returns: list of pull requests
335 335 """
336 336 pull_requests = self.get_all(
337 337 repo_name, source=source, statuses=statuses, opened_by=opened_by,
338 338 order_by=order_by, order_dir=order_dir)
339 339
340 340 _my = PullRequestModel().get_not_reviewed(user_id)
341 341 my_participation = []
342 342 for pr in pull_requests:
343 343 if pr in _my:
344 344 my_participation.append(pr)
345 345 _filtered_pull_requests = my_participation
346 346 if length:
347 347 return _filtered_pull_requests[offset:offset+length]
348 348 else:
349 349 return _filtered_pull_requests
350 350
351 351 def get_not_reviewed(self, user_id):
352 352 return [
353 353 x.pull_request for x in PullRequestReviewers.query().filter(
354 354 PullRequestReviewers.user_id == user_id).all()
355 355 ]
356 356
357 357 def _prepare_participating_query(self, user_id=None, statuses=None,
358 358 order_by=None, order_dir='desc'):
359 359 q = PullRequest.query()
360 360 if user_id:
361 361 reviewers_subquery = Session().query(
362 362 PullRequestReviewers.pull_request_id).filter(
363 363 PullRequestReviewers.user_id == user_id).subquery()
364 364 user_filter= or_(
365 365 PullRequest.user_id == user_id,
366 366 PullRequest.pull_request_id.in_(reviewers_subquery)
367 367 )
368 368 q = PullRequest.query().filter(user_filter)
369 369
370 370 # closed,opened
371 371 if statuses:
372 372 q = q.filter(PullRequest.status.in_(statuses))
373 373
374 374 if order_by:
375 375 order_map = {
376 376 'name_raw': PullRequest.pull_request_id,
377 377 'title': PullRequest.title,
378 378 'updated_on_raw': PullRequest.updated_on,
379 379 'target_repo': PullRequest.target_repo_id
380 380 }
381 381 if order_dir == 'asc':
382 382 q = q.order_by(order_map[order_by].asc())
383 383 else:
384 384 q = q.order_by(order_map[order_by].desc())
385 385
386 386 return q
387 387
388 388 def count_im_participating_in(self, user_id=None, statuses=None):
389 389 q = self._prepare_participating_query(user_id, statuses=statuses)
390 390 return q.count()
391 391
392 392 def get_im_participating_in(
393 393 self, user_id=None, statuses=None, offset=0,
394 394 length=None, order_by=None, order_dir='desc'):
395 395 """
396 396 Get all Pull requests that i'm participating in, or i have opened
397 397 """
398 398
399 399 q = self._prepare_participating_query(
400 400 user_id, statuses=statuses, order_by=order_by,
401 401 order_dir=order_dir)
402 402
403 403 if length:
404 404 pull_requests = q.limit(length).offset(offset).all()
405 405 else:
406 406 pull_requests = q.all()
407 407
408 408 return pull_requests
409 409
410 410 def get_versions(self, pull_request):
411 411 """
412 412 returns version of pull request sorted by ID descending
413 413 """
414 414 return PullRequestVersion.query()\
415 415 .filter(PullRequestVersion.pull_request == pull_request)\
416 416 .order_by(PullRequestVersion.pull_request_version_id.asc())\
417 417 .all()
418 418
419 419 def create(self, created_by, source_repo, source_ref, target_repo,
420 420 target_ref, revisions, reviewers, title, description=None,
421 421 reviewer_data=None):
422 422
423 423 created_by_user = self._get_user(created_by)
424 424 source_repo = self._get_repo(source_repo)
425 425 target_repo = self._get_repo(target_repo)
426 426
427 427 pull_request = PullRequest()
428 428 pull_request.source_repo = source_repo
429 429 pull_request.source_ref = source_ref
430 430 pull_request.target_repo = target_repo
431 431 pull_request.target_ref = target_ref
432 432 pull_request.revisions = revisions
433 433 pull_request.title = title
434 434 pull_request.description = description
435 435 pull_request.author = created_by_user
436 436 pull_request.reviewer_data = reviewer_data
437 437
438 438 Session().add(pull_request)
439 439 Session().flush()
440 440
441 441 reviewer_ids = set()
442 442 # members / reviewers
443 443 for reviewer_object in reviewers:
444 444 user_id, reasons, mandatory = reviewer_object
445 445 user = self._get_user(user_id)
446 446
447 447 # skip duplicates
448 448 if user.user_id in reviewer_ids:
449 449 continue
450 450
451 451 reviewer_ids.add(user.user_id)
452 452
453 453 reviewer = PullRequestReviewers()
454 454 reviewer.user = user
455 455 reviewer.pull_request = pull_request
456 456 reviewer.reasons = reasons
457 457 reviewer.mandatory = mandatory
458 458 Session().add(reviewer)
459 459
460 460 # Set approval status to "Under Review" for all commits which are
461 461 # part of this pull request.
462 462 ChangesetStatusModel().set_status(
463 463 repo=target_repo,
464 464 status=ChangesetStatus.STATUS_UNDER_REVIEW,
465 465 user=created_by_user,
466 466 pull_request=pull_request
467 467 )
468 468
469 469 self.notify_reviewers(pull_request, reviewer_ids)
470 470 self._trigger_pull_request_hook(
471 471 pull_request, created_by_user, 'create')
472 472
473 473 creation_data = pull_request.get_api_data(with_merge_state=False)
474 474 self._log_audit_action(
475 475 'repo.pull_request.create', {'data': creation_data},
476 476 created_by_user, pull_request)
477 477
478 478 return pull_request
479 479
480 480 def _trigger_pull_request_hook(self, pull_request, user, action):
481 481 pull_request = self.__get_pull_request(pull_request)
482 482 target_scm = pull_request.target_repo.scm_instance()
483 483 if action == 'create':
484 484 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
485 485 elif action == 'merge':
486 486 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
487 487 elif action == 'close':
488 488 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
489 489 elif action == 'review_status_change':
490 490 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
491 491 elif action == 'update':
492 492 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
493 493 else:
494 494 return
495 495
496 496 trigger_hook(
497 497 username=user.username,
498 498 repo_name=pull_request.target_repo.repo_name,
499 499 repo_alias=target_scm.alias,
500 500 pull_request=pull_request)
501 501
502 502 def _get_commit_ids(self, pull_request):
503 503 """
504 504 Return the commit ids of the merged pull request.
505 505
506 506 This method is not dealing correctly yet with the lack of autoupdates
507 507 nor with the implicit target updates.
508 508 For example: if a commit in the source repo is already in the target it
509 509 will be reported anyways.
510 510 """
511 511 merge_rev = pull_request.merge_rev
512 512 if merge_rev is None:
513 513 raise ValueError('This pull request was not merged yet')
514 514
515 515 commit_ids = list(pull_request.revisions)
516 516 if merge_rev not in commit_ids:
517 517 commit_ids.append(merge_rev)
518 518
519 519 return commit_ids
520 520
521 521 def merge(self, pull_request, user, extras):
522 522 log.debug("Merging pull request %s", pull_request.pull_request_id)
523 523 merge_state = self._merge_pull_request(pull_request, user, extras)
524 524 if merge_state.executed:
525 525 log.debug(
526 526 "Merge was successful, updating the pull request comments.")
527 527 self._comment_and_close_pr(pull_request, user, merge_state)
528 528
529 529 self._log_audit_action(
530 530 'repo.pull_request.merge',
531 531 {'merge_state': merge_state.__dict__},
532 532 user, pull_request)
533 533
534 534 else:
535 535 log.warn("Merge failed, not updating the pull request.")
536 536 return merge_state
537 537
538 538 def _merge_pull_request(self, pull_request, user, extras):
539 539 target_vcs = pull_request.target_repo.scm_instance()
540 540 source_vcs = pull_request.source_repo.scm_instance()
541 541 target_ref = self._refresh_reference(
542 542 pull_request.target_ref_parts, target_vcs)
543 543
544 544 message = _(
545 545 'Merge pull request #%(pr_id)s from '
546 546 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
547 547 'pr_id': pull_request.pull_request_id,
548 548 'source_repo': source_vcs.name,
549 549 'source_ref_name': pull_request.source_ref_parts.name,
550 550 'pr_title': pull_request.title
551 551 }
552 552
553 553 workspace_id = self._workspace_id(pull_request)
554 554 use_rebase = self._use_rebase_for_merging(pull_request)
555 555
556 556 callback_daemon, extras = prepare_callback_daemon(
557 557 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
558 558 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
559 559
560 560 with callback_daemon:
561 561 # TODO: johbo: Implement a clean way to run a config_override
562 562 # for a single call.
563 563 target_vcs.config.set(
564 564 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
565 565 merge_state = target_vcs.merge(
566 566 target_ref, source_vcs, pull_request.source_ref_parts,
567 567 workspace_id, user_name=user.username,
568 568 user_email=user.email, message=message, use_rebase=use_rebase)
569 569 return merge_state
570 570
571 571 def _comment_and_close_pr(self, pull_request, user, merge_state):
572 572 pull_request.merge_rev = merge_state.merge_ref.commit_id
573 573 pull_request.updated_on = datetime.datetime.now()
574 574
575 575 CommentsModel().create(
576 576 text=unicode(_('Pull request merged and closed')),
577 577 repo=pull_request.target_repo.repo_id,
578 578 user=user.user_id,
579 579 pull_request=pull_request.pull_request_id,
580 580 f_path=None,
581 581 line_no=None,
582 582 closing_pr=True
583 583 )
584 584
585 585 Session().add(pull_request)
586 586 Session().flush()
587 587 # TODO: paris: replace invalidation with less radical solution
588 588 ScmModel().mark_for_invalidation(
589 589 pull_request.target_repo.repo_name)
590 590 self._trigger_pull_request_hook(pull_request, user, 'merge')
591 591
592 592 def has_valid_update_type(self, pull_request):
593 593 source_ref_type = pull_request.source_ref_parts.type
594 594 return source_ref_type in ['book', 'branch', 'tag']
595 595
596 596 def update_commits(self, pull_request):
597 597 """
598 598 Get the updated list of commits for the pull request
599 599 and return the new pull request version and the list
600 600 of commits processed by this update action
601 601 """
602 602 pull_request = self.__get_pull_request(pull_request)
603 603 source_ref_type = pull_request.source_ref_parts.type
604 604 source_ref_name = pull_request.source_ref_parts.name
605 605 source_ref_id = pull_request.source_ref_parts.commit_id
606 606
607 607 target_ref_type = pull_request.target_ref_parts.type
608 608 target_ref_name = pull_request.target_ref_parts.name
609 609 target_ref_id = pull_request.target_ref_parts.commit_id
610 610
611 611 if not self.has_valid_update_type(pull_request):
612 612 log.debug(
613 613 "Skipping update of pull request %s due to ref type: %s",
614 614 pull_request, source_ref_type)
615 615 return UpdateResponse(
616 616 executed=False,
617 617 reason=UpdateFailureReason.WRONG_REF_TYPE,
618 618 old=pull_request, new=None, changes=None,
619 619 source_changed=False, target_changed=False)
620 620
621 621 # source repo
622 622 source_repo = pull_request.source_repo.scm_instance()
623 623 try:
624 624 source_commit = source_repo.get_commit(commit_id=source_ref_name)
625 625 except CommitDoesNotExistError:
626 626 return UpdateResponse(
627 627 executed=False,
628 628 reason=UpdateFailureReason.MISSING_SOURCE_REF,
629 629 old=pull_request, new=None, changes=None,
630 630 source_changed=False, target_changed=False)
631 631
632 632 source_changed = source_ref_id != source_commit.raw_id
633 633
634 634 # target repo
635 635 target_repo = pull_request.target_repo.scm_instance()
636 636 try:
637 637 target_commit = target_repo.get_commit(commit_id=target_ref_name)
638 638 except CommitDoesNotExistError:
639 639 return UpdateResponse(
640 640 executed=False,
641 641 reason=UpdateFailureReason.MISSING_TARGET_REF,
642 642 old=pull_request, new=None, changes=None,
643 643 source_changed=False, target_changed=False)
644 644 target_changed = target_ref_id != target_commit.raw_id
645 645
646 646 if not (source_changed or target_changed):
647 647 log.debug("Nothing changed in pull request %s", pull_request)
648 648 return UpdateResponse(
649 649 executed=False,
650 650 reason=UpdateFailureReason.NO_CHANGE,
651 651 old=pull_request, new=None, changes=None,
652 652 source_changed=target_changed, target_changed=source_changed)
653 653
654 654 change_in_found = 'target repo' if target_changed else 'source repo'
655 655 log.debug('Updating pull request because of change in %s detected',
656 656 change_in_found)
657 657
658 658 # Finally there is a need for an update, in case of source change
659 659 # we create a new version, else just an update
660 660 if source_changed:
661 661 pull_request_version = self._create_version_from_snapshot(pull_request)
662 662 self._link_comments_to_version(pull_request_version)
663 663 else:
664 664 try:
665 665 ver = pull_request.versions[-1]
666 666 except IndexError:
667 667 ver = None
668 668
669 669 pull_request.pull_request_version_id = \
670 670 ver.pull_request_version_id if ver else None
671 671 pull_request_version = pull_request
672 672
673 673 try:
674 674 if target_ref_type in ('tag', 'branch', 'book'):
675 675 target_commit = target_repo.get_commit(target_ref_name)
676 676 else:
677 677 target_commit = target_repo.get_commit(target_ref_id)
678 678 except CommitDoesNotExistError:
679 679 return UpdateResponse(
680 680 executed=False,
681 681 reason=UpdateFailureReason.MISSING_TARGET_REF,
682 682 old=pull_request, new=None, changes=None,
683 683 source_changed=source_changed, target_changed=target_changed)
684 684
685 685 # re-compute commit ids
686 686 old_commit_ids = pull_request.revisions
687 687 pre_load = ["author", "branch", "date", "message"]
688 688 commit_ranges = target_repo.compare(
689 689 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
690 690 pre_load=pre_load)
691 691
692 692 ancestor = target_repo.get_common_ancestor(
693 693 target_commit.raw_id, source_commit.raw_id, source_repo)
694 694
695 695 pull_request.source_ref = '%s:%s:%s' % (
696 696 source_ref_type, source_ref_name, source_commit.raw_id)
697 697 pull_request.target_ref = '%s:%s:%s' % (
698 698 target_ref_type, target_ref_name, ancestor)
699 699
700 700 pull_request.revisions = [
701 701 commit.raw_id for commit in reversed(commit_ranges)]
702 702 pull_request.updated_on = datetime.datetime.now()
703 703 Session().add(pull_request)
704 704 new_commit_ids = pull_request.revisions
705 705
706 706 old_diff_data, new_diff_data = self._generate_update_diffs(
707 707 pull_request, pull_request_version)
708 708
709 709 # calculate commit and file changes
710 710 changes = self._calculate_commit_id_changes(
711 711 old_commit_ids, new_commit_ids)
712 712 file_changes = self._calculate_file_changes(
713 713 old_diff_data, new_diff_data)
714 714
715 715 # set comments as outdated if DIFFS changed
716 716 CommentsModel().outdate_comments(
717 717 pull_request, old_diff_data=old_diff_data,
718 718 new_diff_data=new_diff_data)
719 719
720 720 commit_changes = (changes.added or changes.removed)
721 721 file_node_changes = (
722 722 file_changes.added or file_changes.modified or file_changes.removed)
723 723 pr_has_changes = commit_changes or file_node_changes
724 724
725 725 # Add an automatic comment to the pull request, in case
726 726 # anything has changed
727 727 if pr_has_changes:
728 728 update_comment = CommentsModel().create(
729 729 text=self._render_update_message(changes, file_changes),
730 730 repo=pull_request.target_repo,
731 731 user=pull_request.author,
732 732 pull_request=pull_request,
733 733 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
734 734
735 735 # Update status to "Under Review" for added commits
736 736 for commit_id in changes.added:
737 737 ChangesetStatusModel().set_status(
738 738 repo=pull_request.source_repo,
739 739 status=ChangesetStatus.STATUS_UNDER_REVIEW,
740 740 comment=update_comment,
741 741 user=pull_request.author,
742 742 pull_request=pull_request,
743 743 revision=commit_id)
744 744
745 745 log.debug(
746 746 'Updated pull request %s, added_ids: %s, common_ids: %s, '
747 747 'removed_ids: %s', pull_request.pull_request_id,
748 748 changes.added, changes.common, changes.removed)
749 749 log.debug(
750 750 'Updated pull request with the following file changes: %s',
751 751 file_changes)
752 752
753 753 log.info(
754 754 "Updated pull request %s from commit %s to commit %s, "
755 755 "stored new version %s of this pull request.",
756 756 pull_request.pull_request_id, source_ref_id,
757 757 pull_request.source_ref_parts.commit_id,
758 758 pull_request_version.pull_request_version_id)
759 759 Session().commit()
760 760 self._trigger_pull_request_hook(
761 761 pull_request, pull_request.author, 'update')
762 762
763 763 return UpdateResponse(
764 764 executed=True, reason=UpdateFailureReason.NONE,
765 765 old=pull_request, new=pull_request_version, changes=changes,
766 766 source_changed=source_changed, target_changed=target_changed)
767 767
768 768 def _create_version_from_snapshot(self, pull_request):
769 769 version = PullRequestVersion()
770 770 version.title = pull_request.title
771 771 version.description = pull_request.description
772 772 version.status = pull_request.status
773 773 version.created_on = datetime.datetime.now()
774 774 version.updated_on = pull_request.updated_on
775 775 version.user_id = pull_request.user_id
776 776 version.source_repo = pull_request.source_repo
777 777 version.source_ref = pull_request.source_ref
778 778 version.target_repo = pull_request.target_repo
779 779 version.target_ref = pull_request.target_ref
780 780
781 781 version._last_merge_source_rev = pull_request._last_merge_source_rev
782 782 version._last_merge_target_rev = pull_request._last_merge_target_rev
783 783 version._last_merge_status = pull_request._last_merge_status
784 784 version.shadow_merge_ref = pull_request.shadow_merge_ref
785 785 version.merge_rev = pull_request.merge_rev
786 786 version.reviewer_data = pull_request.reviewer_data
787 787
788 788 version.revisions = pull_request.revisions
789 789 version.pull_request = pull_request
790 790 Session().add(version)
791 791 Session().flush()
792 792
793 793 return version
794 794
795 795 def _generate_update_diffs(self, pull_request, pull_request_version):
796 796
797 797 diff_context = (
798 798 self.DIFF_CONTEXT +
799 799 CommentsModel.needed_extra_diff_context())
800 800
801 801 source_repo = pull_request_version.source_repo
802 802 source_ref_id = pull_request_version.source_ref_parts.commit_id
803 803 target_ref_id = pull_request_version.target_ref_parts.commit_id
804 804 old_diff = self._get_diff_from_pr_or_version(
805 805 source_repo, source_ref_id, target_ref_id, context=diff_context)
806 806
807 807 source_repo = pull_request.source_repo
808 808 source_ref_id = pull_request.source_ref_parts.commit_id
809 809 target_ref_id = pull_request.target_ref_parts.commit_id
810 810
811 811 new_diff = self._get_diff_from_pr_or_version(
812 812 source_repo, source_ref_id, target_ref_id, context=diff_context)
813 813
814 814 old_diff_data = diffs.DiffProcessor(old_diff)
815 815 old_diff_data.prepare()
816 816 new_diff_data = diffs.DiffProcessor(new_diff)
817 817 new_diff_data.prepare()
818 818
819 819 return old_diff_data, new_diff_data
820 820
821 821 def _link_comments_to_version(self, pull_request_version):
822 822 """
823 823 Link all unlinked comments of this pull request to the given version.
824 824
825 825 :param pull_request_version: The `PullRequestVersion` to which
826 826 the comments shall be linked.
827 827
828 828 """
829 829 pull_request = pull_request_version.pull_request
830 830 comments = ChangesetComment.query()\
831 831 .filter(
832 832 # TODO: johbo: Should we query for the repo at all here?
833 833 # Pending decision on how comments of PRs are to be related
834 834 # to either the source repo, the target repo or no repo at all.
835 835 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
836 836 ChangesetComment.pull_request == pull_request,
837 837 ChangesetComment.pull_request_version == None)\
838 838 .order_by(ChangesetComment.comment_id.asc())
839 839
840 840 # TODO: johbo: Find out why this breaks if it is done in a bulk
841 841 # operation.
842 842 for comment in comments:
843 843 comment.pull_request_version_id = (
844 844 pull_request_version.pull_request_version_id)
845 845 Session().add(comment)
846 846
847 847 def _calculate_commit_id_changes(self, old_ids, new_ids):
848 848 added = [x for x in new_ids if x not in old_ids]
849 849 common = [x for x in new_ids if x in old_ids]
850 850 removed = [x for x in old_ids if x not in new_ids]
851 851 total = new_ids
852 852 return ChangeTuple(added, common, removed, total)
853 853
854 854 def _calculate_file_changes(self, old_diff_data, new_diff_data):
855 855
856 856 old_files = OrderedDict()
857 857 for diff_data in old_diff_data.parsed_diff:
858 858 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
859 859
860 860 added_files = []
861 861 modified_files = []
862 862 removed_files = []
863 863 for diff_data in new_diff_data.parsed_diff:
864 864 new_filename = diff_data['filename']
865 865 new_hash = md5_safe(diff_data['raw_diff'])
866 866
867 867 old_hash = old_files.get(new_filename)
868 868 if not old_hash:
869 869 # file is not present in old diff, means it's added
870 870 added_files.append(new_filename)
871 871 else:
872 872 if new_hash != old_hash:
873 873 modified_files.append(new_filename)
874 874 # now remove a file from old, since we have seen it already
875 875 del old_files[new_filename]
876 876
877 877 # removed files is when there are present in old, but not in NEW,
878 878 # since we remove old files that are present in new diff, left-overs
879 879 # if any should be the removed files
880 880 removed_files.extend(old_files.keys())
881 881
882 882 return FileChangeTuple(added_files, modified_files, removed_files)
883 883
884 884 def _render_update_message(self, changes, file_changes):
885 885 """
886 886 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
887 887 so it's always looking the same disregarding on which default
888 888 renderer system is using.
889 889
890 890 :param changes: changes named tuple
891 891 :param file_changes: file changes named tuple
892 892
893 893 """
894 894 new_status = ChangesetStatus.get_status_lbl(
895 895 ChangesetStatus.STATUS_UNDER_REVIEW)
896 896
897 897 changed_files = (
898 898 file_changes.added + file_changes.modified + file_changes.removed)
899 899
900 900 params = {
901 901 'under_review_label': new_status,
902 902 'added_commits': changes.added,
903 903 'removed_commits': changes.removed,
904 904 'changed_files': changed_files,
905 905 'added_files': file_changes.added,
906 906 'modified_files': file_changes.modified,
907 907 'removed_files': file_changes.removed,
908 908 }
909 909 renderer = RstTemplateRenderer()
910 910 return renderer.render('pull_request_update.mako', **params)
911 911
912 912 def edit(self, pull_request, title, description, user):
913 913 pull_request = self.__get_pull_request(pull_request)
914 914 old_data = pull_request.get_api_data(with_merge_state=False)
915 915 if pull_request.is_closed():
916 916 raise ValueError('This pull request is closed')
917 917 if title:
918 918 pull_request.title = title
919 919 pull_request.description = description
920 920 pull_request.updated_on = datetime.datetime.now()
921 921 Session().add(pull_request)
922 922 self._log_audit_action(
923 923 'repo.pull_request.edit', {'old_data': old_data},
924 924 user, pull_request)
925 925
926 926 def update_reviewers(self, pull_request, reviewer_data, user):
927 927 """
928 928 Update the reviewers in the pull request
929 929
930 930 :param pull_request: the pr to update
931 931 :param reviewer_data: list of tuples
932 932 [(user, ['reason1', 'reason2'], mandatory_flag)]
933 933 """
934 934
935 935 reviewers = {}
936 936 for user_id, reasons, mandatory in reviewer_data:
937 937 if isinstance(user_id, (int, basestring)):
938 938 user_id = self._get_user(user_id).user_id
939 939 reviewers[user_id] = {
940 940 'reasons': reasons, 'mandatory': mandatory}
941 941
942 942 reviewers_ids = set(reviewers.keys())
943 943 pull_request = self.__get_pull_request(pull_request)
944 944 current_reviewers = PullRequestReviewers.query()\
945 945 .filter(PullRequestReviewers.pull_request ==
946 946 pull_request).all()
947 947 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
948 948
949 949 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
950 950 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
951 951
952 952 log.debug("Adding %s reviewers", ids_to_add)
953 953 log.debug("Removing %s reviewers", ids_to_remove)
954 954 changed = False
955 955 for uid in ids_to_add:
956 956 changed = True
957 957 _usr = self._get_user(uid)
958 958 reviewer = PullRequestReviewers()
959 959 reviewer.user = _usr
960 960 reviewer.pull_request = pull_request
961 961 reviewer.reasons = reviewers[uid]['reasons']
962 962 # NOTE(marcink): mandatory shouldn't be changed now
963 963 # reviewer.mandatory = reviewers[uid]['reasons']
964 964 Session().add(reviewer)
965 965 self._log_audit_action(
966 966 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
967 967 user, pull_request)
968 968
969 969 for uid in ids_to_remove:
970 970 changed = True
971 971 reviewers = PullRequestReviewers.query()\
972 972 .filter(PullRequestReviewers.user_id == uid,
973 973 PullRequestReviewers.pull_request == pull_request)\
974 974 .all()
975 975 # use .all() in case we accidentally added the same person twice
976 976 # this CAN happen due to the lack of DB checks
977 977 for obj in reviewers:
978 978 old_data = obj.get_dict()
979 979 Session().delete(obj)
980 980 self._log_audit_action(
981 981 'repo.pull_request.reviewer.delete',
982 982 {'old_data': old_data}, user, pull_request)
983 983
984 984 if changed:
985 985 pull_request.updated_on = datetime.datetime.now()
986 986 Session().add(pull_request)
987 987
988 988 self.notify_reviewers(pull_request, ids_to_add)
989 989 return ids_to_add, ids_to_remove
990 990
991 991 def get_url(self, pull_request, request=None, permalink=False):
992 992 if not request:
993 993 request = get_current_request()
994 994
995 995 if permalink:
996 996 return request.route_url(
997 997 'pull_requests_global',
998 998 pull_request_id=pull_request.pull_request_id,)
999 999 else:
1000 1000 return request.route_url('pullrequest_show',
1001 1001 repo_name=safe_str(pull_request.target_repo.repo_name),
1002 1002 pull_request_id=pull_request.pull_request_id,)
1003 1003
1004 1004 def get_shadow_clone_url(self, pull_request):
1005 1005 """
1006 1006 Returns qualified url pointing to the shadow repository. If this pull
1007 1007 request is closed there is no shadow repository and ``None`` will be
1008 1008 returned.
1009 1009 """
1010 1010 if pull_request.is_closed():
1011 1011 return None
1012 1012 else:
1013 1013 pr_url = urllib.unquote(self.get_url(pull_request))
1014 1014 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1015 1015
1016 1016 def notify_reviewers(self, pull_request, reviewers_ids):
1017 1017 # notification to reviewers
1018 1018 if not reviewers_ids:
1019 1019 return
1020 1020
1021 1021 pull_request_obj = pull_request
1022 1022 # get the current participants of this pull request
1023 1023 recipients = reviewers_ids
1024 1024 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1025 1025
1026 1026 pr_source_repo = pull_request_obj.source_repo
1027 1027 pr_target_repo = pull_request_obj.target_repo
1028 1028
1029 1029 pr_url = h.route_url('pullrequest_show',
1030 1030 repo_name=pr_target_repo.repo_name,
1031 1031 pull_request_id=pull_request_obj.pull_request_id,)
1032 1032
1033 1033 # set some variables for email notification
1034 1034 pr_target_repo_url = h.route_url(
1035 1035 'repo_summary', repo_name=pr_target_repo.repo_name)
1036 1036
1037 1037 pr_source_repo_url = h.route_url(
1038 1038 'repo_summary', repo_name=pr_source_repo.repo_name)
1039 1039
1040 1040 # pull request specifics
1041 1041 pull_request_commits = [
1042 1042 (x.raw_id, x.message)
1043 1043 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1044 1044
1045 1045 kwargs = {
1046 1046 'user': pull_request.author,
1047 1047 'pull_request': pull_request_obj,
1048 1048 'pull_request_commits': pull_request_commits,
1049 1049
1050 1050 'pull_request_target_repo': pr_target_repo,
1051 1051 'pull_request_target_repo_url': pr_target_repo_url,
1052 1052
1053 1053 'pull_request_source_repo': pr_source_repo,
1054 1054 'pull_request_source_repo_url': pr_source_repo_url,
1055 1055
1056 1056 'pull_request_url': pr_url,
1057 1057 }
1058 1058
1059 1059 # pre-generate the subject for notification itself
1060 1060 (subject,
1061 1061 _h, _e, # we don't care about those
1062 1062 body_plaintext) = EmailNotificationModel().render_email(
1063 1063 notification_type, **kwargs)
1064 1064
1065 1065 # create notification objects, and emails
1066 1066 NotificationModel().create(
1067 1067 created_by=pull_request.author,
1068 1068 notification_subject=subject,
1069 1069 notification_body=body_plaintext,
1070 1070 notification_type=notification_type,
1071 1071 recipients=recipients,
1072 1072 email_kwargs=kwargs,
1073 1073 )
1074 1074
1075 1075 def delete(self, pull_request, user):
1076 1076 pull_request = self.__get_pull_request(pull_request)
1077 1077 old_data = pull_request.get_api_data(with_merge_state=False)
1078 1078 self._cleanup_merge_workspace(pull_request)
1079 1079 self._log_audit_action(
1080 1080 'repo.pull_request.delete', {'old_data': old_data},
1081 1081 user, pull_request)
1082 1082 Session().delete(pull_request)
1083 1083
1084 1084 def close_pull_request(self, pull_request, user):
1085 1085 pull_request = self.__get_pull_request(pull_request)
1086 1086 self._cleanup_merge_workspace(pull_request)
1087 1087 pull_request.status = PullRequest.STATUS_CLOSED
1088 1088 pull_request.updated_on = datetime.datetime.now()
1089 1089 Session().add(pull_request)
1090 1090 self._trigger_pull_request_hook(
1091 1091 pull_request, pull_request.author, 'close')
1092 1092 self._log_audit_action(
1093 1093 'repo.pull_request.close', {}, user, pull_request)
1094 1094
1095 1095 def close_pull_request_with_comment(
1096 1096 self, pull_request, user, repo, message=None):
1097 1097
1098 1098 pull_request_review_status = pull_request.calculated_review_status()
1099 1099
1100 1100 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1101 1101 # approved only if we have voting consent
1102 1102 status = ChangesetStatus.STATUS_APPROVED
1103 1103 else:
1104 1104 status = ChangesetStatus.STATUS_REJECTED
1105 1105 status_lbl = ChangesetStatus.get_status_lbl(status)
1106 1106
1107 1107 default_message = (
1108 1108 _('Closing with status change {transition_icon} {status}.')
1109 1109 ).format(transition_icon='>', status=status_lbl)
1110 1110 text = message or default_message
1111 1111
1112 1112 # create a comment, and link it to new status
1113 1113 comment = CommentsModel().create(
1114 1114 text=text,
1115 1115 repo=repo.repo_id,
1116 1116 user=user.user_id,
1117 1117 pull_request=pull_request.pull_request_id,
1118 1118 status_change=status_lbl,
1119 1119 status_change_type=status,
1120 1120 closing_pr=True
1121 1121 )
1122 1122
1123 1123 # calculate old status before we change it
1124 1124 old_calculated_status = pull_request.calculated_review_status()
1125 1125 ChangesetStatusModel().set_status(
1126 1126 repo.repo_id,
1127 1127 status,
1128 1128 user.user_id,
1129 1129 comment=comment,
1130 1130 pull_request=pull_request.pull_request_id
1131 1131 )
1132 1132
1133 1133 Session().flush()
1134 1134 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1135 1135 # we now calculate the status of pull request again, and based on that
1136 1136 # calculation trigger status change. This might happen in cases
1137 1137 # that non-reviewer admin closes a pr, which means his vote doesn't
1138 1138 # change the status, while if he's a reviewer this might change it.
1139 1139 calculated_status = pull_request.calculated_review_status()
1140 1140 if old_calculated_status != calculated_status:
1141 1141 self._trigger_pull_request_hook(
1142 1142 pull_request, user, 'review_status_change')
1143 1143
1144 1144 # finally close the PR
1145 1145 PullRequestModel().close_pull_request(
1146 1146 pull_request.pull_request_id, user)
1147 1147
1148 1148 return comment, status
1149 1149
1150 1150 def merge_status(self, pull_request):
1151 1151 if not self._is_merge_enabled(pull_request):
1152 1152 return False, _('Server-side pull request merging is disabled.')
1153 1153 if pull_request.is_closed():
1154 1154 return False, _('This pull request is closed.')
1155 1155 merge_possible, msg = self._check_repo_requirements(
1156 1156 target=pull_request.target_repo, source=pull_request.source_repo)
1157 1157 if not merge_possible:
1158 1158 return merge_possible, msg
1159 1159
1160 1160 try:
1161 1161 resp = self._try_merge(pull_request)
1162 1162 log.debug("Merge response: %s", resp)
1163 1163 status = resp.possible, self.merge_status_message(
1164 1164 resp.failure_reason)
1165 1165 except NotImplementedError:
1166 1166 status = False, _('Pull request merging is not supported.')
1167 1167
1168 1168 return status
1169 1169
1170 1170 def _check_repo_requirements(self, target, source):
1171 1171 """
1172 1172 Check if `target` and `source` have compatible requirements.
1173 1173
1174 1174 Currently this is just checking for largefiles.
1175 1175 """
1176 1176 target_has_largefiles = self._has_largefiles(target)
1177 1177 source_has_largefiles = self._has_largefiles(source)
1178 1178 merge_possible = True
1179 1179 message = u''
1180 1180
1181 1181 if target_has_largefiles != source_has_largefiles:
1182 1182 merge_possible = False
1183 1183 if source_has_largefiles:
1184 1184 message = _(
1185 1185 'Target repository large files support is disabled.')
1186 1186 else:
1187 1187 message = _(
1188 1188 'Source repository large files support is disabled.')
1189 1189
1190 1190 return merge_possible, message
1191 1191
1192 1192 def _has_largefiles(self, repo):
1193 1193 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1194 1194 'extensions', 'largefiles')
1195 1195 return largefiles_ui and largefiles_ui[0].active
1196 1196
1197 1197 def _try_merge(self, pull_request):
1198 1198 """
1199 1199 Try to merge the pull request and return the merge status.
1200 1200 """
1201 1201 log.debug(
1202 1202 "Trying out if the pull request %s can be merged.",
1203 1203 pull_request.pull_request_id)
1204 1204 target_vcs = pull_request.target_repo.scm_instance()
1205 1205
1206 1206 # Refresh the target reference.
1207 1207 try:
1208 1208 target_ref = self._refresh_reference(
1209 1209 pull_request.target_ref_parts, target_vcs)
1210 1210 except CommitDoesNotExistError:
1211 1211 merge_state = MergeResponse(
1212 1212 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1213 1213 return merge_state
1214 1214
1215 1215 target_locked = pull_request.target_repo.locked
1216 1216 if target_locked and target_locked[0]:
1217 1217 log.debug("The target repository is locked.")
1218 1218 merge_state = MergeResponse(
1219 1219 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1220 1220 elif self._needs_merge_state_refresh(pull_request, target_ref):
1221 1221 log.debug("Refreshing the merge status of the repository.")
1222 1222 merge_state = self._refresh_merge_state(
1223 1223 pull_request, target_vcs, target_ref)
1224 1224 else:
1225 1225 possible = pull_request.\
1226 1226 _last_merge_status == MergeFailureReason.NONE
1227 1227 merge_state = MergeResponse(
1228 1228 possible, False, None, pull_request._last_merge_status)
1229 1229
1230 1230 return merge_state
1231 1231
1232 1232 def _refresh_reference(self, reference, vcs_repository):
1233 1233 if reference.type in ('branch', 'book'):
1234 1234 name_or_id = reference.name
1235 1235 else:
1236 1236 name_or_id = reference.commit_id
1237 1237 refreshed_commit = vcs_repository.get_commit(name_or_id)
1238 1238 refreshed_reference = Reference(
1239 1239 reference.type, reference.name, refreshed_commit.raw_id)
1240 1240 return refreshed_reference
1241 1241
1242 1242 def _needs_merge_state_refresh(self, pull_request, target_reference):
1243 1243 return not(
1244 1244 pull_request.revisions and
1245 1245 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1246 1246 target_reference.commit_id == pull_request._last_merge_target_rev)
1247 1247
1248 1248 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1249 1249 workspace_id = self._workspace_id(pull_request)
1250 1250 source_vcs = pull_request.source_repo.scm_instance()
1251 1251 use_rebase = self._use_rebase_for_merging(pull_request)
1252 1252 merge_state = target_vcs.merge(
1253 1253 target_reference, source_vcs, pull_request.source_ref_parts,
1254 1254 workspace_id, dry_run=True, use_rebase=use_rebase)
1255 1255
1256 1256 # Do not store the response if there was an unknown error.
1257 1257 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1258 1258 pull_request._last_merge_source_rev = \
1259 1259 pull_request.source_ref_parts.commit_id
1260 1260 pull_request._last_merge_target_rev = target_reference.commit_id
1261 1261 pull_request._last_merge_status = merge_state.failure_reason
1262 1262 pull_request.shadow_merge_ref = merge_state.merge_ref
1263 1263 Session().add(pull_request)
1264 1264 Session().commit()
1265 1265
1266 1266 return merge_state
1267 1267
1268 1268 def _workspace_id(self, pull_request):
1269 1269 workspace_id = 'pr-%s' % pull_request.pull_request_id
1270 1270 return workspace_id
1271 1271
1272 1272 def merge_status_message(self, status_code):
1273 1273 """
1274 1274 Return a human friendly error message for the given merge status code.
1275 1275 """
1276 1276 return self.MERGE_STATUS_MESSAGES[status_code]
1277 1277
1278 1278 def generate_repo_data(self, repo, commit_id=None, branch=None,
1279 1279 bookmark=None):
1280 1280 all_refs, selected_ref = \
1281 1281 self._get_repo_pullrequest_sources(
1282 1282 repo.scm_instance(), commit_id=commit_id,
1283 1283 branch=branch, bookmark=bookmark)
1284 1284
1285 1285 refs_select2 = []
1286 1286 for element in all_refs:
1287 1287 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1288 1288 refs_select2.append({'text': element[1], 'children': children})
1289 1289
1290 1290 return {
1291 1291 'user': {
1292 1292 'user_id': repo.user.user_id,
1293 1293 'username': repo.user.username,
1294 1294 'firstname': repo.user.first_name,
1295 1295 'lastname': repo.user.last_name,
1296 1296 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1297 1297 },
1298 'description': h.chop_at_smart(repo.description, '\n'),
1298 'description': h.chop_at_smart(repo.description_safe, '\n'),
1299 1299 'refs': {
1300 1300 'all_refs': all_refs,
1301 1301 'selected_ref': selected_ref,
1302 1302 'select2_refs': refs_select2
1303 1303 }
1304 1304 }
1305 1305
1306 1306 def generate_pullrequest_title(self, source, source_ref, target):
1307 1307 return u'{source}#{at_ref} to {target}'.format(
1308 1308 source=source,
1309 1309 at_ref=source_ref,
1310 1310 target=target,
1311 1311 )
1312 1312
1313 1313 def _cleanup_merge_workspace(self, pull_request):
1314 1314 # Merging related cleanup
1315 1315 target_scm = pull_request.target_repo.scm_instance()
1316 1316 workspace_id = 'pr-%s' % pull_request.pull_request_id
1317 1317
1318 1318 try:
1319 1319 target_scm.cleanup_merge_workspace(workspace_id)
1320 1320 except NotImplementedError:
1321 1321 pass
1322 1322
1323 1323 def _get_repo_pullrequest_sources(
1324 1324 self, repo, commit_id=None, branch=None, bookmark=None):
1325 1325 """
1326 1326 Return a structure with repo's interesting commits, suitable for
1327 1327 the selectors in pullrequest controller
1328 1328
1329 1329 :param commit_id: a commit that must be in the list somehow
1330 1330 and selected by default
1331 1331 :param branch: a branch that must be in the list and selected
1332 1332 by default - even if closed
1333 1333 :param bookmark: a bookmark that must be in the list and selected
1334 1334 """
1335 1335
1336 1336 commit_id = safe_str(commit_id) if commit_id else None
1337 1337 branch = safe_str(branch) if branch else None
1338 1338 bookmark = safe_str(bookmark) if bookmark else None
1339 1339
1340 1340 selected = None
1341 1341
1342 1342 # order matters: first source that has commit_id in it will be selected
1343 1343 sources = []
1344 1344 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1345 1345 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1346 1346
1347 1347 if commit_id:
1348 1348 ref_commit = (h.short_id(commit_id), commit_id)
1349 1349 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1350 1350
1351 1351 sources.append(
1352 1352 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1353 1353 )
1354 1354
1355 1355 groups = []
1356 1356 for group_key, ref_list, group_name, match in sources:
1357 1357 group_refs = []
1358 1358 for ref_name, ref_id in ref_list:
1359 1359 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1360 1360 group_refs.append((ref_key, ref_name))
1361 1361
1362 1362 if not selected:
1363 1363 if set([commit_id, match]) & set([ref_id, ref_name]):
1364 1364 selected = ref_key
1365 1365
1366 1366 if group_refs:
1367 1367 groups.append((group_refs, group_name))
1368 1368
1369 1369 if not selected:
1370 1370 ref = commit_id or branch or bookmark
1371 1371 if ref:
1372 1372 raise CommitDoesNotExistError(
1373 1373 'No commit refs could be found matching: %s' % ref)
1374 1374 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1375 1375 selected = 'branch:%s:%s' % (
1376 1376 repo.DEFAULT_BRANCH_NAME,
1377 1377 repo.branches[repo.DEFAULT_BRANCH_NAME]
1378 1378 )
1379 1379 elif repo.commit_ids:
1380 1380 rev = repo.commit_ids[0]
1381 1381 selected = 'rev:%s:%s' % (rev, rev)
1382 1382 else:
1383 1383 raise EmptyRepositoryError()
1384 1384 return groups, selected
1385 1385
1386 1386 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1387 1387 return self._get_diff_from_pr_or_version(
1388 1388 source_repo, source_ref_id, target_ref_id, context=context)
1389 1389
1390 1390 def _get_diff_from_pr_or_version(
1391 1391 self, source_repo, source_ref_id, target_ref_id, context):
1392 1392 target_commit = source_repo.get_commit(
1393 1393 commit_id=safe_str(target_ref_id))
1394 1394 source_commit = source_repo.get_commit(
1395 1395 commit_id=safe_str(source_ref_id))
1396 1396 if isinstance(source_repo, Repository):
1397 1397 vcs_repo = source_repo.scm_instance()
1398 1398 else:
1399 1399 vcs_repo = source_repo
1400 1400
1401 1401 # TODO: johbo: In the context of an update, we cannot reach
1402 1402 # the old commit anymore with our normal mechanisms. It needs
1403 1403 # some sort of special support in the vcs layer to avoid this
1404 1404 # workaround.
1405 1405 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1406 1406 vcs_repo.alias == 'git'):
1407 1407 source_commit.raw_id = safe_str(source_ref_id)
1408 1408
1409 1409 log.debug('calculating diff between '
1410 1410 'source_ref:%s and target_ref:%s for repo `%s`',
1411 1411 target_ref_id, source_ref_id,
1412 1412 safe_unicode(vcs_repo.path))
1413 1413
1414 1414 vcs_diff = vcs_repo.get_diff(
1415 1415 commit1=target_commit, commit2=source_commit, context=context)
1416 1416 return vcs_diff
1417 1417
1418 1418 def _is_merge_enabled(self, pull_request):
1419 1419 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1420 1420 settings = settings_model.get_general_settings()
1421 1421 return settings.get('rhodecode_pr_merge_enabled', False)
1422 1422
1423 1423 def _use_rebase_for_merging(self, pull_request):
1424 1424 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1425 1425 settings = settings_model.get_general_settings()
1426 1426 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1427 1427
1428 1428 def _log_audit_action(self, action, action_data, user, pull_request):
1429 1429 audit_logger.store(
1430 1430 action=action,
1431 1431 action_data=action_data,
1432 1432 user=user,
1433 1433 repo=pull_request.target_repo)
1434 1434
1435 1435 def get_reviewer_functions(self):
1436 1436 """
1437 1437 Fetches functions for validation and fetching default reviewers.
1438 1438 If available we use the EE package, else we fallback to CE
1439 1439 package functions
1440 1440 """
1441 1441 try:
1442 1442 from rc_reviewers.utils import get_default_reviewers_data
1443 1443 from rc_reviewers.utils import validate_default_reviewers
1444 1444 except ImportError:
1445 1445 from rhodecode.apps.repository.utils import \
1446 1446 get_default_reviewers_data
1447 1447 from rhodecode.apps.repository.utils import \
1448 1448 validate_default_reviewers
1449 1449
1450 1450 return get_default_reviewers_data, validate_default_reviewers
1451 1451
1452 1452
1453 1453 class MergeCheck(object):
1454 1454 """
1455 1455 Perform Merge Checks and returns a check object which stores information
1456 1456 about merge errors, and merge conditions
1457 1457 """
1458 1458 TODO_CHECK = 'todo'
1459 1459 PERM_CHECK = 'perm'
1460 1460 REVIEW_CHECK = 'review'
1461 1461 MERGE_CHECK = 'merge'
1462 1462
1463 1463 def __init__(self):
1464 1464 self.review_status = None
1465 1465 self.merge_possible = None
1466 1466 self.merge_msg = ''
1467 1467 self.failed = None
1468 1468 self.errors = []
1469 1469 self.error_details = OrderedDict()
1470 1470
1471 1471 def push_error(self, error_type, message, error_key, details):
1472 1472 self.failed = True
1473 1473 self.errors.append([error_type, message])
1474 1474 self.error_details[error_key] = dict(
1475 1475 details=details,
1476 1476 error_type=error_type,
1477 1477 message=message
1478 1478 )
1479 1479
1480 1480 @classmethod
1481 1481 def validate(cls, pull_request, user, fail_early=False, translator=None):
1482 1482 # if migrated to pyramid...
1483 1483 # _ = lambda: translator or _ # use passed in translator if any
1484 1484
1485 1485 merge_check = cls()
1486 1486
1487 1487 # permissions to merge
1488 1488 user_allowed_to_merge = PullRequestModel().check_user_merge(
1489 1489 pull_request, user)
1490 1490 if not user_allowed_to_merge:
1491 1491 log.debug("MergeCheck: cannot merge, approval is pending.")
1492 1492
1493 1493 msg = _('User `{}` not allowed to perform merge.').format(user.username)
1494 1494 merge_check.push_error('error', msg, cls.PERM_CHECK, user.username)
1495 1495 if fail_early:
1496 1496 return merge_check
1497 1497
1498 1498 # review status, must be always present
1499 1499 review_status = pull_request.calculated_review_status()
1500 1500 merge_check.review_status = review_status
1501 1501
1502 1502 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1503 1503 if not status_approved:
1504 1504 log.debug("MergeCheck: cannot merge, approval is pending.")
1505 1505
1506 1506 msg = _('Pull request reviewer approval is pending.')
1507 1507
1508 1508 merge_check.push_error(
1509 1509 'warning', msg, cls.REVIEW_CHECK, review_status)
1510 1510
1511 1511 if fail_early:
1512 1512 return merge_check
1513 1513
1514 1514 # left over TODOs
1515 1515 todos = CommentsModel().get_unresolved_todos(pull_request)
1516 1516 if todos:
1517 1517 log.debug("MergeCheck: cannot merge, {} "
1518 1518 "unresolved todos left.".format(len(todos)))
1519 1519
1520 1520 if len(todos) == 1:
1521 1521 msg = _('Cannot merge, {} TODO still not resolved.').format(
1522 1522 len(todos))
1523 1523 else:
1524 1524 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1525 1525 len(todos))
1526 1526
1527 1527 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1528 1528
1529 1529 if fail_early:
1530 1530 return merge_check
1531 1531
1532 1532 # merge possible
1533 1533 merge_status, msg = PullRequestModel().merge_status(pull_request)
1534 1534 merge_check.merge_possible = merge_status
1535 1535 merge_check.merge_msg = msg
1536 1536 if not merge_status:
1537 1537 log.debug(
1538 1538 "MergeCheck: cannot merge, pull request merge not possible.")
1539 1539 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1540 1540
1541 1541 if fail_early:
1542 1542 return merge_check
1543 1543
1544 1544 return merge_check
1545 1545
1546 1546
1547 1547 ChangeTuple = namedtuple('ChangeTuple',
1548 1548 ['added', 'common', 'removed', 'total'])
1549 1549
1550 1550 FileChangeTuple = namedtuple('FileChangeTuple',
1551 1551 ['added', 'modified', 'removed'])
@@ -1,1029 +1,1029 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Repository model for rhodecode
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28 import shutil
29 29 import time
30 30 import traceback
31 31 from datetime import datetime, timedelta
32 32
33 33 from pyramid.threadlocal import get_current_request
34 34 from zope.cachedescriptors.property import Lazy as LazyProperty
35 35
36 36 from rhodecode import events
37 37 from rhodecode.lib import helpers as h
38 38 from rhodecode.lib.auth import HasUserGroupPermissionAny
39 39 from rhodecode.lib.caching_query import FromCache
40 40 from rhodecode.lib.exceptions import AttachedForksError
41 41 from rhodecode.lib.hooks_base import log_delete_repository
42 42 from rhodecode.lib.utils import make_db_config
43 43 from rhodecode.lib.utils2 import (
44 44 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
45 45 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
46 46 from rhodecode.lib.vcs.backends import get_backend
47 47 from rhodecode.model import BaseModel
48 48 from rhodecode.model.db import (_hash_key,
49 49 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
50 50 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
51 51 RepoGroup, RepositoryField)
52 52
53 53 from rhodecode.model.settings import VcsSettingsModel
54 54
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 class RepoModel(BaseModel):
60 60
61 61 cls = Repository
62 62
63 63 def _get_user_group(self, users_group):
64 64 return self._get_instance(UserGroup, users_group,
65 65 callback=UserGroup.get_by_group_name)
66 66
67 67 def _get_repo_group(self, repo_group):
68 68 return self._get_instance(RepoGroup, repo_group,
69 69 callback=RepoGroup.get_by_group_name)
70 70
71 71 def _create_default_perms(self, repository, private):
72 72 # create default permission
73 73 default = 'repository.read'
74 74 def_user = User.get_default_user()
75 75 for p in def_user.user_perms:
76 76 if p.permission.permission_name.startswith('repository.'):
77 77 default = p.permission.permission_name
78 78 break
79 79
80 80 default_perm = 'repository.none' if private else default
81 81
82 82 repo_to_perm = UserRepoToPerm()
83 83 repo_to_perm.permission = Permission.get_by_key(default_perm)
84 84
85 85 repo_to_perm.repository = repository
86 86 repo_to_perm.user_id = def_user.user_id
87 87
88 88 return repo_to_perm
89 89
90 90 @LazyProperty
91 91 def repos_path(self):
92 92 """
93 93 Gets the repositories root path from database
94 94 """
95 95 settings_model = VcsSettingsModel(sa=self.sa)
96 96 return settings_model.get_repos_location()
97 97
98 98 def get(self, repo_id, cache=False):
99 99 repo = self.sa.query(Repository) \
100 100 .filter(Repository.repo_id == repo_id)
101 101
102 102 if cache:
103 103 repo = repo.options(
104 104 FromCache("sql_cache_short", "get_repo_%s" % repo_id))
105 105 return repo.scalar()
106 106
107 107 def get_repo(self, repository):
108 108 return self._get_repo(repository)
109 109
110 110 def get_by_repo_name(self, repo_name, cache=False):
111 111 repo = self.sa.query(Repository) \
112 112 .filter(Repository.repo_name == repo_name)
113 113
114 114 if cache:
115 115 name_key = _hash_key(repo_name)
116 116 repo = repo.options(
117 117 FromCache("sql_cache_short", "get_repo_%s" % name_key))
118 118 return repo.scalar()
119 119
120 120 def _extract_id_from_repo_name(self, repo_name):
121 121 if repo_name.startswith('/'):
122 122 repo_name = repo_name.lstrip('/')
123 123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
124 124 if by_id_match:
125 125 return by_id_match.groups()[0]
126 126
127 127 def get_repo_by_id(self, repo_name):
128 128 """
129 129 Extracts repo_name by id from special urls.
130 130 Example url is _11/repo_name
131 131
132 132 :param repo_name:
133 133 :return: repo object if matched else None
134 134 """
135 135
136 136 try:
137 137 _repo_id = self._extract_id_from_repo_name(repo_name)
138 138 if _repo_id:
139 139 return self.get(_repo_id)
140 140 except Exception:
141 141 log.exception('Failed to extract repo_name from URL')
142 142
143 143 return None
144 144
145 145 def get_repos_for_root(self, root, traverse=False):
146 146 if traverse:
147 147 like_expression = u'{}%'.format(safe_unicode(root))
148 148 repos = Repository.query().filter(
149 149 Repository.repo_name.like(like_expression)).all()
150 150 else:
151 151 if root and not isinstance(root, RepoGroup):
152 152 raise ValueError(
153 153 'Root must be an instance '
154 154 'of RepoGroup, got:{} instead'.format(type(root)))
155 155 repos = Repository.query().filter(Repository.group == root).all()
156 156 return repos
157 157
158 158 def get_url(self, repo, request=None, permalink=False):
159 159 if not request:
160 160 request = get_current_request()
161 161
162 162 if not request:
163 163 return
164 164
165 165 if permalink:
166 166 return request.route_url(
167 167 'repo_summary', repo_name=safe_str(repo.repo_id))
168 168 else:
169 169 return request.route_url(
170 170 'repo_summary', repo_name=safe_str(repo.repo_name))
171 171
172 172 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
173 173 if not request:
174 174 request = get_current_request()
175 175
176 176 if not request:
177 177 return
178 178
179 179 if permalink:
180 180 return request.route_url(
181 181 'repo_commit', repo_name=safe_str(repo.repo_id),
182 182 commit_id=commit_id)
183 183
184 184 else:
185 185 return request.route_url(
186 186 'repo_commit', repo_name=safe_str(repo.repo_name),
187 187 commit_id=commit_id)
188 188
189 189 @classmethod
190 190 def update_repoinfo(cls, repositories=None):
191 191 if not repositories:
192 192 repositories = Repository.getAll()
193 193 for repo in repositories:
194 194 repo.update_commit_cache()
195 195
196 196 def get_repos_as_dict(self, repo_list=None, admin=False,
197 197 super_user_actions=False):
198 198
199 199 from rhodecode.lib.utils import PartialRenderer
200 200 _render = PartialRenderer('data_table/_dt_elements.mako')
201 201 c = _render.c
202 202
203 203 def quick_menu(repo_name):
204 204 return _render('quick_menu', repo_name)
205 205
206 206 def repo_lnk(name, rtype, rstate, private, fork_of):
207 207 return _render('repo_name', name, rtype, rstate, private, fork_of,
208 208 short_name=not admin, admin=False)
209 209
210 210 def last_change(last_change):
211 211 if admin and isinstance(last_change, datetime) and not last_change.tzinfo:
212 212 last_change = last_change + timedelta(seconds=
213 213 (datetime.now() - datetime.utcnow()).seconds)
214 214 return _render("last_change", last_change)
215 215
216 216 def rss_lnk(repo_name):
217 217 return _render("rss", repo_name)
218 218
219 219 def atom_lnk(repo_name):
220 220 return _render("atom", repo_name)
221 221
222 222 def last_rev(repo_name, cs_cache):
223 223 return _render('revision', repo_name, cs_cache.get('revision'),
224 224 cs_cache.get('raw_id'), cs_cache.get('author'),
225 225 cs_cache.get('message'))
226 226
227 227 def desc(desc):
228 228 if c.visual.stylify_metatags:
229 229 desc = h.urlify_text(h.escaped_stylize(desc))
230 230 else:
231 231 desc = h.urlify_text(h.html_escape(desc))
232 232
233 233 return _render('repo_desc', desc)
234 234
235 235 def state(repo_state):
236 236 return _render("repo_state", repo_state)
237 237
238 238 def repo_actions(repo_name):
239 239 return _render('repo_actions', repo_name, super_user_actions)
240 240
241 241 def user_profile(username):
242 242 return _render('user_profile', username)
243 243
244 244 repos_data = []
245 245 for repo in repo_list:
246 246 cs_cache = repo.changeset_cache
247 247 row = {
248 248 "menu": quick_menu(repo.repo_name),
249 249
250 250 "name": repo_lnk(repo.repo_name, repo.repo_type,
251 251 repo.repo_state, repo.private, repo.fork),
252 252 "name_raw": repo.repo_name.lower(),
253 253
254 254 "last_change": last_change(repo.last_db_change),
255 255 "last_change_raw": datetime_to_time(repo.last_db_change),
256 256
257 257 "last_changeset": last_rev(repo.repo_name, cs_cache),
258 258 "last_changeset_raw": cs_cache.get('revision'),
259 259
260 "desc": desc(repo.description),
260 "desc": desc(repo.description_safe),
261 261 "owner": user_profile(repo.user.username),
262 262
263 263 "state": state(repo.repo_state),
264 264 "rss": rss_lnk(repo.repo_name),
265 265
266 266 "atom": atom_lnk(repo.repo_name),
267 267 }
268 268 if admin:
269 269 row.update({
270 270 "action": repo_actions(repo.repo_name),
271 271 })
272 272 repos_data.append(row)
273 273
274 274 return repos_data
275 275
276 276 def _get_defaults(self, repo_name):
277 277 """
278 278 Gets information about repository, and returns a dict for
279 279 usage in forms
280 280
281 281 :param repo_name:
282 282 """
283 283
284 284 repo_info = Repository.get_by_repo_name(repo_name)
285 285
286 286 if repo_info is None:
287 287 return None
288 288
289 289 defaults = repo_info.get_dict()
290 290 defaults['repo_name'] = repo_info.just_name
291 291
292 292 groups = repo_info.groups_with_parents
293 293 parent_group = groups[-1] if groups else None
294 294
295 295 # we use -1 as this is how in HTML, we mark an empty group
296 296 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
297 297
298 298 keys_to_process = (
299 299 {'k': 'repo_type', 'strip': False},
300 300 {'k': 'repo_enable_downloads', 'strip': True},
301 301 {'k': 'repo_description', 'strip': True},
302 302 {'k': 'repo_enable_locking', 'strip': True},
303 303 {'k': 'repo_landing_rev', 'strip': True},
304 304 {'k': 'clone_uri', 'strip': False},
305 305 {'k': 'repo_private', 'strip': True},
306 306 {'k': 'repo_enable_statistics', 'strip': True}
307 307 )
308 308
309 309 for item in keys_to_process:
310 310 attr = item['k']
311 311 if item['strip']:
312 312 attr = remove_prefix(item['k'], 'repo_')
313 313
314 314 val = defaults[attr]
315 315 if item['k'] == 'repo_landing_rev':
316 316 val = ':'.join(defaults[attr])
317 317 defaults[item['k']] = val
318 318 if item['k'] == 'clone_uri':
319 319 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
320 320
321 321 # fill owner
322 322 if repo_info.user:
323 323 defaults.update({'user': repo_info.user.username})
324 324 else:
325 325 replacement_user = User.get_first_super_admin().username
326 326 defaults.update({'user': replacement_user})
327 327
328 328 return defaults
329 329
330 330 def update(self, repo, **kwargs):
331 331 try:
332 332 cur_repo = self._get_repo(repo)
333 333 source_repo_name = cur_repo.repo_name
334 334 if 'user' in kwargs:
335 335 cur_repo.user = User.get_by_username(kwargs['user'])
336 336
337 337 if 'repo_group' in kwargs:
338 338 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
339 339 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
340 340
341 341 update_keys = [
342 342 (1, 'repo_description'),
343 343 (1, 'repo_landing_rev'),
344 344 (1, 'repo_private'),
345 345 (1, 'repo_enable_downloads'),
346 346 (1, 'repo_enable_locking'),
347 347 (1, 'repo_enable_statistics'),
348 348 (0, 'clone_uri'),
349 349 (0, 'fork_id')
350 350 ]
351 351 for strip, k in update_keys:
352 352 if k in kwargs:
353 353 val = kwargs[k]
354 354 if strip:
355 355 k = remove_prefix(k, 'repo_')
356 356
357 357 setattr(cur_repo, k, val)
358 358
359 359 new_name = cur_repo.get_new_name(kwargs['repo_name'])
360 360 cur_repo.repo_name = new_name
361 361
362 362 # if private flag is set, reset default permission to NONE
363 363 if kwargs.get('repo_private'):
364 364 EMPTY_PERM = 'repository.none'
365 365 RepoModel().grant_user_permission(
366 366 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
367 367 )
368 368
369 369 # handle extra fields
370 370 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
371 371 kwargs):
372 372 k = RepositoryField.un_prefix_key(field)
373 373 ex_field = RepositoryField.get_by_key_name(
374 374 key=k, repo=cur_repo)
375 375 if ex_field:
376 376 ex_field.field_value = kwargs[field]
377 377 self.sa.add(ex_field)
378 378 self.sa.add(cur_repo)
379 379
380 380 if source_repo_name != new_name:
381 381 # rename repository
382 382 self._rename_filesystem_repo(
383 383 old=source_repo_name, new=new_name)
384 384
385 385 return cur_repo
386 386 except Exception:
387 387 log.error(traceback.format_exc())
388 388 raise
389 389
390 390 def _create_repo(self, repo_name, repo_type, description, owner,
391 391 private=False, clone_uri=None, repo_group=None,
392 392 landing_rev='rev:tip', fork_of=None,
393 393 copy_fork_permissions=False, enable_statistics=False,
394 394 enable_locking=False, enable_downloads=False,
395 395 copy_group_permissions=False,
396 396 state=Repository.STATE_PENDING):
397 397 """
398 398 Create repository inside database with PENDING state, this should be
399 399 only executed by create() repo. With exception of importing existing
400 400 repos
401 401 """
402 402 from rhodecode.model.scm import ScmModel
403 403
404 404 owner = self._get_user(owner)
405 405 fork_of = self._get_repo(fork_of)
406 406 repo_group = self._get_repo_group(safe_int(repo_group))
407 407
408 408 try:
409 409 repo_name = safe_unicode(repo_name)
410 410 description = safe_unicode(description)
411 411 # repo name is just a name of repository
412 412 # while repo_name_full is a full qualified name that is combined
413 413 # with name and path of group
414 414 repo_name_full = repo_name
415 415 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
416 416
417 417 new_repo = Repository()
418 418 new_repo.repo_state = state
419 419 new_repo.enable_statistics = False
420 420 new_repo.repo_name = repo_name_full
421 421 new_repo.repo_type = repo_type
422 422 new_repo.user = owner
423 423 new_repo.group = repo_group
424 424 new_repo.description = description or repo_name
425 425 new_repo.private = private
426 426 new_repo.clone_uri = clone_uri
427 427 new_repo.landing_rev = landing_rev
428 428
429 429 new_repo.enable_statistics = enable_statistics
430 430 new_repo.enable_locking = enable_locking
431 431 new_repo.enable_downloads = enable_downloads
432 432
433 433 if repo_group:
434 434 new_repo.enable_locking = repo_group.enable_locking
435 435
436 436 if fork_of:
437 437 parent_repo = fork_of
438 438 new_repo.fork = parent_repo
439 439
440 440 events.trigger(events.RepoPreCreateEvent(new_repo))
441 441
442 442 self.sa.add(new_repo)
443 443
444 444 EMPTY_PERM = 'repository.none'
445 445 if fork_of and copy_fork_permissions:
446 446 repo = fork_of
447 447 user_perms = UserRepoToPerm.query() \
448 448 .filter(UserRepoToPerm.repository == repo).all()
449 449 group_perms = UserGroupRepoToPerm.query() \
450 450 .filter(UserGroupRepoToPerm.repository == repo).all()
451 451
452 452 for perm in user_perms:
453 453 UserRepoToPerm.create(
454 454 perm.user, new_repo, perm.permission)
455 455
456 456 for perm in group_perms:
457 457 UserGroupRepoToPerm.create(
458 458 perm.users_group, new_repo, perm.permission)
459 459 # in case we copy permissions and also set this repo to private
460 460 # override the default user permission to make it a private
461 461 # repo
462 462 if private:
463 463 RepoModel(self.sa).grant_user_permission(
464 464 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
465 465
466 466 elif repo_group and copy_group_permissions:
467 467 user_perms = UserRepoGroupToPerm.query() \
468 468 .filter(UserRepoGroupToPerm.group == repo_group).all()
469 469
470 470 group_perms = UserGroupRepoGroupToPerm.query() \
471 471 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
472 472
473 473 for perm in user_perms:
474 474 perm_name = perm.permission.permission_name.replace(
475 475 'group.', 'repository.')
476 476 perm_obj = Permission.get_by_key(perm_name)
477 477 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
478 478
479 479 for perm in group_perms:
480 480 perm_name = perm.permission.permission_name.replace(
481 481 'group.', 'repository.')
482 482 perm_obj = Permission.get_by_key(perm_name)
483 483 UserGroupRepoToPerm.create(
484 484 perm.users_group, new_repo, perm_obj)
485 485
486 486 if private:
487 487 RepoModel(self.sa).grant_user_permission(
488 488 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
489 489
490 490 else:
491 491 perm_obj = self._create_default_perms(new_repo, private)
492 492 self.sa.add(perm_obj)
493 493
494 494 # now automatically start following this repository as owner
495 495 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
496 496 owner.user_id)
497 497
498 498 # we need to flush here, in order to check if database won't
499 499 # throw any exceptions, create filesystem dirs at the very end
500 500 self.sa.flush()
501 501 events.trigger(events.RepoCreateEvent(new_repo))
502 502 return new_repo
503 503
504 504 except Exception:
505 505 log.error(traceback.format_exc())
506 506 raise
507 507
508 508 def create(self, form_data, cur_user):
509 509 """
510 510 Create repository using celery tasks
511 511
512 512 :param form_data:
513 513 :param cur_user:
514 514 """
515 515 from rhodecode.lib.celerylib import tasks, run_task
516 516 return run_task(tasks.create_repo, form_data, cur_user)
517 517
518 518 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
519 519 perm_deletions=None, check_perms=True,
520 520 cur_user=None):
521 521 if not perm_additions:
522 522 perm_additions = []
523 523 if not perm_updates:
524 524 perm_updates = []
525 525 if not perm_deletions:
526 526 perm_deletions = []
527 527
528 528 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
529 529
530 530 changes = {
531 531 'added': [],
532 532 'updated': [],
533 533 'deleted': []
534 534 }
535 535 # update permissions
536 536 for member_id, perm, member_type in perm_updates:
537 537 member_id = int(member_id)
538 538 if member_type == 'user':
539 539 member_name = User.get(member_id).username
540 540 # this updates also current one if found
541 541 self.grant_user_permission(
542 542 repo=repo, user=member_id, perm=perm)
543 543 else: # set for user group
544 544 # check if we have permissions to alter this usergroup
545 545 member_name = UserGroup.get(member_id).users_group_name
546 546 if not check_perms or HasUserGroupPermissionAny(
547 547 *req_perms)(member_name, user=cur_user):
548 548 self.grant_user_group_permission(
549 549 repo=repo, group_name=member_id, perm=perm)
550 550
551 551 changes['updated'].append({'type': member_type, 'id': member_id,
552 552 'name': member_name, 'new_perm': perm})
553 553
554 554 # set new permissions
555 555 for member_id, perm, member_type in perm_additions:
556 556 member_id = int(member_id)
557 557 if member_type == 'user':
558 558 member_name = User.get(member_id).username
559 559 self.grant_user_permission(
560 560 repo=repo, user=member_id, perm=perm)
561 561 else: # set for user group
562 562 # check if we have permissions to alter this usergroup
563 563 member_name = UserGroup.get(member_id).users_group_name
564 564 if not check_perms or HasUserGroupPermissionAny(
565 565 *req_perms)(member_name, user=cur_user):
566 566 self.grant_user_group_permission(
567 567 repo=repo, group_name=member_id, perm=perm)
568 568 changes['added'].append({'type': member_type, 'id': member_id,
569 569 'name': member_name, 'new_perm': perm})
570 570 # delete permissions
571 571 for member_id, perm, member_type in perm_deletions:
572 572 member_id = int(member_id)
573 573 if member_type == 'user':
574 574 member_name = User.get(member_id).username
575 575 self.revoke_user_permission(repo=repo, user=member_id)
576 576 else: # set for user group
577 577 # check if we have permissions to alter this usergroup
578 578 member_name = UserGroup.get(member_id).users_group_name
579 579 if not check_perms or HasUserGroupPermissionAny(
580 580 *req_perms)(member_name, user=cur_user):
581 581 self.revoke_user_group_permission(
582 582 repo=repo, group_name=member_id)
583 583
584 584 changes['deleted'].append({'type': member_type, 'id': member_id,
585 585 'name': member_name, 'new_perm': perm})
586 586 return changes
587 587
588 588 def create_fork(self, form_data, cur_user):
589 589 """
590 590 Simple wrapper into executing celery task for fork creation
591 591
592 592 :param form_data:
593 593 :param cur_user:
594 594 """
595 595 from rhodecode.lib.celerylib import tasks, run_task
596 596 return run_task(tasks.create_repo_fork, form_data, cur_user)
597 597
598 598 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
599 599 """
600 600 Delete given repository, forks parameter defines what do do with
601 601 attached forks. Throws AttachedForksError if deleted repo has attached
602 602 forks
603 603
604 604 :param repo:
605 605 :param forks: str 'delete' or 'detach'
606 606 :param fs_remove: remove(archive) repo from filesystem
607 607 """
608 608 if not cur_user:
609 609 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
610 610 repo = self._get_repo(repo)
611 611 if repo:
612 612 if forks == 'detach':
613 613 for r in repo.forks:
614 614 r.fork = None
615 615 self.sa.add(r)
616 616 elif forks == 'delete':
617 617 for r in repo.forks:
618 618 self.delete(r, forks='delete')
619 619 elif [f for f in repo.forks]:
620 620 raise AttachedForksError()
621 621
622 622 old_repo_dict = repo.get_dict()
623 623 events.trigger(events.RepoPreDeleteEvent(repo))
624 624 try:
625 625 self.sa.delete(repo)
626 626 if fs_remove:
627 627 self._delete_filesystem_repo(repo)
628 628 else:
629 629 log.debug('skipping removal from filesystem')
630 630 old_repo_dict.update({
631 631 'deleted_by': cur_user,
632 632 'deleted_on': time.time(),
633 633 })
634 634 log_delete_repository(**old_repo_dict)
635 635 events.trigger(events.RepoDeleteEvent(repo))
636 636 except Exception:
637 637 log.error(traceback.format_exc())
638 638 raise
639 639
640 640 def grant_user_permission(self, repo, user, perm):
641 641 """
642 642 Grant permission for user on given repository, or update existing one
643 643 if found
644 644
645 645 :param repo: Instance of Repository, repository_id, or repository name
646 646 :param user: Instance of User, user_id or username
647 647 :param perm: Instance of Permission, or permission_name
648 648 """
649 649 user = self._get_user(user)
650 650 repo = self._get_repo(repo)
651 651 permission = self._get_perm(perm)
652 652
653 653 # check if we have that permission already
654 654 obj = self.sa.query(UserRepoToPerm) \
655 655 .filter(UserRepoToPerm.user == user) \
656 656 .filter(UserRepoToPerm.repository == repo) \
657 657 .scalar()
658 658 if obj is None:
659 659 # create new !
660 660 obj = UserRepoToPerm()
661 661 obj.repository = repo
662 662 obj.user = user
663 663 obj.permission = permission
664 664 self.sa.add(obj)
665 665 log.debug('Granted perm %s to %s on %s', perm, user, repo)
666 666 action_logger_generic(
667 667 'granted permission: {} to user: {} on repo: {}'.format(
668 668 perm, user, repo), namespace='security.repo')
669 669 return obj
670 670
671 671 def revoke_user_permission(self, repo, user):
672 672 """
673 673 Revoke permission for user on given repository
674 674
675 675 :param repo: Instance of Repository, repository_id, or repository name
676 676 :param user: Instance of User, user_id or username
677 677 """
678 678
679 679 user = self._get_user(user)
680 680 repo = self._get_repo(repo)
681 681
682 682 obj = self.sa.query(UserRepoToPerm) \
683 683 .filter(UserRepoToPerm.repository == repo) \
684 684 .filter(UserRepoToPerm.user == user) \
685 685 .scalar()
686 686 if obj:
687 687 self.sa.delete(obj)
688 688 log.debug('Revoked perm on %s on %s', repo, user)
689 689 action_logger_generic(
690 690 'revoked permission from user: {} on repo: {}'.format(
691 691 user, repo), namespace='security.repo')
692 692
693 693 def grant_user_group_permission(self, repo, group_name, perm):
694 694 """
695 695 Grant permission for user group on given repository, or update
696 696 existing one if found
697 697
698 698 :param repo: Instance of Repository, repository_id, or repository name
699 699 :param group_name: Instance of UserGroup, users_group_id,
700 700 or user group name
701 701 :param perm: Instance of Permission, or permission_name
702 702 """
703 703 repo = self._get_repo(repo)
704 704 group_name = self._get_user_group(group_name)
705 705 permission = self._get_perm(perm)
706 706
707 707 # check if we have that permission already
708 708 obj = self.sa.query(UserGroupRepoToPerm) \
709 709 .filter(UserGroupRepoToPerm.users_group == group_name) \
710 710 .filter(UserGroupRepoToPerm.repository == repo) \
711 711 .scalar()
712 712
713 713 if obj is None:
714 714 # create new
715 715 obj = UserGroupRepoToPerm()
716 716
717 717 obj.repository = repo
718 718 obj.users_group = group_name
719 719 obj.permission = permission
720 720 self.sa.add(obj)
721 721 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
722 722 action_logger_generic(
723 723 'granted permission: {} to usergroup: {} on repo: {}'.format(
724 724 perm, group_name, repo), namespace='security.repo')
725 725
726 726 return obj
727 727
728 728 def revoke_user_group_permission(self, repo, group_name):
729 729 """
730 730 Revoke permission for user group on given repository
731 731
732 732 :param repo: Instance of Repository, repository_id, or repository name
733 733 :param group_name: Instance of UserGroup, users_group_id,
734 734 or user group name
735 735 """
736 736 repo = self._get_repo(repo)
737 737 group_name = self._get_user_group(group_name)
738 738
739 739 obj = self.sa.query(UserGroupRepoToPerm) \
740 740 .filter(UserGroupRepoToPerm.repository == repo) \
741 741 .filter(UserGroupRepoToPerm.users_group == group_name) \
742 742 .scalar()
743 743 if obj:
744 744 self.sa.delete(obj)
745 745 log.debug('Revoked perm to %s on %s', repo, group_name)
746 746 action_logger_generic(
747 747 'revoked permission from usergroup: {} on repo: {}'.format(
748 748 group_name, repo), namespace='security.repo')
749 749
750 750 def delete_stats(self, repo_name):
751 751 """
752 752 removes stats for given repo
753 753
754 754 :param repo_name:
755 755 """
756 756 repo = self._get_repo(repo_name)
757 757 try:
758 758 obj = self.sa.query(Statistics) \
759 759 .filter(Statistics.repository == repo).scalar()
760 760 if obj:
761 761 self.sa.delete(obj)
762 762 except Exception:
763 763 log.error(traceback.format_exc())
764 764 raise
765 765
766 766 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
767 767 field_type='str', field_desc=''):
768 768
769 769 repo = self._get_repo(repo_name)
770 770
771 771 new_field = RepositoryField()
772 772 new_field.repository = repo
773 773 new_field.field_key = field_key
774 774 new_field.field_type = field_type # python type
775 775 new_field.field_value = field_value
776 776 new_field.field_desc = field_desc
777 777 new_field.field_label = field_label
778 778 self.sa.add(new_field)
779 779 return new_field
780 780
781 781 def delete_repo_field(self, repo_name, field_key):
782 782 repo = self._get_repo(repo_name)
783 783 field = RepositoryField.get_by_key_name(field_key, repo)
784 784 if field:
785 785 self.sa.delete(field)
786 786
787 787 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
788 788 clone_uri=None, repo_store_location=None,
789 789 use_global_config=False):
790 790 """
791 791 makes repository on filesystem. It's group aware means it'll create
792 792 a repository within a group, and alter the paths accordingly of
793 793 group location
794 794
795 795 :param repo_name:
796 796 :param alias:
797 797 :param parent:
798 798 :param clone_uri:
799 799 :param repo_store_location:
800 800 """
801 801 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
802 802 from rhodecode.model.scm import ScmModel
803 803
804 804 if Repository.NAME_SEP in repo_name:
805 805 raise ValueError(
806 806 'repo_name must not contain groups got `%s`' % repo_name)
807 807
808 808 if isinstance(repo_group, RepoGroup):
809 809 new_parent_path = os.sep.join(repo_group.full_path_splitted)
810 810 else:
811 811 new_parent_path = repo_group or ''
812 812
813 813 if repo_store_location:
814 814 _paths = [repo_store_location]
815 815 else:
816 816 _paths = [self.repos_path, new_parent_path, repo_name]
817 817 # we need to make it str for mercurial
818 818 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
819 819
820 820 # check if this path is not a repository
821 821 if is_valid_repo(repo_path, self.repos_path):
822 822 raise Exception('This path %s is a valid repository' % repo_path)
823 823
824 824 # check if this path is a group
825 825 if is_valid_repo_group(repo_path, self.repos_path):
826 826 raise Exception('This path %s is a valid group' % repo_path)
827 827
828 828 log.info('creating repo %s in %s from url: `%s`',
829 829 repo_name, safe_unicode(repo_path),
830 830 obfuscate_url_pw(clone_uri))
831 831
832 832 backend = get_backend(repo_type)
833 833
834 834 config_repo = None if use_global_config else repo_name
835 835 if config_repo and new_parent_path:
836 836 config_repo = Repository.NAME_SEP.join(
837 837 (new_parent_path, config_repo))
838 838 config = make_db_config(clear_session=False, repo=config_repo)
839 839 config.set('extensions', 'largefiles', '')
840 840
841 841 # patch and reset hooks section of UI config to not run any
842 842 # hooks on creating remote repo
843 843 config.clear_section('hooks')
844 844
845 845 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
846 846 if repo_type == 'git':
847 847 repo = backend(
848 848 repo_path, config=config, create=True, src_url=clone_uri,
849 849 bare=True)
850 850 else:
851 851 repo = backend(
852 852 repo_path, config=config, create=True, src_url=clone_uri)
853 853
854 854 ScmModel().install_hooks(repo, repo_type=repo_type)
855 855
856 856 log.debug('Created repo %s with %s backend',
857 857 safe_unicode(repo_name), safe_unicode(repo_type))
858 858 return repo
859 859
860 860 def _rename_filesystem_repo(self, old, new):
861 861 """
862 862 renames repository on filesystem
863 863
864 864 :param old: old name
865 865 :param new: new name
866 866 """
867 867 log.info('renaming repo from %s to %s', old, new)
868 868
869 869 old_path = os.path.join(self.repos_path, old)
870 870 new_path = os.path.join(self.repos_path, new)
871 871 if os.path.isdir(new_path):
872 872 raise Exception(
873 873 'Was trying to rename to already existing dir %s' % new_path
874 874 )
875 875 shutil.move(old_path, new_path)
876 876
877 877 def _delete_filesystem_repo(self, repo):
878 878 """
879 879 removes repo from filesystem, the removal is acctually made by
880 880 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
881 881 repository is no longer valid for rhodecode, can be undeleted later on
882 882 by reverting the renames on this repository
883 883
884 884 :param repo: repo object
885 885 """
886 886 rm_path = os.path.join(self.repos_path, repo.repo_name)
887 887 repo_group = repo.group
888 888 log.info("Removing repository %s", rm_path)
889 889 # disable hg/git internal that it doesn't get detected as repo
890 890 alias = repo.repo_type
891 891
892 892 config = make_db_config(clear_session=False)
893 893 config.set('extensions', 'largefiles', '')
894 894 bare = getattr(repo.scm_instance(config=config), 'bare', False)
895 895
896 896 # skip this for bare git repos
897 897 if not bare:
898 898 # disable VCS repo
899 899 vcs_path = os.path.join(rm_path, '.%s' % alias)
900 900 if os.path.exists(vcs_path):
901 901 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
902 902
903 903 _now = datetime.now()
904 904 _ms = str(_now.microsecond).rjust(6, '0')
905 905 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
906 906 repo.just_name)
907 907 if repo_group:
908 908 # if repository is in group, prefix the removal path with the group
909 909 args = repo_group.full_path_splitted + [_d]
910 910 _d = os.path.join(*args)
911 911
912 912 if os.path.isdir(rm_path):
913 913 shutil.move(rm_path, os.path.join(self.repos_path, _d))
914 914
915 915
916 916 class ReadmeFinder:
917 917 """
918 918 Utility which knows how to find a readme for a specific commit.
919 919
920 920 The main idea is that this is a configurable algorithm. When creating an
921 921 instance you can define parameters, currently only the `default_renderer`.
922 922 Based on this configuration the method :meth:`search` behaves slightly
923 923 different.
924 924 """
925 925
926 926 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
927 927 path_re = re.compile(r'^docs?', re.IGNORECASE)
928 928
929 929 default_priorities = {
930 930 None: 0,
931 931 '.text': 2,
932 932 '.txt': 3,
933 933 '.rst': 1,
934 934 '.rest': 2,
935 935 '.md': 1,
936 936 '.mkdn': 2,
937 937 '.mdown': 3,
938 938 '.markdown': 4,
939 939 }
940 940
941 941 path_priority = {
942 942 'doc': 0,
943 943 'docs': 1,
944 944 }
945 945
946 946 FALLBACK_PRIORITY = 99
947 947
948 948 RENDERER_TO_EXTENSION = {
949 949 'rst': ['.rst', '.rest'],
950 950 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
951 951 }
952 952
953 953 def __init__(self, default_renderer=None):
954 954 self._default_renderer = default_renderer
955 955 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
956 956 default_renderer, [])
957 957
958 958 def search(self, commit, path='/'):
959 959 """
960 960 Find a readme in the given `commit`.
961 961 """
962 962 nodes = commit.get_nodes(path)
963 963 matches = self._match_readmes(nodes)
964 964 matches = self._sort_according_to_priority(matches)
965 965 if matches:
966 966 return matches[0].node
967 967
968 968 paths = self._match_paths(nodes)
969 969 paths = self._sort_paths_according_to_priority(paths)
970 970 for path in paths:
971 971 match = self.search(commit, path=path)
972 972 if match:
973 973 return match
974 974
975 975 return None
976 976
977 977 def _match_readmes(self, nodes):
978 978 for node in nodes:
979 979 if not node.is_file():
980 980 continue
981 981 path = node.path.rsplit('/', 1)[-1]
982 982 match = self.readme_re.match(path)
983 983 if match:
984 984 extension = match.group(1)
985 985 yield ReadmeMatch(node, match, self._priority(extension))
986 986
987 987 def _match_paths(self, nodes):
988 988 for node in nodes:
989 989 if not node.is_dir():
990 990 continue
991 991 match = self.path_re.match(node.path)
992 992 if match:
993 993 yield node.path
994 994
995 995 def _priority(self, extension):
996 996 renderer_priority = (
997 997 0 if extension in self._renderer_extensions else 1)
998 998 extension_priority = self.default_priorities.get(
999 999 extension, self.FALLBACK_PRIORITY)
1000 1000 return (renderer_priority, extension_priority)
1001 1001
1002 1002 def _sort_according_to_priority(self, matches):
1003 1003
1004 1004 def priority_and_path(match):
1005 1005 return (match.priority, match.path)
1006 1006
1007 1007 return sorted(matches, key=priority_and_path)
1008 1008
1009 1009 def _sort_paths_according_to_priority(self, paths):
1010 1010
1011 1011 def priority_and_path(path):
1012 1012 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1013 1013
1014 1014 return sorted(paths, key=priority_and_path)
1015 1015
1016 1016
1017 1017 class ReadmeMatch:
1018 1018
1019 1019 def __init__(self, node, match, priority):
1020 1020 self.node = node
1021 1021 self._match = match
1022 1022 self.priority = priority
1023 1023
1024 1024 @property
1025 1025 def path(self):
1026 1026 return self.node.path
1027 1027
1028 1028 def __repr__(self):
1029 1029 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,733 +1,733 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2017 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 repo group model for RhodeCode
24 24 """
25 25
26 26 import os
27 27 import datetime
28 28 import itertools
29 29 import logging
30 30 import shutil
31 31 import traceback
32 32 import string
33 33
34 34 from zope.cachedescriptors.property import Lazy as LazyProperty
35 35
36 36 from rhodecode import events
37 37 from rhodecode.model import BaseModel
38 38 from rhodecode.model.db import (_hash_key,
39 39 RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
40 40 UserGroup, Repository)
41 41 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
42 42 from rhodecode.lib.caching_query import FromCache
43 43 from rhodecode.lib.utils2 import action_logger_generic
44 44
45 45 log = logging.getLogger(__name__)
46 46
47 47
48 48 class RepoGroupModel(BaseModel):
49 49
50 50 cls = RepoGroup
51 51 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
52 52 PERSONAL_GROUP_PATTERN = '${username}' # default
53 53
54 54 def _get_user_group(self, users_group):
55 55 return self._get_instance(UserGroup, users_group,
56 56 callback=UserGroup.get_by_group_name)
57 57
58 58 def _get_repo_group(self, repo_group):
59 59 return self._get_instance(RepoGroup, repo_group,
60 60 callback=RepoGroup.get_by_group_name)
61 61
62 62 @LazyProperty
63 63 def repos_path(self):
64 64 """
65 65 Gets the repositories root path from database
66 66 """
67 67
68 68 settings_model = VcsSettingsModel(sa=self.sa)
69 69 return settings_model.get_repos_location()
70 70
71 71 def get_by_group_name(self, repo_group_name, cache=None):
72 72 repo = self.sa.query(RepoGroup) \
73 73 .filter(RepoGroup.group_name == repo_group_name)
74 74
75 75 if cache:
76 76 name_key = _hash_key(repo_group_name)
77 77 repo = repo.options(
78 78 FromCache("sql_cache_short", "get_repo_group_%s" % name_key))
79 79 return repo.scalar()
80 80
81 81 def get_default_create_personal_repo_group(self):
82 82 value = SettingsModel().get_setting_by_name(
83 83 'create_personal_repo_group')
84 84 return value.app_settings_value if value else None or False
85 85
86 86 def get_personal_group_name_pattern(self):
87 87 value = SettingsModel().get_setting_by_name(
88 88 'personal_repo_group_pattern')
89 89 val = value.app_settings_value if value else None
90 90 group_template = val or self.PERSONAL_GROUP_PATTERN
91 91
92 92 group_template = group_template.lstrip('/')
93 93 return group_template
94 94
95 95 def get_personal_group_name(self, user):
96 96 template = self.get_personal_group_name_pattern()
97 97 return string.Template(template).safe_substitute(
98 98 username=user.username,
99 99 user_id=user.user_id,
100 100 )
101 101
102 102 def create_personal_repo_group(self, user, commit_early=True):
103 103 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
104 104 personal_repo_group_name = self.get_personal_group_name(user)
105 105
106 106 # create a new one
107 107 RepoGroupModel().create(
108 108 group_name=personal_repo_group_name,
109 109 group_description=desc,
110 110 owner=user.username,
111 111 personal=True,
112 112 commit_early=commit_early)
113 113
114 114 def _create_default_perms(self, new_group):
115 115 # create default permission
116 116 default_perm = 'group.read'
117 117 def_user = User.get_default_user()
118 118 for p in def_user.user_perms:
119 119 if p.permission.permission_name.startswith('group.'):
120 120 default_perm = p.permission.permission_name
121 121 break
122 122
123 123 repo_group_to_perm = UserRepoGroupToPerm()
124 124 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
125 125
126 126 repo_group_to_perm.group = new_group
127 127 repo_group_to_perm.user_id = def_user.user_id
128 128 return repo_group_to_perm
129 129
130 130 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
131 131 get_object=False):
132 132 """
133 133 Get's the group name and a parent group name from given group name.
134 134 If repo_in_path is set to truth, we asume the full path also includes
135 135 repo name, in such case we clean the last element.
136 136
137 137 :param group_name_full:
138 138 """
139 139 split_paths = 1
140 140 if repo_in_path:
141 141 split_paths = 2
142 142 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
143 143
144 144 if repo_in_path and len(_parts) > 1:
145 145 # such case last element is the repo_name
146 146 _parts.pop(-1)
147 147 group_name_cleaned = _parts[-1] # just the group name
148 148 parent_repo_group_name = None
149 149
150 150 if len(_parts) > 1:
151 151 parent_repo_group_name = _parts[0]
152 152
153 153 parent_group = None
154 154 if parent_repo_group_name:
155 155 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
156 156
157 157 if get_object:
158 158 return group_name_cleaned, parent_repo_group_name, parent_group
159 159
160 160 return group_name_cleaned, parent_repo_group_name
161 161
162 162 def check_exist_filesystem(self, group_name, exc_on_failure=True):
163 163 create_path = os.path.join(self.repos_path, group_name)
164 164 log.debug('creating new group in %s', create_path)
165 165
166 166 if os.path.isdir(create_path):
167 167 if exc_on_failure:
168 168 abs_create_path = os.path.abspath(create_path)
169 169 raise Exception('Directory `{}` already exists !'.format(abs_create_path))
170 170 return False
171 171 return True
172 172
173 173 def _create_group(self, group_name):
174 174 """
175 175 makes repository group on filesystem
176 176
177 177 :param repo_name:
178 178 :param parent_id:
179 179 """
180 180
181 181 self.check_exist_filesystem(group_name)
182 182 create_path = os.path.join(self.repos_path, group_name)
183 183 log.debug('creating new group in %s', create_path)
184 184 os.makedirs(create_path, mode=0755)
185 185 log.debug('created group in %s', create_path)
186 186
187 187 def _rename_group(self, old, new):
188 188 """
189 189 Renames a group on filesystem
190 190
191 191 :param group_name:
192 192 """
193 193
194 194 if old == new:
195 195 log.debug('skipping group rename')
196 196 return
197 197
198 198 log.debug('renaming repository group from %s to %s', old, new)
199 199
200 200 old_path = os.path.join(self.repos_path, old)
201 201 new_path = os.path.join(self.repos_path, new)
202 202
203 203 log.debug('renaming repos paths from %s to %s', old_path, new_path)
204 204
205 205 if os.path.isdir(new_path):
206 206 raise Exception('Was trying to rename to already '
207 207 'existing dir %s' % new_path)
208 208 shutil.move(old_path, new_path)
209 209
210 210 def _delete_filesystem_group(self, group, force_delete=False):
211 211 """
212 212 Deletes a group from a filesystem
213 213
214 214 :param group: instance of group from database
215 215 :param force_delete: use shutil rmtree to remove all objects
216 216 """
217 217 paths = group.full_path.split(RepoGroup.url_sep())
218 218 paths = os.sep.join(paths)
219 219
220 220 rm_path = os.path.join(self.repos_path, paths)
221 221 log.info("Removing group %s", rm_path)
222 222 # delete only if that path really exists
223 223 if os.path.isdir(rm_path):
224 224 if force_delete:
225 225 shutil.rmtree(rm_path)
226 226 else:
227 227 # archive that group`
228 228 _now = datetime.datetime.now()
229 229 _ms = str(_now.microsecond).rjust(6, '0')
230 230 _d = 'rm__%s_GROUP_%s' % (
231 231 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
232 232 shutil.move(rm_path, os.path.join(self.repos_path, _d))
233 233
234 234 def create(self, group_name, group_description, owner, just_db=False,
235 235 copy_permissions=False, personal=None, commit_early=True):
236 236
237 237 (group_name_cleaned,
238 238 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
239 239
240 240 parent_group = None
241 241 if parent_group_name:
242 242 parent_group = self._get_repo_group(parent_group_name)
243 243 if not parent_group:
244 244 # we tried to create a nested group, but the parent is not
245 245 # existing
246 246 raise ValueError(
247 247 'Parent group `%s` given in `%s` group name '
248 248 'is not yet existing.' % (parent_group_name, group_name))
249 249
250 250 # because we are doing a cleanup, we need to check if such directory
251 251 # already exists. If we don't do that we can accidentally delete
252 252 # existing directory via cleanup that can cause data issues, since
253 253 # delete does a folder rename to special syntax later cleanup
254 254 # functions can delete this
255 255 cleanup_group = self.check_exist_filesystem(group_name,
256 256 exc_on_failure=False)
257 257 try:
258 258 user = self._get_user(owner)
259 259 new_repo_group = RepoGroup()
260 260 new_repo_group.user = user
261 261 new_repo_group.group_description = group_description or group_name
262 262 new_repo_group.parent_group = parent_group
263 263 new_repo_group.group_name = group_name
264 264 new_repo_group.personal = personal
265 265
266 266 self.sa.add(new_repo_group)
267 267
268 268 # create an ADMIN permission for owner except if we're super admin,
269 269 # later owner should go into the owner field of groups
270 270 if not user.is_admin:
271 271 self.grant_user_permission(repo_group=new_repo_group,
272 272 user=owner, perm='group.admin')
273 273
274 274 if parent_group and copy_permissions:
275 275 # copy permissions from parent
276 276 user_perms = UserRepoGroupToPerm.query() \
277 277 .filter(UserRepoGroupToPerm.group == parent_group).all()
278 278
279 279 group_perms = UserGroupRepoGroupToPerm.query() \
280 280 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
281 281
282 282 for perm in user_perms:
283 283 # don't copy over the permission for user who is creating
284 284 # this group, if he is not super admin he get's admin
285 285 # permission set above
286 286 if perm.user != user or user.is_admin:
287 287 UserRepoGroupToPerm.create(
288 288 perm.user, new_repo_group, perm.permission)
289 289
290 290 for perm in group_perms:
291 291 UserGroupRepoGroupToPerm.create(
292 292 perm.users_group, new_repo_group, perm.permission)
293 293 else:
294 294 perm_obj = self._create_default_perms(new_repo_group)
295 295 self.sa.add(perm_obj)
296 296
297 297 # now commit the changes, earlier so we are sure everything is in
298 298 # the database.
299 299 if commit_early:
300 300 self.sa.commit()
301 301 if not just_db:
302 302 self._create_group(new_repo_group.group_name)
303 303
304 304 # trigger the post hook
305 305 from rhodecode.lib.hooks_base import log_create_repository_group
306 306 repo_group = RepoGroup.get_by_group_name(group_name)
307 307 log_create_repository_group(
308 308 created_by=user.username, **repo_group.get_dict())
309 309
310 310 # Trigger create event.
311 311 events.trigger(events.RepoGroupCreateEvent(repo_group))
312 312
313 313 return new_repo_group
314 314 except Exception:
315 315 self.sa.rollback()
316 316 log.exception('Exception occurred when creating repository group, '
317 317 'doing cleanup...')
318 318 # rollback things manually !
319 319 repo_group = RepoGroup.get_by_group_name(group_name)
320 320 if repo_group:
321 321 RepoGroup.delete(repo_group.group_id)
322 322 self.sa.commit()
323 323 if cleanup_group:
324 324 RepoGroupModel()._delete_filesystem_group(repo_group)
325 325 raise
326 326
327 327 def update_permissions(
328 328 self, repo_group, perm_additions=None, perm_updates=None,
329 329 perm_deletions=None, recursive=None, check_perms=True,
330 330 cur_user=None):
331 331 from rhodecode.model.repo import RepoModel
332 332 from rhodecode.lib.auth import HasUserGroupPermissionAny
333 333
334 334 if not perm_additions:
335 335 perm_additions = []
336 336 if not perm_updates:
337 337 perm_updates = []
338 338 if not perm_deletions:
339 339 perm_deletions = []
340 340
341 341 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
342 342
343 343 changes = {
344 344 'added': [],
345 345 'updated': [],
346 346 'deleted': []
347 347 }
348 348
349 349 def _set_perm_user(obj, user, perm):
350 350 if isinstance(obj, RepoGroup):
351 351 self.grant_user_permission(
352 352 repo_group=obj, user=user, perm=perm)
353 353 elif isinstance(obj, Repository):
354 354 # private repos will not allow to change the default
355 355 # permissions using recursive mode
356 356 if obj.private and user == User.DEFAULT_USER:
357 357 return
358 358
359 359 # we set group permission but we have to switch to repo
360 360 # permission
361 361 perm = perm.replace('group.', 'repository.')
362 362 RepoModel().grant_user_permission(
363 363 repo=obj, user=user, perm=perm)
364 364
365 365 def _set_perm_group(obj, users_group, perm):
366 366 if isinstance(obj, RepoGroup):
367 367 self.grant_user_group_permission(
368 368 repo_group=obj, group_name=users_group, perm=perm)
369 369 elif isinstance(obj, Repository):
370 370 # we set group permission but we have to switch to repo
371 371 # permission
372 372 perm = perm.replace('group.', 'repository.')
373 373 RepoModel().grant_user_group_permission(
374 374 repo=obj, group_name=users_group, perm=perm)
375 375
376 376 def _revoke_perm_user(obj, user):
377 377 if isinstance(obj, RepoGroup):
378 378 self.revoke_user_permission(repo_group=obj, user=user)
379 379 elif isinstance(obj, Repository):
380 380 RepoModel().revoke_user_permission(repo=obj, user=user)
381 381
382 382 def _revoke_perm_group(obj, user_group):
383 383 if isinstance(obj, RepoGroup):
384 384 self.revoke_user_group_permission(
385 385 repo_group=obj, group_name=user_group)
386 386 elif isinstance(obj, Repository):
387 387 RepoModel().revoke_user_group_permission(
388 388 repo=obj, group_name=user_group)
389 389
390 390 # start updates
391 391 log.debug('Now updating permissions for %s in recursive mode:%s',
392 392 repo_group, recursive)
393 393
394 394 # initialize check function, we'll call that multiple times
395 395 has_group_perm = HasUserGroupPermissionAny(*req_perms)
396 396
397 397 for obj in repo_group.recursive_groups_and_repos():
398 398 # iterated obj is an instance of a repos group or repository in
399 399 # that group, recursive option can be: none, repos, groups, all
400 400 if recursive == 'all':
401 401 obj = obj
402 402 elif recursive == 'repos':
403 403 # skip groups, other than this one
404 404 if isinstance(obj, RepoGroup) and not obj == repo_group:
405 405 continue
406 406 elif recursive == 'groups':
407 407 # skip repos
408 408 if isinstance(obj, Repository):
409 409 continue
410 410 else: # recursive == 'none':
411 411 # DEFAULT option - don't apply to iterated objects
412 412 # also we do a break at the end of this loop. if we are not
413 413 # in recursive mode
414 414 obj = repo_group
415 415
416 416 change_obj = obj.get_api_data()
417 417
418 418 # update permissions
419 419 for member_id, perm, member_type in perm_updates:
420 420 member_id = int(member_id)
421 421 if member_type == 'user':
422 422 member_name = User.get(member_id).username
423 423 # this updates also current one if found
424 424 _set_perm_user(obj, user=member_id, perm=perm)
425 425 else: # set for user group
426 426 member_name = UserGroup.get(member_id).users_group_name
427 427 if not check_perms or has_group_perm(member_name,
428 428 user=cur_user):
429 429 _set_perm_group(obj, users_group=member_id, perm=perm)
430 430
431 431 changes['updated'].append(
432 432 {'change_obj': change_obj, 'type': member_type,
433 433 'id': member_id, 'name': member_name, 'new_perm': perm})
434 434
435 435 # set new permissions
436 436 for member_id, perm, member_type in perm_additions:
437 437 member_id = int(member_id)
438 438 if member_type == 'user':
439 439 member_name = User.get(member_id).username
440 440 _set_perm_user(obj, user=member_id, perm=perm)
441 441 else: # set for user group
442 442 # check if we have permissions to alter this usergroup
443 443 member_name = UserGroup.get(member_id).users_group_name
444 444 if not check_perms or has_group_perm(member_name,
445 445 user=cur_user):
446 446 _set_perm_group(obj, users_group=member_id, perm=perm)
447 447
448 448 changes['added'].append(
449 449 {'change_obj': change_obj, 'type': member_type,
450 450 'id': member_id, 'name': member_name, 'new_perm': perm})
451 451
452 452 # delete permissions
453 453 for member_id, perm, member_type in perm_deletions:
454 454 member_id = int(member_id)
455 455 if member_type == 'user':
456 456 member_name = User.get(member_id).username
457 457 _revoke_perm_user(obj, user=member_id)
458 458 else: # set for user group
459 459 # check if we have permissions to alter this usergroup
460 460 member_name = UserGroup.get(member_id).users_group_name
461 461 if not check_perms or has_group_perm(member_name,
462 462 user=cur_user):
463 463 _revoke_perm_group(obj, user_group=member_id)
464 464
465 465 changes['deleted'].append(
466 466 {'change_obj': change_obj, 'type': member_type,
467 467 'id': member_id, 'name': member_name, 'new_perm': perm})
468 468
469 469 # if it's not recursive call for all,repos,groups
470 470 # break the loop and don't proceed with other changes
471 471 if recursive not in ['all', 'repos', 'groups']:
472 472 break
473 473
474 474 return changes
475 475
476 476 def update(self, repo_group, form_data):
477 477 try:
478 478 repo_group = self._get_repo_group(repo_group)
479 479 old_path = repo_group.full_path
480 480
481 481 # change properties
482 482 if 'group_description' in form_data:
483 483 repo_group.group_description = form_data['group_description']
484 484
485 485 if 'enable_locking' in form_data:
486 486 repo_group.enable_locking = form_data['enable_locking']
487 487
488 488 if 'group_parent_id' in form_data:
489 489 parent_group = (
490 490 self._get_repo_group(form_data['group_parent_id']))
491 491 repo_group.group_parent_id = (
492 492 parent_group.group_id if parent_group else None)
493 493 repo_group.parent_group = parent_group
494 494
495 495 # mikhail: to update the full_path, we have to explicitly
496 496 # update group_name
497 497 group_name = form_data.get('group_name', repo_group.name)
498 498 repo_group.group_name = repo_group.get_new_name(group_name)
499 499
500 500 new_path = repo_group.full_path
501 501
502 502 if 'user' in form_data:
503 503 repo_group.user = User.get_by_username(form_data['user'])
504 504
505 505 self.sa.add(repo_group)
506 506
507 507 # iterate over all members of this groups and do fixes
508 508 # set locking if given
509 509 # if obj is a repoGroup also fix the name of the group according
510 510 # to the parent
511 511 # if obj is a Repo fix it's name
512 512 # this can be potentially heavy operation
513 513 for obj in repo_group.recursive_groups_and_repos():
514 514 # set the value from it's parent
515 515 obj.enable_locking = repo_group.enable_locking
516 516 if isinstance(obj, RepoGroup):
517 517 new_name = obj.get_new_name(obj.name)
518 518 log.debug('Fixing group %s to new name %s',
519 519 obj.group_name, new_name)
520 520 obj.group_name = new_name
521 521 elif isinstance(obj, Repository):
522 522 # we need to get all repositories from this new group and
523 523 # rename them accordingly to new group path
524 524 new_name = obj.get_new_name(obj.just_name)
525 525 log.debug('Fixing repo %s to new name %s',
526 526 obj.repo_name, new_name)
527 527 obj.repo_name = new_name
528 528 self.sa.add(obj)
529 529
530 530 self._rename_group(old_path, new_path)
531 531
532 532 # Trigger update event.
533 533 events.trigger(events.RepoGroupUpdateEvent(repo_group))
534 534
535 535 return repo_group
536 536 except Exception:
537 537 log.error(traceback.format_exc())
538 538 raise
539 539
540 540 def delete(self, repo_group, force_delete=False, fs_remove=True):
541 541 repo_group = self._get_repo_group(repo_group)
542 542 if not repo_group:
543 543 return False
544 544 try:
545 545 self.sa.delete(repo_group)
546 546 if fs_remove:
547 547 self._delete_filesystem_group(repo_group, force_delete)
548 548 else:
549 549 log.debug('skipping removal from filesystem')
550 550
551 551 # Trigger delete event.
552 552 events.trigger(events.RepoGroupDeleteEvent(repo_group))
553 553 return True
554 554
555 555 except Exception:
556 556 log.error('Error removing repo_group %s', repo_group)
557 557 raise
558 558
559 559 def grant_user_permission(self, repo_group, user, perm):
560 560 """
561 561 Grant permission for user on given repository group, or update
562 562 existing one if found
563 563
564 564 :param repo_group: Instance of RepoGroup, repositories_group_id,
565 565 or repositories_group name
566 566 :param user: Instance of User, user_id or username
567 567 :param perm: Instance of Permission, or permission_name
568 568 """
569 569
570 570 repo_group = self._get_repo_group(repo_group)
571 571 user = self._get_user(user)
572 572 permission = self._get_perm(perm)
573 573
574 574 # check if we have that permission already
575 575 obj = self.sa.query(UserRepoGroupToPerm)\
576 576 .filter(UserRepoGroupToPerm.user == user)\
577 577 .filter(UserRepoGroupToPerm.group == repo_group)\
578 578 .scalar()
579 579 if obj is None:
580 580 # create new !
581 581 obj = UserRepoGroupToPerm()
582 582 obj.group = repo_group
583 583 obj.user = user
584 584 obj.permission = permission
585 585 self.sa.add(obj)
586 586 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
587 587 action_logger_generic(
588 588 'granted permission: {} to user: {} on repogroup: {}'.format(
589 589 perm, user, repo_group), namespace='security.repogroup')
590 590 return obj
591 591
592 592 def revoke_user_permission(self, repo_group, user):
593 593 """
594 594 Revoke permission for user on given repository group
595 595
596 596 :param repo_group: Instance of RepoGroup, repositories_group_id,
597 597 or repositories_group name
598 598 :param user: Instance of User, user_id or username
599 599 """
600 600
601 601 repo_group = self._get_repo_group(repo_group)
602 602 user = self._get_user(user)
603 603
604 604 obj = self.sa.query(UserRepoGroupToPerm)\
605 605 .filter(UserRepoGroupToPerm.user == user)\
606 606 .filter(UserRepoGroupToPerm.group == repo_group)\
607 607 .scalar()
608 608 if obj:
609 609 self.sa.delete(obj)
610 610 log.debug('Revoked perm on %s on %s', repo_group, user)
611 611 action_logger_generic(
612 612 'revoked permission from user: {} on repogroup: {}'.format(
613 613 user, repo_group), namespace='security.repogroup')
614 614
615 615 def grant_user_group_permission(self, repo_group, group_name, perm):
616 616 """
617 617 Grant permission for user group on given repository group, or update
618 618 existing one if found
619 619
620 620 :param repo_group: Instance of RepoGroup, repositories_group_id,
621 621 or repositories_group name
622 622 :param group_name: Instance of UserGroup, users_group_id,
623 623 or user group name
624 624 :param perm: Instance of Permission, or permission_name
625 625 """
626 626 repo_group = self._get_repo_group(repo_group)
627 627 group_name = self._get_user_group(group_name)
628 628 permission = self._get_perm(perm)
629 629
630 630 # check if we have that permission already
631 631 obj = self.sa.query(UserGroupRepoGroupToPerm)\
632 632 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
633 633 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
634 634 .scalar()
635 635
636 636 if obj is None:
637 637 # create new
638 638 obj = UserGroupRepoGroupToPerm()
639 639
640 640 obj.group = repo_group
641 641 obj.users_group = group_name
642 642 obj.permission = permission
643 643 self.sa.add(obj)
644 644 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
645 645 action_logger_generic(
646 646 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
647 647 perm, group_name, repo_group), namespace='security.repogroup')
648 648 return obj
649 649
650 650 def revoke_user_group_permission(self, repo_group, group_name):
651 651 """
652 652 Revoke permission for user group on given repository group
653 653
654 654 :param repo_group: Instance of RepoGroup, repositories_group_id,
655 655 or repositories_group name
656 656 :param group_name: Instance of UserGroup, users_group_id,
657 657 or user group name
658 658 """
659 659 repo_group = self._get_repo_group(repo_group)
660 660 group_name = self._get_user_group(group_name)
661 661
662 662 obj = self.sa.query(UserGroupRepoGroupToPerm)\
663 663 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
664 664 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
665 665 .scalar()
666 666 if obj:
667 667 self.sa.delete(obj)
668 668 log.debug('Revoked perm to %s on %s', repo_group, group_name)
669 669 action_logger_generic(
670 670 'revoked permission from usergroup: {} on repogroup: {}'.format(
671 671 group_name, repo_group), namespace='security.repogroup')
672 672
673 673 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
674 674 super_user_actions=False):
675 675
676 676 from rhodecode.lib.utils import PartialRenderer
677 677 _render = PartialRenderer('data_table/_dt_elements.mako')
678 678 c = _render.c
679 679 h = _render.h
680 680
681 681 def quick_menu(repo_group_name):
682 682 return _render('quick_repo_group_menu', repo_group_name)
683 683
684 684 def repo_group_lnk(repo_group_name):
685 685 return _render('repo_group_name', repo_group_name)
686 686
687 687 def desc(desc, personal):
688 688 prefix = h.escaped_stylize(u'[personal] ') if personal else ''
689 689
690 690 if c.visual.stylify_metatags:
691 691 desc = h.urlify_text(prefix + h.escaped_stylize(desc))
692 692 else:
693 693 desc = h.urlify_text(prefix + h.html_escape(desc))
694 694
695 695 return _render('repo_group_desc', desc)
696 696
697 697 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
698 698 return _render(
699 699 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
700 700
701 701 def repo_group_name(repo_group_name, children_groups):
702 702 return _render("repo_group_name", repo_group_name, children_groups)
703 703
704 704 def user_profile(username):
705 705 return _render('user_profile', username)
706 706
707 707 repo_group_data = []
708 708 for group in repo_group_list:
709 709
710 710 row = {
711 711 "menu": quick_menu(group.group_name),
712 712 "name": repo_group_lnk(group.group_name),
713 713 "name_raw": group.group_name,
714 "desc": desc(group.group_description, group.personal),
714 "desc": desc(group.description_safe, group.personal),
715 715 "top_level_repos": 0,
716 716 "owner": user_profile(group.user.username)
717 717 }
718 718 if admin:
719 719 repo_count = group.repositories.count()
720 720 children_groups = map(
721 721 h.safe_unicode,
722 722 itertools.chain((g.name for g in group.parents),
723 723 (x.name for x in [group])))
724 724 row.update({
725 725 "action": repo_group_actions(
726 726 group.group_id, group.group_name, repo_count),
727 727 "top_level_repos": repo_count,
728 728 "name": repo_group_name(group.group_name, children_groups),
729 729
730 730 })
731 731 repo_group_data.append(row)
732 732
733 733 return repo_group_data
General Comments 0
You need to be logged in to leave comments. Login now