##// END OF EJS Templates
gists: fixed cache problems and updated ui
marcink -
r3869:816873d4 default
parent child Browse files
Show More
@@ -1,1879 +1,1881 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24 import os
25 25 import re
26 26 import time
27 27 import shutil
28 28 import datetime
29 29 import fnmatch
30 30 import itertools
31 31 import logging
32 32 import collections
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from pyramid import compat
38 38
39 39 import rhodecode
40 40 from rhodecode.translation import lazy_ugettext
41 41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 42 from rhodecode.lib.vcs import connection
43 43 from rhodecode.lib.vcs.utils import author_name, author_email
44 44 from rhodecode.lib.vcs.conf import settings
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 50 RepositoryError)
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 FILEMODE_DEFAULT = 0o100644
57 57 FILEMODE_EXECUTABLE = 0o100755
58 58 EMPTY_COMMIT_ID = '0' * 40
59 59
60 60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61 61
62 62
63 63 class MergeFailureReason(object):
64 64 """
65 65 Enumeration with all the reasons why the server side merge could fail.
66 66
67 67 DO NOT change the number of the reasons, as they may be stored in the
68 68 database.
69 69
70 70 Changing the name of a reason is acceptable and encouraged to deprecate old
71 71 reasons.
72 72 """
73 73
74 74 # Everything went well.
75 75 NONE = 0
76 76
77 77 # An unexpected exception was raised. Check the logs for more details.
78 78 UNKNOWN = 1
79 79
80 80 # The merge was not successful, there are conflicts.
81 81 MERGE_FAILED = 2
82 82
83 83 # The merge succeeded but we could not push it to the target repository.
84 84 PUSH_FAILED = 3
85 85
86 86 # The specified target is not a head in the target repository.
87 87 TARGET_IS_NOT_HEAD = 4
88 88
89 89 # The source repository contains more branches than the target. Pushing
90 90 # the merge will create additional branches in the target.
91 91 HG_SOURCE_HAS_MORE_BRANCHES = 5
92 92
93 93 # The target reference has multiple heads. That does not allow to correctly
94 94 # identify the target location. This could only happen for mercurial
95 95 # branches.
96 96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
97 97
98 98 # The target repository is locked
99 99 TARGET_IS_LOCKED = 7
100 100
101 101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
102 102 # A involved commit could not be found.
103 103 _DEPRECATED_MISSING_COMMIT = 8
104 104
105 105 # The target repo reference is missing.
106 106 MISSING_TARGET_REF = 9
107 107
108 108 # The source repo reference is missing.
109 109 MISSING_SOURCE_REF = 10
110 110
111 111 # The merge was not successful, there are conflicts related to sub
112 112 # repositories.
113 113 SUBREPO_MERGE_FAILED = 11
114 114
115 115
116 116 class UpdateFailureReason(object):
117 117 """
118 118 Enumeration with all the reasons why the pull request update could fail.
119 119
120 120 DO NOT change the number of the reasons, as they may be stored in the
121 121 database.
122 122
123 123 Changing the name of a reason is acceptable and encouraged to deprecate old
124 124 reasons.
125 125 """
126 126
127 127 # Everything went well.
128 128 NONE = 0
129 129
130 130 # An unexpected exception was raised. Check the logs for more details.
131 131 UNKNOWN = 1
132 132
133 133 # The pull request is up to date.
134 134 NO_CHANGE = 2
135 135
136 136 # The pull request has a reference type that is not supported for update.
137 137 WRONG_REF_TYPE = 3
138 138
139 139 # Update failed because the target reference is missing.
140 140 MISSING_TARGET_REF = 4
141 141
142 142 # Update failed because the source reference is missing.
143 143 MISSING_SOURCE_REF = 5
144 144
145 145
146 146 class MergeResponse(object):
147 147
148 148 # uses .format(**metadata) for variables
149 149 MERGE_STATUS_MESSAGES = {
150 150 MergeFailureReason.NONE: lazy_ugettext(
151 151 u'This pull request can be automatically merged.'),
152 152 MergeFailureReason.UNKNOWN: lazy_ugettext(
153 153 u'This pull request cannot be merged because of an unhandled exception. '
154 154 u'{exception}'),
155 155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
156 156 u'This pull request cannot be merged because of merge conflicts.'),
157 157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
158 158 u'This pull request could not be merged because push to '
159 159 u'target:`{target}@{merge_commit}` failed.'),
160 160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
161 161 u'This pull request cannot be merged because the target '
162 162 u'`{target_ref.name}` is not a head.'),
163 163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
164 164 u'This pull request cannot be merged because the source contains '
165 165 u'more branches than the target.'),
166 166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
167 167 u'This pull request cannot be merged because the target `{target_ref.name}` '
168 168 u'has multiple heads: `{heads}`.'),
169 169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
170 170 u'This pull request cannot be merged because the target repository is '
171 171 u'locked by {locked_by}.'),
172 172
173 173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
174 174 u'This pull request cannot be merged because the target '
175 175 u'reference `{target_ref.name}` is missing.'),
176 176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
177 177 u'This pull request cannot be merged because the source '
178 178 u'reference `{source_ref.name}` is missing.'),
179 179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
180 180 u'This pull request cannot be merged because of conflicts related '
181 181 u'to sub repositories.'),
182 182
183 183 # Deprecations
184 184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
185 185 u'This pull request cannot be merged because the target or the '
186 186 u'source reference is missing.'),
187 187
188 188 }
189 189
190 190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
191 191 self.possible = possible
192 192 self.executed = executed
193 193 self.merge_ref = merge_ref
194 194 self.failure_reason = failure_reason
195 195 self.metadata = metadata or {}
196 196
197 197 def __repr__(self):
198 198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
199 199
200 200 def __eq__(self, other):
201 201 same_instance = isinstance(other, self.__class__)
202 202 return same_instance \
203 203 and self.possible == other.possible \
204 204 and self.executed == other.executed \
205 205 and self.failure_reason == other.failure_reason
206 206
207 207 @property
208 208 def label(self):
209 209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
210 210 not k.startswith('_'))
211 211 return label_dict.get(self.failure_reason)
212 212
213 213 @property
214 214 def merge_status_message(self):
215 215 """
216 216 Return a human friendly error message for the given merge status code.
217 217 """
218 218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
219 219 try:
220 220 return msg.format(**self.metadata)
221 221 except Exception:
222 222 log.exception('Failed to format %s message', self)
223 223 return msg
224 224
225 225 def asdict(self):
226 226 data = {}
227 227 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
228 228 'merge_status_message']:
229 229 data[k] = getattr(self, k)
230 230 return data
231 231
232 232
233 233 class BaseRepository(object):
234 234 """
235 235 Base Repository for final backends
236 236
237 237 .. attribute:: DEFAULT_BRANCH_NAME
238 238
239 239 name of default branch (i.e. "trunk" for svn, "master" for git etc.
240 240
241 241 .. attribute:: commit_ids
242 242
243 243 list of all available commit ids, in ascending order
244 244
245 245 .. attribute:: path
246 246
247 247 absolute path to the repository
248 248
249 249 .. attribute:: bookmarks
250 250
251 251 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
252 252 there are no bookmarks or the backend implementation does not support
253 253 bookmarks.
254 254
255 255 .. attribute:: tags
256 256
257 257 Mapping from name to :term:`Commit ID` of the tag.
258 258
259 259 """
260 260
261 261 DEFAULT_BRANCH_NAME = None
262 262 DEFAULT_CONTACT = u"Unknown"
263 263 DEFAULT_DESCRIPTION = u"unknown"
264 264 EMPTY_COMMIT_ID = '0' * 40
265 265
266 266 path = None
267 267
268 268 _is_empty = None
269 269 _commit_ids = {}
270 270
271 271 def __init__(self, repo_path, config=None, create=False, **kwargs):
272 272 """
273 273 Initializes repository. Raises RepositoryError if repository could
274 274 not be find at the given ``repo_path`` or directory at ``repo_path``
275 275 exists and ``create`` is set to True.
276 276
277 277 :param repo_path: local path of the repository
278 278 :param config: repository configuration
279 279 :param create=False: if set to True, would try to create repository.
280 280 :param src_url=None: if set, should be proper url from which repository
281 281 would be cloned; requires ``create`` parameter to be set to True -
282 282 raises RepositoryError if src_url is set and create evaluates to
283 283 False
284 284 """
285 285 raise NotImplementedError
286 286
287 287 def __repr__(self):
288 288 return '<%s at %s>' % (self.__class__.__name__, self.path)
289 289
290 290 def __len__(self):
291 291 return self.count()
292 292
293 293 def __eq__(self, other):
294 294 same_instance = isinstance(other, self.__class__)
295 295 return same_instance and other.path == self.path
296 296
297 297 def __ne__(self, other):
298 298 return not self.__eq__(other)
299 299
300 300 def get_create_shadow_cache_pr_path(self, db_repo):
301 301 path = db_repo.cached_diffs_dir
302 302 if not os.path.exists(path):
303 303 os.makedirs(path, 0o755)
304 304 return path
305 305
306 306 @classmethod
307 307 def get_default_config(cls, default=None):
308 308 config = Config()
309 309 if default and isinstance(default, list):
310 310 for section, key, val in default:
311 311 config.set(section, key, val)
312 312 return config
313 313
314 314 @LazyProperty
315 315 def _remote(self):
316 316 raise NotImplementedError
317 317
318 318 def _heads(self, branch=None):
319 319 return []
320 320
321 321 @LazyProperty
322 322 def EMPTY_COMMIT(self):
323 323 return EmptyCommit(self.EMPTY_COMMIT_ID)
324 324
325 325 @LazyProperty
326 326 def alias(self):
327 327 for k, v in settings.BACKENDS.items():
328 328 if v.split('.')[-1] == str(self.__class__.__name__):
329 329 return k
330 330
331 331 @LazyProperty
332 332 def name(self):
333 333 return safe_unicode(os.path.basename(self.path))
334 334
335 335 @LazyProperty
336 336 def description(self):
337 337 raise NotImplementedError
338 338
339 339 def refs(self):
340 340 """
341 341 returns a `dict` with branches, bookmarks, tags, and closed_branches
342 342 for this repository
343 343 """
344 344 return dict(
345 345 branches=self.branches,
346 346 branches_closed=self.branches_closed,
347 347 tags=self.tags,
348 348 bookmarks=self.bookmarks
349 349 )
350 350
351 351 @LazyProperty
352 352 def branches(self):
353 353 """
354 354 A `dict` which maps branch names to commit ids.
355 355 """
356 356 raise NotImplementedError
357 357
358 358 @LazyProperty
359 359 def branches_closed(self):
360 360 """
361 361 A `dict` which maps tags names to commit ids.
362 362 """
363 363 raise NotImplementedError
364 364
365 365 @LazyProperty
366 366 def bookmarks(self):
367 367 """
368 368 A `dict` which maps tags names to commit ids.
369 369 """
370 370 raise NotImplementedError
371 371
372 372 @LazyProperty
373 373 def tags(self):
374 374 """
375 375 A `dict` which maps tags names to commit ids.
376 376 """
377 377 raise NotImplementedError
378 378
379 379 @LazyProperty
380 380 def size(self):
381 381 """
382 382 Returns combined size in bytes for all repository files
383 383 """
384 384 tip = self.get_commit()
385 385 return tip.size
386 386
387 387 def size_at_commit(self, commit_id):
388 388 commit = self.get_commit(commit_id)
389 389 return commit.size
390 390
391 391 def _check_for_empty(self):
392 392 no_commits = len(self._commit_ids) == 0
393 393 if no_commits:
394 394 # check on remote to be sure
395 395 return self._remote.is_empty()
396 396 else:
397 397 return False
398 398
399 399 def is_empty(self):
400 400 if rhodecode.is_test:
401 401 return self._check_for_empty()
402 402
403 403 if self._is_empty is None:
404 404 # cache empty for production, but not tests
405 405 self._is_empty = self._check_for_empty()
406 406
407 407 return self._is_empty
408 408
409 409 @staticmethod
410 410 def check_url(url, config):
411 411 """
412 412 Function will check given url and try to verify if it's a valid
413 413 link.
414 414 """
415 415 raise NotImplementedError
416 416
417 417 @staticmethod
418 418 def is_valid_repository(path):
419 419 """
420 420 Check if given `path` contains a valid repository of this backend
421 421 """
422 422 raise NotImplementedError
423 423
424 424 # ==========================================================================
425 425 # COMMITS
426 426 # ==========================================================================
427 427
428 428 @CachedProperty
429 429 def commit_ids(self):
430 430 raise NotImplementedError
431 431
432 432 def append_commit_id(self, commit_id):
433 433 if commit_id not in self.commit_ids:
434 434 self._rebuild_cache(self.commit_ids + [commit_id])
435 # clear cache
436 self._invalidate_prop_cache('commit_ids')
435
436 # clear cache
437 self._invalidate_prop_cache('commit_ids')
438 self._is_empty = False
437 439
438 440 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
439 441 """
440 442 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
441 443 are both None, most recent commit is returned.
442 444
443 445 :param pre_load: Optional. List of commit attributes to load.
444 446
445 447 :raises ``EmptyRepositoryError``: if there are no commits
446 448 """
447 449 raise NotImplementedError
448 450
449 451 def __iter__(self):
450 452 for commit_id in self.commit_ids:
451 453 yield self.get_commit(commit_id=commit_id)
452 454
453 455 def get_commits(
454 456 self, start_id=None, end_id=None, start_date=None, end_date=None,
455 457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
456 458 """
457 459 Returns iterator of `BaseCommit` objects from start to end
458 460 not inclusive. This should behave just like a list, ie. end is not
459 461 inclusive.
460 462
461 463 :param start_id: None or str, must be a valid commit id
462 464 :param end_id: None or str, must be a valid commit id
463 465 :param start_date:
464 466 :param end_date:
465 467 :param branch_name:
466 468 :param show_hidden:
467 469 :param pre_load:
468 470 :param translate_tags:
469 471 """
470 472 raise NotImplementedError
471 473
472 474 def __getitem__(self, key):
473 475 """
474 476 Allows index based access to the commit objects of this repository.
475 477 """
476 478 pre_load = ["author", "branch", "date", "message", "parents"]
477 479 if isinstance(key, slice):
478 480 return self._get_range(key, pre_load)
479 481 return self.get_commit(commit_idx=key, pre_load=pre_load)
480 482
481 483 def _get_range(self, slice_obj, pre_load):
482 484 for commit_id in self.commit_ids.__getitem__(slice_obj):
483 485 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
484 486
485 487 def count(self):
486 488 return len(self.commit_ids)
487 489
488 490 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
489 491 """
490 492 Creates and returns a tag for the given ``commit_id``.
491 493
492 494 :param name: name for new tag
493 495 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
494 496 :param commit_id: commit id for which new tag would be created
495 497 :param message: message of the tag's commit
496 498 :param date: date of tag's commit
497 499
498 500 :raises TagAlreadyExistError: if tag with same name already exists
499 501 """
500 502 raise NotImplementedError
501 503
502 504 def remove_tag(self, name, user, message=None, date=None):
503 505 """
504 506 Removes tag with the given ``name``.
505 507
506 508 :param name: name of the tag to be removed
507 509 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
508 510 :param message: message of the tag's removal commit
509 511 :param date: date of tag's removal commit
510 512
511 513 :raises TagDoesNotExistError: if tag with given name does not exists
512 514 """
513 515 raise NotImplementedError
514 516
515 517 def get_diff(
516 518 self, commit1, commit2, path=None, ignore_whitespace=False,
517 519 context=3, path1=None):
518 520 """
519 521 Returns (git like) *diff*, as plain text. Shows changes introduced by
520 522 `commit2` since `commit1`.
521 523
522 524 :param commit1: Entry point from which diff is shown. Can be
523 525 ``self.EMPTY_COMMIT`` - in this case, patch showing all
524 526 the changes since empty state of the repository until `commit2`
525 527 :param commit2: Until which commit changes should be shown.
526 528 :param path: Can be set to a path of a file to create a diff of that
527 529 file. If `path1` is also set, this value is only associated to
528 530 `commit2`.
529 531 :param ignore_whitespace: If set to ``True``, would not show whitespace
530 532 changes. Defaults to ``False``.
531 533 :param context: How many lines before/after changed lines should be
532 534 shown. Defaults to ``3``.
533 535 :param path1: Can be set to a path to associate with `commit1`. This
534 536 parameter works only for backends which support diff generation for
535 537 different paths. Other backends will raise a `ValueError` if `path1`
536 538 is set and has a different value than `path`.
537 539 :param file_path: filter this diff by given path pattern
538 540 """
539 541 raise NotImplementedError
540 542
541 543 def strip(self, commit_id, branch=None):
542 544 """
543 545 Strip given commit_id from the repository
544 546 """
545 547 raise NotImplementedError
546 548
547 549 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
548 550 """
549 551 Return a latest common ancestor commit if one exists for this repo
550 552 `commit_id1` vs `commit_id2` from `repo2`.
551 553
552 554 :param commit_id1: Commit it from this repository to use as a
553 555 target for the comparison.
554 556 :param commit_id2: Source commit id to use for comparison.
555 557 :param repo2: Source repository to use for comparison.
556 558 """
557 559 raise NotImplementedError
558 560
559 561 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
560 562 """
561 563 Compare this repository's revision `commit_id1` with `commit_id2`.
562 564
563 565 Returns a tuple(commits, ancestor) that would be merged from
564 566 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
565 567 will be returned as ancestor.
566 568
567 569 :param commit_id1: Commit it from this repository to use as a
568 570 target for the comparison.
569 571 :param commit_id2: Source commit id to use for comparison.
570 572 :param repo2: Source repository to use for comparison.
571 573 :param merge: If set to ``True`` will do a merge compare which also
572 574 returns the common ancestor.
573 575 :param pre_load: Optional. List of commit attributes to load.
574 576 """
575 577 raise NotImplementedError
576 578
577 579 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
578 580 user_name='', user_email='', message='', dry_run=False,
579 581 use_rebase=False, close_branch=False):
580 582 """
581 583 Merge the revisions specified in `source_ref` from `source_repo`
582 584 onto the `target_ref` of this repository.
583 585
584 586 `source_ref` and `target_ref` are named tupls with the following
585 587 fields `type`, `name` and `commit_id`.
586 588
587 589 Returns a MergeResponse named tuple with the following fields
588 590 'possible', 'executed', 'source_commit', 'target_commit',
589 591 'merge_commit'.
590 592
591 593 :param repo_id: `repo_id` target repo id.
592 594 :param workspace_id: `workspace_id` unique identifier.
593 595 :param target_ref: `target_ref` points to the commit on top of which
594 596 the `source_ref` should be merged.
595 597 :param source_repo: The repository that contains the commits to be
596 598 merged.
597 599 :param source_ref: `source_ref` points to the topmost commit from
598 600 the `source_repo` which should be merged.
599 601 :param user_name: Merge commit `user_name`.
600 602 :param user_email: Merge commit `user_email`.
601 603 :param message: Merge commit `message`.
602 604 :param dry_run: If `True` the merge will not take place.
603 605 :param use_rebase: If `True` commits from the source will be rebased
604 606 on top of the target instead of being merged.
605 607 :param close_branch: If `True` branch will be close before merging it
606 608 """
607 609 if dry_run:
608 610 message = message or settings.MERGE_DRY_RUN_MESSAGE
609 611 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
610 612 user_name = user_name or settings.MERGE_DRY_RUN_USER
611 613 else:
612 614 if not user_name:
613 615 raise ValueError('user_name cannot be empty')
614 616 if not user_email:
615 617 raise ValueError('user_email cannot be empty')
616 618 if not message:
617 619 raise ValueError('message cannot be empty')
618 620
619 621 try:
620 622 return self._merge_repo(
621 623 repo_id, workspace_id, target_ref, source_repo,
622 624 source_ref, message, user_name, user_email, dry_run=dry_run,
623 625 use_rebase=use_rebase, close_branch=close_branch)
624 626 except RepositoryError as exc:
625 627 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
626 628 return MergeResponse(
627 629 False, False, None, MergeFailureReason.UNKNOWN,
628 630 metadata={'exception': str(exc)})
629 631
630 632 def _merge_repo(self, repo_id, workspace_id, target_ref,
631 633 source_repo, source_ref, merge_message,
632 634 merger_name, merger_email, dry_run=False,
633 635 use_rebase=False, close_branch=False):
634 636 """Internal implementation of merge."""
635 637 raise NotImplementedError
636 638
637 639 def _maybe_prepare_merge_workspace(
638 640 self, repo_id, workspace_id, target_ref, source_ref):
639 641 """
640 642 Create the merge workspace.
641 643
642 644 :param workspace_id: `workspace_id` unique identifier.
643 645 """
644 646 raise NotImplementedError
645 647
646 648 def _get_legacy_shadow_repository_path(self, workspace_id):
647 649 """
648 650 Legacy version that was used before. We still need it for
649 651 backward compat
650 652 """
651 653 return os.path.join(
652 654 os.path.dirname(self.path),
653 655 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
654 656
655 657 def _get_shadow_repository_path(self, repo_id, workspace_id):
656 658 # The name of the shadow repository must start with '.', so it is
657 659 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
658 660 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
659 661 if os.path.exists(legacy_repository_path):
660 662 return legacy_repository_path
661 663 else:
662 664 return os.path.join(
663 665 os.path.dirname(self.path),
664 666 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
665 667
666 668 def cleanup_merge_workspace(self, repo_id, workspace_id):
667 669 """
668 670 Remove merge workspace.
669 671
670 672 This function MUST not fail in case there is no workspace associated to
671 673 the given `workspace_id`.
672 674
673 675 :param workspace_id: `workspace_id` unique identifier.
674 676 """
675 677 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
676 678 shadow_repository_path_del = '{}.{}.delete'.format(
677 679 shadow_repository_path, time.time())
678 680
679 681 # move the shadow repo, so it never conflicts with the one used.
680 682 # we use this method because shutil.rmtree had some edge case problems
681 683 # removing symlinked repositories
682 684 if not os.path.isdir(shadow_repository_path):
683 685 return
684 686
685 687 shutil.move(shadow_repository_path, shadow_repository_path_del)
686 688 try:
687 689 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
688 690 except Exception:
689 691 log.exception('Failed to gracefully remove shadow repo under %s',
690 692 shadow_repository_path_del)
691 693 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
692 694
693 695 # ========== #
694 696 # COMMIT API #
695 697 # ========== #
696 698
697 699 @LazyProperty
698 700 def in_memory_commit(self):
699 701 """
700 702 Returns :class:`InMemoryCommit` object for this repository.
701 703 """
702 704 raise NotImplementedError
703 705
704 706 # ======================== #
705 707 # UTILITIES FOR SUBCLASSES #
706 708 # ======================== #
707 709
708 710 def _validate_diff_commits(self, commit1, commit2):
709 711 """
710 712 Validates that the given commits are related to this repository.
711 713
712 714 Intended as a utility for sub classes to have a consistent validation
713 715 of input parameters in methods like :meth:`get_diff`.
714 716 """
715 717 self._validate_commit(commit1)
716 718 self._validate_commit(commit2)
717 719 if (isinstance(commit1, EmptyCommit) and
718 720 isinstance(commit2, EmptyCommit)):
719 721 raise ValueError("Cannot compare two empty commits")
720 722
721 723 def _validate_commit(self, commit):
722 724 if not isinstance(commit, BaseCommit):
723 725 raise TypeError(
724 726 "%s is not of type BaseCommit" % repr(commit))
725 727 if commit.repository != self and not isinstance(commit, EmptyCommit):
726 728 raise ValueError(
727 729 "Commit %s must be a valid commit from this repository %s, "
728 730 "related to this repository instead %s." %
729 731 (commit, self, commit.repository))
730 732
731 733 def _validate_commit_id(self, commit_id):
732 734 if not isinstance(commit_id, compat.string_types):
733 735 raise TypeError("commit_id must be a string value")
734 736
735 737 def _validate_commit_idx(self, commit_idx):
736 738 if not isinstance(commit_idx, (int, long)):
737 739 raise TypeError("commit_idx must be a numeric value")
738 740
739 741 def _validate_branch_name(self, branch_name):
740 742 if branch_name and branch_name not in self.branches_all:
741 743 msg = ("Branch %s not found in %s" % (branch_name, self))
742 744 raise BranchDoesNotExistError(msg)
743 745
744 746 #
745 747 # Supporting deprecated API parts
746 748 # TODO: johbo: consider to move this into a mixin
747 749 #
748 750
749 751 @property
750 752 def EMPTY_CHANGESET(self):
751 753 warnings.warn(
752 754 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
753 755 return self.EMPTY_COMMIT_ID
754 756
755 757 @property
756 758 def revisions(self):
757 759 warnings.warn("Use commits attribute instead", DeprecationWarning)
758 760 return self.commit_ids
759 761
760 762 @revisions.setter
761 763 def revisions(self, value):
762 764 warnings.warn("Use commits attribute instead", DeprecationWarning)
763 765 self.commit_ids = value
764 766
765 767 def get_changeset(self, revision=None, pre_load=None):
766 768 warnings.warn("Use get_commit instead", DeprecationWarning)
767 769 commit_id = None
768 770 commit_idx = None
769 771 if isinstance(revision, compat.string_types):
770 772 commit_id = revision
771 773 else:
772 774 commit_idx = revision
773 775 return self.get_commit(
774 776 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
775 777
776 778 def get_changesets(
777 779 self, start=None, end=None, start_date=None, end_date=None,
778 780 branch_name=None, pre_load=None):
779 781 warnings.warn("Use get_commits instead", DeprecationWarning)
780 782 start_id = self._revision_to_commit(start)
781 783 end_id = self._revision_to_commit(end)
782 784 return self.get_commits(
783 785 start_id=start_id, end_id=end_id, start_date=start_date,
784 786 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
785 787
786 788 def _revision_to_commit(self, revision):
787 789 """
788 790 Translates a revision to a commit_id
789 791
790 792 Helps to support the old changeset based API which allows to use
791 793 commit ids and commit indices interchangeable.
792 794 """
793 795 if revision is None:
794 796 return revision
795 797
796 798 if isinstance(revision, compat.string_types):
797 799 commit_id = revision
798 800 else:
799 801 commit_id = self.commit_ids[revision]
800 802 return commit_id
801 803
802 804 @property
803 805 def in_memory_changeset(self):
804 806 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
805 807 return self.in_memory_commit
806 808
807 809 def get_path_permissions(self, username):
808 810 """
809 811 Returns a path permission checker or None if not supported
810 812
811 813 :param username: session user name
812 814 :return: an instance of BasePathPermissionChecker or None
813 815 """
814 816 return None
815 817
816 818 def install_hooks(self, force=False):
817 819 return self._remote.install_hooks(force)
818 820
819 821 def get_hooks_info(self):
820 822 return self._remote.get_hooks_info()
821 823
822 824
823 825 class BaseCommit(object):
824 826 """
825 827 Each backend should implement it's commit representation.
826 828
827 829 **Attributes**
828 830
829 831 ``repository``
830 832 repository object within which commit exists
831 833
832 834 ``id``
833 835 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
834 836 just ``tip``.
835 837
836 838 ``raw_id``
837 839 raw commit representation (i.e. full 40 length sha for git
838 840 backend)
839 841
840 842 ``short_id``
841 843 shortened (if apply) version of ``raw_id``; it would be simple
842 844 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
843 845 as ``raw_id`` for subversion
844 846
845 847 ``idx``
846 848 commit index
847 849
848 850 ``files``
849 851 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
850 852
851 853 ``dirs``
852 854 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
853 855
854 856 ``nodes``
855 857 combined list of ``Node`` objects
856 858
857 859 ``author``
858 860 author of the commit, as unicode
859 861
860 862 ``message``
861 863 message of the commit, as unicode
862 864
863 865 ``parents``
864 866 list of parent commits
865 867
866 868 """
867 869
868 870 branch = None
869 871 """
870 872 Depending on the backend this should be set to the branch name of the
871 873 commit. Backends not supporting branches on commits should leave this
872 874 value as ``None``.
873 875 """
874 876
875 877 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
876 878 """
877 879 This template is used to generate a default prefix for repository archives
878 880 if no prefix has been specified.
879 881 """
880 882
881 883 def __str__(self):
882 884 return '<%s at %s:%s>' % (
883 885 self.__class__.__name__, self.idx, self.short_id)
884 886
885 887 def __repr__(self):
886 888 return self.__str__()
887 889
888 890 def __unicode__(self):
889 891 return u'%s:%s' % (self.idx, self.short_id)
890 892
891 893 def __eq__(self, other):
892 894 same_instance = isinstance(other, self.__class__)
893 895 return same_instance and self.raw_id == other.raw_id
894 896
895 897 def __json__(self):
896 898 parents = []
897 899 try:
898 900 for parent in self.parents:
899 901 parents.append({'raw_id': parent.raw_id})
900 902 except NotImplementedError:
901 903 # empty commit doesn't have parents implemented
902 904 pass
903 905
904 906 return {
905 907 'short_id': self.short_id,
906 908 'raw_id': self.raw_id,
907 909 'revision': self.idx,
908 910 'message': self.message,
909 911 'date': self.date,
910 912 'author': self.author,
911 913 'parents': parents,
912 914 'branch': self.branch
913 915 }
914 916
915 917 def __getstate__(self):
916 918 d = self.__dict__.copy()
917 919 d.pop('_remote', None)
918 920 d.pop('repository', None)
919 921 return d
920 922
921 923 def _get_refs(self):
922 924 return {
923 925 'branches': [self.branch] if self.branch else [],
924 926 'bookmarks': getattr(self, 'bookmarks', []),
925 927 'tags': self.tags
926 928 }
927 929
928 930 @LazyProperty
929 931 def last(self):
930 932 """
931 933 ``True`` if this is last commit in repository, ``False``
932 934 otherwise; trying to access this attribute while there is no
933 935 commits would raise `EmptyRepositoryError`
934 936 """
935 937 if self.repository is None:
936 938 raise CommitError("Cannot check if it's most recent commit")
937 939 return self.raw_id == self.repository.commit_ids[-1]
938 940
939 941 @LazyProperty
940 942 def parents(self):
941 943 """
942 944 Returns list of parent commits.
943 945 """
944 946 raise NotImplementedError
945 947
946 948 @LazyProperty
947 949 def first_parent(self):
948 950 """
949 951 Returns list of parent commits.
950 952 """
951 953 return self.parents[0] if self.parents else EmptyCommit()
952 954
953 955 @property
954 956 def merge(self):
955 957 """
956 958 Returns boolean if commit is a merge.
957 959 """
958 960 return len(self.parents) > 1
959 961
960 962 @LazyProperty
961 963 def children(self):
962 964 """
963 965 Returns list of child commits.
964 966 """
965 967 raise NotImplementedError
966 968
967 969 @LazyProperty
968 970 def id(self):
969 971 """
970 972 Returns string identifying this commit.
971 973 """
972 974 raise NotImplementedError
973 975
974 976 @LazyProperty
975 977 def raw_id(self):
976 978 """
977 979 Returns raw string identifying this commit.
978 980 """
979 981 raise NotImplementedError
980 982
981 983 @LazyProperty
982 984 def short_id(self):
983 985 """
984 986 Returns shortened version of ``raw_id`` attribute, as string,
985 987 identifying this commit, useful for presentation to users.
986 988 """
987 989 raise NotImplementedError
988 990
989 991 @LazyProperty
990 992 def idx(self):
991 993 """
992 994 Returns integer identifying this commit.
993 995 """
994 996 raise NotImplementedError
995 997
996 998 @LazyProperty
997 999 def committer(self):
998 1000 """
999 1001 Returns committer for this commit
1000 1002 """
1001 1003 raise NotImplementedError
1002 1004
1003 1005 @LazyProperty
1004 1006 def committer_name(self):
1005 1007 """
1006 1008 Returns committer name for this commit
1007 1009 """
1008 1010
1009 1011 return author_name(self.committer)
1010 1012
1011 1013 @LazyProperty
1012 1014 def committer_email(self):
1013 1015 """
1014 1016 Returns committer email address for this commit
1015 1017 """
1016 1018
1017 1019 return author_email(self.committer)
1018 1020
1019 1021 @LazyProperty
1020 1022 def author(self):
1021 1023 """
1022 1024 Returns author for this commit
1023 1025 """
1024 1026
1025 1027 raise NotImplementedError
1026 1028
1027 1029 @LazyProperty
1028 1030 def author_name(self):
1029 1031 """
1030 1032 Returns author name for this commit
1031 1033 """
1032 1034
1033 1035 return author_name(self.author)
1034 1036
1035 1037 @LazyProperty
1036 1038 def author_email(self):
1037 1039 """
1038 1040 Returns author email address for this commit
1039 1041 """
1040 1042
1041 1043 return author_email(self.author)
1042 1044
1043 1045 def get_file_mode(self, path):
1044 1046 """
1045 1047 Returns stat mode of the file at `path`.
1046 1048 """
1047 1049 raise NotImplementedError
1048 1050
1049 1051 def is_link(self, path):
1050 1052 """
1051 1053 Returns ``True`` if given `path` is a symlink
1052 1054 """
1053 1055 raise NotImplementedError
1054 1056
1055 1057 def get_file_content(self, path):
1056 1058 """
1057 1059 Returns content of the file at the given `path`.
1058 1060 """
1059 1061 raise NotImplementedError
1060 1062
1061 1063 def get_file_size(self, path):
1062 1064 """
1063 1065 Returns size of the file at the given `path`.
1064 1066 """
1065 1067 raise NotImplementedError
1066 1068
1067 1069 def get_path_commit(self, path, pre_load=None):
1068 1070 """
1069 1071 Returns last commit of the file at the given `path`.
1070 1072
1071 1073 :param pre_load: Optional. List of commit attributes to load.
1072 1074 """
1073 1075 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1074 1076 if not commits:
1075 1077 raise RepositoryError(
1076 1078 'Failed to fetch history for path {}. '
1077 1079 'Please check if such path exists in your repository'.format(
1078 1080 path))
1079 1081 return commits[0]
1080 1082
1081 1083 def get_path_history(self, path, limit=None, pre_load=None):
1082 1084 """
1083 1085 Returns history of file as reversed list of :class:`BaseCommit`
1084 1086 objects for which file at given `path` has been modified.
1085 1087
1086 1088 :param limit: Optional. Allows to limit the size of the returned
1087 1089 history. This is intended as a hint to the underlying backend, so
1088 1090 that it can apply optimizations depending on the limit.
1089 1091 :param pre_load: Optional. List of commit attributes to load.
1090 1092 """
1091 1093 raise NotImplementedError
1092 1094
1093 1095 def get_file_annotate(self, path, pre_load=None):
1094 1096 """
1095 1097 Returns a generator of four element tuples with
1096 1098 lineno, sha, commit lazy loader and line
1097 1099
1098 1100 :param pre_load: Optional. List of commit attributes to load.
1099 1101 """
1100 1102 raise NotImplementedError
1101 1103
1102 1104 def get_nodes(self, path):
1103 1105 """
1104 1106 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1105 1107 state of commit at the given ``path``.
1106 1108
1107 1109 :raises ``CommitError``: if node at the given ``path`` is not
1108 1110 instance of ``DirNode``
1109 1111 """
1110 1112 raise NotImplementedError
1111 1113
1112 1114 def get_node(self, path):
1113 1115 """
1114 1116 Returns ``Node`` object from the given ``path``.
1115 1117
1116 1118 :raises ``NodeDoesNotExistError``: if there is no node at the given
1117 1119 ``path``
1118 1120 """
1119 1121 raise NotImplementedError
1120 1122
1121 1123 def get_largefile_node(self, path):
1122 1124 """
1123 1125 Returns the path to largefile from Mercurial/Git-lfs storage.
1124 1126 or None if it's not a largefile node
1125 1127 """
1126 1128 return None
1127 1129
1128 1130 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1129 1131 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1130 1132 """
1131 1133 Creates an archive containing the contents of the repository.
1132 1134
1133 1135 :param archive_dest_path: path to the file which to create the archive.
1134 1136 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1135 1137 :param prefix: name of root directory in archive.
1136 1138 Default is repository name and commit's short_id joined with dash:
1137 1139 ``"{repo_name}-{short_id}"``.
1138 1140 :param write_metadata: write a metadata file into archive.
1139 1141 :param mtime: custom modification time for archive creation, defaults
1140 1142 to time.time() if not given.
1141 1143 :param archive_at_path: pack files at this path (default '/')
1142 1144
1143 1145 :raise VCSError: If prefix has a problem.
1144 1146 """
1145 1147 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1146 1148 if kind not in allowed_kinds:
1147 1149 raise ImproperArchiveTypeError(
1148 1150 'Archive kind (%s) not supported use one of %s' %
1149 1151 (kind, allowed_kinds))
1150 1152
1151 1153 prefix = self._validate_archive_prefix(prefix)
1152 1154
1153 1155 mtime = mtime is not None or time.mktime(self.date.timetuple())
1154 1156
1155 1157 file_info = []
1156 1158 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1157 1159 for _r, _d, files in cur_rev.walk(archive_at_path):
1158 1160 for f in files:
1159 1161 f_path = os.path.join(prefix, f.path)
1160 1162 file_info.append(
1161 1163 (f_path, f.mode, f.is_link(), f.raw_bytes))
1162 1164
1163 1165 if write_metadata:
1164 1166 metadata = [
1165 1167 ('repo_name', self.repository.name),
1166 1168 ('commit_id', self.raw_id),
1167 1169 ('mtime', mtime),
1168 1170 ('branch', self.branch),
1169 1171 ('tags', ','.join(self.tags)),
1170 1172 ]
1171 1173 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1172 1174 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1173 1175
1174 1176 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1175 1177
1176 1178 def _validate_archive_prefix(self, prefix):
1177 1179 if prefix is None:
1178 1180 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1179 1181 repo_name=safe_str(self.repository.name),
1180 1182 short_id=self.short_id)
1181 1183 elif not isinstance(prefix, str):
1182 1184 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1183 1185 elif prefix.startswith('/'):
1184 1186 raise VCSError("Prefix cannot start with leading slash")
1185 1187 elif prefix.strip() == '':
1186 1188 raise VCSError("Prefix cannot be empty")
1187 1189 return prefix
1188 1190
1189 1191 @LazyProperty
1190 1192 def root(self):
1191 1193 """
1192 1194 Returns ``RootNode`` object for this commit.
1193 1195 """
1194 1196 return self.get_node('')
1195 1197
1196 1198 def next(self, branch=None):
1197 1199 """
1198 1200 Returns next commit from current, if branch is gives it will return
1199 1201 next commit belonging to this branch
1200 1202
1201 1203 :param branch: show commits within the given named branch
1202 1204 """
1203 1205 indexes = xrange(self.idx + 1, self.repository.count())
1204 1206 return self._find_next(indexes, branch)
1205 1207
1206 1208 def prev(self, branch=None):
1207 1209 """
1208 1210 Returns previous commit from current, if branch is gives it will
1209 1211 return previous commit belonging to this branch
1210 1212
1211 1213 :param branch: show commit within the given named branch
1212 1214 """
1213 1215 indexes = xrange(self.idx - 1, -1, -1)
1214 1216 return self._find_next(indexes, branch)
1215 1217
1216 1218 def _find_next(self, indexes, branch=None):
1217 1219 if branch and self.branch != branch:
1218 1220 raise VCSError('Branch option used on commit not belonging '
1219 1221 'to that branch')
1220 1222
1221 1223 for next_idx in indexes:
1222 1224 commit = self.repository.get_commit(commit_idx=next_idx)
1223 1225 if branch and branch != commit.branch:
1224 1226 continue
1225 1227 return commit
1226 1228 raise CommitDoesNotExistError
1227 1229
1228 1230 def diff(self, ignore_whitespace=True, context=3):
1229 1231 """
1230 1232 Returns a `Diff` object representing the change made by this commit.
1231 1233 """
1232 1234 parent = self.first_parent
1233 1235 diff = self.repository.get_diff(
1234 1236 parent, self,
1235 1237 ignore_whitespace=ignore_whitespace,
1236 1238 context=context)
1237 1239 return diff
1238 1240
1239 1241 @LazyProperty
1240 1242 def added(self):
1241 1243 """
1242 1244 Returns list of added ``FileNode`` objects.
1243 1245 """
1244 1246 raise NotImplementedError
1245 1247
1246 1248 @LazyProperty
1247 1249 def changed(self):
1248 1250 """
1249 1251 Returns list of modified ``FileNode`` objects.
1250 1252 """
1251 1253 raise NotImplementedError
1252 1254
1253 1255 @LazyProperty
1254 1256 def removed(self):
1255 1257 """
1256 1258 Returns list of removed ``FileNode`` objects.
1257 1259 """
1258 1260 raise NotImplementedError
1259 1261
1260 1262 @LazyProperty
1261 1263 def size(self):
1262 1264 """
1263 1265 Returns total number of bytes from contents of all filenodes.
1264 1266 """
1265 1267 return sum((node.size for node in self.get_filenodes_generator()))
1266 1268
1267 1269 def walk(self, topurl=''):
1268 1270 """
1269 1271 Similar to os.walk method. Insted of filesystem it walks through
1270 1272 commit starting at given ``topurl``. Returns generator of tuples
1271 1273 (topnode, dirnodes, filenodes).
1272 1274 """
1273 1275 topnode = self.get_node(topurl)
1274 1276 if not topnode.is_dir():
1275 1277 return
1276 1278 yield (topnode, topnode.dirs, topnode.files)
1277 1279 for dirnode in topnode.dirs:
1278 1280 for tup in self.walk(dirnode.path):
1279 1281 yield tup
1280 1282
1281 1283 def get_filenodes_generator(self):
1282 1284 """
1283 1285 Returns generator that yields *all* file nodes.
1284 1286 """
1285 1287 for topnode, dirs, files in self.walk():
1286 1288 for node in files:
1287 1289 yield node
1288 1290
1289 1291 #
1290 1292 # Utilities for sub classes to support consistent behavior
1291 1293 #
1292 1294
1293 1295 def no_node_at_path(self, path):
1294 1296 return NodeDoesNotExistError(
1295 1297 u"There is no file nor directory at the given path: "
1296 1298 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1297 1299
1298 1300 def _fix_path(self, path):
1299 1301 """
1300 1302 Paths are stored without trailing slash so we need to get rid off it if
1301 1303 needed.
1302 1304 """
1303 1305 return path.rstrip('/')
1304 1306
1305 1307 #
1306 1308 # Deprecated API based on changesets
1307 1309 #
1308 1310
1309 1311 @property
1310 1312 def revision(self):
1311 1313 warnings.warn("Use idx instead", DeprecationWarning)
1312 1314 return self.idx
1313 1315
1314 1316 @revision.setter
1315 1317 def revision(self, value):
1316 1318 warnings.warn("Use idx instead", DeprecationWarning)
1317 1319 self.idx = value
1318 1320
1319 1321 def get_file_changeset(self, path):
1320 1322 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1321 1323 return self.get_path_commit(path)
1322 1324
1323 1325
1324 1326 class BaseChangesetClass(type):
1325 1327
1326 1328 def __instancecheck__(self, instance):
1327 1329 return isinstance(instance, BaseCommit)
1328 1330
1329 1331
1330 1332 class BaseChangeset(BaseCommit):
1331 1333
1332 1334 __metaclass__ = BaseChangesetClass
1333 1335
1334 1336 def __new__(cls, *args, **kwargs):
1335 1337 warnings.warn(
1336 1338 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1337 1339 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1338 1340
1339 1341
1340 1342 class BaseInMemoryCommit(object):
1341 1343 """
1342 1344 Represents differences between repository's state (most recent head) and
1343 1345 changes made *in place*.
1344 1346
1345 1347 **Attributes**
1346 1348
1347 1349 ``repository``
1348 1350 repository object for this in-memory-commit
1349 1351
1350 1352 ``added``
1351 1353 list of ``FileNode`` objects marked as *added*
1352 1354
1353 1355 ``changed``
1354 1356 list of ``FileNode`` objects marked as *changed*
1355 1357
1356 1358 ``removed``
1357 1359 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1358 1360 *removed*
1359 1361
1360 1362 ``parents``
1361 1363 list of :class:`BaseCommit` instances representing parents of
1362 1364 in-memory commit. Should always be 2-element sequence.
1363 1365
1364 1366 """
1365 1367
1366 1368 def __init__(self, repository):
1367 1369 self.repository = repository
1368 1370 self.added = []
1369 1371 self.changed = []
1370 1372 self.removed = []
1371 1373 self.parents = []
1372 1374
1373 1375 def add(self, *filenodes):
1374 1376 """
1375 1377 Marks given ``FileNode`` objects as *to be committed*.
1376 1378
1377 1379 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1378 1380 latest commit
1379 1381 :raises ``NodeAlreadyAddedError``: if node with same path is already
1380 1382 marked as *added*
1381 1383 """
1382 1384 # Check if not already marked as *added* first
1383 1385 for node in filenodes:
1384 1386 if node.path in (n.path for n in self.added):
1385 1387 raise NodeAlreadyAddedError(
1386 1388 "Such FileNode %s is already marked for addition"
1387 1389 % node.path)
1388 1390 for node in filenodes:
1389 1391 self.added.append(node)
1390 1392
1391 1393 def change(self, *filenodes):
1392 1394 """
1393 1395 Marks given ``FileNode`` objects to be *changed* in next commit.
1394 1396
1395 1397 :raises ``EmptyRepositoryError``: if there are no commits yet
1396 1398 :raises ``NodeAlreadyExistsError``: if node with same path is already
1397 1399 marked to be *changed*
1398 1400 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1399 1401 marked to be *removed*
1400 1402 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1401 1403 commit
1402 1404 :raises ``NodeNotChangedError``: if node hasn't really be changed
1403 1405 """
1404 1406 for node in filenodes:
1405 1407 if node.path in (n.path for n in self.removed):
1406 1408 raise NodeAlreadyRemovedError(
1407 1409 "Node at %s is already marked as removed" % node.path)
1408 1410 try:
1409 1411 self.repository.get_commit()
1410 1412 except EmptyRepositoryError:
1411 1413 raise EmptyRepositoryError(
1412 1414 "Nothing to change - try to *add* new nodes rather than "
1413 1415 "changing them")
1414 1416 for node in filenodes:
1415 1417 if node.path in (n.path for n in self.changed):
1416 1418 raise NodeAlreadyChangedError(
1417 1419 "Node at '%s' is already marked as changed" % node.path)
1418 1420 self.changed.append(node)
1419 1421
1420 1422 def remove(self, *filenodes):
1421 1423 """
1422 1424 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1423 1425 *removed* in next commit.
1424 1426
1425 1427 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1426 1428 be *removed*
1427 1429 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1428 1430 be *changed*
1429 1431 """
1430 1432 for node in filenodes:
1431 1433 if node.path in (n.path for n in self.removed):
1432 1434 raise NodeAlreadyRemovedError(
1433 1435 "Node is already marked to for removal at %s" % node.path)
1434 1436 if node.path in (n.path for n in self.changed):
1435 1437 raise NodeAlreadyChangedError(
1436 1438 "Node is already marked to be changed at %s" % node.path)
1437 1439 # We only mark node as *removed* - real removal is done by
1438 1440 # commit method
1439 1441 self.removed.append(node)
1440 1442
1441 1443 def reset(self):
1442 1444 """
1443 1445 Resets this instance to initial state (cleans ``added``, ``changed``
1444 1446 and ``removed`` lists).
1445 1447 """
1446 1448 self.added = []
1447 1449 self.changed = []
1448 1450 self.removed = []
1449 1451 self.parents = []
1450 1452
1451 1453 def get_ipaths(self):
1452 1454 """
1453 1455 Returns generator of paths from nodes marked as added, changed or
1454 1456 removed.
1455 1457 """
1456 1458 for node in itertools.chain(self.added, self.changed, self.removed):
1457 1459 yield node.path
1458 1460
1459 1461 def get_paths(self):
1460 1462 """
1461 1463 Returns list of paths from nodes marked as added, changed or removed.
1462 1464 """
1463 1465 return list(self.get_ipaths())
1464 1466
1465 1467 def check_integrity(self, parents=None):
1466 1468 """
1467 1469 Checks in-memory commit's integrity. Also, sets parents if not
1468 1470 already set.
1469 1471
1470 1472 :raises CommitError: if any error occurs (i.e.
1471 1473 ``NodeDoesNotExistError``).
1472 1474 """
1473 1475 if not self.parents:
1474 1476 parents = parents or []
1475 1477 if len(parents) == 0:
1476 1478 try:
1477 1479 parents = [self.repository.get_commit(), None]
1478 1480 except EmptyRepositoryError:
1479 1481 parents = [None, None]
1480 1482 elif len(parents) == 1:
1481 1483 parents += [None]
1482 1484 self.parents = parents
1483 1485
1484 1486 # Local parents, only if not None
1485 1487 parents = [p for p in self.parents if p]
1486 1488
1487 1489 # Check nodes marked as added
1488 1490 for p in parents:
1489 1491 for node in self.added:
1490 1492 try:
1491 1493 p.get_node(node.path)
1492 1494 except NodeDoesNotExistError:
1493 1495 pass
1494 1496 else:
1495 1497 raise NodeAlreadyExistsError(
1496 1498 "Node `%s` already exists at %s" % (node.path, p))
1497 1499
1498 1500 # Check nodes marked as changed
1499 1501 missing = set(self.changed)
1500 1502 not_changed = set(self.changed)
1501 1503 if self.changed and not parents:
1502 1504 raise NodeDoesNotExistError(str(self.changed[0].path))
1503 1505 for p in parents:
1504 1506 for node in self.changed:
1505 1507 try:
1506 1508 old = p.get_node(node.path)
1507 1509 missing.remove(node)
1508 1510 # if content actually changed, remove node from not_changed
1509 1511 if old.content != node.content:
1510 1512 not_changed.remove(node)
1511 1513 except NodeDoesNotExistError:
1512 1514 pass
1513 1515 if self.changed and missing:
1514 1516 raise NodeDoesNotExistError(
1515 1517 "Node `%s` marked as modified but missing in parents: %s"
1516 1518 % (node.path, parents))
1517 1519
1518 1520 if self.changed and not_changed:
1519 1521 raise NodeNotChangedError(
1520 1522 "Node `%s` wasn't actually changed (parents: %s)"
1521 1523 % (not_changed.pop().path, parents))
1522 1524
1523 1525 # Check nodes marked as removed
1524 1526 if self.removed and not parents:
1525 1527 raise NodeDoesNotExistError(
1526 1528 "Cannot remove node at %s as there "
1527 1529 "were no parents specified" % self.removed[0].path)
1528 1530 really_removed = set()
1529 1531 for p in parents:
1530 1532 for node in self.removed:
1531 1533 try:
1532 1534 p.get_node(node.path)
1533 1535 really_removed.add(node)
1534 1536 except CommitError:
1535 1537 pass
1536 1538 not_removed = set(self.removed) - really_removed
1537 1539 if not_removed:
1538 1540 # TODO: johbo: This code branch does not seem to be covered
1539 1541 raise NodeDoesNotExistError(
1540 1542 "Cannot remove node at %s from "
1541 1543 "following parents: %s" % (not_removed, parents))
1542 1544
1543 1545 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1544 1546 """
1545 1547 Performs in-memory commit (doesn't check workdir in any way) and
1546 1548 returns newly created :class:`BaseCommit`. Updates repository's
1547 1549 attribute `commits`.
1548 1550
1549 1551 .. note::
1550 1552
1551 1553 While overriding this method each backend's should call
1552 1554 ``self.check_integrity(parents)`` in the first place.
1553 1555
1554 1556 :param message: message of the commit
1555 1557 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1556 1558 :param parents: single parent or sequence of parents from which commit
1557 1559 would be derived
1558 1560 :param date: ``datetime.datetime`` instance. Defaults to
1559 1561 ``datetime.datetime.now()``.
1560 1562 :param branch: branch name, as string. If none given, default backend's
1561 1563 branch would be used.
1562 1564
1563 1565 :raises ``CommitError``: if any error occurs while committing
1564 1566 """
1565 1567 raise NotImplementedError
1566 1568
1567 1569
1568 1570 class BaseInMemoryChangesetClass(type):
1569 1571
1570 1572 def __instancecheck__(self, instance):
1571 1573 return isinstance(instance, BaseInMemoryCommit)
1572 1574
1573 1575
1574 1576 class BaseInMemoryChangeset(BaseInMemoryCommit):
1575 1577
1576 1578 __metaclass__ = BaseInMemoryChangesetClass
1577 1579
1578 1580 def __new__(cls, *args, **kwargs):
1579 1581 warnings.warn(
1580 1582 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1581 1583 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1582 1584
1583 1585
1584 1586 class EmptyCommit(BaseCommit):
1585 1587 """
1586 1588 An dummy empty commit. It's possible to pass hash when creating
1587 1589 an EmptyCommit
1588 1590 """
1589 1591
1590 1592 def __init__(
1591 1593 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1592 1594 message='', author='', date=None):
1593 1595 self._empty_commit_id = commit_id
1594 1596 # TODO: johbo: Solve idx parameter, default value does not make
1595 1597 # too much sense
1596 1598 self.idx = idx
1597 1599 self.message = message
1598 1600 self.author = author
1599 1601 self.date = date or datetime.datetime.fromtimestamp(0)
1600 1602 self.repository = repo
1601 1603 self.alias = alias
1602 1604
1603 1605 @LazyProperty
1604 1606 def raw_id(self):
1605 1607 """
1606 1608 Returns raw string identifying this commit, useful for web
1607 1609 representation.
1608 1610 """
1609 1611
1610 1612 return self._empty_commit_id
1611 1613
1612 1614 @LazyProperty
1613 1615 def branch(self):
1614 1616 if self.alias:
1615 1617 from rhodecode.lib.vcs.backends import get_backend
1616 1618 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1617 1619
1618 1620 @LazyProperty
1619 1621 def short_id(self):
1620 1622 return self.raw_id[:12]
1621 1623
1622 1624 @LazyProperty
1623 1625 def id(self):
1624 1626 return self.raw_id
1625 1627
1626 1628 def get_path_commit(self, path):
1627 1629 return self
1628 1630
1629 1631 def get_file_content(self, path):
1630 1632 return u''
1631 1633
1632 1634 def get_file_size(self, path):
1633 1635 return 0
1634 1636
1635 1637
1636 1638 class EmptyChangesetClass(type):
1637 1639
1638 1640 def __instancecheck__(self, instance):
1639 1641 return isinstance(instance, EmptyCommit)
1640 1642
1641 1643
1642 1644 class EmptyChangeset(EmptyCommit):
1643 1645
1644 1646 __metaclass__ = EmptyChangesetClass
1645 1647
1646 1648 def __new__(cls, *args, **kwargs):
1647 1649 warnings.warn(
1648 1650 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1649 1651 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1650 1652
1651 1653 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1652 1654 alias=None, revision=-1, message='', author='', date=None):
1653 1655 if requested_revision is not None:
1654 1656 warnings.warn(
1655 1657 "Parameter requested_revision not supported anymore",
1656 1658 DeprecationWarning)
1657 1659 super(EmptyChangeset, self).__init__(
1658 1660 commit_id=cs, repo=repo, alias=alias, idx=revision,
1659 1661 message=message, author=author, date=date)
1660 1662
1661 1663 @property
1662 1664 def revision(self):
1663 1665 warnings.warn("Use idx instead", DeprecationWarning)
1664 1666 return self.idx
1665 1667
1666 1668 @revision.setter
1667 1669 def revision(self, value):
1668 1670 warnings.warn("Use idx instead", DeprecationWarning)
1669 1671 self.idx = value
1670 1672
1671 1673
1672 1674 class EmptyRepository(BaseRepository):
1673 1675 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1674 1676 pass
1675 1677
1676 1678 def get_diff(self, *args, **kwargs):
1677 1679 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1678 1680 return GitDiff('')
1679 1681
1680 1682
1681 1683 class CollectionGenerator(object):
1682 1684
1683 1685 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1684 1686 self.repo = repo
1685 1687 self.commit_ids = commit_ids
1686 1688 # TODO: (oliver) this isn't currently hooked up
1687 1689 self.collection_size = None
1688 1690 self.pre_load = pre_load
1689 1691 self.translate_tag = translate_tag
1690 1692
1691 1693 def __len__(self):
1692 1694 if self.collection_size is not None:
1693 1695 return self.collection_size
1694 1696 return self.commit_ids.__len__()
1695 1697
1696 1698 def __iter__(self):
1697 1699 for commit_id in self.commit_ids:
1698 1700 # TODO: johbo: Mercurial passes in commit indices or commit ids
1699 1701 yield self._commit_factory(commit_id)
1700 1702
1701 1703 def _commit_factory(self, commit_id):
1702 1704 """
1703 1705 Allows backends to override the way commits are generated.
1704 1706 """
1705 1707 return self.repo.get_commit(
1706 1708 commit_id=commit_id, pre_load=self.pre_load,
1707 1709 translate_tag=self.translate_tag)
1708 1710
1709 1711 def __getslice__(self, i, j):
1710 1712 """
1711 1713 Returns an iterator of sliced repository
1712 1714 """
1713 1715 commit_ids = self.commit_ids[i:j]
1714 1716 return self.__class__(
1715 1717 self.repo, commit_ids, pre_load=self.pre_load,
1716 1718 translate_tag=self.translate_tag)
1717 1719
1718 1720 def __repr__(self):
1719 1721 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1720 1722
1721 1723
1722 1724 class Config(object):
1723 1725 """
1724 1726 Represents the configuration for a repository.
1725 1727
1726 1728 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1727 1729 standard library. It implements only the needed subset.
1728 1730 """
1729 1731
1730 1732 def __init__(self):
1731 1733 self._values = {}
1732 1734
1733 1735 def copy(self):
1734 1736 clone = Config()
1735 1737 for section, values in self._values.items():
1736 1738 clone._values[section] = values.copy()
1737 1739 return clone
1738 1740
1739 1741 def __repr__(self):
1740 1742 return '<Config(%s sections) at %s>' % (
1741 1743 len(self._values), hex(id(self)))
1742 1744
1743 1745 def items(self, section):
1744 1746 return self._values.get(section, {}).iteritems()
1745 1747
1746 1748 def get(self, section, option):
1747 1749 return self._values.get(section, {}).get(option)
1748 1750
1749 1751 def set(self, section, option, value):
1750 1752 section_values = self._values.setdefault(section, {})
1751 1753 section_values[option] = value
1752 1754
1753 1755 def clear_section(self, section):
1754 1756 self._values[section] = {}
1755 1757
1756 1758 def serialize(self):
1757 1759 """
1758 1760 Creates a list of three tuples (section, key, value) representing
1759 1761 this config object.
1760 1762 """
1761 1763 items = []
1762 1764 for section in self._values:
1763 1765 for option, value in self._values[section].items():
1764 1766 items.append(
1765 1767 (safe_str(section), safe_str(option), safe_str(value)))
1766 1768 return items
1767 1769
1768 1770
1769 1771 class Diff(object):
1770 1772 """
1771 1773 Represents a diff result from a repository backend.
1772 1774
1773 1775 Subclasses have to provide a backend specific value for
1774 1776 :attr:`_header_re` and :attr:`_meta_re`.
1775 1777 """
1776 1778 _meta_re = None
1777 1779 _header_re = None
1778 1780
1779 1781 def __init__(self, raw_diff):
1780 1782 self.raw = raw_diff
1781 1783
1782 1784 def chunks(self):
1783 1785 """
1784 1786 split the diff in chunks of separate --git a/file b/file chunks
1785 1787 to make diffs consistent we must prepend with \n, and make sure
1786 1788 we can detect last chunk as this was also has special rule
1787 1789 """
1788 1790
1789 1791 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1790 1792 header = diff_parts[0]
1791 1793
1792 1794 if self._meta_re:
1793 1795 match = self._meta_re.match(header)
1794 1796
1795 1797 chunks = diff_parts[1:]
1796 1798 total_chunks = len(chunks)
1797 1799
1798 1800 return (
1799 1801 DiffChunk(chunk, self, cur_chunk == total_chunks)
1800 1802 for cur_chunk, chunk in enumerate(chunks, start=1))
1801 1803
1802 1804
1803 1805 class DiffChunk(object):
1804 1806
1805 1807 def __init__(self, chunk, diff, last_chunk):
1806 1808 self._diff = diff
1807 1809
1808 1810 # since we split by \ndiff --git that part is lost from original diff
1809 1811 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1810 1812 if not last_chunk:
1811 1813 chunk += '\n'
1812 1814
1813 1815 match = self._diff._header_re.match(chunk)
1814 1816 self.header = match.groupdict()
1815 1817 self.diff = chunk[match.end():]
1816 1818 self.raw = chunk
1817 1819
1818 1820
1819 1821 class BasePathPermissionChecker(object):
1820 1822
1821 1823 @staticmethod
1822 1824 def create_from_patterns(includes, excludes):
1823 1825 if includes and '*' in includes and not excludes:
1824 1826 return AllPathPermissionChecker()
1825 1827 elif excludes and '*' in excludes:
1826 1828 return NonePathPermissionChecker()
1827 1829 else:
1828 1830 return PatternPathPermissionChecker(includes, excludes)
1829 1831
1830 1832 @property
1831 1833 def has_full_access(self):
1832 1834 raise NotImplemented()
1833 1835
1834 1836 def has_access(self, path):
1835 1837 raise NotImplemented()
1836 1838
1837 1839
1838 1840 class AllPathPermissionChecker(BasePathPermissionChecker):
1839 1841
1840 1842 @property
1841 1843 def has_full_access(self):
1842 1844 return True
1843 1845
1844 1846 def has_access(self, path):
1845 1847 return True
1846 1848
1847 1849
1848 1850 class NonePathPermissionChecker(BasePathPermissionChecker):
1849 1851
1850 1852 @property
1851 1853 def has_full_access(self):
1852 1854 return False
1853 1855
1854 1856 def has_access(self, path):
1855 1857 return False
1856 1858
1857 1859
1858 1860 class PatternPathPermissionChecker(BasePathPermissionChecker):
1859 1861
1860 1862 def __init__(self, includes, excludes):
1861 1863 self.includes = includes
1862 1864 self.excludes = excludes
1863 1865 self.includes_re = [] if not includes else [
1864 1866 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1865 1867 self.excludes_re = [] if not excludes else [
1866 1868 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1867 1869
1868 1870 @property
1869 1871 def has_full_access(self):
1870 1872 return '*' in self.includes and not self.excludes
1871 1873
1872 1874 def has_access(self, path):
1873 1875 for regex in self.excludes_re:
1874 1876 if regex.match(path):
1875 1877 return False
1876 1878 for regex in self.includes_re:
1877 1879 if regex.match(path):
1878 1880 return True
1879 1881 return False
@@ -1,1073 +1,1074 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import re
23 23 import shutil
24 24 import time
25 25 import logging
26 26 import traceback
27 27 import datetime
28 28
29 29 from pyramid.threadlocal import get_current_request
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode import events
33 33 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 34 from rhodecode.lib.caching_query import FromCache
35 35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
36 36 from rhodecode.lib.hooks_base import log_delete_repository
37 37 from rhodecode.lib.user_log_filter import user_log_filter
38 38 from rhodecode.lib.utils import make_db_config
39 39 from rhodecode.lib.utils2 import (
40 40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
41 41 get_current_rhodecode_user, safe_int, datetime_to_time,
42 42 action_logger_generic)
43 43 from rhodecode.lib.vcs.backends import get_backend
44 44 from rhodecode.model import BaseModel
45 45 from rhodecode.model.db import (
46 46 _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 49
50 50 from rhodecode.model.settings import VcsSettingsModel
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 class RepoModel(BaseModel):
57 57
58 58 cls = Repository
59 59
60 60 def _get_user_group(self, users_group):
61 61 return self._get_instance(UserGroup, users_group,
62 62 callback=UserGroup.get_by_group_name)
63 63
64 64 def _get_repo_group(self, repo_group):
65 65 return self._get_instance(RepoGroup, repo_group,
66 66 callback=RepoGroup.get_by_group_name)
67 67
68 68 def _create_default_perms(self, repository, private):
69 69 # create default permission
70 70 default = 'repository.read'
71 71 def_user = User.get_default_user()
72 72 for p in def_user.user_perms:
73 73 if p.permission.permission_name.startswith('repository.'):
74 74 default = p.permission.permission_name
75 75 break
76 76
77 77 default_perm = 'repository.none' if private else default
78 78
79 79 repo_to_perm = UserRepoToPerm()
80 80 repo_to_perm.permission = Permission.get_by_key(default_perm)
81 81
82 82 repo_to_perm.repository = repository
83 83 repo_to_perm.user_id = def_user.user_id
84 84
85 85 return repo_to_perm
86 86
87 87 @LazyProperty
88 88 def repos_path(self):
89 89 """
90 90 Gets the repositories root path from database
91 91 """
92 92 settings_model = VcsSettingsModel(sa=self.sa)
93 93 return settings_model.get_repos_location()
94 94
95 95 def get(self, repo_id):
96 96 repo = self.sa.query(Repository) \
97 97 .filter(Repository.repo_id == repo_id)
98 98
99 99 return repo.scalar()
100 100
101 101 def get_repo(self, repository):
102 102 return self._get_repo(repository)
103 103
104 104 def get_by_repo_name(self, repo_name, cache=False):
105 105 repo = self.sa.query(Repository) \
106 106 .filter(Repository.repo_name == repo_name)
107 107
108 108 if cache:
109 109 name_key = _hash_key(repo_name)
110 110 repo = repo.options(
111 111 FromCache("sql_cache_short", "get_repo_%s" % name_key))
112 112 return repo.scalar()
113 113
114 114 def _extract_id_from_repo_name(self, repo_name):
115 115 if repo_name.startswith('/'):
116 116 repo_name = repo_name.lstrip('/')
117 117 by_id_match = re.match(r'^_(\d{1,})', repo_name)
118 118 if by_id_match:
119 119 return by_id_match.groups()[0]
120 120
121 121 def get_repo_by_id(self, repo_name):
122 122 """
123 123 Extracts repo_name by id from special urls.
124 124 Example url is _11/repo_name
125 125
126 126 :param repo_name:
127 127 :return: repo object if matched else None
128 128 """
129 129
130 130 try:
131 131 _repo_id = self._extract_id_from_repo_name(repo_name)
132 132 if _repo_id:
133 133 return self.get(_repo_id)
134 134 except Exception:
135 135 log.exception('Failed to extract repo_name from URL')
136 136
137 137 return None
138 138
139 139 def get_repos_for_root(self, root, traverse=False):
140 140 if traverse:
141 141 like_expression = u'{}%'.format(safe_unicode(root))
142 142 repos = Repository.query().filter(
143 143 Repository.repo_name.like(like_expression)).all()
144 144 else:
145 145 if root and not isinstance(root, RepoGroup):
146 146 raise ValueError(
147 147 'Root must be an instance '
148 148 'of RepoGroup, got:{} instead'.format(type(root)))
149 149 repos = Repository.query().filter(Repository.group == root).all()
150 150 return repos
151 151
152 152 def get_url(self, repo, request=None, permalink=False):
153 153 if not request:
154 154 request = get_current_request()
155 155
156 156 if not request:
157 157 return
158 158
159 159 if permalink:
160 160 return request.route_url(
161 161 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
162 162 else:
163 163 return request.route_url(
164 164 'repo_summary', repo_name=safe_str(repo.repo_name))
165 165
166 166 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
167 167 if not request:
168 168 request = get_current_request()
169 169
170 170 if not request:
171 171 return
172 172
173 173 if permalink:
174 174 return request.route_url(
175 175 'repo_commit', repo_name=safe_str(repo.repo_id),
176 176 commit_id=commit_id)
177 177
178 178 else:
179 179 return request.route_url(
180 180 'repo_commit', repo_name=safe_str(repo.repo_name),
181 181 commit_id=commit_id)
182 182
183 183 def get_repo_log(self, repo, filter_term):
184 184 repo_log = UserLog.query()\
185 185 .filter(or_(UserLog.repository_id == repo.repo_id,
186 186 UserLog.repository_name == repo.repo_name))\
187 187 .options(joinedload(UserLog.user))\
188 188 .options(joinedload(UserLog.repository))\
189 189 .order_by(UserLog.action_date.desc())
190 190
191 191 repo_log = user_log_filter(repo_log, filter_term)
192 192 return repo_log
193 193
194 194 @classmethod
195 195 def update_commit_cache(cls, repositories=None):
196 196 if not repositories:
197 197 repositories = Repository.getAll()
198 198 for repo in repositories:
199 199 repo.update_commit_cache()
200 200
201 201 def get_repos_as_dict(self, repo_list=None, admin=False,
202 202 super_user_actions=False, short_name=None):
203 203 _render = get_current_request().get_partial_renderer(
204 204 'rhodecode:templates/data_table/_dt_elements.mako')
205 205 c = _render.get_call_context()
206 206
207 207 def quick_menu(repo_name):
208 208 return _render('quick_menu', repo_name)
209 209
210 210 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
211 211 if short_name is not None:
212 212 short_name_var = short_name
213 213 else:
214 214 short_name_var = not admin
215 215 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
216 216 short_name=short_name_var, admin=False)
217 217
218 218 def last_change(last_change):
219 219 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
220 220 last_change = last_change + datetime.timedelta(seconds=
221 221 (datetime.datetime.now() - datetime.datetime.utcnow()).seconds)
222 222 return _render("last_change", last_change)
223 223
224 224 def rss_lnk(repo_name):
225 225 return _render("rss", repo_name)
226 226
227 227 def atom_lnk(repo_name):
228 228 return _render("atom", repo_name)
229 229
230 230 def last_rev(repo_name, cs_cache):
231 231 return _render('revision', repo_name, cs_cache.get('revision'),
232 232 cs_cache.get('raw_id'), cs_cache.get('author'),
233 233 cs_cache.get('message'), cs_cache.get('date'))
234 234
235 235 def desc(desc):
236 236 return _render('repo_desc', desc, c.visual.stylify_metatags)
237 237
238 238 def state(repo_state):
239 239 return _render("repo_state", repo_state)
240 240
241 241 def repo_actions(repo_name):
242 242 return _render('repo_actions', repo_name, super_user_actions)
243 243
244 244 def user_profile(username):
245 245 return _render('user_profile', username)
246 246
247 247 repos_data = []
248 248 for repo in repo_list:
249 249 cs_cache = repo.changeset_cache
250 250 row = {
251 251 "menu": quick_menu(repo.repo_name),
252 252
253 253 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
254 254 repo.private, repo.archived, repo.fork),
255 255 "name_raw": repo.repo_name.lower(),
256 256
257 257 "last_change": last_change(repo.last_commit_change),
258 258 "last_change_raw": datetime_to_time(repo.last_commit_change),
259 259
260 260 "last_changeset": last_rev(repo.repo_name, cs_cache),
261 261 "last_changeset_raw": cs_cache.get('revision'),
262 262
263 263 "desc": desc(repo.description_safe),
264 264 "owner": user_profile(repo.user.username),
265 265
266 266 "state": state(repo.repo_state),
267 267 "rss": rss_lnk(repo.repo_name),
268 268
269 269 "atom": atom_lnk(repo.repo_name),
270 270 }
271 271 if admin:
272 272 row.update({
273 273 "action": repo_actions(repo.repo_name),
274 274 })
275 275 repos_data.append(row)
276 276
277 277 return repos_data
278 278
279 279 def _get_defaults(self, repo_name):
280 280 """
281 281 Gets information about repository, and returns a dict for
282 282 usage in forms
283 283
284 284 :param repo_name:
285 285 """
286 286
287 287 repo_info = Repository.get_by_repo_name(repo_name)
288 288
289 289 if repo_info is None:
290 290 return None
291 291
292 292 defaults = repo_info.get_dict()
293 293 defaults['repo_name'] = repo_info.just_name
294 294
295 295 groups = repo_info.groups_with_parents
296 296 parent_group = groups[-1] if groups else None
297 297
298 298 # we use -1 as this is how in HTML, we mark an empty group
299 299 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
300 300
301 301 keys_to_process = (
302 302 {'k': 'repo_type', 'strip': False},
303 303 {'k': 'repo_enable_downloads', 'strip': True},
304 304 {'k': 'repo_description', 'strip': True},
305 305 {'k': 'repo_enable_locking', 'strip': True},
306 306 {'k': 'repo_landing_rev', 'strip': True},
307 307 {'k': 'clone_uri', 'strip': False},
308 308 {'k': 'push_uri', 'strip': False},
309 309 {'k': 'repo_private', 'strip': True},
310 310 {'k': 'repo_enable_statistics', 'strip': True}
311 311 )
312 312
313 313 for item in keys_to_process:
314 314 attr = item['k']
315 315 if item['strip']:
316 316 attr = remove_prefix(item['k'], 'repo_')
317 317
318 318 val = defaults[attr]
319 319 if item['k'] == 'repo_landing_rev':
320 320 val = ':'.join(defaults[attr])
321 321 defaults[item['k']] = val
322 322 if item['k'] == 'clone_uri':
323 323 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
324 324 if item['k'] == 'push_uri':
325 325 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
326 326
327 327 # fill owner
328 328 if repo_info.user:
329 329 defaults.update({'user': repo_info.user.username})
330 330 else:
331 331 replacement_user = User.get_first_super_admin().username
332 332 defaults.update({'user': replacement_user})
333 333
334 334 return defaults
335 335
336 336 def update(self, repo, **kwargs):
337 337 try:
338 338 cur_repo = self._get_repo(repo)
339 339 source_repo_name = cur_repo.repo_name
340 340 if 'user' in kwargs:
341 341 cur_repo.user = User.get_by_username(kwargs['user'])
342 342
343 343 if 'repo_group' in kwargs:
344 344 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
345 345 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
346 346
347 347 update_keys = [
348 348 (1, 'repo_description'),
349 349 (1, 'repo_landing_rev'),
350 350 (1, 'repo_private'),
351 351 (1, 'repo_enable_downloads'),
352 352 (1, 'repo_enable_locking'),
353 353 (1, 'repo_enable_statistics'),
354 354 (0, 'clone_uri'),
355 355 (0, 'push_uri'),
356 356 (0, 'fork_id')
357 357 ]
358 358 for strip, k in update_keys:
359 359 if k in kwargs:
360 360 val = kwargs[k]
361 361 if strip:
362 362 k = remove_prefix(k, 'repo_')
363 363
364 364 setattr(cur_repo, k, val)
365 365
366 366 new_name = cur_repo.get_new_name(kwargs['repo_name'])
367 367 cur_repo.repo_name = new_name
368 368
369 369 # if private flag is set, reset default permission to NONE
370 370 if kwargs.get('repo_private'):
371 371 EMPTY_PERM = 'repository.none'
372 372 RepoModel().grant_user_permission(
373 373 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
374 374 )
375 375
376 376 # handle extra fields
377 377 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
378 378 k = RepositoryField.un_prefix_key(field)
379 379 ex_field = RepositoryField.get_by_key_name(
380 380 key=k, repo=cur_repo)
381 381 if ex_field:
382 382 ex_field.field_value = kwargs[field]
383 383 self.sa.add(ex_field)
384 384 cur_repo.updated_on = datetime.datetime.now()
385 385 self.sa.add(cur_repo)
386 386
387 387 if source_repo_name != new_name:
388 388 # rename repository
389 389 self._rename_filesystem_repo(
390 390 old=source_repo_name, new=new_name)
391 391
392 392 return cur_repo
393 393 except Exception:
394 394 log.error(traceback.format_exc())
395 395 raise
396 396
397 397 def _create_repo(self, repo_name, repo_type, description, owner,
398 398 private=False, clone_uri=None, repo_group=None,
399 399 landing_rev='rev:tip', fork_of=None,
400 400 copy_fork_permissions=False, enable_statistics=False,
401 401 enable_locking=False, enable_downloads=False,
402 402 copy_group_permissions=False,
403 403 state=Repository.STATE_PENDING):
404 404 """
405 405 Create repository inside database with PENDING state, this should be
406 406 only executed by create() repo. With exception of importing existing
407 407 repos
408 408 """
409 409 from rhodecode.model.scm import ScmModel
410 410
411 411 owner = self._get_user(owner)
412 412 fork_of = self._get_repo(fork_of)
413 413 repo_group = self._get_repo_group(safe_int(repo_group))
414 414
415 415 try:
416 416 repo_name = safe_unicode(repo_name)
417 417 description = safe_unicode(description)
418 418 # repo name is just a name of repository
419 419 # while repo_name_full is a full qualified name that is combined
420 420 # with name and path of group
421 421 repo_name_full = repo_name
422 422 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
423 423
424 424 new_repo = Repository()
425 425 new_repo.repo_state = state
426 426 new_repo.enable_statistics = False
427 427 new_repo.repo_name = repo_name_full
428 428 new_repo.repo_type = repo_type
429 429 new_repo.user = owner
430 430 new_repo.group = repo_group
431 431 new_repo.description = description or repo_name
432 432 new_repo.private = private
433 433 new_repo.archived = False
434 434 new_repo.clone_uri = clone_uri
435 435 new_repo.landing_rev = landing_rev
436 436
437 437 new_repo.enable_statistics = enable_statistics
438 438 new_repo.enable_locking = enable_locking
439 439 new_repo.enable_downloads = enable_downloads
440 440
441 441 if repo_group:
442 442 new_repo.enable_locking = repo_group.enable_locking
443 443
444 444 if fork_of:
445 445 parent_repo = fork_of
446 446 new_repo.fork = parent_repo
447 447
448 448 events.trigger(events.RepoPreCreateEvent(new_repo))
449 449
450 450 self.sa.add(new_repo)
451 451
452 452 EMPTY_PERM = 'repository.none'
453 453 if fork_of and copy_fork_permissions:
454 454 repo = fork_of
455 455 user_perms = UserRepoToPerm.query() \
456 456 .filter(UserRepoToPerm.repository == repo).all()
457 457 group_perms = UserGroupRepoToPerm.query() \
458 458 .filter(UserGroupRepoToPerm.repository == repo).all()
459 459
460 460 for perm in user_perms:
461 461 UserRepoToPerm.create(
462 462 perm.user, new_repo, perm.permission)
463 463
464 464 for perm in group_perms:
465 465 UserGroupRepoToPerm.create(
466 466 perm.users_group, new_repo, perm.permission)
467 467 # in case we copy permissions and also set this repo to private
468 468 # override the default user permission to make it a private repo
469 469 if private:
470 470 RepoModel(self.sa).grant_user_permission(
471 471 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
472 472
473 473 elif repo_group and copy_group_permissions:
474 474 user_perms = UserRepoGroupToPerm.query() \
475 475 .filter(UserRepoGroupToPerm.group == repo_group).all()
476 476
477 477 group_perms = UserGroupRepoGroupToPerm.query() \
478 478 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
479 479
480 480 for perm in user_perms:
481 481 perm_name = perm.permission.permission_name.replace(
482 482 'group.', 'repository.')
483 483 perm_obj = Permission.get_by_key(perm_name)
484 484 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
485 485
486 486 for perm in group_perms:
487 487 perm_name = perm.permission.permission_name.replace(
488 488 'group.', 'repository.')
489 489 perm_obj = Permission.get_by_key(perm_name)
490 490 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
491 491
492 492 if private:
493 493 RepoModel(self.sa).grant_user_permission(
494 494 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
495 495
496 496 else:
497 497 perm_obj = self._create_default_perms(new_repo, private)
498 498 self.sa.add(perm_obj)
499 499
500 500 # now automatically start following this repository as owner
501 501 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
502 502
503 503 # we need to flush here, in order to check if database won't
504 504 # throw any exceptions, create filesystem dirs at the very end
505 505 self.sa.flush()
506 506 events.trigger(events.RepoCreateEvent(new_repo))
507 507 return new_repo
508 508
509 509 except Exception:
510 510 log.error(traceback.format_exc())
511 511 raise
512 512
513 513 def create(self, form_data, cur_user):
514 514 """
515 515 Create repository using celery tasks
516 516
517 517 :param form_data:
518 518 :param cur_user:
519 519 """
520 520 from rhodecode.lib.celerylib import tasks, run_task
521 521 return run_task(tasks.create_repo, form_data, cur_user)
522 522
523 523 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
524 524 perm_deletions=None, check_perms=True,
525 525 cur_user=None):
526 526 if not perm_additions:
527 527 perm_additions = []
528 528 if not perm_updates:
529 529 perm_updates = []
530 530 if not perm_deletions:
531 531 perm_deletions = []
532 532
533 533 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
534 534
535 535 changes = {
536 536 'added': [],
537 537 'updated': [],
538 538 'deleted': []
539 539 }
540 540 # update permissions
541 541 for member_id, perm, member_type in perm_updates:
542 542 member_id = int(member_id)
543 543 if member_type == 'user':
544 544 member_name = User.get(member_id).username
545 545 # this updates also current one if found
546 546 self.grant_user_permission(
547 547 repo=repo, user=member_id, perm=perm)
548 548 elif member_type == 'user_group':
549 549 # check if we have permissions to alter this usergroup
550 550 member_name = UserGroup.get(member_id).users_group_name
551 551 if not check_perms or HasUserGroupPermissionAny(
552 552 *req_perms)(member_name, user=cur_user):
553 553 self.grant_user_group_permission(
554 554 repo=repo, group_name=member_id, perm=perm)
555 555 else:
556 556 raise ValueError("member_type must be 'user' or 'user_group' "
557 557 "got {} instead".format(member_type))
558 558 changes['updated'].append({'type': member_type, 'id': member_id,
559 559 'name': member_name, 'new_perm': perm})
560 560
561 561 # set new permissions
562 562 for member_id, perm, member_type in perm_additions:
563 563 member_id = int(member_id)
564 564 if member_type == 'user':
565 565 member_name = User.get(member_id).username
566 566 self.grant_user_permission(
567 567 repo=repo, user=member_id, perm=perm)
568 568 elif member_type == 'user_group':
569 569 # check if we have permissions to alter this usergroup
570 570 member_name = UserGroup.get(member_id).users_group_name
571 571 if not check_perms or HasUserGroupPermissionAny(
572 572 *req_perms)(member_name, user=cur_user):
573 573 self.grant_user_group_permission(
574 574 repo=repo, group_name=member_id, perm=perm)
575 575 else:
576 576 raise ValueError("member_type must be 'user' or 'user_group' "
577 577 "got {} instead".format(member_type))
578 578
579 579 changes['added'].append({'type': member_type, 'id': member_id,
580 580 'name': member_name, 'new_perm': perm})
581 581 # delete permissions
582 582 for member_id, perm, member_type in perm_deletions:
583 583 member_id = int(member_id)
584 584 if member_type == 'user':
585 585 member_name = User.get(member_id).username
586 586 self.revoke_user_permission(repo=repo, user=member_id)
587 587 elif member_type == 'user_group':
588 588 # check if we have permissions to alter this usergroup
589 589 member_name = UserGroup.get(member_id).users_group_name
590 590 if not check_perms or HasUserGroupPermissionAny(
591 591 *req_perms)(member_name, user=cur_user):
592 592 self.revoke_user_group_permission(
593 593 repo=repo, group_name=member_id)
594 594 else:
595 595 raise ValueError("member_type must be 'user' or 'user_group' "
596 596 "got {} instead".format(member_type))
597 597
598 598 changes['deleted'].append({'type': member_type, 'id': member_id,
599 599 'name': member_name, 'new_perm': perm})
600 600 return changes
601 601
602 602 def create_fork(self, form_data, cur_user):
603 603 """
604 604 Simple wrapper into executing celery task for fork creation
605 605
606 606 :param form_data:
607 607 :param cur_user:
608 608 """
609 609 from rhodecode.lib.celerylib import tasks, run_task
610 610 return run_task(tasks.create_repo_fork, form_data, cur_user)
611 611
612 612 def archive(self, repo):
613 613 """
614 614 Archive given repository. Set archive flag.
615 615
616 616 :param repo:
617 617 """
618 618 repo = self._get_repo(repo)
619 619 if repo:
620 620
621 621 try:
622 622 repo.archived = True
623 623 self.sa.add(repo)
624 624 self.sa.commit()
625 625 except Exception:
626 626 log.error(traceback.format_exc())
627 627 raise
628 628
629 629 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
630 630 """
631 631 Delete given repository, forks parameter defines what do do with
632 632 attached forks. Throws AttachedForksError if deleted repo has attached
633 633 forks
634 634
635 635 :param repo:
636 636 :param forks: str 'delete' or 'detach'
637 637 :param pull_requests: str 'delete' or None
638 638 :param fs_remove: remove(archive) repo from filesystem
639 639 """
640 640 if not cur_user:
641 641 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
642 642 repo = self._get_repo(repo)
643 643 if repo:
644 644 if forks == 'detach':
645 645 for r in repo.forks:
646 646 r.fork = None
647 647 self.sa.add(r)
648 648 elif forks == 'delete':
649 649 for r in repo.forks:
650 650 self.delete(r, forks='delete')
651 651 elif [f for f in repo.forks]:
652 652 raise AttachedForksError()
653 653
654 654 # check for pull requests
655 655 pr_sources = repo.pull_requests_source
656 656 pr_targets = repo.pull_requests_target
657 657 if pull_requests != 'delete' and (pr_sources or pr_targets):
658 658 raise AttachedPullRequestsError()
659 659
660 660 old_repo_dict = repo.get_dict()
661 661 events.trigger(events.RepoPreDeleteEvent(repo))
662 662 try:
663 663 self.sa.delete(repo)
664 664 if fs_remove:
665 665 self._delete_filesystem_repo(repo)
666 666 else:
667 667 log.debug('skipping removal from filesystem')
668 668 old_repo_dict.update({
669 669 'deleted_by': cur_user,
670 670 'deleted_on': time.time(),
671 671 })
672 672 log_delete_repository(**old_repo_dict)
673 673 events.trigger(events.RepoDeleteEvent(repo))
674 674 except Exception:
675 675 log.error(traceback.format_exc())
676 676 raise
677 677
678 678 def grant_user_permission(self, repo, user, perm):
679 679 """
680 680 Grant permission for user on given repository, or update existing one
681 681 if found
682 682
683 683 :param repo: Instance of Repository, repository_id, or repository name
684 684 :param user: Instance of User, user_id or username
685 685 :param perm: Instance of Permission, or permission_name
686 686 """
687 687 user = self._get_user(user)
688 688 repo = self._get_repo(repo)
689 689 permission = self._get_perm(perm)
690 690
691 691 # check if we have that permission already
692 692 obj = self.sa.query(UserRepoToPerm) \
693 693 .filter(UserRepoToPerm.user == user) \
694 694 .filter(UserRepoToPerm.repository == repo) \
695 695 .scalar()
696 696 if obj is None:
697 697 # create new !
698 698 obj = UserRepoToPerm()
699 699 obj.repository = repo
700 700 obj.user = user
701 701 obj.permission = permission
702 702 self.sa.add(obj)
703 703 log.debug('Granted perm %s to %s on %s', perm, user, repo)
704 704 action_logger_generic(
705 705 'granted permission: {} to user: {} on repo: {}'.format(
706 706 perm, user, repo), namespace='security.repo')
707 707 return obj
708 708
709 709 def revoke_user_permission(self, repo, user):
710 710 """
711 711 Revoke permission for user on given repository
712 712
713 713 :param repo: Instance of Repository, repository_id, or repository name
714 714 :param user: Instance of User, user_id or username
715 715 """
716 716
717 717 user = self._get_user(user)
718 718 repo = self._get_repo(repo)
719 719
720 720 obj = self.sa.query(UserRepoToPerm) \
721 721 .filter(UserRepoToPerm.repository == repo) \
722 722 .filter(UserRepoToPerm.user == user) \
723 723 .scalar()
724 724 if obj:
725 725 self.sa.delete(obj)
726 726 log.debug('Revoked perm on %s on %s', repo, user)
727 727 action_logger_generic(
728 728 'revoked permission from user: {} on repo: {}'.format(
729 729 user, repo), namespace='security.repo')
730 730
731 731 def grant_user_group_permission(self, repo, group_name, perm):
732 732 """
733 733 Grant permission for user group on given repository, or update
734 734 existing one if found
735 735
736 736 :param repo: Instance of Repository, repository_id, or repository name
737 737 :param group_name: Instance of UserGroup, users_group_id,
738 738 or user group name
739 739 :param perm: Instance of Permission, or permission_name
740 740 """
741 741 repo = self._get_repo(repo)
742 742 group_name = self._get_user_group(group_name)
743 743 permission = self._get_perm(perm)
744 744
745 745 # check if we have that permission already
746 746 obj = self.sa.query(UserGroupRepoToPerm) \
747 747 .filter(UserGroupRepoToPerm.users_group == group_name) \
748 748 .filter(UserGroupRepoToPerm.repository == repo) \
749 749 .scalar()
750 750
751 751 if obj is None:
752 752 # create new
753 753 obj = UserGroupRepoToPerm()
754 754
755 755 obj.repository = repo
756 756 obj.users_group = group_name
757 757 obj.permission = permission
758 758 self.sa.add(obj)
759 759 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
760 760 action_logger_generic(
761 761 'granted permission: {} to usergroup: {} on repo: {}'.format(
762 762 perm, group_name, repo), namespace='security.repo')
763 763
764 764 return obj
765 765
766 766 def revoke_user_group_permission(self, repo, group_name):
767 767 """
768 768 Revoke permission for user group on given repository
769 769
770 770 :param repo: Instance of Repository, repository_id, or repository name
771 771 :param group_name: Instance of UserGroup, users_group_id,
772 772 or user group name
773 773 """
774 774 repo = self._get_repo(repo)
775 775 group_name = self._get_user_group(group_name)
776 776
777 777 obj = self.sa.query(UserGroupRepoToPerm) \
778 778 .filter(UserGroupRepoToPerm.repository == repo) \
779 779 .filter(UserGroupRepoToPerm.users_group == group_name) \
780 780 .scalar()
781 781 if obj:
782 782 self.sa.delete(obj)
783 783 log.debug('Revoked perm to %s on %s', repo, group_name)
784 784 action_logger_generic(
785 785 'revoked permission from usergroup: {} on repo: {}'.format(
786 786 group_name, repo), namespace='security.repo')
787 787
788 788 def delete_stats(self, repo_name):
789 789 """
790 790 removes stats for given repo
791 791
792 792 :param repo_name:
793 793 """
794 794 repo = self._get_repo(repo_name)
795 795 try:
796 796 obj = self.sa.query(Statistics) \
797 797 .filter(Statistics.repository == repo).scalar()
798 798 if obj:
799 799 self.sa.delete(obj)
800 800 except Exception:
801 801 log.error(traceback.format_exc())
802 802 raise
803 803
804 804 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
805 805 field_type='str', field_desc=''):
806 806
807 807 repo = self._get_repo(repo_name)
808 808
809 809 new_field = RepositoryField()
810 810 new_field.repository = repo
811 811 new_field.field_key = field_key
812 812 new_field.field_type = field_type # python type
813 813 new_field.field_value = field_value
814 814 new_field.field_desc = field_desc
815 815 new_field.field_label = field_label
816 816 self.sa.add(new_field)
817 817 return new_field
818 818
819 819 def delete_repo_field(self, repo_name, field_key):
820 820 repo = self._get_repo(repo_name)
821 821 field = RepositoryField.get_by_key_name(field_key, repo)
822 822 if field:
823 823 self.sa.delete(field)
824 824
825 825 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
826 826 clone_uri=None, repo_store_location=None,
827 use_global_config=False):
827 use_global_config=False, install_hooks=True):
828 828 """
829 829 makes repository on filesystem. It's group aware means it'll create
830 830 a repository within a group, and alter the paths accordingly of
831 831 group location
832 832
833 833 :param repo_name:
834 834 :param alias:
835 835 :param parent:
836 836 :param clone_uri:
837 837 :param repo_store_location:
838 838 """
839 839 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
840 840 from rhodecode.model.scm import ScmModel
841 841
842 842 if Repository.NAME_SEP in repo_name:
843 843 raise ValueError(
844 844 'repo_name must not contain groups got `%s`' % repo_name)
845 845
846 846 if isinstance(repo_group, RepoGroup):
847 847 new_parent_path = os.sep.join(repo_group.full_path_splitted)
848 848 else:
849 849 new_parent_path = repo_group or ''
850 850
851 851 if repo_store_location:
852 852 _paths = [repo_store_location]
853 853 else:
854 854 _paths = [self.repos_path, new_parent_path, repo_name]
855 855 # we need to make it str for mercurial
856 856 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
857 857
858 858 # check if this path is not a repository
859 859 if is_valid_repo(repo_path, self.repos_path):
860 860 raise Exception('This path %s is a valid repository' % repo_path)
861 861
862 862 # check if this path is a group
863 863 if is_valid_repo_group(repo_path, self.repos_path):
864 864 raise Exception('This path %s is a valid group' % repo_path)
865 865
866 866 log.info('creating repo %s in %s from url: `%s`',
867 867 repo_name, safe_unicode(repo_path),
868 868 obfuscate_url_pw(clone_uri))
869 869
870 870 backend = get_backend(repo_type)
871 871
872 872 config_repo = None if use_global_config else repo_name
873 873 if config_repo and new_parent_path:
874 874 config_repo = Repository.NAME_SEP.join(
875 875 (new_parent_path, config_repo))
876 876 config = make_db_config(clear_session=False, repo=config_repo)
877 877 config.set('extensions', 'largefiles', '')
878 878
879 879 # patch and reset hooks section of UI config to not run any
880 880 # hooks on creating remote repo
881 881 config.clear_section('hooks')
882 882
883 883 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
884 884 if repo_type == 'git':
885 885 repo = backend(
886 886 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
887 887 with_wire={"cache": False})
888 888 else:
889 889 repo = backend(
890 890 repo_path, config=config, create=True, src_url=clone_uri,
891 891 with_wire={"cache": False})
892 892
893 repo.install_hooks()
893 if install_hooks:
894 repo.install_hooks()
894 895
895 896 log.debug('Created repo %s with %s backend',
896 897 safe_unicode(repo_name), safe_unicode(repo_type))
897 898 return repo
898 899
899 900 def _rename_filesystem_repo(self, old, new):
900 901 """
901 902 renames repository on filesystem
902 903
903 904 :param old: old name
904 905 :param new: new name
905 906 """
906 907 log.info('renaming repo from %s to %s', old, new)
907 908
908 909 old_path = os.path.join(self.repos_path, old)
909 910 new_path = os.path.join(self.repos_path, new)
910 911 if os.path.isdir(new_path):
911 912 raise Exception(
912 913 'Was trying to rename to already existing dir %s' % new_path
913 914 )
914 915 shutil.move(old_path, new_path)
915 916
916 917 def _delete_filesystem_repo(self, repo):
917 918 """
918 919 removes repo from filesystem, the removal is acctually made by
919 920 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
920 921 repository is no longer valid for rhodecode, can be undeleted later on
921 922 by reverting the renames on this repository
922 923
923 924 :param repo: repo object
924 925 """
925 926 rm_path = os.path.join(self.repos_path, repo.repo_name)
926 927 repo_group = repo.group
927 928 log.info("Removing repository %s", rm_path)
928 929 # disable hg/git internal that it doesn't get detected as repo
929 930 alias = repo.repo_type
930 931
931 932 config = make_db_config(clear_session=False)
932 933 config.set('extensions', 'largefiles', '')
933 934 bare = getattr(repo.scm_instance(config=config), 'bare', False)
934 935
935 936 # skip this for bare git repos
936 937 if not bare:
937 938 # disable VCS repo
938 939 vcs_path = os.path.join(rm_path, '.%s' % alias)
939 940 if os.path.exists(vcs_path):
940 941 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
941 942
942 943 _now = datetime.datetime.now()
943 944 _ms = str(_now.microsecond).rjust(6, '0')
944 945 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
945 946 repo.just_name)
946 947 if repo_group:
947 948 # if repository is in group, prefix the removal path with the group
948 949 args = repo_group.full_path_splitted + [_d]
949 950 _d = os.path.join(*args)
950 951
951 952 if os.path.isdir(rm_path):
952 953 shutil.move(rm_path, os.path.join(self.repos_path, _d))
953 954
954 955 # finally cleanup diff-cache if it exists
955 956 cached_diffs_dir = repo.cached_diffs_dir
956 957 if os.path.isdir(cached_diffs_dir):
957 958 shutil.rmtree(cached_diffs_dir)
958 959
959 960
960 961 class ReadmeFinder:
961 962 """
962 963 Utility which knows how to find a readme for a specific commit.
963 964
964 965 The main idea is that this is a configurable algorithm. When creating an
965 966 instance you can define parameters, currently only the `default_renderer`.
966 967 Based on this configuration the method :meth:`search` behaves slightly
967 968 different.
968 969 """
969 970
970 971 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
971 972 path_re = re.compile(r'^docs?', re.IGNORECASE)
972 973
973 974 default_priorities = {
974 975 None: 0,
975 976 '.text': 2,
976 977 '.txt': 3,
977 978 '.rst': 1,
978 979 '.rest': 2,
979 980 '.md': 1,
980 981 '.mkdn': 2,
981 982 '.mdown': 3,
982 983 '.markdown': 4,
983 984 }
984 985
985 986 path_priority = {
986 987 'doc': 0,
987 988 'docs': 1,
988 989 }
989 990
990 991 FALLBACK_PRIORITY = 99
991 992
992 993 RENDERER_TO_EXTENSION = {
993 994 'rst': ['.rst', '.rest'],
994 995 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
995 996 }
996 997
997 998 def __init__(self, default_renderer=None):
998 999 self._default_renderer = default_renderer
999 1000 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1000 1001 default_renderer, [])
1001 1002
1002 1003 def search(self, commit, path='/'):
1003 1004 """
1004 1005 Find a readme in the given `commit`.
1005 1006 """
1006 1007 nodes = commit.get_nodes(path)
1007 1008 matches = self._match_readmes(nodes)
1008 1009 matches = self._sort_according_to_priority(matches)
1009 1010 if matches:
1010 1011 return matches[0].node
1011 1012
1012 1013 paths = self._match_paths(nodes)
1013 1014 paths = self._sort_paths_according_to_priority(paths)
1014 1015 for path in paths:
1015 1016 match = self.search(commit, path=path)
1016 1017 if match:
1017 1018 return match
1018 1019
1019 1020 return None
1020 1021
1021 1022 def _match_readmes(self, nodes):
1022 1023 for node in nodes:
1023 1024 if not node.is_file():
1024 1025 continue
1025 1026 path = node.path.rsplit('/', 1)[-1]
1026 1027 match = self.readme_re.match(path)
1027 1028 if match:
1028 1029 extension = match.group(1)
1029 1030 yield ReadmeMatch(node, match, self._priority(extension))
1030 1031
1031 1032 def _match_paths(self, nodes):
1032 1033 for node in nodes:
1033 1034 if not node.is_dir():
1034 1035 continue
1035 1036 match = self.path_re.match(node.path)
1036 1037 if match:
1037 1038 yield node.path
1038 1039
1039 1040 def _priority(self, extension):
1040 1041 renderer_priority = (
1041 1042 0 if extension in self._renderer_extensions else 1)
1042 1043 extension_priority = self.default_priorities.get(
1043 1044 extension, self.FALLBACK_PRIORITY)
1044 1045 return (renderer_priority, extension_priority)
1045 1046
1046 1047 def _sort_according_to_priority(self, matches):
1047 1048
1048 1049 def priority_and_path(match):
1049 1050 return (match.priority, match.path)
1050 1051
1051 1052 return sorted(matches, key=priority_and_path)
1052 1053
1053 1054 def _sort_paths_according_to_priority(self, paths):
1054 1055
1055 1056 def priority_and_path(path):
1056 1057 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1057 1058
1058 1059 return sorted(paths, key=priority_and_path)
1059 1060
1060 1061
1061 1062 class ReadmeMatch:
1062 1063
1063 1064 def __init__(self, node, match, priority):
1064 1065 self.node = node
1065 1066 self._match = match
1066 1067 self.priority = priority
1067 1068
1068 1069 @property
1069 1070 def path(self):
1070 1071 return self.node.path
1071 1072
1072 1073 def __repr__(self):
1073 1074 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,86 +1,85 b''
1 1 ## -*- coding: utf-8 -*-
2 2 <%inherit file="/base/base.mako"/>
3 3
4 4 <%def name="title()">
5 5 ${_('New Gist')}
6 6 %if c.rhodecode_name:
7 7 &middot; ${h.branding(c.rhodecode_name)}
8 8 %endif
9 9 </%def>
10 10
11 <%def name="breadcrumbs_links()">
12 ${_('New Gist')}
13 </%def>
11 <%def name="breadcrumbs_links()"></%def>
14 12
15 13 <%def name="menu_bar_nav()">
16 14 ${self.menu_items(active='gists')}
17 15 </%def>
18 16
19 17 <%def name="main()">
20 18 <div class="box">
21 19 <!-- box / title -->
22 20 <div class="title">
23 ${self.breadcrumbs()}
21
24 22 </div>
25 23
26 24 <div class="table">
27 25 <div id="files_data">
28 26 ${h.secure_form(h.route_path('gists_create'), id='eform', request=request)}
29 27 <div>
30 <textarea id="description" name="description" placeholder="${_('Gist description ...')}"></textarea>
31
32 28 <span class="gist-gravatar">
33 29 ${self.gravatar(c.rhodecode_user.email, 30)}
34 30 </span>
35 <label for='gistid'>${_('Gist id')}</label>
36 ${h.text('gistid', placeholder=_('Auto generated'))}
31 <label for='gistid'>${_('Gist id')}</label>
32 ${h.text('gistid', placeholder=_('Auto generated'))}
33
34 <label for='lifetime'>${_('Gist lifetime')}</label>
35 ${h.dropdownmenu('lifetime', '', c.lifetime_options)}
37 36
38 <label for='lifetime'>${_('Gist lifetime')}</label>
39 ${h.dropdownmenu('lifetime', '', c.lifetime_options)}
37 <label for='acl_level'>${_('Gist access level')}</label>
38 ${h.dropdownmenu('gist_acl_level', '', c.acl_options)}
40 39
41 <label for='acl_level'>${_('Gist access level')}</label>
42 ${h.dropdownmenu('gist_acl_level', '', c.acl_options)}
40 <textarea style="margin-top: 5px" id="description" name="description" placeholder="${_('Gist description ...')}"></textarea>
41 </div>
43 42
44 </div>
45 43 <div id="codeblock" class="codeblock">
46 44 <div class="code-header">
47 45 <div class="form">
48 46 <div class="fields">
49 ${h.text('filename', size=30, placeholder=_('name this file...'))}
47 ${h.text('filename', size=30, placeholder=_('name gist file...'))}
50 48 ${h.dropdownmenu('mimetype','plain',[('plain',_('plain'))],enable_filter=True)}
51 49 </div>
52 50 </div>
53 51 </div>
52
54 53 <div id="editor_container">
55 54 <div id="editor_pre"></div>
56 55 <textarea id="editor" name="content" ></textarea>
57 56 </div>
58 57 </div>
58
59 59 <div class="pull-right">
60 60 ${h.submit('private',_('Create Private Gist'),class_="btn")}
61 61 ${h.submit('public',_('Create Public Gist'),class_="btn")}
62 ${h.reset('reset',_('Reset'),class_="btn")}
63 62 </div>
64 63 ${h.end_form()}
65 64 </div>
66 65 </div>
67 66
68 67 </div>
69 68
70 69 <script type="text/javascript">
71 70 var myCodeMirror = initCodeMirror('editor', '');
72 71
73 72 var modes_select = $('#mimetype');
74 73 fillCodeMirrorOptions(modes_select);
75 74
76 75 var filename_selector = '#filename';
77 76 // on change of select field set mode
78 77 setCodeMirrorModeFromSelect(
79 78 modes_select, filename_selector, myCodeMirror, null);
80 79
81 80 // on entering the new filename set mode, from given extension
82 81 setCodeMirrorModeFromInput(
83 82 modes_select, filename_selector, myCodeMirror, null);
84 83
85 84 </script>
86 85 </%def>
General Comments 0
You need to be logged in to leave comments. Login now