##// END OF EJS Templates
caches: make gevent curl connection cache friendly....
marcink -
r2946:193b4eb7 default
parent child Browse files
Show More
@@ -1,1746 +1,1749 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24
25 25 import collections
26 26 import datetime
27 27 import fnmatch
28 28 import itertools
29 29 import logging
30 30 import os
31 31 import re
32 32 import time
33 33 import warnings
34 34 import shutil
35 35
36 36 from zope.cachedescriptors.property import Lazy as LazyProperty
37 37
38 38 from rhodecode.lib.utils2 import safe_str, safe_unicode
39 39 from rhodecode.lib.vcs import connection
40 40 from rhodecode.lib.vcs.utils import author_name, author_email
41 41 from rhodecode.lib.vcs.conf import settings
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
44 44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
45 45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
46 46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
47 47 RepositoryError)
48 48
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 FILEMODE_DEFAULT = 0100644
54 54 FILEMODE_EXECUTABLE = 0100755
55 55
56 56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
57 57 MergeResponse = collections.namedtuple(
58 58 'MergeResponse',
59 59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
60 60
61 61
62 62 class MergeFailureReason(object):
63 63 """
64 64 Enumeration with all the reasons why the server side merge could fail.
65 65
66 66 DO NOT change the number of the reasons, as they may be stored in the
67 67 database.
68 68
69 69 Changing the name of a reason is acceptable and encouraged to deprecate old
70 70 reasons.
71 71 """
72 72
73 73 # Everything went well.
74 74 NONE = 0
75 75
76 76 # An unexpected exception was raised. Check the logs for more details.
77 77 UNKNOWN = 1
78 78
79 79 # The merge was not successful, there are conflicts.
80 80 MERGE_FAILED = 2
81 81
82 82 # The merge succeeded but we could not push it to the target repository.
83 83 PUSH_FAILED = 3
84 84
85 85 # The specified target is not a head in the target repository.
86 86 TARGET_IS_NOT_HEAD = 4
87 87
88 88 # The source repository contains more branches than the target. Pushing
89 89 # the merge will create additional branches in the target.
90 90 HG_SOURCE_HAS_MORE_BRANCHES = 5
91 91
92 92 # The target reference has multiple heads. That does not allow to correctly
93 93 # identify the target location. This could only happen for mercurial
94 94 # branches.
95 95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96 96
97 97 # The target repository is locked
98 98 TARGET_IS_LOCKED = 7
99 99
100 100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 101 # A involved commit could not be found.
102 102 _DEPRECATED_MISSING_COMMIT = 8
103 103
104 104 # The target repo reference is missing.
105 105 MISSING_TARGET_REF = 9
106 106
107 107 # The source repo reference is missing.
108 108 MISSING_SOURCE_REF = 10
109 109
110 110 # The merge was not successful, there are conflicts related to sub
111 111 # repositories.
112 112 SUBREPO_MERGE_FAILED = 11
113 113
114 114
115 115 class UpdateFailureReason(object):
116 116 """
117 117 Enumeration with all the reasons why the pull request update could fail.
118 118
119 119 DO NOT change the number of the reasons, as they may be stored in the
120 120 database.
121 121
122 122 Changing the name of a reason is acceptable and encouraged to deprecate old
123 123 reasons.
124 124 """
125 125
126 126 # Everything went well.
127 127 NONE = 0
128 128
129 129 # An unexpected exception was raised. Check the logs for more details.
130 130 UNKNOWN = 1
131 131
132 132 # The pull request is up to date.
133 133 NO_CHANGE = 2
134 134
135 135 # The pull request has a reference type that is not supported for update.
136 136 WRONG_REF_TYPE = 3
137 137
138 138 # Update failed because the target reference is missing.
139 139 MISSING_TARGET_REF = 4
140 140
141 141 # Update failed because the source reference is missing.
142 142 MISSING_SOURCE_REF = 5
143 143
144 144
145 145 class BaseRepository(object):
146 146 """
147 147 Base Repository for final backends
148 148
149 149 .. attribute:: DEFAULT_BRANCH_NAME
150 150
151 151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
152 152
153 153 .. attribute:: commit_ids
154 154
155 155 list of all available commit ids, in ascending order
156 156
157 157 .. attribute:: path
158 158
159 159 absolute path to the repository
160 160
161 161 .. attribute:: bookmarks
162 162
163 163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
164 164 there are no bookmarks or the backend implementation does not support
165 165 bookmarks.
166 166
167 167 .. attribute:: tags
168 168
169 169 Mapping from name to :term:`Commit ID` of the tag.
170 170
171 171 """
172 172
173 173 DEFAULT_BRANCH_NAME = None
174 174 DEFAULT_CONTACT = u"Unknown"
175 175 DEFAULT_DESCRIPTION = u"unknown"
176 176 EMPTY_COMMIT_ID = '0' * 40
177 177
178 178 path = None
179 _remote = None
180 179
181 180 def __init__(self, repo_path, config=None, create=False, **kwargs):
182 181 """
183 182 Initializes repository. Raises RepositoryError if repository could
184 183 not be find at the given ``repo_path`` or directory at ``repo_path``
185 184 exists and ``create`` is set to True.
186 185
187 186 :param repo_path: local path of the repository
188 187 :param config: repository configuration
189 188 :param create=False: if set to True, would try to create repository.
190 189 :param src_url=None: if set, should be proper url from which repository
191 190 would be cloned; requires ``create`` parameter to be set to True -
192 191 raises RepositoryError if src_url is set and create evaluates to
193 192 False
194 193 """
195 194 raise NotImplementedError
196 195
197 196 def __repr__(self):
198 197 return '<%s at %s>' % (self.__class__.__name__, self.path)
199 198
200 199 def __len__(self):
201 200 return self.count()
202 201
203 202 def __eq__(self, other):
204 203 same_instance = isinstance(other, self.__class__)
205 204 return same_instance and other.path == self.path
206 205
207 206 def __ne__(self, other):
208 207 return not self.__eq__(other)
209 208
210 209 def get_create_shadow_cache_pr_path(self, db_repo):
211 210 path = db_repo.cached_diffs_dir
212 211 if not os.path.exists(path):
213 212 os.makedirs(path, 0755)
214 213 return path
215 214
216 215 @classmethod
217 216 def get_default_config(cls, default=None):
218 217 config = Config()
219 218 if default and isinstance(default, list):
220 219 for section, key, val in default:
221 220 config.set(section, key, val)
222 221 return config
223 222
224 223 @LazyProperty
224 def _remote(self):
225 raise NotImplementedError
226
227 @LazyProperty
225 228 def EMPTY_COMMIT(self):
226 229 return EmptyCommit(self.EMPTY_COMMIT_ID)
227 230
228 231 @LazyProperty
229 232 def alias(self):
230 233 for k, v in settings.BACKENDS.items():
231 234 if v.split('.')[-1] == str(self.__class__.__name__):
232 235 return k
233 236
234 237 @LazyProperty
235 238 def name(self):
236 239 return safe_unicode(os.path.basename(self.path))
237 240
238 241 @LazyProperty
239 242 def description(self):
240 243 raise NotImplementedError
241 244
242 245 def refs(self):
243 246 """
244 247 returns a `dict` with branches, bookmarks, tags, and closed_branches
245 248 for this repository
246 249 """
247 250 return dict(
248 251 branches=self.branches,
249 252 branches_closed=self.branches_closed,
250 253 tags=self.tags,
251 254 bookmarks=self.bookmarks
252 255 )
253 256
254 257 @LazyProperty
255 258 def branches(self):
256 259 """
257 260 A `dict` which maps branch names to commit ids.
258 261 """
259 262 raise NotImplementedError
260 263
261 264 @LazyProperty
262 265 def branches_closed(self):
263 266 """
264 267 A `dict` which maps tags names to commit ids.
265 268 """
266 269 raise NotImplementedError
267 270
268 271 @LazyProperty
269 272 def bookmarks(self):
270 273 """
271 274 A `dict` which maps tags names to commit ids.
272 275 """
273 276 raise NotImplementedError
274 277
275 278 @LazyProperty
276 279 def tags(self):
277 280 """
278 281 A `dict` which maps tags names to commit ids.
279 282 """
280 283 raise NotImplementedError
281 284
282 285 @LazyProperty
283 286 def size(self):
284 287 """
285 288 Returns combined size in bytes for all repository files
286 289 """
287 290 tip = self.get_commit()
288 291 return tip.size
289 292
290 293 def size_at_commit(self, commit_id):
291 294 commit = self.get_commit(commit_id)
292 295 return commit.size
293 296
294 297 def is_empty(self):
295 298 return not bool(self.commit_ids)
296 299
297 300 @staticmethod
298 301 def check_url(url, config):
299 302 """
300 303 Function will check given url and try to verify if it's a valid
301 304 link.
302 305 """
303 306 raise NotImplementedError
304 307
305 308 @staticmethod
306 309 def is_valid_repository(path):
307 310 """
308 311 Check if given `path` contains a valid repository of this backend
309 312 """
310 313 raise NotImplementedError
311 314
312 315 # ==========================================================================
313 316 # COMMITS
314 317 # ==========================================================================
315 318
316 319 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
317 320 """
318 321 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
319 322 are both None, most recent commit is returned.
320 323
321 324 :param pre_load: Optional. List of commit attributes to load.
322 325
323 326 :raises ``EmptyRepositoryError``: if there are no commits
324 327 """
325 328 raise NotImplementedError
326 329
327 330 def __iter__(self):
328 331 for commit_id in self.commit_ids:
329 332 yield self.get_commit(commit_id=commit_id)
330 333
331 334 def get_commits(
332 335 self, start_id=None, end_id=None, start_date=None, end_date=None,
333 336 branch_name=None, show_hidden=False, pre_load=None):
334 337 """
335 338 Returns iterator of `BaseCommit` objects from start to end
336 339 not inclusive. This should behave just like a list, ie. end is not
337 340 inclusive.
338 341
339 342 :param start_id: None or str, must be a valid commit id
340 343 :param end_id: None or str, must be a valid commit id
341 344 :param start_date:
342 345 :param end_date:
343 346 :param branch_name:
344 347 :param show_hidden:
345 348 :param pre_load:
346 349 """
347 350 raise NotImplementedError
348 351
349 352 def __getitem__(self, key):
350 353 """
351 354 Allows index based access to the commit objects of this repository.
352 355 """
353 356 pre_load = ["author", "branch", "date", "message", "parents"]
354 357 if isinstance(key, slice):
355 358 return self._get_range(key, pre_load)
356 359 return self.get_commit(commit_idx=key, pre_load=pre_load)
357 360
358 361 def _get_range(self, slice_obj, pre_load):
359 362 for commit_id in self.commit_ids.__getitem__(slice_obj):
360 363 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
361 364
362 365 def count(self):
363 366 return len(self.commit_ids)
364 367
365 368 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
366 369 """
367 370 Creates and returns a tag for the given ``commit_id``.
368 371
369 372 :param name: name for new tag
370 373 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
371 374 :param commit_id: commit id for which new tag would be created
372 375 :param message: message of the tag's commit
373 376 :param date: date of tag's commit
374 377
375 378 :raises TagAlreadyExistError: if tag with same name already exists
376 379 """
377 380 raise NotImplementedError
378 381
379 382 def remove_tag(self, name, user, message=None, date=None):
380 383 """
381 384 Removes tag with the given ``name``.
382 385
383 386 :param name: name of the tag to be removed
384 387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
385 388 :param message: message of the tag's removal commit
386 389 :param date: date of tag's removal commit
387 390
388 391 :raises TagDoesNotExistError: if tag with given name does not exists
389 392 """
390 393 raise NotImplementedError
391 394
392 395 def get_diff(
393 396 self, commit1, commit2, path=None, ignore_whitespace=False,
394 397 context=3, path1=None):
395 398 """
396 399 Returns (git like) *diff*, as plain text. Shows changes introduced by
397 400 `commit2` since `commit1`.
398 401
399 402 :param commit1: Entry point from which diff is shown. Can be
400 403 ``self.EMPTY_COMMIT`` - in this case, patch showing all
401 404 the changes since empty state of the repository until `commit2`
402 405 :param commit2: Until which commit changes should be shown.
403 406 :param path: Can be set to a path of a file to create a diff of that
404 407 file. If `path1` is also set, this value is only associated to
405 408 `commit2`.
406 409 :param ignore_whitespace: If set to ``True``, would not show whitespace
407 410 changes. Defaults to ``False``.
408 411 :param context: How many lines before/after changed lines should be
409 412 shown. Defaults to ``3``.
410 413 :param path1: Can be set to a path to associate with `commit1`. This
411 414 parameter works only for backends which support diff generation for
412 415 different paths. Other backends will raise a `ValueError` if `path1`
413 416 is set and has a different value than `path`.
414 417 :param file_path: filter this diff by given path pattern
415 418 """
416 419 raise NotImplementedError
417 420
418 421 def strip(self, commit_id, branch=None):
419 422 """
420 423 Strip given commit_id from the repository
421 424 """
422 425 raise NotImplementedError
423 426
424 427 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
425 428 """
426 429 Return a latest common ancestor commit if one exists for this repo
427 430 `commit_id1` vs `commit_id2` from `repo2`.
428 431
429 432 :param commit_id1: Commit it from this repository to use as a
430 433 target for the comparison.
431 434 :param commit_id2: Source commit id to use for comparison.
432 435 :param repo2: Source repository to use for comparison.
433 436 """
434 437 raise NotImplementedError
435 438
436 439 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
437 440 """
438 441 Compare this repository's revision `commit_id1` with `commit_id2`.
439 442
440 443 Returns a tuple(commits, ancestor) that would be merged from
441 444 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
442 445 will be returned as ancestor.
443 446
444 447 :param commit_id1: Commit it from this repository to use as a
445 448 target for the comparison.
446 449 :param commit_id2: Source commit id to use for comparison.
447 450 :param repo2: Source repository to use for comparison.
448 451 :param merge: If set to ``True`` will do a merge compare which also
449 452 returns the common ancestor.
450 453 :param pre_load: Optional. List of commit attributes to load.
451 454 """
452 455 raise NotImplementedError
453 456
454 457 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
455 458 user_name='', user_email='', message='', dry_run=False,
456 459 use_rebase=False, close_branch=False):
457 460 """
458 461 Merge the revisions specified in `source_ref` from `source_repo`
459 462 onto the `target_ref` of this repository.
460 463
461 464 `source_ref` and `target_ref` are named tupls with the following
462 465 fields `type`, `name` and `commit_id`.
463 466
464 467 Returns a MergeResponse named tuple with the following fields
465 468 'possible', 'executed', 'source_commit', 'target_commit',
466 469 'merge_commit'.
467 470
468 471 :param repo_id: `repo_id` target repo id.
469 472 :param workspace_id: `workspace_id` unique identifier.
470 473 :param target_ref: `target_ref` points to the commit on top of which
471 474 the `source_ref` should be merged.
472 475 :param source_repo: The repository that contains the commits to be
473 476 merged.
474 477 :param source_ref: `source_ref` points to the topmost commit from
475 478 the `source_repo` which should be merged.
476 479 :param user_name: Merge commit `user_name`.
477 480 :param user_email: Merge commit `user_email`.
478 481 :param message: Merge commit `message`.
479 482 :param dry_run: If `True` the merge will not take place.
480 483 :param use_rebase: If `True` commits from the source will be rebased
481 484 on top of the target instead of being merged.
482 485 :param close_branch: If `True` branch will be close before merging it
483 486 """
484 487 if dry_run:
485 488 message = message or 'dry_run_merge_message'
486 489 user_email = user_email or 'dry-run-merge@rhodecode.com'
487 490 user_name = user_name or 'Dry-Run User'
488 491 else:
489 492 if not user_name:
490 493 raise ValueError('user_name cannot be empty')
491 494 if not user_email:
492 495 raise ValueError('user_email cannot be empty')
493 496 if not message:
494 497 raise ValueError('message cannot be empty')
495 498
496 499 try:
497 500 return self._merge_repo(
498 501 repo_id, workspace_id, target_ref, source_repo,
499 502 source_ref, message, user_name, user_email, dry_run=dry_run,
500 503 use_rebase=use_rebase, close_branch=close_branch)
501 504 except RepositoryError:
502 505 log.exception(
503 506 'Unexpected failure when running merge, dry-run=%s',
504 507 dry_run)
505 508 return MergeResponse(
506 509 False, False, None, MergeFailureReason.UNKNOWN)
507 510
508 511 def _merge_repo(self, repo_id, workspace_id, target_ref,
509 512 source_repo, source_ref, merge_message,
510 513 merger_name, merger_email, dry_run=False,
511 514 use_rebase=False, close_branch=False):
512 515 """Internal implementation of merge."""
513 516 raise NotImplementedError
514 517
515 518 def _maybe_prepare_merge_workspace(
516 519 self, repo_id, workspace_id, target_ref, source_ref):
517 520 """
518 521 Create the merge workspace.
519 522
520 523 :param workspace_id: `workspace_id` unique identifier.
521 524 """
522 525 raise NotImplementedError
523 526
524 527 def _get_legacy_shadow_repository_path(self, workspace_id):
525 528 """
526 529 Legacy version that was used before. We still need it for
527 530 backward compat
528 531 """
529 532 return os.path.join(
530 533 os.path.dirname(self.path),
531 534 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
532 535
533 536 def _get_shadow_repository_path(self, repo_id, workspace_id):
534 537 # The name of the shadow repository must start with '.', so it is
535 538 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
536 539 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
537 540 if os.path.exists(legacy_repository_path):
538 541 return legacy_repository_path
539 542 else:
540 543 return os.path.join(
541 544 os.path.dirname(self.path),
542 545 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
543 546
544 547 def cleanup_merge_workspace(self, repo_id, workspace_id):
545 548 """
546 549 Remove merge workspace.
547 550
548 551 This function MUST not fail in case there is no workspace associated to
549 552 the given `workspace_id`.
550 553
551 554 :param workspace_id: `workspace_id` unique identifier.
552 555 """
553 556 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
554 557 shadow_repository_path_del = '{}.{}.delete'.format(
555 558 shadow_repository_path, time.time())
556 559
557 560 # move the shadow repo, so it never conflicts with the one used.
558 561 # we use this method because shutil.rmtree had some edge case problems
559 562 # removing symlinked repositories
560 563 if not os.path.isdir(shadow_repository_path):
561 564 return
562 565
563 566 shutil.move(shadow_repository_path, shadow_repository_path_del)
564 567 try:
565 568 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
566 569 except Exception:
567 570 log.exception('Failed to gracefully remove shadow repo under %s',
568 571 shadow_repository_path_del)
569 572 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
570 573
571 574 # ========== #
572 575 # COMMIT API #
573 576 # ========== #
574 577
575 578 @LazyProperty
576 579 def in_memory_commit(self):
577 580 """
578 581 Returns :class:`InMemoryCommit` object for this repository.
579 582 """
580 583 raise NotImplementedError
581 584
582 585 # ======================== #
583 586 # UTILITIES FOR SUBCLASSES #
584 587 # ======================== #
585 588
586 589 def _validate_diff_commits(self, commit1, commit2):
587 590 """
588 591 Validates that the given commits are related to this repository.
589 592
590 593 Intended as a utility for sub classes to have a consistent validation
591 594 of input parameters in methods like :meth:`get_diff`.
592 595 """
593 596 self._validate_commit(commit1)
594 597 self._validate_commit(commit2)
595 598 if (isinstance(commit1, EmptyCommit) and
596 599 isinstance(commit2, EmptyCommit)):
597 600 raise ValueError("Cannot compare two empty commits")
598 601
599 602 def _validate_commit(self, commit):
600 603 if not isinstance(commit, BaseCommit):
601 604 raise TypeError(
602 605 "%s is not of type BaseCommit" % repr(commit))
603 606 if commit.repository != self and not isinstance(commit, EmptyCommit):
604 607 raise ValueError(
605 608 "Commit %s must be a valid commit from this repository %s, "
606 609 "related to this repository instead %s." %
607 610 (commit, self, commit.repository))
608 611
609 612 def _validate_commit_id(self, commit_id):
610 613 if not isinstance(commit_id, basestring):
611 614 raise TypeError("commit_id must be a string value")
612 615
613 616 def _validate_commit_idx(self, commit_idx):
614 617 if not isinstance(commit_idx, (int, long)):
615 618 raise TypeError("commit_idx must be a numeric value")
616 619
617 620 def _validate_branch_name(self, branch_name):
618 621 if branch_name and branch_name not in self.branches_all:
619 622 msg = ("Branch %s not found in %s" % (branch_name, self))
620 623 raise BranchDoesNotExistError(msg)
621 624
622 625 #
623 626 # Supporting deprecated API parts
624 627 # TODO: johbo: consider to move this into a mixin
625 628 #
626 629
627 630 @property
628 631 def EMPTY_CHANGESET(self):
629 632 warnings.warn(
630 633 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
631 634 return self.EMPTY_COMMIT_ID
632 635
633 636 @property
634 637 def revisions(self):
635 638 warnings.warn("Use commits attribute instead", DeprecationWarning)
636 639 return self.commit_ids
637 640
638 641 @revisions.setter
639 642 def revisions(self, value):
640 643 warnings.warn("Use commits attribute instead", DeprecationWarning)
641 644 self.commit_ids = value
642 645
643 646 def get_changeset(self, revision=None, pre_load=None):
644 647 warnings.warn("Use get_commit instead", DeprecationWarning)
645 648 commit_id = None
646 649 commit_idx = None
647 650 if isinstance(revision, basestring):
648 651 commit_id = revision
649 652 else:
650 653 commit_idx = revision
651 654 return self.get_commit(
652 655 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
653 656
654 657 def get_changesets(
655 658 self, start=None, end=None, start_date=None, end_date=None,
656 659 branch_name=None, pre_load=None):
657 660 warnings.warn("Use get_commits instead", DeprecationWarning)
658 661 start_id = self._revision_to_commit(start)
659 662 end_id = self._revision_to_commit(end)
660 663 return self.get_commits(
661 664 start_id=start_id, end_id=end_id, start_date=start_date,
662 665 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
663 666
664 667 def _revision_to_commit(self, revision):
665 668 """
666 669 Translates a revision to a commit_id
667 670
668 671 Helps to support the old changeset based API which allows to use
669 672 commit ids and commit indices interchangeable.
670 673 """
671 674 if revision is None:
672 675 return revision
673 676
674 677 if isinstance(revision, basestring):
675 678 commit_id = revision
676 679 else:
677 680 commit_id = self.commit_ids[revision]
678 681 return commit_id
679 682
680 683 @property
681 684 def in_memory_changeset(self):
682 685 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
683 686 return self.in_memory_commit
684 687
685 688 def get_path_permissions(self, username):
686 689 """
687 690 Returns a path permission checker or None if not supported
688 691
689 692 :param username: session user name
690 693 :return: an instance of BasePathPermissionChecker or None
691 694 """
692 695 return None
693 696
694 697 def install_hooks(self, force=False):
695 698 return self._remote.install_hooks(force)
696 699
697 700
698 701 class BaseCommit(object):
699 702 """
700 703 Each backend should implement it's commit representation.
701 704
702 705 **Attributes**
703 706
704 707 ``repository``
705 708 repository object within which commit exists
706 709
707 710 ``id``
708 711 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
709 712 just ``tip``.
710 713
711 714 ``raw_id``
712 715 raw commit representation (i.e. full 40 length sha for git
713 716 backend)
714 717
715 718 ``short_id``
716 719 shortened (if apply) version of ``raw_id``; it would be simple
717 720 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
718 721 as ``raw_id`` for subversion
719 722
720 723 ``idx``
721 724 commit index
722 725
723 726 ``files``
724 727 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
725 728
726 729 ``dirs``
727 730 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
728 731
729 732 ``nodes``
730 733 combined list of ``Node`` objects
731 734
732 735 ``author``
733 736 author of the commit, as unicode
734 737
735 738 ``message``
736 739 message of the commit, as unicode
737 740
738 741 ``parents``
739 742 list of parent commits
740 743
741 744 """
742 745
743 746 branch = None
744 747 """
745 748 Depending on the backend this should be set to the branch name of the
746 749 commit. Backends not supporting branches on commits should leave this
747 750 value as ``None``.
748 751 """
749 752
750 753 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
751 754 """
752 755 This template is used to generate a default prefix for repository archives
753 756 if no prefix has been specified.
754 757 """
755 758
756 759 def __str__(self):
757 760 return '<%s at %s:%s>' % (
758 761 self.__class__.__name__, self.idx, self.short_id)
759 762
760 763 def __repr__(self):
761 764 return self.__str__()
762 765
763 766 def __unicode__(self):
764 767 return u'%s:%s' % (self.idx, self.short_id)
765 768
766 769 def __eq__(self, other):
767 770 same_instance = isinstance(other, self.__class__)
768 771 return same_instance and self.raw_id == other.raw_id
769 772
770 773 def __json__(self):
771 774 parents = []
772 775 try:
773 776 for parent in self.parents:
774 777 parents.append({'raw_id': parent.raw_id})
775 778 except NotImplementedError:
776 779 # empty commit doesn't have parents implemented
777 780 pass
778 781
779 782 return {
780 783 'short_id': self.short_id,
781 784 'raw_id': self.raw_id,
782 785 'revision': self.idx,
783 786 'message': self.message,
784 787 'date': self.date,
785 788 'author': self.author,
786 789 'parents': parents,
787 790 'branch': self.branch
788 791 }
789 792
790 793 def __getstate__(self):
791 794 d = self.__dict__.copy()
792 795 d.pop('_remote', None)
793 796 d.pop('repository', None)
794 797 return d
795 798
796 799 def _get_refs(self):
797 800 return {
798 801 'branches': [self.branch] if self.branch else [],
799 802 'bookmarks': getattr(self, 'bookmarks', []),
800 803 'tags': self.tags
801 804 }
802 805
803 806 @LazyProperty
804 807 def last(self):
805 808 """
806 809 ``True`` if this is last commit in repository, ``False``
807 810 otherwise; trying to access this attribute while there is no
808 811 commits would raise `EmptyRepositoryError`
809 812 """
810 813 if self.repository is None:
811 814 raise CommitError("Cannot check if it's most recent commit")
812 815 return self.raw_id == self.repository.commit_ids[-1]
813 816
814 817 @LazyProperty
815 818 def parents(self):
816 819 """
817 820 Returns list of parent commits.
818 821 """
819 822 raise NotImplementedError
820 823
821 824 @property
822 825 def merge(self):
823 826 """
824 827 Returns boolean if commit is a merge.
825 828 """
826 829 return len(self.parents) > 1
827 830
828 831 @LazyProperty
829 832 def children(self):
830 833 """
831 834 Returns list of child commits.
832 835 """
833 836 raise NotImplementedError
834 837
835 838 @LazyProperty
836 839 def id(self):
837 840 """
838 841 Returns string identifying this commit.
839 842 """
840 843 raise NotImplementedError
841 844
842 845 @LazyProperty
843 846 def raw_id(self):
844 847 """
845 848 Returns raw string identifying this commit.
846 849 """
847 850 raise NotImplementedError
848 851
849 852 @LazyProperty
850 853 def short_id(self):
851 854 """
852 855 Returns shortened version of ``raw_id`` attribute, as string,
853 856 identifying this commit, useful for presentation to users.
854 857 """
855 858 raise NotImplementedError
856 859
857 860 @LazyProperty
858 861 def idx(self):
859 862 """
860 863 Returns integer identifying this commit.
861 864 """
862 865 raise NotImplementedError
863 866
864 867 @LazyProperty
865 868 def committer(self):
866 869 """
867 870 Returns committer for this commit
868 871 """
869 872 raise NotImplementedError
870 873
871 874 @LazyProperty
872 875 def committer_name(self):
873 876 """
874 877 Returns committer name for this commit
875 878 """
876 879
877 880 return author_name(self.committer)
878 881
879 882 @LazyProperty
880 883 def committer_email(self):
881 884 """
882 885 Returns committer email address for this commit
883 886 """
884 887
885 888 return author_email(self.committer)
886 889
887 890 @LazyProperty
888 891 def author(self):
889 892 """
890 893 Returns author for this commit
891 894 """
892 895
893 896 raise NotImplementedError
894 897
895 898 @LazyProperty
896 899 def author_name(self):
897 900 """
898 901 Returns author name for this commit
899 902 """
900 903
901 904 return author_name(self.author)
902 905
903 906 @LazyProperty
904 907 def author_email(self):
905 908 """
906 909 Returns author email address for this commit
907 910 """
908 911
909 912 return author_email(self.author)
910 913
911 914 def get_file_mode(self, path):
912 915 """
913 916 Returns stat mode of the file at `path`.
914 917 """
915 918 raise NotImplementedError
916 919
917 920 def is_link(self, path):
918 921 """
919 922 Returns ``True`` if given `path` is a symlink
920 923 """
921 924 raise NotImplementedError
922 925
923 926 def get_file_content(self, path):
924 927 """
925 928 Returns content of the file at the given `path`.
926 929 """
927 930 raise NotImplementedError
928 931
929 932 def get_file_size(self, path):
930 933 """
931 934 Returns size of the file at the given `path`.
932 935 """
933 936 raise NotImplementedError
934 937
935 938 def get_file_commit(self, path, pre_load=None):
936 939 """
937 940 Returns last commit of the file at the given `path`.
938 941
939 942 :param pre_load: Optional. List of commit attributes to load.
940 943 """
941 944 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
942 945 if not commits:
943 946 raise RepositoryError(
944 947 'Failed to fetch history for path {}. '
945 948 'Please check if such path exists in your repository'.format(
946 949 path))
947 950 return commits[0]
948 951
949 952 def get_file_history(self, path, limit=None, pre_load=None):
950 953 """
951 954 Returns history of file as reversed list of :class:`BaseCommit`
952 955 objects for which file at given `path` has been modified.
953 956
954 957 :param limit: Optional. Allows to limit the size of the returned
955 958 history. This is intended as a hint to the underlying backend, so
956 959 that it can apply optimizations depending on the limit.
957 960 :param pre_load: Optional. List of commit attributes to load.
958 961 """
959 962 raise NotImplementedError
960 963
961 964 def get_file_annotate(self, path, pre_load=None):
962 965 """
963 966 Returns a generator of four element tuples with
964 967 lineno, sha, commit lazy loader and line
965 968
966 969 :param pre_load: Optional. List of commit attributes to load.
967 970 """
968 971 raise NotImplementedError
969 972
970 973 def get_nodes(self, path):
971 974 """
972 975 Returns combined ``DirNode`` and ``FileNode`` objects list representing
973 976 state of commit at the given ``path``.
974 977
975 978 :raises ``CommitError``: if node at the given ``path`` is not
976 979 instance of ``DirNode``
977 980 """
978 981 raise NotImplementedError
979 982
980 983 def get_node(self, path):
981 984 """
982 985 Returns ``Node`` object from the given ``path``.
983 986
984 987 :raises ``NodeDoesNotExistError``: if there is no node at the given
985 988 ``path``
986 989 """
987 990 raise NotImplementedError
988 991
989 992 def get_largefile_node(self, path):
990 993 """
991 994 Returns the path to largefile from Mercurial/Git-lfs storage.
992 995 or None if it's not a largefile node
993 996 """
994 997 return None
995 998
996 999 def archive_repo(self, file_path, kind='tgz', subrepos=None,
997 1000 prefix=None, write_metadata=False, mtime=None):
998 1001 """
999 1002 Creates an archive containing the contents of the repository.
1000 1003
1001 1004 :param file_path: path to the file which to create the archive.
1002 1005 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1003 1006 :param prefix: name of root directory in archive.
1004 1007 Default is repository name and commit's short_id joined with dash:
1005 1008 ``"{repo_name}-{short_id}"``.
1006 1009 :param write_metadata: write a metadata file into archive.
1007 1010 :param mtime: custom modification time for archive creation, defaults
1008 1011 to time.time() if not given.
1009 1012
1010 1013 :raise VCSError: If prefix has a problem.
1011 1014 """
1012 1015 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1013 1016 if kind not in allowed_kinds:
1014 1017 raise ImproperArchiveTypeError(
1015 1018 'Archive kind (%s) not supported use one of %s' %
1016 1019 (kind, allowed_kinds))
1017 1020
1018 1021 prefix = self._validate_archive_prefix(prefix)
1019 1022
1020 1023 mtime = mtime or time.mktime(self.date.timetuple())
1021 1024
1022 1025 file_info = []
1023 1026 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1024 1027 for _r, _d, files in cur_rev.walk('/'):
1025 1028 for f in files:
1026 1029 f_path = os.path.join(prefix, f.path)
1027 1030 file_info.append(
1028 1031 (f_path, f.mode, f.is_link(), f.raw_bytes))
1029 1032
1030 1033 if write_metadata:
1031 1034 metadata = [
1032 1035 ('repo_name', self.repository.name),
1033 1036 ('rev', self.raw_id),
1034 1037 ('create_time', mtime),
1035 1038 ('branch', self.branch),
1036 1039 ('tags', ','.join(self.tags)),
1037 1040 ]
1038 1041 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1039 1042 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1040 1043
1041 1044 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1042 1045
1043 1046 def _validate_archive_prefix(self, prefix):
1044 1047 if prefix is None:
1045 1048 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1046 1049 repo_name=safe_str(self.repository.name),
1047 1050 short_id=self.short_id)
1048 1051 elif not isinstance(prefix, str):
1049 1052 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1050 1053 elif prefix.startswith('/'):
1051 1054 raise VCSError("Prefix cannot start with leading slash")
1052 1055 elif prefix.strip() == '':
1053 1056 raise VCSError("Prefix cannot be empty")
1054 1057 return prefix
1055 1058
1056 1059 @LazyProperty
1057 1060 def root(self):
1058 1061 """
1059 1062 Returns ``RootNode`` object for this commit.
1060 1063 """
1061 1064 return self.get_node('')
1062 1065
1063 1066 def next(self, branch=None):
1064 1067 """
1065 1068 Returns next commit from current, if branch is gives it will return
1066 1069 next commit belonging to this branch
1067 1070
1068 1071 :param branch: show commits within the given named branch
1069 1072 """
1070 1073 indexes = xrange(self.idx + 1, self.repository.count())
1071 1074 return self._find_next(indexes, branch)
1072 1075
1073 1076 def prev(self, branch=None):
1074 1077 """
1075 1078 Returns previous commit from current, if branch is gives it will
1076 1079 return previous commit belonging to this branch
1077 1080
1078 1081 :param branch: show commit within the given named branch
1079 1082 """
1080 1083 indexes = xrange(self.idx - 1, -1, -1)
1081 1084 return self._find_next(indexes, branch)
1082 1085
1083 1086 def _find_next(self, indexes, branch=None):
1084 1087 if branch and self.branch != branch:
1085 1088 raise VCSError('Branch option used on commit not belonging '
1086 1089 'to that branch')
1087 1090
1088 1091 for next_idx in indexes:
1089 1092 commit = self.repository.get_commit(commit_idx=next_idx)
1090 1093 if branch and branch != commit.branch:
1091 1094 continue
1092 1095 return commit
1093 1096 raise CommitDoesNotExistError
1094 1097
1095 1098 def diff(self, ignore_whitespace=True, context=3):
1096 1099 """
1097 1100 Returns a `Diff` object representing the change made by this commit.
1098 1101 """
1099 1102 parent = (
1100 1103 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1101 1104 diff = self.repository.get_diff(
1102 1105 parent, self,
1103 1106 ignore_whitespace=ignore_whitespace,
1104 1107 context=context)
1105 1108 return diff
1106 1109
1107 1110 @LazyProperty
1108 1111 def added(self):
1109 1112 """
1110 1113 Returns list of added ``FileNode`` objects.
1111 1114 """
1112 1115 raise NotImplementedError
1113 1116
1114 1117 @LazyProperty
1115 1118 def changed(self):
1116 1119 """
1117 1120 Returns list of modified ``FileNode`` objects.
1118 1121 """
1119 1122 raise NotImplementedError
1120 1123
1121 1124 @LazyProperty
1122 1125 def removed(self):
1123 1126 """
1124 1127 Returns list of removed ``FileNode`` objects.
1125 1128 """
1126 1129 raise NotImplementedError
1127 1130
1128 1131 @LazyProperty
1129 1132 def size(self):
1130 1133 """
1131 1134 Returns total number of bytes from contents of all filenodes.
1132 1135 """
1133 1136 return sum((node.size for node in self.get_filenodes_generator()))
1134 1137
1135 1138 def walk(self, topurl=''):
1136 1139 """
1137 1140 Similar to os.walk method. Insted of filesystem it walks through
1138 1141 commit starting at given ``topurl``. Returns generator of tuples
1139 1142 (topnode, dirnodes, filenodes).
1140 1143 """
1141 1144 topnode = self.get_node(topurl)
1142 1145 if not topnode.is_dir():
1143 1146 return
1144 1147 yield (topnode, topnode.dirs, topnode.files)
1145 1148 for dirnode in topnode.dirs:
1146 1149 for tup in self.walk(dirnode.path):
1147 1150 yield tup
1148 1151
1149 1152 def get_filenodes_generator(self):
1150 1153 """
1151 1154 Returns generator that yields *all* file nodes.
1152 1155 """
1153 1156 for topnode, dirs, files in self.walk():
1154 1157 for node in files:
1155 1158 yield node
1156 1159
1157 1160 #
1158 1161 # Utilities for sub classes to support consistent behavior
1159 1162 #
1160 1163
1161 1164 def no_node_at_path(self, path):
1162 1165 return NodeDoesNotExistError(
1163 1166 u"There is no file nor directory at the given path: "
1164 1167 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1165 1168
1166 1169 def _fix_path(self, path):
1167 1170 """
1168 1171 Paths are stored without trailing slash so we need to get rid off it if
1169 1172 needed.
1170 1173 """
1171 1174 return path.rstrip('/')
1172 1175
1173 1176 #
1174 1177 # Deprecated API based on changesets
1175 1178 #
1176 1179
1177 1180 @property
1178 1181 def revision(self):
1179 1182 warnings.warn("Use idx instead", DeprecationWarning)
1180 1183 return self.idx
1181 1184
1182 1185 @revision.setter
1183 1186 def revision(self, value):
1184 1187 warnings.warn("Use idx instead", DeprecationWarning)
1185 1188 self.idx = value
1186 1189
1187 1190 def get_file_changeset(self, path):
1188 1191 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1189 1192 return self.get_file_commit(path)
1190 1193
1191 1194
1192 1195 class BaseChangesetClass(type):
1193 1196
1194 1197 def __instancecheck__(self, instance):
1195 1198 return isinstance(instance, BaseCommit)
1196 1199
1197 1200
1198 1201 class BaseChangeset(BaseCommit):
1199 1202
1200 1203 __metaclass__ = BaseChangesetClass
1201 1204
1202 1205 def __new__(cls, *args, **kwargs):
1203 1206 warnings.warn(
1204 1207 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1205 1208 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1206 1209
1207 1210
1208 1211 class BaseInMemoryCommit(object):
1209 1212 """
1210 1213 Represents differences between repository's state (most recent head) and
1211 1214 changes made *in place*.
1212 1215
1213 1216 **Attributes**
1214 1217
1215 1218 ``repository``
1216 1219 repository object for this in-memory-commit
1217 1220
1218 1221 ``added``
1219 1222 list of ``FileNode`` objects marked as *added*
1220 1223
1221 1224 ``changed``
1222 1225 list of ``FileNode`` objects marked as *changed*
1223 1226
1224 1227 ``removed``
1225 1228 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1226 1229 *removed*
1227 1230
1228 1231 ``parents``
1229 1232 list of :class:`BaseCommit` instances representing parents of
1230 1233 in-memory commit. Should always be 2-element sequence.
1231 1234
1232 1235 """
1233 1236
1234 1237 def __init__(self, repository):
1235 1238 self.repository = repository
1236 1239 self.added = []
1237 1240 self.changed = []
1238 1241 self.removed = []
1239 1242 self.parents = []
1240 1243
1241 1244 def add(self, *filenodes):
1242 1245 """
1243 1246 Marks given ``FileNode`` objects as *to be committed*.
1244 1247
1245 1248 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1246 1249 latest commit
1247 1250 :raises ``NodeAlreadyAddedError``: if node with same path is already
1248 1251 marked as *added*
1249 1252 """
1250 1253 # Check if not already marked as *added* first
1251 1254 for node in filenodes:
1252 1255 if node.path in (n.path for n in self.added):
1253 1256 raise NodeAlreadyAddedError(
1254 1257 "Such FileNode %s is already marked for addition"
1255 1258 % node.path)
1256 1259 for node in filenodes:
1257 1260 self.added.append(node)
1258 1261
1259 1262 def change(self, *filenodes):
1260 1263 """
1261 1264 Marks given ``FileNode`` objects to be *changed* in next commit.
1262 1265
1263 1266 :raises ``EmptyRepositoryError``: if there are no commits yet
1264 1267 :raises ``NodeAlreadyExistsError``: if node with same path is already
1265 1268 marked to be *changed*
1266 1269 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1267 1270 marked to be *removed*
1268 1271 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1269 1272 commit
1270 1273 :raises ``NodeNotChangedError``: if node hasn't really be changed
1271 1274 """
1272 1275 for node in filenodes:
1273 1276 if node.path in (n.path for n in self.removed):
1274 1277 raise NodeAlreadyRemovedError(
1275 1278 "Node at %s is already marked as removed" % node.path)
1276 1279 try:
1277 1280 self.repository.get_commit()
1278 1281 except EmptyRepositoryError:
1279 1282 raise EmptyRepositoryError(
1280 1283 "Nothing to change - try to *add* new nodes rather than "
1281 1284 "changing them")
1282 1285 for node in filenodes:
1283 1286 if node.path in (n.path for n in self.changed):
1284 1287 raise NodeAlreadyChangedError(
1285 1288 "Node at '%s' is already marked as changed" % node.path)
1286 1289 self.changed.append(node)
1287 1290
1288 1291 def remove(self, *filenodes):
1289 1292 """
1290 1293 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1291 1294 *removed* in next commit.
1292 1295
1293 1296 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1294 1297 be *removed*
1295 1298 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1296 1299 be *changed*
1297 1300 """
1298 1301 for node in filenodes:
1299 1302 if node.path in (n.path for n in self.removed):
1300 1303 raise NodeAlreadyRemovedError(
1301 1304 "Node is already marked to for removal at %s" % node.path)
1302 1305 if node.path in (n.path for n in self.changed):
1303 1306 raise NodeAlreadyChangedError(
1304 1307 "Node is already marked to be changed at %s" % node.path)
1305 1308 # We only mark node as *removed* - real removal is done by
1306 1309 # commit method
1307 1310 self.removed.append(node)
1308 1311
1309 1312 def reset(self):
1310 1313 """
1311 1314 Resets this instance to initial state (cleans ``added``, ``changed``
1312 1315 and ``removed`` lists).
1313 1316 """
1314 1317 self.added = []
1315 1318 self.changed = []
1316 1319 self.removed = []
1317 1320 self.parents = []
1318 1321
1319 1322 def get_ipaths(self):
1320 1323 """
1321 1324 Returns generator of paths from nodes marked as added, changed or
1322 1325 removed.
1323 1326 """
1324 1327 for node in itertools.chain(self.added, self.changed, self.removed):
1325 1328 yield node.path
1326 1329
1327 1330 def get_paths(self):
1328 1331 """
1329 1332 Returns list of paths from nodes marked as added, changed or removed.
1330 1333 """
1331 1334 return list(self.get_ipaths())
1332 1335
1333 1336 def check_integrity(self, parents=None):
1334 1337 """
1335 1338 Checks in-memory commit's integrity. Also, sets parents if not
1336 1339 already set.
1337 1340
1338 1341 :raises CommitError: if any error occurs (i.e.
1339 1342 ``NodeDoesNotExistError``).
1340 1343 """
1341 1344 if not self.parents:
1342 1345 parents = parents or []
1343 1346 if len(parents) == 0:
1344 1347 try:
1345 1348 parents = [self.repository.get_commit(), None]
1346 1349 except EmptyRepositoryError:
1347 1350 parents = [None, None]
1348 1351 elif len(parents) == 1:
1349 1352 parents += [None]
1350 1353 self.parents = parents
1351 1354
1352 1355 # Local parents, only if not None
1353 1356 parents = [p for p in self.parents if p]
1354 1357
1355 1358 # Check nodes marked as added
1356 1359 for p in parents:
1357 1360 for node in self.added:
1358 1361 try:
1359 1362 p.get_node(node.path)
1360 1363 except NodeDoesNotExistError:
1361 1364 pass
1362 1365 else:
1363 1366 raise NodeAlreadyExistsError(
1364 1367 "Node `%s` already exists at %s" % (node.path, p))
1365 1368
1366 1369 # Check nodes marked as changed
1367 1370 missing = set(self.changed)
1368 1371 not_changed = set(self.changed)
1369 1372 if self.changed and not parents:
1370 1373 raise NodeDoesNotExistError(str(self.changed[0].path))
1371 1374 for p in parents:
1372 1375 for node in self.changed:
1373 1376 try:
1374 1377 old = p.get_node(node.path)
1375 1378 missing.remove(node)
1376 1379 # if content actually changed, remove node from not_changed
1377 1380 if old.content != node.content:
1378 1381 not_changed.remove(node)
1379 1382 except NodeDoesNotExistError:
1380 1383 pass
1381 1384 if self.changed and missing:
1382 1385 raise NodeDoesNotExistError(
1383 1386 "Node `%s` marked as modified but missing in parents: %s"
1384 1387 % (node.path, parents))
1385 1388
1386 1389 if self.changed and not_changed:
1387 1390 raise NodeNotChangedError(
1388 1391 "Node `%s` wasn't actually changed (parents: %s)"
1389 1392 % (not_changed.pop().path, parents))
1390 1393
1391 1394 # Check nodes marked as removed
1392 1395 if self.removed and not parents:
1393 1396 raise NodeDoesNotExistError(
1394 1397 "Cannot remove node at %s as there "
1395 1398 "were no parents specified" % self.removed[0].path)
1396 1399 really_removed = set()
1397 1400 for p in parents:
1398 1401 for node in self.removed:
1399 1402 try:
1400 1403 p.get_node(node.path)
1401 1404 really_removed.add(node)
1402 1405 except CommitError:
1403 1406 pass
1404 1407 not_removed = set(self.removed) - really_removed
1405 1408 if not_removed:
1406 1409 # TODO: johbo: This code branch does not seem to be covered
1407 1410 raise NodeDoesNotExistError(
1408 1411 "Cannot remove node at %s from "
1409 1412 "following parents: %s" % (not_removed, parents))
1410 1413
1411 1414 def commit(
1412 1415 self, message, author, parents=None, branch=None, date=None,
1413 1416 **kwargs):
1414 1417 """
1415 1418 Performs in-memory commit (doesn't check workdir in any way) and
1416 1419 returns newly created :class:`BaseCommit`. Updates repository's
1417 1420 attribute `commits`.
1418 1421
1419 1422 .. note::
1420 1423
1421 1424 While overriding this method each backend's should call
1422 1425 ``self.check_integrity(parents)`` in the first place.
1423 1426
1424 1427 :param message: message of the commit
1425 1428 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1426 1429 :param parents: single parent or sequence of parents from which commit
1427 1430 would be derived
1428 1431 :param date: ``datetime.datetime`` instance. Defaults to
1429 1432 ``datetime.datetime.now()``.
1430 1433 :param branch: branch name, as string. If none given, default backend's
1431 1434 branch would be used.
1432 1435
1433 1436 :raises ``CommitError``: if any error occurs while committing
1434 1437 """
1435 1438 raise NotImplementedError
1436 1439
1437 1440
1438 1441 class BaseInMemoryChangesetClass(type):
1439 1442
1440 1443 def __instancecheck__(self, instance):
1441 1444 return isinstance(instance, BaseInMemoryCommit)
1442 1445
1443 1446
1444 1447 class BaseInMemoryChangeset(BaseInMemoryCommit):
1445 1448
1446 1449 __metaclass__ = BaseInMemoryChangesetClass
1447 1450
1448 1451 def __new__(cls, *args, **kwargs):
1449 1452 warnings.warn(
1450 1453 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1451 1454 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1452 1455
1453 1456
1454 1457 class EmptyCommit(BaseCommit):
1455 1458 """
1456 1459 An dummy empty commit. It's possible to pass hash when creating
1457 1460 an EmptyCommit
1458 1461 """
1459 1462
1460 1463 def __init__(
1461 1464 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1462 1465 message='', author='', date=None):
1463 1466 self._empty_commit_id = commit_id
1464 1467 # TODO: johbo: Solve idx parameter, default value does not make
1465 1468 # too much sense
1466 1469 self.idx = idx
1467 1470 self.message = message
1468 1471 self.author = author
1469 1472 self.date = date or datetime.datetime.fromtimestamp(0)
1470 1473 self.repository = repo
1471 1474 self.alias = alias
1472 1475
1473 1476 @LazyProperty
1474 1477 def raw_id(self):
1475 1478 """
1476 1479 Returns raw string identifying this commit, useful for web
1477 1480 representation.
1478 1481 """
1479 1482
1480 1483 return self._empty_commit_id
1481 1484
1482 1485 @LazyProperty
1483 1486 def branch(self):
1484 1487 if self.alias:
1485 1488 from rhodecode.lib.vcs.backends import get_backend
1486 1489 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1487 1490
1488 1491 @LazyProperty
1489 1492 def short_id(self):
1490 1493 return self.raw_id[:12]
1491 1494
1492 1495 @LazyProperty
1493 1496 def id(self):
1494 1497 return self.raw_id
1495 1498
1496 1499 def get_file_commit(self, path):
1497 1500 return self
1498 1501
1499 1502 def get_file_content(self, path):
1500 1503 return u''
1501 1504
1502 1505 def get_file_size(self, path):
1503 1506 return 0
1504 1507
1505 1508
1506 1509 class EmptyChangesetClass(type):
1507 1510
1508 1511 def __instancecheck__(self, instance):
1509 1512 return isinstance(instance, EmptyCommit)
1510 1513
1511 1514
1512 1515 class EmptyChangeset(EmptyCommit):
1513 1516
1514 1517 __metaclass__ = EmptyChangesetClass
1515 1518
1516 1519 def __new__(cls, *args, **kwargs):
1517 1520 warnings.warn(
1518 1521 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1519 1522 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1520 1523
1521 1524 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1522 1525 alias=None, revision=-1, message='', author='', date=None):
1523 1526 if requested_revision is not None:
1524 1527 warnings.warn(
1525 1528 "Parameter requested_revision not supported anymore",
1526 1529 DeprecationWarning)
1527 1530 super(EmptyChangeset, self).__init__(
1528 1531 commit_id=cs, repo=repo, alias=alias, idx=revision,
1529 1532 message=message, author=author, date=date)
1530 1533
1531 1534 @property
1532 1535 def revision(self):
1533 1536 warnings.warn("Use idx instead", DeprecationWarning)
1534 1537 return self.idx
1535 1538
1536 1539 @revision.setter
1537 1540 def revision(self, value):
1538 1541 warnings.warn("Use idx instead", DeprecationWarning)
1539 1542 self.idx = value
1540 1543
1541 1544
1542 1545 class EmptyRepository(BaseRepository):
1543 1546 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1544 1547 pass
1545 1548
1546 1549 def get_diff(self, *args, **kwargs):
1547 1550 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1548 1551 return GitDiff('')
1549 1552
1550 1553
1551 1554 class CollectionGenerator(object):
1552 1555
1553 1556 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1554 1557 self.repo = repo
1555 1558 self.commit_ids = commit_ids
1556 1559 # TODO: (oliver) this isn't currently hooked up
1557 1560 self.collection_size = None
1558 1561 self.pre_load = pre_load
1559 1562
1560 1563 def __len__(self):
1561 1564 if self.collection_size is not None:
1562 1565 return self.collection_size
1563 1566 return self.commit_ids.__len__()
1564 1567
1565 1568 def __iter__(self):
1566 1569 for commit_id in self.commit_ids:
1567 1570 # TODO: johbo: Mercurial passes in commit indices or commit ids
1568 1571 yield self._commit_factory(commit_id)
1569 1572
1570 1573 def _commit_factory(self, commit_id):
1571 1574 """
1572 1575 Allows backends to override the way commits are generated.
1573 1576 """
1574 1577 return self.repo.get_commit(commit_id=commit_id,
1575 1578 pre_load=self.pre_load)
1576 1579
1577 1580 def __getslice__(self, i, j):
1578 1581 """
1579 1582 Returns an iterator of sliced repository
1580 1583 """
1581 1584 commit_ids = self.commit_ids[i:j]
1582 1585 return self.__class__(
1583 1586 self.repo, commit_ids, pre_load=self.pre_load)
1584 1587
1585 1588 def __repr__(self):
1586 1589 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1587 1590
1588 1591
1589 1592 class Config(object):
1590 1593 """
1591 1594 Represents the configuration for a repository.
1592 1595
1593 1596 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1594 1597 standard library. It implements only the needed subset.
1595 1598 """
1596 1599
1597 1600 def __init__(self):
1598 1601 self._values = {}
1599 1602
1600 1603 def copy(self):
1601 1604 clone = Config()
1602 1605 for section, values in self._values.items():
1603 1606 clone._values[section] = values.copy()
1604 1607 return clone
1605 1608
1606 1609 def __repr__(self):
1607 1610 return '<Config(%s sections) at %s>' % (
1608 1611 len(self._values), hex(id(self)))
1609 1612
1610 1613 def items(self, section):
1611 1614 return self._values.get(section, {}).iteritems()
1612 1615
1613 1616 def get(self, section, option):
1614 1617 return self._values.get(section, {}).get(option)
1615 1618
1616 1619 def set(self, section, option, value):
1617 1620 section_values = self._values.setdefault(section, {})
1618 1621 section_values[option] = value
1619 1622
1620 1623 def clear_section(self, section):
1621 1624 self._values[section] = {}
1622 1625
1623 1626 def serialize(self):
1624 1627 """
1625 1628 Creates a list of three tuples (section, key, value) representing
1626 1629 this config object.
1627 1630 """
1628 1631 items = []
1629 1632 for section in self._values:
1630 1633 for option, value in self._values[section].items():
1631 1634 items.append(
1632 1635 (safe_str(section), safe_str(option), safe_str(value)))
1633 1636 return items
1634 1637
1635 1638
1636 1639 class Diff(object):
1637 1640 """
1638 1641 Represents a diff result from a repository backend.
1639 1642
1640 1643 Subclasses have to provide a backend specific value for
1641 1644 :attr:`_header_re` and :attr:`_meta_re`.
1642 1645 """
1643 1646 _meta_re = None
1644 1647 _header_re = None
1645 1648
1646 1649 def __init__(self, raw_diff):
1647 1650 self.raw = raw_diff
1648 1651
1649 1652 def chunks(self):
1650 1653 """
1651 1654 split the diff in chunks of separate --git a/file b/file chunks
1652 1655 to make diffs consistent we must prepend with \n, and make sure
1653 1656 we can detect last chunk as this was also has special rule
1654 1657 """
1655 1658
1656 1659 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1657 1660 header = diff_parts[0]
1658 1661
1659 1662 if self._meta_re:
1660 1663 match = self._meta_re.match(header)
1661 1664
1662 1665 chunks = diff_parts[1:]
1663 1666 total_chunks = len(chunks)
1664 1667
1665 1668 return (
1666 1669 DiffChunk(chunk, self, cur_chunk == total_chunks)
1667 1670 for cur_chunk, chunk in enumerate(chunks, start=1))
1668 1671
1669 1672
1670 1673 class DiffChunk(object):
1671 1674
1672 1675 def __init__(self, chunk, diff, last_chunk):
1673 1676 self._diff = diff
1674 1677
1675 1678 # since we split by \ndiff --git that part is lost from original diff
1676 1679 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1677 1680 if not last_chunk:
1678 1681 chunk += '\n'
1679 1682
1680 1683 match = self._diff._header_re.match(chunk)
1681 1684 self.header = match.groupdict()
1682 1685 self.diff = chunk[match.end():]
1683 1686 self.raw = chunk
1684 1687
1685 1688
1686 1689 class BasePathPermissionChecker(object):
1687 1690
1688 1691 @staticmethod
1689 1692 def create_from_patterns(includes, excludes):
1690 1693 if includes and '*' in includes and not excludes:
1691 1694 return AllPathPermissionChecker()
1692 1695 elif excludes and '*' in excludes:
1693 1696 return NonePathPermissionChecker()
1694 1697 else:
1695 1698 return PatternPathPermissionChecker(includes, excludes)
1696 1699
1697 1700 @property
1698 1701 def has_full_access(self):
1699 1702 raise NotImplemented()
1700 1703
1701 1704 def has_access(self, path):
1702 1705 raise NotImplemented()
1703 1706
1704 1707
1705 1708 class AllPathPermissionChecker(BasePathPermissionChecker):
1706 1709
1707 1710 @property
1708 1711 def has_full_access(self):
1709 1712 return True
1710 1713
1711 1714 def has_access(self, path):
1712 1715 return True
1713 1716
1714 1717
1715 1718 class NonePathPermissionChecker(BasePathPermissionChecker):
1716 1719
1717 1720 @property
1718 1721 def has_full_access(self):
1719 1722 return False
1720 1723
1721 1724 def has_access(self, path):
1722 1725 return False
1723 1726
1724 1727
1725 1728 class PatternPathPermissionChecker(BasePathPermissionChecker):
1726 1729
1727 1730 def __init__(self, includes, excludes):
1728 1731 self.includes = includes
1729 1732 self.excludes = excludes
1730 1733 self.includes_re = [] if not includes else [
1731 1734 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1732 1735 self.excludes_re = [] if not excludes else [
1733 1736 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1734 1737
1735 1738 @property
1736 1739 def has_full_access(self):
1737 1740 return '*' in self.includes and not self.excludes
1738 1741
1739 1742 def has_access(self, path):
1740 1743 for regex in self.excludes_re:
1741 1744 if regex.match(path):
1742 1745 return False
1743 1746 for regex in self.includes_re:
1744 1747 if regex.match(path):
1745 1748 return True
1746 1749 return False
@@ -1,1006 +1,1009 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, path as vcspath
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference)
39 39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 CommitDoesNotExistError, EmptyRepositoryError,
44 44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45 45
46 46
47 47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 class GitRepository(BaseRepository):
53 53 """
54 54 Git repository backend.
55 55 """
56 56 DEFAULT_BRANCH_NAME = 'master'
57 57
58 58 contact = BaseRepository.DEFAULT_CONTACT
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 update_after_clone=False, with_wire=None, bare=False):
62 62
63 63 self.path = safe_str(os.path.abspath(repo_path))
64 64 self.config = config if config else self.get_default_config()
65 self._remote = connection.Git(
66 self.path, self.config, with_wire=with_wire)
65 self.with_wire = with_wire
67 66
68 67 self._init_repo(create, src_url, update_after_clone, bare)
69 68
70 69 # caches
71 70 self._commit_ids = {}
72 71
73 72 @LazyProperty
73 def _remote(self):
74 return connection.Git(self.path, self.config, with_wire=self.with_wire)
75
76 @LazyProperty
74 77 def bare(self):
75 78 return self._remote.bare()
76 79
77 80 @LazyProperty
78 81 def head(self):
79 82 return self._remote.head()
80 83
81 84 @LazyProperty
82 85 def commit_ids(self):
83 86 """
84 87 Returns list of commit ids, in ascending order. Being lazy
85 88 attribute allows external tools to inject commit ids from cache.
86 89 """
87 90 commit_ids = self._get_all_commit_ids()
88 91 self._rebuild_cache(commit_ids)
89 92 return commit_ids
90 93
91 94 def _rebuild_cache(self, commit_ids):
92 95 self._commit_ids = dict((commit_id, index)
93 96 for index, commit_id in enumerate(commit_ids))
94 97
95 98 def run_git_command(self, cmd, **opts):
96 99 """
97 100 Runs given ``cmd`` as git command and returns tuple
98 101 (stdout, stderr).
99 102
100 103 :param cmd: git command to be executed
101 104 :param opts: env options to pass into Subprocess command
102 105 """
103 106 if not isinstance(cmd, list):
104 107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
105 108
106 109 skip_stderr_log = opts.pop('skip_stderr_log', False)
107 110 out, err = self._remote.run_git_command(cmd, **opts)
108 111 if err and not skip_stderr_log:
109 112 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
110 113 return out, err
111 114
112 115 @staticmethod
113 116 def check_url(url, config):
114 117 """
115 118 Function will check given url and try to verify if it's a valid
116 119 link. Sometimes it may happened that git will issue basic
117 120 auth request that can cause whole API to hang when used from python
118 121 or other external calls.
119 122
120 123 On failures it'll raise urllib2.HTTPError, exception is also thrown
121 124 when the return code is non 200
122 125 """
123 126 # check first if it's not an url
124 127 if os.path.isdir(url) or url.startswith('file:'):
125 128 return True
126 129
127 130 if '+' in url.split('://', 1)[0]:
128 131 url = url.split('+', 1)[1]
129 132
130 133 # Request the _remote to verify the url
131 134 return connection.Git.check_url(url, config.serialize())
132 135
133 136 @staticmethod
134 137 def is_valid_repository(path):
135 138 if os.path.isdir(os.path.join(path, '.git')):
136 139 return True
137 140 # check case of bare repository
138 141 try:
139 142 GitRepository(path)
140 143 return True
141 144 except VCSError:
142 145 pass
143 146 return False
144 147
145 148 def _init_repo(self, create, src_url=None, update_after_clone=False,
146 149 bare=False):
147 150 if create and os.path.exists(self.path):
148 151 raise RepositoryError(
149 152 "Cannot create repository at %s, location already exist"
150 153 % self.path)
151 154
152 155 try:
153 156 if create and src_url:
154 157 GitRepository.check_url(src_url, self.config)
155 158 self.clone(src_url, update_after_clone, bare)
156 159 elif create:
157 160 os.makedirs(self.path, mode=0755)
158 161
159 162 if bare:
160 163 self._remote.init_bare()
161 164 else:
162 165 self._remote.init()
163 166 else:
164 167 if not self._remote.assert_correct_path():
165 168 raise RepositoryError(
166 169 'Path "%s" does not contain a Git repository' %
167 170 (self.path,))
168 171
169 172 # TODO: johbo: check if we have to translate the OSError here
170 173 except OSError as err:
171 174 raise RepositoryError(err)
172 175
173 176 def _get_all_commit_ids(self, filters=None):
174 177 # we must check if this repo is not empty, since later command
175 178 # fails if it is. And it's cheaper to ask than throw the subprocess
176 179 # errors
177 180 try:
178 181 self._remote.head()
179 182 except KeyError:
180 183 return []
181 184
182 185 rev_filter = ['--branches', '--tags']
183 186 extra_filter = []
184 187
185 188 if filters:
186 189 if filters.get('since'):
187 190 extra_filter.append('--since=%s' % (filters['since']))
188 191 if filters.get('until'):
189 192 extra_filter.append('--until=%s' % (filters['until']))
190 193 if filters.get('branch_name'):
191 194 rev_filter = ['--tags']
192 195 extra_filter.append(filters['branch_name'])
193 196 rev_filter.extend(extra_filter)
194 197
195 198 # if filters.get('start') or filters.get('end'):
196 199 # # skip is offset, max-count is limit
197 200 # if filters.get('start'):
198 201 # extra_filter += ' --skip=%s' % filters['start']
199 202 # if filters.get('end'):
200 203 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
201 204
202 205 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
203 206 try:
204 207 output, __ = self.run_git_command(cmd)
205 208 except RepositoryError:
206 209 # Can be raised for empty repositories
207 210 return []
208 211 return output.splitlines()
209 212
210 213 def _get_commit_id(self, commit_id_or_idx):
211 214 def is_null(value):
212 215 return len(value) == commit_id_or_idx.count('0')
213 216
214 217 if self.is_empty():
215 218 raise EmptyRepositoryError("There are no commits yet")
216 219
217 220 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
218 221 return self.commit_ids[-1]
219 222
220 223 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
221 224 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
222 225 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
223 226 try:
224 227 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
225 228 except Exception:
226 229 msg = "Commit %s does not exist for %s" % (
227 230 commit_id_or_idx, self)
228 231 raise CommitDoesNotExistError(msg)
229 232
230 233 elif is_bstr:
231 234 # check full path ref, eg. refs/heads/master
232 235 ref_id = self._refs.get(commit_id_or_idx)
233 236 if ref_id:
234 237 return ref_id
235 238
236 239 # check branch name
237 240 branch_ids = self.branches.values()
238 241 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
239 242 if ref_id:
240 243 return ref_id
241 244
242 245 # check tag name
243 246 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
244 247 if ref_id:
245 248 return ref_id
246 249
247 250 if (not SHA_PATTERN.match(commit_id_or_idx) or
248 251 commit_id_or_idx not in self.commit_ids):
249 252 msg = "Commit %s does not exist for %s" % (
250 253 commit_id_or_idx, self)
251 254 raise CommitDoesNotExistError(msg)
252 255
253 256 # Ensure we return full id
254 257 if not SHA_PATTERN.match(str(commit_id_or_idx)):
255 258 raise CommitDoesNotExistError(
256 259 "Given commit id %s not recognized" % commit_id_or_idx)
257 260 return commit_id_or_idx
258 261
259 262 def get_hook_location(self):
260 263 """
261 264 returns absolute path to location where hooks are stored
262 265 """
263 266 loc = os.path.join(self.path, 'hooks')
264 267 if not self.bare:
265 268 loc = os.path.join(self.path, '.git', 'hooks')
266 269 return loc
267 270
268 271 @LazyProperty
269 272 def last_change(self):
270 273 """
271 274 Returns last change made on this repository as
272 275 `datetime.datetime` object.
273 276 """
274 277 try:
275 278 return self.get_commit().date
276 279 except RepositoryError:
277 280 tzoffset = makedate()[1]
278 281 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
279 282
280 283 def _get_fs_mtime(self):
281 284 idx_loc = '' if self.bare else '.git'
282 285 # fallback to filesystem
283 286 in_path = os.path.join(self.path, idx_loc, "index")
284 287 he_path = os.path.join(self.path, idx_loc, "HEAD")
285 288 if os.path.exists(in_path):
286 289 return os.stat(in_path).st_mtime
287 290 else:
288 291 return os.stat(he_path).st_mtime
289 292
290 293 @LazyProperty
291 294 def description(self):
292 295 description = self._remote.get_description()
293 296 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
294 297
295 298 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
296 299 if self.is_empty():
297 300 return OrderedDict()
298 301
299 302 result = []
300 303 for ref, sha in self._refs.iteritems():
301 304 if ref.startswith(prefix):
302 305 ref_name = ref
303 306 if strip_prefix:
304 307 ref_name = ref[len(prefix):]
305 308 result.append((safe_unicode(ref_name), sha))
306 309
307 310 def get_name(entry):
308 311 return entry[0]
309 312
310 313 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
311 314
312 315 def _get_branches(self):
313 316 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
314 317
315 318 @LazyProperty
316 319 def branches(self):
317 320 return self._get_branches()
318 321
319 322 @LazyProperty
320 323 def branches_closed(self):
321 324 return {}
322 325
323 326 @LazyProperty
324 327 def bookmarks(self):
325 328 return {}
326 329
327 330 @LazyProperty
328 331 def branches_all(self):
329 332 all_branches = {}
330 333 all_branches.update(self.branches)
331 334 all_branches.update(self.branches_closed)
332 335 return all_branches
333 336
334 337 @LazyProperty
335 338 def tags(self):
336 339 return self._get_tags()
337 340
338 341 def _get_tags(self):
339 342 return self._get_refs_entries(
340 343 prefix='refs/tags/', strip_prefix=True, reverse=True)
341 344
342 345 def tag(self, name, user, commit_id=None, message=None, date=None,
343 346 **kwargs):
344 347 # TODO: fix this method to apply annotated tags correct with message
345 348 """
346 349 Creates and returns a tag for the given ``commit_id``.
347 350
348 351 :param name: name for new tag
349 352 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
350 353 :param commit_id: commit id for which new tag would be created
351 354 :param message: message of the tag's commit
352 355 :param date: date of tag's commit
353 356
354 357 :raises TagAlreadyExistError: if tag with same name already exists
355 358 """
356 359 if name in self.tags:
357 360 raise TagAlreadyExistError("Tag %s already exists" % name)
358 361 commit = self.get_commit(commit_id=commit_id)
359 362 message = message or "Added tag %s for commit %s" % (
360 363 name, commit.raw_id)
361 364 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
362 365
363 366 self._refs = self._get_refs()
364 367 self.tags = self._get_tags()
365 368 return commit
366 369
367 370 def remove_tag(self, name, user, message=None, date=None):
368 371 """
369 372 Removes tag with the given ``name``.
370 373
371 374 :param name: name of the tag to be removed
372 375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
373 376 :param message: message of the tag's removal commit
374 377 :param date: date of tag's removal commit
375 378
376 379 :raises TagDoesNotExistError: if tag with given name does not exists
377 380 """
378 381 if name not in self.tags:
379 382 raise TagDoesNotExistError("Tag %s does not exist" % name)
380 383 tagpath = vcspath.join(
381 384 self._remote.get_refs_path(), 'refs', 'tags', name)
382 385 try:
383 386 os.remove(tagpath)
384 387 self._refs = self._get_refs()
385 388 self.tags = self._get_tags()
386 389 except OSError as e:
387 390 raise RepositoryError(e.strerror)
388 391
389 392 def _get_refs(self):
390 393 return self._remote.get_refs()
391 394
392 395 @LazyProperty
393 396 def _refs(self):
394 397 return self._get_refs()
395 398
396 399 @property
397 400 def _ref_tree(self):
398 401 node = tree = {}
399 402 for ref, sha in self._refs.iteritems():
400 403 path = ref.split('/')
401 404 for bit in path[:-1]:
402 405 node = node.setdefault(bit, {})
403 406 node[path[-1]] = sha
404 407 node = tree
405 408 return tree
406 409
407 410 def get_remote_ref(self, ref_name):
408 411 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
409 412 try:
410 413 return self._refs[ref_key]
411 414 except Exception:
412 415 return
413 416
414 417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
415 418 """
416 419 Returns `GitCommit` object representing commit from git repository
417 420 at the given `commit_id` or head (most recent commit) if None given.
418 421 """
419 422 if commit_id is not None:
420 423 self._validate_commit_id(commit_id)
421 424 elif commit_idx is not None:
422 425 self._validate_commit_idx(commit_idx)
423 426 commit_id = commit_idx
424 427 commit_id = self._get_commit_id(commit_id)
425 428 try:
426 429 # Need to call remote to translate id for tagging scenario
427 430 commit_id = self._remote.get_object(commit_id)["commit_id"]
428 431 idx = self._commit_ids[commit_id]
429 432 except KeyError:
430 433 raise RepositoryError("Cannot get object with id %s" % commit_id)
431 434
432 435 return GitCommit(self, commit_id, idx, pre_load=pre_load)
433 436
434 437 def get_commits(
435 438 self, start_id=None, end_id=None, start_date=None, end_date=None,
436 439 branch_name=None, show_hidden=False, pre_load=None):
437 440 """
438 441 Returns generator of `GitCommit` objects from start to end (both
439 442 are inclusive), in ascending date order.
440 443
441 444 :param start_id: None, str(commit_id)
442 445 :param end_id: None, str(commit_id)
443 446 :param start_date: if specified, commits with commit date less than
444 447 ``start_date`` would be filtered out from returned set
445 448 :param end_date: if specified, commits with commit date greater than
446 449 ``end_date`` would be filtered out from returned set
447 450 :param branch_name: if specified, commits not reachable from given
448 451 branch would be filtered out from returned set
449 452 :param show_hidden: Show hidden commits such as obsolete or hidden from
450 453 Mercurial evolve
451 454 :raise BranchDoesNotExistError: If given `branch_name` does not
452 455 exist.
453 456 :raise CommitDoesNotExistError: If commits for given `start` or
454 457 `end` could not be found.
455 458
456 459 """
457 460 if self.is_empty():
458 461 raise EmptyRepositoryError("There are no commits yet")
459 462 self._validate_branch_name(branch_name)
460 463
461 464 if start_id is not None:
462 465 self._validate_commit_id(start_id)
463 466 if end_id is not None:
464 467 self._validate_commit_id(end_id)
465 468
466 469 start_raw_id = self._get_commit_id(start_id)
467 470 start_pos = self._commit_ids[start_raw_id] if start_id else None
468 471 end_raw_id = self._get_commit_id(end_id)
469 472 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
470 473
471 474 if None not in [start_id, end_id] and start_pos > end_pos:
472 475 raise RepositoryError(
473 476 "Start commit '%s' cannot be after end commit '%s'" %
474 477 (start_id, end_id))
475 478
476 479 if end_pos is not None:
477 480 end_pos += 1
478 481
479 482 filter_ = []
480 483 if branch_name:
481 484 filter_.append({'branch_name': branch_name})
482 485 if start_date and not end_date:
483 486 filter_.append({'since': start_date})
484 487 if end_date and not start_date:
485 488 filter_.append({'until': end_date})
486 489 if start_date and end_date:
487 490 filter_.append({'since': start_date})
488 491 filter_.append({'until': end_date})
489 492
490 493 # if start_pos or end_pos:
491 494 # filter_.append({'start': start_pos})
492 495 # filter_.append({'end': end_pos})
493 496
494 497 if filter_:
495 498 revfilters = {
496 499 'branch_name': branch_name,
497 500 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
498 501 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
499 502 'start': start_pos,
500 503 'end': end_pos,
501 504 }
502 505 commit_ids = self._get_all_commit_ids(filters=revfilters)
503 506
504 507 # pure python stuff, it's slow due to walker walking whole repo
505 508 # def get_revs(walker):
506 509 # for walker_entry in walker:
507 510 # yield walker_entry.commit.id
508 511 # revfilters = {}
509 512 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
510 513 else:
511 514 commit_ids = self.commit_ids
512 515
513 516 if start_pos or end_pos:
514 517 commit_ids = commit_ids[start_pos: end_pos]
515 518
516 519 return CollectionGenerator(self, commit_ids, pre_load=pre_load)
517 520
518 521 def get_diff(
519 522 self, commit1, commit2, path='', ignore_whitespace=False,
520 523 context=3, path1=None):
521 524 """
522 525 Returns (git like) *diff*, as plain text. Shows changes introduced by
523 526 ``commit2`` since ``commit1``.
524 527
525 528 :param commit1: Entry point from which diff is shown. Can be
526 529 ``self.EMPTY_COMMIT`` - in this case, patch showing all
527 530 the changes since empty state of the repository until ``commit2``
528 531 :param commit2: Until which commits changes should be shown.
529 532 :param ignore_whitespace: If set to ``True``, would not show whitespace
530 533 changes. Defaults to ``False``.
531 534 :param context: How many lines before/after changed lines should be
532 535 shown. Defaults to ``3``.
533 536 """
534 537 self._validate_diff_commits(commit1, commit2)
535 538 if path1 is not None and path1 != path:
536 539 raise ValueError("Diff of two different paths not supported.")
537 540
538 541 flags = [
539 542 '-U%s' % context, '--full-index', '--binary', '-p',
540 543 '-M', '--abbrev=40']
541 544 if ignore_whitespace:
542 545 flags.append('-w')
543 546
544 547 if commit1 == self.EMPTY_COMMIT:
545 548 cmd = ['show'] + flags + [commit2.raw_id]
546 549 else:
547 550 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
548 551
549 552 if path:
550 553 cmd.extend(['--', path])
551 554
552 555 stdout, __ = self.run_git_command(cmd)
553 556 # If we used 'show' command, strip first few lines (until actual diff
554 557 # starts)
555 558 if commit1 == self.EMPTY_COMMIT:
556 559 lines = stdout.splitlines()
557 560 x = 0
558 561 for line in lines:
559 562 if line.startswith('diff'):
560 563 break
561 564 x += 1
562 565 # Append new line just like 'diff' command do
563 566 stdout = '\n'.join(lines[x:]) + '\n'
564 567 return GitDiff(stdout)
565 568
566 569 def strip(self, commit_id, branch_name):
567 570 commit = self.get_commit(commit_id=commit_id)
568 571 if commit.merge:
569 572 raise Exception('Cannot reset to merge commit')
570 573
571 574 # parent is going to be the new head now
572 575 commit = commit.parents[0]
573 576 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
574 577
575 578 self.commit_ids = self._get_all_commit_ids()
576 579 self._rebuild_cache(self.commit_ids)
577 580
578 581 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
579 582 if commit_id1 == commit_id2:
580 583 return commit_id1
581 584
582 585 if self != repo2:
583 586 commits = self._remote.get_missing_revs(
584 587 commit_id1, commit_id2, repo2.path)
585 588 if commits:
586 589 commit = repo2.get_commit(commits[-1])
587 590 if commit.parents:
588 591 ancestor_id = commit.parents[0].raw_id
589 592 else:
590 593 ancestor_id = None
591 594 else:
592 595 # no commits from other repo, ancestor_id is the commit_id2
593 596 ancestor_id = commit_id2
594 597 else:
595 598 output, __ = self.run_git_command(
596 599 ['merge-base', commit_id1, commit_id2])
597 600 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
598 601
599 602 return ancestor_id
600 603
601 604 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
602 605 repo1 = self
603 606 ancestor_id = None
604 607
605 608 if commit_id1 == commit_id2:
606 609 commits = []
607 610 elif repo1 != repo2:
608 611 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
609 612 repo2.path)
610 613 commits = [
611 614 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
612 615 for commit_id in reversed(missing_ids)]
613 616 else:
614 617 output, __ = repo1.run_git_command(
615 618 ['log', '--reverse', '--pretty=format: %H', '-s',
616 619 '%s..%s' % (commit_id1, commit_id2)])
617 620 commits = [
618 621 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
619 622 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
620 623
621 624 return commits
622 625
623 626 @LazyProperty
624 627 def in_memory_commit(self):
625 628 """
626 629 Returns ``GitInMemoryCommit`` object for this repository.
627 630 """
628 631 return GitInMemoryCommit(self)
629 632
630 633 def clone(self, url, update_after_clone=True, bare=False):
631 634 """
632 635 Tries to clone commits from external location.
633 636
634 637 :param update_after_clone: If set to ``False``, git won't checkout
635 638 working directory
636 639 :param bare: If set to ``True``, repository would be cloned into
637 640 *bare* git repository (no working directory at all).
638 641 """
639 642 # init_bare and init expect empty dir created to proceed
640 643 if not os.path.exists(self.path):
641 644 os.mkdir(self.path)
642 645
643 646 if bare:
644 647 self._remote.init_bare()
645 648 else:
646 649 self._remote.init()
647 650
648 651 deferred = '^{}'
649 652 valid_refs = ('refs/heads', 'refs/tags', 'HEAD')
650 653
651 654 return self._remote.clone(
652 655 url, deferred, valid_refs, update_after_clone)
653 656
654 657 def pull(self, url, commit_ids=None):
655 658 """
656 659 Tries to pull changes from external location. We use fetch here since
657 660 pull in get does merges and we want to be compatible with hg backend so
658 661 pull == fetch in this case
659 662 """
660 663 self.fetch(url, commit_ids=commit_ids)
661 664
662 665 def fetch(self, url, commit_ids=None):
663 666 """
664 667 Tries to fetch changes from external location.
665 668 """
666 669 refs = None
667 670
668 671 if commit_ids is not None:
669 672 remote_refs = self._remote.get_remote_refs(url)
670 673 refs = [
671 674 ref for ref in remote_refs if remote_refs[ref] in commit_ids]
672 675 self._remote.fetch(url, refs=refs)
673 676
674 677 def push(self, url):
675 678 refs = None
676 679 self._remote.sync_push(url, refs=refs)
677 680
678 681 def set_refs(self, ref_name, commit_id):
679 682 self._remote.set_refs(ref_name, commit_id)
680 683
681 684 def remove_ref(self, ref_name):
682 685 self._remote.remove_ref(ref_name)
683 686
684 687 def _update_server_info(self):
685 688 """
686 689 runs gits update-server-info command in this repo instance
687 690 """
688 691 self._remote.update_server_info()
689 692
690 693 def _current_branch(self):
691 694 """
692 695 Return the name of the current branch.
693 696
694 697 It only works for non bare repositories (i.e. repositories with a
695 698 working copy)
696 699 """
697 700 if self.bare:
698 701 raise RepositoryError('Bare git repos do not have active branches')
699 702
700 703 if self.is_empty():
701 704 return None
702 705
703 706 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
704 707 return stdout.strip()
705 708
706 709 def _checkout(self, branch_name, create=False, force=False):
707 710 """
708 711 Checkout a branch in the working directory.
709 712
710 713 It tries to create the branch if create is True, failing if the branch
711 714 already exists.
712 715
713 716 It only works for non bare repositories (i.e. repositories with a
714 717 working copy)
715 718 """
716 719 if self.bare:
717 720 raise RepositoryError('Cannot checkout branches in a bare git repo')
718 721
719 722 cmd = ['checkout']
720 723 if force:
721 724 cmd.append('-f')
722 725 if create:
723 726 cmd.append('-b')
724 727 cmd.append(branch_name)
725 728 self.run_git_command(cmd, fail_on_stderr=False)
726 729
727 730 def _identify(self):
728 731 """
729 732 Return the current state of the working directory.
730 733 """
731 734 if self.bare:
732 735 raise RepositoryError('Bare git repos do not have active branches')
733 736
734 737 if self.is_empty():
735 738 return None
736 739
737 740 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
738 741 return stdout.strip()
739 742
740 743 def _local_clone(self, clone_path, branch_name, source_branch=None):
741 744 """
742 745 Create a local clone of the current repo.
743 746 """
744 747 # N.B.(skreft): the --branch option is required as otherwise the shallow
745 748 # clone will only fetch the active branch.
746 749 cmd = ['clone', '--branch', branch_name,
747 750 self.path, os.path.abspath(clone_path)]
748 751
749 752 self.run_git_command(cmd, fail_on_stderr=False)
750 753
751 754 # if we get the different source branch, make sure we also fetch it for
752 755 # merge conditions
753 756 if source_branch and source_branch != branch_name:
754 757 # check if the ref exists.
755 758 shadow_repo = GitRepository(os.path.abspath(clone_path))
756 759 if shadow_repo.get_remote_ref(source_branch):
757 760 cmd = ['fetch', self.path, source_branch]
758 761 self.run_git_command(cmd, fail_on_stderr=False)
759 762
760 763 def _local_fetch(self, repository_path, branch_name, use_origin=False):
761 764 """
762 765 Fetch a branch from a local repository.
763 766 """
764 767 repository_path = os.path.abspath(repository_path)
765 768 if repository_path == self.path:
766 769 raise ValueError('Cannot fetch from the same repository')
767 770
768 771 if use_origin:
769 772 branch_name = '+{branch}:refs/heads/{branch}'.format(
770 773 branch=branch_name)
771 774
772 775 cmd = ['fetch', '--no-tags', '--update-head-ok',
773 776 repository_path, branch_name]
774 777 self.run_git_command(cmd, fail_on_stderr=False)
775 778
776 779 def _local_reset(self, branch_name):
777 780 branch_name = '{}'.format(branch_name)
778 781 cmd = ['reset', '--hard', branch_name]
779 782 self.run_git_command(cmd, fail_on_stderr=False)
780 783
781 784 def _last_fetch_heads(self):
782 785 """
783 786 Return the last fetched heads that need merging.
784 787
785 788 The algorithm is defined at
786 789 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
787 790 """
788 791 if not self.bare:
789 792 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
790 793 else:
791 794 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
792 795
793 796 heads = []
794 797 with open(fetch_heads_path) as f:
795 798 for line in f:
796 799 if ' not-for-merge ' in line:
797 800 continue
798 801 line = re.sub('\t.*', '', line, flags=re.DOTALL)
799 802 heads.append(line)
800 803
801 804 return heads
802 805
803 806 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
804 807 return GitRepository(shadow_repository_path)
805 808
806 809 def _local_pull(self, repository_path, branch_name, ff_only=True):
807 810 """
808 811 Pull a branch from a local repository.
809 812 """
810 813 if self.bare:
811 814 raise RepositoryError('Cannot pull into a bare git repository')
812 815 # N.B.(skreft): The --ff-only option is to make sure this is a
813 816 # fast-forward (i.e., we are only pulling new changes and there are no
814 817 # conflicts with our current branch)
815 818 # Additionally, that option needs to go before --no-tags, otherwise git
816 819 # pull complains about it being an unknown flag.
817 820 cmd = ['pull']
818 821 if ff_only:
819 822 cmd.append('--ff-only')
820 823 cmd.extend(['--no-tags', repository_path, branch_name])
821 824 self.run_git_command(cmd, fail_on_stderr=False)
822 825
823 826 def _local_merge(self, merge_message, user_name, user_email, heads):
824 827 """
825 828 Merge the given head into the checked out branch.
826 829
827 830 It will force a merge commit.
828 831
829 832 Currently it raises an error if the repo is empty, as it is not possible
830 833 to create a merge commit in an empty repo.
831 834
832 835 :param merge_message: The message to use for the merge commit.
833 836 :param heads: the heads to merge.
834 837 """
835 838 if self.bare:
836 839 raise RepositoryError('Cannot merge into a bare git repository')
837 840
838 841 if not heads:
839 842 return
840 843
841 844 if self.is_empty():
842 845 # TODO(skreft): do somehting more robust in this case.
843 846 raise RepositoryError(
844 847 'Do not know how to merge into empty repositories yet')
845 848
846 849 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
847 850 # commit message. We also specify the user who is doing the merge.
848 851 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
849 852 '-c', 'user.email=%s' % safe_str(user_email),
850 853 'merge', '--no-ff', '-m', safe_str(merge_message)]
851 854 cmd.extend(heads)
852 855 try:
853 856 output = self.run_git_command(cmd, fail_on_stderr=False)
854 857 except RepositoryError:
855 858 # Cleanup any merge leftovers
856 859 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
857 860 raise
858 861
859 862 def _local_push(
860 863 self, source_branch, repository_path, target_branch,
861 864 enable_hooks=False, rc_scm_data=None):
862 865 """
863 866 Push the source_branch to the given repository and target_branch.
864 867
865 868 Currently it if the target_branch is not master and the target repo is
866 869 empty, the push will work, but then GitRepository won't be able to find
867 870 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
868 871 pointing to master, which does not exist).
869 872
870 873 It does not run the hooks in the target repo.
871 874 """
872 875 # TODO(skreft): deal with the case in which the target repo is empty,
873 876 # and the target_branch is not master.
874 877 target_repo = GitRepository(repository_path)
875 878 if (not target_repo.bare and
876 879 target_repo._current_branch() == target_branch):
877 880 # Git prevents pushing to the checked out branch, so simulate it by
878 881 # pulling into the target repository.
879 882 target_repo._local_pull(self.path, source_branch)
880 883 else:
881 884 cmd = ['push', os.path.abspath(repository_path),
882 885 '%s:%s' % (source_branch, target_branch)]
883 886 gitenv = {}
884 887 if rc_scm_data:
885 888 gitenv.update({'RC_SCM_DATA': rc_scm_data})
886 889
887 890 if not enable_hooks:
888 891 gitenv['RC_SKIP_HOOKS'] = '1'
889 892 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
890 893
891 894 def _get_new_pr_branch(self, source_branch, target_branch):
892 895 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
893 896 pr_branches = []
894 897 for branch in self.branches:
895 898 if branch.startswith(prefix):
896 899 pr_branches.append(int(branch[len(prefix):]))
897 900
898 901 if not pr_branches:
899 902 branch_id = 0
900 903 else:
901 904 branch_id = max(pr_branches) + 1
902 905
903 906 return '%s%d' % (prefix, branch_id)
904 907
905 908 def _maybe_prepare_merge_workspace(
906 909 self, repo_id, workspace_id, target_ref, source_ref):
907 910 shadow_repository_path = self._get_shadow_repository_path(
908 911 repo_id, workspace_id)
909 912 if not os.path.exists(shadow_repository_path):
910 913 self._local_clone(
911 914 shadow_repository_path, target_ref.name, source_ref.name)
912 915 log.debug(
913 916 'Prepared shadow repository in %s', shadow_repository_path)
914 917
915 918 return shadow_repository_path
916 919
917 920 def _merge_repo(self, repo_id, workspace_id, target_ref,
918 921 source_repo, source_ref, merge_message,
919 922 merger_name, merger_email, dry_run=False,
920 923 use_rebase=False, close_branch=False):
921 924 if target_ref.commit_id != self.branches[target_ref.name]:
922 925 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
923 926 target_ref.commit_id, self.branches[target_ref.name])
924 927 return MergeResponse(
925 928 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
926 929
927 930 shadow_repository_path = self._maybe_prepare_merge_workspace(
928 931 repo_id, workspace_id, target_ref, source_ref)
929 932 shadow_repo = self._get_shadow_instance(shadow_repository_path)
930 933
931 934 # checkout source, if it's different. Otherwise we could not
932 935 # fetch proper commits for merge testing
933 936 if source_ref.name != target_ref.name:
934 937 if shadow_repo.get_remote_ref(source_ref.name):
935 938 shadow_repo._checkout(source_ref.name, force=True)
936 939
937 940 # checkout target, and fetch changes
938 941 shadow_repo._checkout(target_ref.name, force=True)
939 942
940 943 # fetch/reset pull the target, in case it is changed
941 944 # this handles even force changes
942 945 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
943 946 shadow_repo._local_reset(target_ref.name)
944 947
945 948 # Need to reload repo to invalidate the cache, or otherwise we cannot
946 949 # retrieve the last target commit.
947 950 shadow_repo = self._get_shadow_instance(shadow_repository_path)
948 951 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
949 952 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
950 953 target_ref, target_ref.commit_id,
951 954 shadow_repo.branches[target_ref.name])
952 955 return MergeResponse(
953 956 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
954 957
955 958 # calculate new branch
956 959 pr_branch = shadow_repo._get_new_pr_branch(
957 960 source_ref.name, target_ref.name)
958 961 log.debug('using pull-request merge branch: `%s`', pr_branch)
959 962 # checkout to temp branch, and fetch changes
960 963 shadow_repo._checkout(pr_branch, create=True)
961 964 try:
962 965 shadow_repo._local_fetch(source_repo.path, source_ref.name)
963 966 except RepositoryError:
964 967 log.exception('Failure when doing local fetch on git shadow repo')
965 968 return MergeResponse(
966 969 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
967 970
968 971 merge_ref = None
969 972 merge_failure_reason = MergeFailureReason.NONE
970 973 try:
971 974 shadow_repo._local_merge(merge_message, merger_name, merger_email,
972 975 [source_ref.commit_id])
973 976 merge_possible = True
974 977
975 978 # Need to reload repo to invalidate the cache, or otherwise we
976 979 # cannot retrieve the merge commit.
977 980 shadow_repo = GitRepository(shadow_repository_path)
978 981 merge_commit_id = shadow_repo.branches[pr_branch]
979 982
980 983 # Set a reference pointing to the merge commit. This reference may
981 984 # be used to easily identify the last successful merge commit in
982 985 # the shadow repository.
983 986 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
984 987 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
985 988 except RepositoryError:
986 989 log.exception('Failure when doing local merge on git shadow repo')
987 990 merge_possible = False
988 991 merge_failure_reason = MergeFailureReason.MERGE_FAILED
989 992
990 993 if merge_possible and not dry_run:
991 994 try:
992 995 shadow_repo._local_push(
993 996 pr_branch, self.path, target_ref.name, enable_hooks=True,
994 997 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
995 998 merge_succeeded = True
996 999 except RepositoryError:
997 1000 log.exception(
998 1001 'Failure when doing local push on git shadow repo')
999 1002 merge_succeeded = False
1000 1003 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1001 1004 else:
1002 1005 merge_succeeded = False
1003 1006
1004 1007 return MergeResponse(
1005 1008 merge_possible, merge_succeeded, merge_ref,
1006 1009 merge_failure_reason)
@@ -1,915 +1,917 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, exceptions
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 45 from rhodecode.lib.vcs.compat import configparser
46 46
47 47 hexlify = binascii.hexlify
48 48 nullid = "\0" * 20
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class MercurialRepository(BaseRepository):
54 54 """
55 55 Mercurial repository backend
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'default'
58 58
59 59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 60 update_after_clone=False, with_wire=None):
61 61 """
62 62 Raises RepositoryError if repository could not be find at the given
63 63 ``repo_path``.
64 64
65 65 :param repo_path: local path of the repository
66 66 :param config: config object containing the repo configuration
67 67 :param create=False: if set to True, would try to create repository if
68 68 it does not exist rather than raising exception
69 69 :param src_url=None: would try to clone repository from given location
70 70 :param update_after_clone=False: sets update of working copy after
71 71 making a clone
72 72 """
73 73
74 74 self.path = safe_str(os.path.abspath(repo_path))
75 75 # mercurial since 4.4.X requires certain configuration to be present
76 76 # because sometimes we init the repos with config we need to meet
77 77 # special requirements
78 78 self.config = config if config else self.get_default_config(
79 79 default=[('extensions', 'largefiles', '1')])
80
81 self._remote = connection.Hg(
82 self.path, self.config, with_wire=with_wire)
80 self.with_wire = with_wire
83 81
84 82 self._init_repo(create, src_url, update_after_clone)
85 83
86 84 # caches
87 85 self._commit_ids = {}
88 86
89 87 @LazyProperty
88 def _remote(self):
89 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
90
91 @LazyProperty
90 92 def commit_ids(self):
91 93 """
92 94 Returns list of commit ids, in ascending order. Being lazy
93 95 attribute allows external tools to inject shas from cache.
94 96 """
95 97 commit_ids = self._get_all_commit_ids()
96 98 self._rebuild_cache(commit_ids)
97 99 return commit_ids
98 100
99 101 def _rebuild_cache(self, commit_ids):
100 102 self._commit_ids = dict((commit_id, index)
101 103 for index, commit_id in enumerate(commit_ids))
102 104
103 105 @LazyProperty
104 106 def branches(self):
105 107 return self._get_branches()
106 108
107 109 @LazyProperty
108 110 def branches_closed(self):
109 111 return self._get_branches(active=False, closed=True)
110 112
111 113 @LazyProperty
112 114 def branches_all(self):
113 115 all_branches = {}
114 116 all_branches.update(self.branches)
115 117 all_branches.update(self.branches_closed)
116 118 return all_branches
117 119
118 120 def _get_branches(self, active=True, closed=False):
119 121 """
120 122 Gets branches for this repository
121 123 Returns only not closed active branches by default
122 124
123 125 :param active: return also active branches
124 126 :param closed: return also closed branches
125 127
126 128 """
127 129 if self.is_empty():
128 130 return {}
129 131
130 132 def get_name(ctx):
131 133 return ctx[0]
132 134
133 135 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
134 136 self._remote.branches(active, closed).items()]
135 137
136 138 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
137 139
138 140 @LazyProperty
139 141 def tags(self):
140 142 """
141 143 Gets tags for this repository
142 144 """
143 145 return self._get_tags()
144 146
145 147 def _get_tags(self):
146 148 if self.is_empty():
147 149 return {}
148 150
149 151 def get_name(ctx):
150 152 return ctx[0]
151 153
152 154 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
153 155 self._remote.tags().items()]
154 156
155 157 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
156 158
157 159 def tag(self, name, user, commit_id=None, message=None, date=None,
158 160 **kwargs):
159 161 """
160 162 Creates and returns a tag for the given ``commit_id``.
161 163
162 164 :param name: name for new tag
163 165 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
164 166 :param commit_id: commit id for which new tag would be created
165 167 :param message: message of the tag's commit
166 168 :param date: date of tag's commit
167 169
168 170 :raises TagAlreadyExistError: if tag with same name already exists
169 171 """
170 172 if name in self.tags:
171 173 raise TagAlreadyExistError("Tag %s already exists" % name)
172 174 commit = self.get_commit(commit_id=commit_id)
173 175 local = kwargs.setdefault('local', False)
174 176
175 177 if message is None:
176 178 message = "Added tag %s for commit %s" % (name, commit.short_id)
177 179
178 180 date, tz = date_to_timestamp_plus_offset(date)
179 181
180 182 self._remote.tag(
181 183 name, commit.raw_id, message, local, user, date, tz)
182 184 self._remote.invalidate_vcs_cache()
183 185
184 186 # Reinitialize tags
185 187 self.tags = self._get_tags()
186 188 tag_id = self.tags[name]
187 189
188 190 return self.get_commit(commit_id=tag_id)
189 191
190 192 def remove_tag(self, name, user, message=None, date=None):
191 193 """
192 194 Removes tag with the given `name`.
193 195
194 196 :param name: name of the tag to be removed
195 197 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
196 198 :param message: message of the tag's removal commit
197 199 :param date: date of tag's removal commit
198 200
199 201 :raises TagDoesNotExistError: if tag with given name does not exists
200 202 """
201 203 if name not in self.tags:
202 204 raise TagDoesNotExistError("Tag %s does not exist" % name)
203 205 if message is None:
204 206 message = "Removed tag %s" % name
205 207 local = False
206 208
207 209 date, tz = date_to_timestamp_plus_offset(date)
208 210
209 211 self._remote.tag(name, nullid, message, local, user, date, tz)
210 212 self._remote.invalidate_vcs_cache()
211 213 self.tags = self._get_tags()
212 214
213 215 @LazyProperty
214 216 def bookmarks(self):
215 217 """
216 218 Gets bookmarks for this repository
217 219 """
218 220 return self._get_bookmarks()
219 221
220 222 def _get_bookmarks(self):
221 223 if self.is_empty():
222 224 return {}
223 225
224 226 def get_name(ctx):
225 227 return ctx[0]
226 228
227 229 _bookmarks = [
228 230 (safe_unicode(n), hexlify(h)) for n, h in
229 231 self._remote.bookmarks().items()]
230 232
231 233 return OrderedDict(sorted(_bookmarks, key=get_name))
232 234
233 235 def _get_all_commit_ids(self):
234 236 return self._remote.get_all_commit_ids('visible')
235 237
236 238 def get_diff(
237 239 self, commit1, commit2, path='', ignore_whitespace=False,
238 240 context=3, path1=None):
239 241 """
240 242 Returns (git like) *diff*, as plain text. Shows changes introduced by
241 243 `commit2` since `commit1`.
242 244
243 245 :param commit1: Entry point from which diff is shown. Can be
244 246 ``self.EMPTY_COMMIT`` - in this case, patch showing all
245 247 the changes since empty state of the repository until `commit2`
246 248 :param commit2: Until which commit changes should be shown.
247 249 :param ignore_whitespace: If set to ``True``, would not show whitespace
248 250 changes. Defaults to ``False``.
249 251 :param context: How many lines before/after changed lines should be
250 252 shown. Defaults to ``3``.
251 253 """
252 254 self._validate_diff_commits(commit1, commit2)
253 255 if path1 is not None and path1 != path:
254 256 raise ValueError("Diff of two different paths not supported.")
255 257
256 258 if path:
257 259 file_filter = [self.path, path]
258 260 else:
259 261 file_filter = None
260 262
261 263 diff = self._remote.diff(
262 264 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
263 265 opt_git=True, opt_ignorews=ignore_whitespace,
264 266 context=context)
265 267 return MercurialDiff(diff)
266 268
267 269 def strip(self, commit_id, branch=None):
268 270 self._remote.strip(commit_id, update=False, backup="none")
269 271
270 272 self._remote.invalidate_vcs_cache()
271 273 self.commit_ids = self._get_all_commit_ids()
272 274 self._rebuild_cache(self.commit_ids)
273 275
274 276 def verify(self):
275 277 verify = self._remote.verify()
276 278
277 279 self._remote.invalidate_vcs_cache()
278 280 return verify
279 281
280 282 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
281 283 if commit_id1 == commit_id2:
282 284 return commit_id1
283 285
284 286 ancestors = self._remote.revs_from_revspec(
285 287 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
286 288 other_path=repo2.path)
287 289 return repo2[ancestors[0]].raw_id if ancestors else None
288 290
289 291 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
290 292 if commit_id1 == commit_id2:
291 293 commits = []
292 294 else:
293 295 if merge:
294 296 indexes = self._remote.revs_from_revspec(
295 297 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
296 298 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
297 299 else:
298 300 indexes = self._remote.revs_from_revspec(
299 301 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
300 302 commit_id1, other_path=repo2.path)
301 303
302 304 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
303 305 for idx in indexes]
304 306
305 307 return commits
306 308
307 309 @staticmethod
308 310 def check_url(url, config):
309 311 """
310 312 Function will check given url and try to verify if it's a valid
311 313 link. Sometimes it may happened that mercurial will issue basic
312 314 auth request that can cause whole API to hang when used from python
313 315 or other external calls.
314 316
315 317 On failures it'll raise urllib2.HTTPError, exception is also thrown
316 318 when the return code is non 200
317 319 """
318 320 # check first if it's not an local url
319 321 if os.path.isdir(url) or url.startswith('file:'):
320 322 return True
321 323
322 324 # Request the _remote to verify the url
323 325 return connection.Hg.check_url(url, config.serialize())
324 326
325 327 @staticmethod
326 328 def is_valid_repository(path):
327 329 return os.path.isdir(os.path.join(path, '.hg'))
328 330
329 331 def _init_repo(self, create, src_url=None, update_after_clone=False):
330 332 """
331 333 Function will check for mercurial repository in given path. If there
332 334 is no repository in that path it will raise an exception unless
333 335 `create` parameter is set to True - in that case repository would
334 336 be created.
335 337
336 338 If `src_url` is given, would try to clone repository from the
337 339 location at given clone_point. Additionally it'll make update to
338 340 working copy accordingly to `update_after_clone` flag.
339 341 """
340 342 if create and os.path.exists(self.path):
341 343 raise RepositoryError(
342 344 "Cannot create repository at %s, location already exist"
343 345 % self.path)
344 346
345 347 if src_url:
346 348 url = str(self._get_url(src_url))
347 349 MercurialRepository.check_url(url, self.config)
348 350
349 351 self._remote.clone(url, self.path, update_after_clone)
350 352
351 353 # Don't try to create if we've already cloned repo
352 354 create = False
353 355
354 356 if create:
355 357 os.makedirs(self.path, mode=0755)
356 358
357 359 self._remote.localrepository(create)
358 360
359 361 @LazyProperty
360 362 def in_memory_commit(self):
361 363 return MercurialInMemoryCommit(self)
362 364
363 365 @LazyProperty
364 366 def description(self):
365 367 description = self._remote.get_config_value(
366 368 'web', 'description', untrusted=True)
367 369 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
368 370
369 371 @LazyProperty
370 372 def contact(self):
371 373 contact = (
372 374 self._remote.get_config_value("web", "contact") or
373 375 self._remote.get_config_value("ui", "username"))
374 376 return safe_unicode(contact or self.DEFAULT_CONTACT)
375 377
376 378 @LazyProperty
377 379 def last_change(self):
378 380 """
379 381 Returns last change made on this repository as
380 382 `datetime.datetime` object.
381 383 """
382 384 try:
383 385 return self.get_commit().date
384 386 except RepositoryError:
385 387 tzoffset = makedate()[1]
386 388 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
387 389
388 390 def _get_fs_mtime(self):
389 391 # fallback to filesystem
390 392 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
391 393 st_path = os.path.join(self.path, '.hg', "store")
392 394 if os.path.exists(cl_path):
393 395 return os.stat(cl_path).st_mtime
394 396 else:
395 397 return os.stat(st_path).st_mtime
396 398
397 399 def _get_url(self, url):
398 400 """
399 401 Returns normalized url. If schema is not given, would fall
400 402 to filesystem
401 403 (``file:///``) schema.
402 404 """
403 405 url = url.encode('utf8')
404 406 if url != 'default' and '://' not in url:
405 407 url = "file:" + urllib.pathname2url(url)
406 408 return url
407 409
408 410 def get_hook_location(self):
409 411 """
410 412 returns absolute path to location where hooks are stored
411 413 """
412 414 return os.path.join(self.path, '.hg', '.hgrc')
413 415
414 416 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
415 417 """
416 418 Returns ``MercurialCommit`` object representing repository's
417 419 commit at the given `commit_id` or `commit_idx`.
418 420 """
419 421 if self.is_empty():
420 422 raise EmptyRepositoryError("There are no commits yet")
421 423
422 424 if commit_id is not None:
423 425 self._validate_commit_id(commit_id)
424 426 try:
425 427 idx = self._commit_ids[commit_id]
426 428 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
427 429 except KeyError:
428 430 pass
429 431 elif commit_idx is not None:
430 432 self._validate_commit_idx(commit_idx)
431 433 try:
432 434 id_ = self.commit_ids[commit_idx]
433 435 if commit_idx < 0:
434 436 commit_idx += len(self.commit_ids)
435 437 return MercurialCommit(
436 438 self, id_, commit_idx, pre_load=pre_load)
437 439 except IndexError:
438 440 commit_id = commit_idx
439 441 else:
440 442 commit_id = "tip"
441 443
442 444 if isinstance(commit_id, unicode):
443 445 commit_id = safe_str(commit_id)
444 446
445 447 try:
446 448 raw_id, idx = self._remote.lookup(commit_id, both=True)
447 449 except CommitDoesNotExistError:
448 450 msg = "Commit %s does not exist for %s" % (
449 451 commit_id, self)
450 452 raise CommitDoesNotExistError(msg)
451 453
452 454 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
453 455
454 456 def get_commits(
455 457 self, start_id=None, end_id=None, start_date=None, end_date=None,
456 458 branch_name=None, show_hidden=False, pre_load=None):
457 459 """
458 460 Returns generator of ``MercurialCommit`` objects from start to end
459 461 (both are inclusive)
460 462
461 463 :param start_id: None, str(commit_id)
462 464 :param end_id: None, str(commit_id)
463 465 :param start_date: if specified, commits with commit date less than
464 466 ``start_date`` would be filtered out from returned set
465 467 :param end_date: if specified, commits with commit date greater than
466 468 ``end_date`` would be filtered out from returned set
467 469 :param branch_name: if specified, commits not reachable from given
468 470 branch would be filtered out from returned set
469 471 :param show_hidden: Show hidden commits such as obsolete or hidden from
470 472 Mercurial evolve
471 473 :raise BranchDoesNotExistError: If given ``branch_name`` does not
472 474 exist.
473 475 :raise CommitDoesNotExistError: If commit for given ``start`` or
474 476 ``end`` could not be found.
475 477 """
476 478 # actually we should check now if it's not an empty repo
477 479 branch_ancestors = False
478 480 if self.is_empty():
479 481 raise EmptyRepositoryError("There are no commits yet")
480 482 self._validate_branch_name(branch_name)
481 483
482 484 if start_id is not None:
483 485 self._validate_commit_id(start_id)
484 486 c_start = self.get_commit(commit_id=start_id)
485 487 start_pos = self._commit_ids[c_start.raw_id]
486 488 else:
487 489 start_pos = None
488 490
489 491 if end_id is not None:
490 492 self._validate_commit_id(end_id)
491 493 c_end = self.get_commit(commit_id=end_id)
492 494 end_pos = max(0, self._commit_ids[c_end.raw_id])
493 495 else:
494 496 end_pos = None
495 497
496 498 if None not in [start_id, end_id] and start_pos > end_pos:
497 499 raise RepositoryError(
498 500 "Start commit '%s' cannot be after end commit '%s'" %
499 501 (start_id, end_id))
500 502
501 503 if end_pos is not None:
502 504 end_pos += 1
503 505
504 506 commit_filter = []
505 507
506 508 if branch_name and not branch_ancestors:
507 509 commit_filter.append('branch("%s")' % (branch_name,))
508 510 elif branch_name and branch_ancestors:
509 511 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
510 512
511 513 if start_date and not end_date:
512 514 commit_filter.append('date(">%s")' % (start_date,))
513 515 if end_date and not start_date:
514 516 commit_filter.append('date("<%s")' % (end_date,))
515 517 if start_date and end_date:
516 518 commit_filter.append(
517 519 'date(">%s") and date("<%s")' % (start_date, end_date))
518 520
519 521 if not show_hidden:
520 522 commit_filter.append('not obsolete()')
521 523 commit_filter.append('not hidden()')
522 524
523 525 # TODO: johbo: Figure out a simpler way for this solution
524 526 collection_generator = CollectionGenerator
525 527 if commit_filter:
526 528 commit_filter = ' and '.join(map(safe_str, commit_filter))
527 529 revisions = self._remote.rev_range([commit_filter])
528 530 collection_generator = MercurialIndexBasedCollectionGenerator
529 531 else:
530 532 revisions = self.commit_ids
531 533
532 534 if start_pos or end_pos:
533 535 revisions = revisions[start_pos:end_pos]
534 536
535 537 return collection_generator(self, revisions, pre_load=pre_load)
536 538
537 539 def pull(self, url, commit_ids=None):
538 540 """
539 541 Tries to pull changes from external location.
540 542
541 543 :param commit_ids: Optional. Can be set to a list of commit ids
542 544 which shall be pulled from the other repository.
543 545 """
544 546 url = self._get_url(url)
545 547 self._remote.pull(url, commit_ids=commit_ids)
546 548 self._remote.invalidate_vcs_cache()
547 549
548 550 def push(self, url):
549 551 url = self._get_url(url)
550 552 self._remote.sync_push(url)
551 553
552 554 def _local_clone(self, clone_path):
553 555 """
554 556 Create a local clone of the current repo.
555 557 """
556 558 self._remote.clone(self.path, clone_path, update_after_clone=True,
557 559 hooks=False)
558 560
559 561 def _update(self, revision, clean=False):
560 562 """
561 563 Update the working copy to the specified revision.
562 564 """
563 565 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
564 566 self._remote.update(revision, clean=clean)
565 567
566 568 def _identify(self):
567 569 """
568 570 Return the current state of the working directory.
569 571 """
570 572 return self._remote.identify().strip().rstrip('+')
571 573
572 574 def _heads(self, branch=None):
573 575 """
574 576 Return the commit ids of the repository heads.
575 577 """
576 578 return self._remote.heads(branch=branch).strip().split(' ')
577 579
578 580 def _ancestor(self, revision1, revision2):
579 581 """
580 582 Return the common ancestor of the two revisions.
581 583 """
582 584 return self._remote.ancestor(revision1, revision2)
583 585
584 586 def _local_push(
585 587 self, revision, repository_path, push_branches=False,
586 588 enable_hooks=False):
587 589 """
588 590 Push the given revision to the specified repository.
589 591
590 592 :param push_branches: allow to create branches in the target repo.
591 593 """
592 594 self._remote.push(
593 595 [revision], repository_path, hooks=enable_hooks,
594 596 push_branches=push_branches)
595 597
596 598 def _local_merge(self, target_ref, merge_message, user_name, user_email,
597 599 source_ref, use_rebase=False, dry_run=False):
598 600 """
599 601 Merge the given source_revision into the checked out revision.
600 602
601 603 Returns the commit id of the merge and a boolean indicating if the
602 604 commit needs to be pushed.
603 605 """
604 606 self._update(target_ref.commit_id)
605 607
606 608 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
607 609 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
608 610
609 611 if ancestor == source_ref.commit_id:
610 612 # Nothing to do, the changes were already integrated
611 613 return target_ref.commit_id, False
612 614
613 615 elif ancestor == target_ref.commit_id and is_the_same_branch:
614 616 # In this case we should force a commit message
615 617 return source_ref.commit_id, True
616 618
617 619 if use_rebase:
618 620 try:
619 621 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
620 622 target_ref.commit_id)
621 623 self.bookmark(bookmark_name, revision=source_ref.commit_id)
622 624 self._remote.rebase(
623 625 source=source_ref.commit_id, dest=target_ref.commit_id)
624 626 self._remote.invalidate_vcs_cache()
625 627 self._update(bookmark_name)
626 628 return self._identify(), True
627 629 except RepositoryError:
628 630 # The rebase-abort may raise another exception which 'hides'
629 631 # the original one, therefore we log it here.
630 632 log.exception('Error while rebasing shadow repo during merge.')
631 633
632 634 # Cleanup any rebase leftovers
633 635 self._remote.invalidate_vcs_cache()
634 636 self._remote.rebase(abort=True)
635 637 self._remote.invalidate_vcs_cache()
636 638 self._remote.update(clean=True)
637 639 raise
638 640 else:
639 641 try:
640 642 self._remote.merge(source_ref.commit_id)
641 643 self._remote.invalidate_vcs_cache()
642 644 self._remote.commit(
643 645 message=safe_str(merge_message),
644 646 username=safe_str('%s <%s>' % (user_name, user_email)))
645 647 self._remote.invalidate_vcs_cache()
646 648 return self._identify(), True
647 649 except RepositoryError:
648 650 # Cleanup any merge leftovers
649 651 self._remote.update(clean=True)
650 652 raise
651 653
652 654 def _local_close(self, target_ref, user_name, user_email,
653 655 source_ref, close_message=''):
654 656 """
655 657 Close the branch of the given source_revision
656 658
657 659 Returns the commit id of the close and a boolean indicating if the
658 660 commit needs to be pushed.
659 661 """
660 662 self._update(source_ref.commit_id)
661 663 message = close_message or "Closing branch: `{}`".format(source_ref.name)
662 664 try:
663 665 self._remote.commit(
664 666 message=safe_str(message),
665 667 username=safe_str('%s <%s>' % (user_name, user_email)),
666 668 close_branch=True)
667 669 self._remote.invalidate_vcs_cache()
668 670 return self._identify(), True
669 671 except RepositoryError:
670 672 # Cleanup any commit leftovers
671 673 self._remote.update(clean=True)
672 674 raise
673 675
674 676 def _is_the_same_branch(self, target_ref, source_ref):
675 677 return (
676 678 self._get_branch_name(target_ref) ==
677 679 self._get_branch_name(source_ref))
678 680
679 681 def _get_branch_name(self, ref):
680 682 if ref.type == 'branch':
681 683 return ref.name
682 684 return self._remote.ctx_branch(ref.commit_id)
683 685
684 686 def _maybe_prepare_merge_workspace(
685 687 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
686 688 shadow_repository_path = self._get_shadow_repository_path(
687 689 repo_id, workspace_id)
688 690 if not os.path.exists(shadow_repository_path):
689 691 self._local_clone(shadow_repository_path)
690 692 log.debug(
691 693 'Prepared shadow repository in %s', shadow_repository_path)
692 694
693 695 return shadow_repository_path
694 696
695 697 def _merge_repo(self, repo_id, workspace_id, target_ref,
696 698 source_repo, source_ref, merge_message,
697 699 merger_name, merger_email, dry_run=False,
698 700 use_rebase=False, close_branch=False):
699 701
700 702 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
701 703 'rebase' if use_rebase else 'merge', dry_run)
702 704 if target_ref.commit_id not in self._heads():
703 705 return MergeResponse(
704 706 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
705 707
706 708 try:
707 709 if (target_ref.type == 'branch' and
708 710 len(self._heads(target_ref.name)) != 1):
709 711 return MergeResponse(
710 712 False, False, None,
711 713 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
712 714 except CommitDoesNotExistError:
713 715 log.exception('Failure when looking up branch heads on hg target')
714 716 return MergeResponse(
715 717 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
716 718
717 719 shadow_repository_path = self._maybe_prepare_merge_workspace(
718 720 repo_id, workspace_id, target_ref, source_ref)
719 721 shadow_repo = self._get_shadow_instance(shadow_repository_path)
720 722
721 723 log.debug('Pulling in target reference %s', target_ref)
722 724 self._validate_pull_reference(target_ref)
723 725 shadow_repo._local_pull(self.path, target_ref)
724 726 try:
725 727 log.debug('Pulling in source reference %s', source_ref)
726 728 source_repo._validate_pull_reference(source_ref)
727 729 shadow_repo._local_pull(source_repo.path, source_ref)
728 730 except CommitDoesNotExistError:
729 731 log.exception('Failure when doing local pull on hg shadow repo')
730 732 return MergeResponse(
731 733 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
732 734
733 735 merge_ref = None
734 736 merge_commit_id = None
735 737 close_commit_id = None
736 738 merge_failure_reason = MergeFailureReason.NONE
737 739
738 740 # enforce that close branch should be used only in case we source from
739 741 # an actual Branch
740 742 close_branch = close_branch and source_ref.type == 'branch'
741 743
742 744 # don't allow to close branch if source and target are the same
743 745 close_branch = close_branch and source_ref.name != target_ref.name
744 746
745 747 needs_push_on_close = False
746 748 if close_branch and not use_rebase and not dry_run:
747 749 try:
748 750 close_commit_id, needs_push_on_close = shadow_repo._local_close(
749 751 target_ref, merger_name, merger_email, source_ref)
750 752 merge_possible = True
751 753 except RepositoryError:
752 754 log.exception(
753 755 'Failure when doing close branch on hg shadow repo')
754 756 merge_possible = False
755 757 merge_failure_reason = MergeFailureReason.MERGE_FAILED
756 758 else:
757 759 merge_possible = True
758 760
759 761 needs_push = False
760 762 if merge_possible:
761 763 try:
762 764 merge_commit_id, needs_push = shadow_repo._local_merge(
763 765 target_ref, merge_message, merger_name, merger_email,
764 766 source_ref, use_rebase=use_rebase, dry_run=dry_run)
765 767 merge_possible = True
766 768
767 769 # read the state of the close action, if it
768 770 # maybe required a push
769 771 needs_push = needs_push or needs_push_on_close
770 772
771 773 # Set a bookmark pointing to the merge commit. This bookmark
772 774 # may be used to easily identify the last successful merge
773 775 # commit in the shadow repository.
774 776 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
775 777 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
776 778 except SubrepoMergeError:
777 779 log.exception(
778 780 'Subrepo merge error during local merge on hg shadow repo.')
779 781 merge_possible = False
780 782 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
781 783 needs_push = False
782 784 except RepositoryError:
783 785 log.exception('Failure when doing local merge on hg shadow repo')
784 786 merge_possible = False
785 787 merge_failure_reason = MergeFailureReason.MERGE_FAILED
786 788 needs_push = False
787 789
788 790 if merge_possible and not dry_run:
789 791 if needs_push:
790 792 # In case the target is a bookmark, update it, so after pushing
791 793 # the bookmarks is also updated in the target.
792 794 if target_ref.type == 'book':
793 795 shadow_repo.bookmark(
794 796 target_ref.name, revision=merge_commit_id)
795 797 try:
796 798 shadow_repo_with_hooks = self._get_shadow_instance(
797 799 shadow_repository_path,
798 800 enable_hooks=True)
799 801 # This is the actual merge action, we push from shadow
800 802 # into origin.
801 803 # Note: the push_branches option will push any new branch
802 804 # defined in the source repository to the target. This may
803 805 # be dangerous as branches are permanent in Mercurial.
804 806 # This feature was requested in issue #441.
805 807 shadow_repo_with_hooks._local_push(
806 808 merge_commit_id, self.path, push_branches=True,
807 809 enable_hooks=True)
808 810
809 811 # maybe we also need to push the close_commit_id
810 812 if close_commit_id:
811 813 shadow_repo_with_hooks._local_push(
812 814 close_commit_id, self.path, push_branches=True,
813 815 enable_hooks=True)
814 816 merge_succeeded = True
815 817 except RepositoryError:
816 818 log.exception(
817 819 'Failure when doing local push from the shadow '
818 820 'repository to the target repository.')
819 821 merge_succeeded = False
820 822 merge_failure_reason = MergeFailureReason.PUSH_FAILED
821 823 else:
822 824 merge_succeeded = True
823 825 else:
824 826 merge_succeeded = False
825 827
826 828 return MergeResponse(
827 829 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
828 830
829 831 def _get_shadow_instance(
830 832 self, shadow_repository_path, enable_hooks=False):
831 833 config = self.config.copy()
832 834 if not enable_hooks:
833 835 config.clear_section('hooks')
834 836 return MercurialRepository(shadow_repository_path, config)
835 837
836 838 def _validate_pull_reference(self, reference):
837 839 if not (reference.name in self.bookmarks or
838 840 reference.name in self.branches or
839 841 self.get_commit(reference.commit_id)):
840 842 raise CommitDoesNotExistError(
841 843 'Unknown branch, bookmark or commit id')
842 844
843 845 def _local_pull(self, repository_path, reference):
844 846 """
845 847 Fetch a branch, bookmark or commit from a local repository.
846 848 """
847 849 repository_path = os.path.abspath(repository_path)
848 850 if repository_path == self.path:
849 851 raise ValueError('Cannot pull from the same repository')
850 852
851 853 reference_type_to_option_name = {
852 854 'book': 'bookmark',
853 855 'branch': 'branch',
854 856 }
855 857 option_name = reference_type_to_option_name.get(
856 858 reference.type, 'revision')
857 859
858 860 if option_name == 'revision':
859 861 ref = reference.commit_id
860 862 else:
861 863 ref = reference.name
862 864
863 865 options = {option_name: [ref]}
864 866 self._remote.pull_cmd(repository_path, hooks=False, **options)
865 867 self._remote.invalidate_vcs_cache()
866 868
867 869 def bookmark(self, bookmark, revision=None):
868 870 if isinstance(bookmark, unicode):
869 871 bookmark = safe_str(bookmark)
870 872 self._remote.bookmark(bookmark, revision=revision)
871 873 self._remote.invalidate_vcs_cache()
872 874
873 875 def get_path_permissions(self, username):
874 876 hgacl_file = os.path.join(self.path, '.hg/hgacl')
875 877
876 878 def read_patterns(suffix):
877 879 svalue = None
878 880 try:
879 881 svalue = hgacl.get('narrowhgacl', username + suffix)
880 882 except configparser.NoOptionError:
881 883 try:
882 884 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
883 885 except configparser.NoOptionError:
884 886 pass
885 887 if not svalue:
886 888 return None
887 889 result = ['/']
888 890 for pattern in svalue.split():
889 891 result.append(pattern)
890 892 if '*' not in pattern and '?' not in pattern:
891 893 result.append(pattern + '/*')
892 894 return result
893 895
894 896 if os.path.exists(hgacl_file):
895 897 try:
896 898 hgacl = configparser.RawConfigParser()
897 899 hgacl.read(hgacl_file)
898 900
899 901 includes = read_patterns('.includes')
900 902 excludes = read_patterns('.excludes')
901 903 return BasePathPermissionChecker.create_from_patterns(
902 904 includes, excludes)
903 905 except BaseException as e:
904 906 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
905 907 hgacl_file, self.name, e)
906 908 raise exceptions.RepositoryRequirementError(msg)
907 909 else:
908 910 return None
909 911
910 912
911 913 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
912 914
913 915 def _commit_factory(self, commit_id):
914 916 return self.repo.get_commit(
915 917 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,341 +1,343 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SVN repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import date_astimestamp
33 33 from rhodecode.lib.utils import safe_str, safe_unicode
34 34 from rhodecode.lib.vcs import connection, path as vcspath
35 35 from rhodecode.lib.vcs.backends import base
36 36 from rhodecode.lib.vcs.backends.svn.commit import (
37 37 SubversionCommit, _date_from_svn_properties)
38 38 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
39 39 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
40 40 from rhodecode.lib.vcs.conf import settings
41 41 from rhodecode.lib.vcs.exceptions import (
42 42 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
43 43 VCSError, NodeDoesNotExistError)
44 44
45 45
46 46 log = logging.getLogger(__name__)
47 47
48 48
49 49 class SubversionRepository(base.BaseRepository):
50 50 """
51 51 Subversion backend implementation
52 52
53 53 .. important::
54 54
55 55 It is very important to distinguish the commit index and the commit id
56 56 which is assigned by Subversion. The first one is always handled as an
57 57 `int` by this implementation. The commit id assigned by Subversion on
58 58 the other side will always be a `str`.
59 59
60 60 There is a specific trap since the first commit will have the index
61 61 ``0`` but the svn id will be ``"1"``.
62 62
63 63 """
64 64
65 65 # Note: Subversion does not really have a default branch name.
66 66 DEFAULT_BRANCH_NAME = None
67 67
68 68 contact = base.BaseRepository.DEFAULT_CONTACT
69 69 description = base.BaseRepository.DEFAULT_DESCRIPTION
70 70
71 71 def __init__(self, repo_path, config=None, create=False, src_url=None,
72 72 **kwargs):
73 73 self.path = safe_str(os.path.abspath(repo_path))
74 74 self.config = config if config else self.get_default_config()
75 self._remote = connection.Svn(
76 self.path, self.config)
77 75
78 76 self._init_repo(create, src_url)
79 77
78 @LazyProperty
79 def _remote(self):
80 return connection.Svn(self.path, self.config)
81
80 82 def _init_repo(self, create, src_url):
81 83 if create and os.path.exists(self.path):
82 84 raise RepositoryError(
83 85 "Cannot create repository at %s, location already exist"
84 86 % self.path)
85 87
86 88 if create:
87 89 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
88 90 if src_url:
89 91 src_url = _sanitize_url(src_url)
90 92 self._remote.import_remote_repository(src_url)
91 93 else:
92 94 self._check_path()
93 95
94 96 @LazyProperty
95 97 def commit_ids(self):
96 98 head = self._remote.lookup(None)
97 99 return [str(r) for r in xrange(1, head + 1)]
98 100
99 101 @LazyProperty
100 102 def branches(self):
101 103 return self._tags_or_branches('vcs_svn_branch')
102 104
103 105 @LazyProperty
104 106 def branches_closed(self):
105 107 return {}
106 108
107 109 @LazyProperty
108 110 def bookmarks(self):
109 111 return {}
110 112
111 113 @LazyProperty
112 114 def branches_all(self):
113 115 # TODO: johbo: Implement proper branch support
114 116 all_branches = {}
115 117 all_branches.update(self.branches)
116 118 all_branches.update(self.branches_closed)
117 119 return all_branches
118 120
119 121 @LazyProperty
120 122 def tags(self):
121 123 return self._tags_or_branches('vcs_svn_tag')
122 124
123 125 def _tags_or_branches(self, config_section):
124 126 found_items = {}
125 127
126 128 if self.is_empty():
127 129 return {}
128 130
129 131 for pattern in self._patterns_from_section(config_section):
130 132 pattern = vcspath.sanitize(pattern)
131 133 tip = self.get_commit()
132 134 try:
133 135 if pattern.endswith('*'):
134 136 basedir = tip.get_node(vcspath.dirname(pattern))
135 137 directories = basedir.dirs
136 138 else:
137 139 directories = (tip.get_node(pattern), )
138 140 except NodeDoesNotExistError:
139 141 continue
140 142 found_items.update(
141 143 (safe_unicode(n.path),
142 144 self.commit_ids[-1])
143 145 for n in directories)
144 146
145 147 def get_name(item):
146 148 return item[0]
147 149
148 150 return OrderedDict(sorted(found_items.items(), key=get_name))
149 151
150 152 def _patterns_from_section(self, section):
151 153 return (pattern for key, pattern in self.config.items(section))
152 154
153 155 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
154 156 if self != repo2:
155 157 raise ValueError(
156 158 "Subversion does not support getting common ancestor of"
157 159 " different repositories.")
158 160
159 161 if int(commit_id1) < int(commit_id2):
160 162 return commit_id1
161 163 return commit_id2
162 164
163 165 def verify(self):
164 166 verify = self._remote.verify()
165 167
166 168 self._remote.invalidate_vcs_cache()
167 169 return verify
168 170
169 171 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
170 172 # TODO: johbo: Implement better comparison, this is a very naive
171 173 # version which does not allow to compare branches, tags or folders
172 174 # at all.
173 175 if repo2 != self:
174 176 raise ValueError(
175 177 "Subversion does not support comparison of of different "
176 178 "repositories.")
177 179
178 180 if commit_id1 == commit_id2:
179 181 return []
180 182
181 183 commit_idx1 = self._get_commit_idx(commit_id1)
182 184 commit_idx2 = self._get_commit_idx(commit_id2)
183 185
184 186 commits = [
185 187 self.get_commit(commit_idx=idx)
186 188 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
187 189
188 190 return commits
189 191
190 192 def _get_commit_idx(self, commit_id):
191 193 try:
192 194 svn_rev = int(commit_id)
193 195 except:
194 196 # TODO: johbo: this might be only one case, HEAD, check this
195 197 svn_rev = self._remote.lookup(commit_id)
196 198 commit_idx = svn_rev - 1
197 199 if commit_idx >= len(self.commit_ids):
198 200 raise CommitDoesNotExistError(
199 201 "Commit at index %s does not exist." % (commit_idx, ))
200 202 return commit_idx
201 203
202 204 @staticmethod
203 205 def check_url(url, config):
204 206 """
205 207 Check if `url` is a valid source to import a Subversion repository.
206 208 """
207 209 # convert to URL if it's a local directory
208 210 if os.path.isdir(url):
209 211 url = 'file://' + urllib.pathname2url(url)
210 212 return connection.Svn.check_url(url, config.serialize())
211 213
212 214 @staticmethod
213 215 def is_valid_repository(path):
214 216 try:
215 217 SubversionRepository(path)
216 218 return True
217 219 except VCSError:
218 220 pass
219 221 return False
220 222
221 223 def _check_path(self):
222 224 if not os.path.exists(self.path):
223 225 raise VCSError('Path "%s" does not exist!' % (self.path, ))
224 226 if not self._remote.is_path_valid_repository(self.path):
225 227 raise VCSError(
226 228 'Path "%s" does not contain a Subversion repository' %
227 229 (self.path, ))
228 230
229 231 @LazyProperty
230 232 def last_change(self):
231 233 """
232 234 Returns last change made on this repository as
233 235 `datetime.datetime` object.
234 236 """
235 237 # Subversion always has a first commit which has id "0" and contains
236 238 # what we are looking for.
237 239 last_id = len(self.commit_ids)
238 240 properties = self._remote.revision_properties(last_id)
239 241 return _date_from_svn_properties(properties)
240 242
241 243 @LazyProperty
242 244 def in_memory_commit(self):
243 245 return SubversionInMemoryCommit(self)
244 246
245 247 def get_hook_location(self):
246 248 """
247 249 returns absolute path to location where hooks are stored
248 250 """
249 251 return os.path.join(self.path, 'hooks')
250 252
251 253 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
252 254 if self.is_empty():
253 255 raise EmptyRepositoryError("There are no commits yet")
254 256 if commit_id is not None:
255 257 self._validate_commit_id(commit_id)
256 258 elif commit_idx is not None:
257 259 self._validate_commit_idx(commit_idx)
258 260 try:
259 261 commit_id = self.commit_ids[commit_idx]
260 262 except IndexError:
261 263 raise CommitDoesNotExistError
262 264
263 265 commit_id = self._sanitize_commit_id(commit_id)
264 266 commit = SubversionCommit(repository=self, commit_id=commit_id)
265 267 return commit
266 268
267 269 def get_commits(
268 270 self, start_id=None, end_id=None, start_date=None, end_date=None,
269 271 branch_name=None, show_hidden=False, pre_load=None):
270 272 if self.is_empty():
271 273 raise EmptyRepositoryError("There are no commit_ids yet")
272 274 self._validate_branch_name(branch_name)
273 275
274 276 if start_id is not None:
275 277 self._validate_commit_id(start_id)
276 278 if end_id is not None:
277 279 self._validate_commit_id(end_id)
278 280
279 281 start_raw_id = self._sanitize_commit_id(start_id)
280 282 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
281 283 end_raw_id = self._sanitize_commit_id(end_id)
282 284 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
283 285
284 286 if None not in [start_id, end_id] and start_pos > end_pos:
285 287 raise RepositoryError(
286 288 "Start commit '%s' cannot be after end commit '%s'" %
287 289 (start_id, end_id))
288 290 if end_pos is not None:
289 291 end_pos += 1
290 292
291 293 # Date based filtering
292 294 if start_date or end_date:
293 295 start_raw_id, end_raw_id = self._remote.lookup_interval(
294 296 date_astimestamp(start_date) if start_date else None,
295 297 date_astimestamp(end_date) if end_date else None)
296 298 start_pos = start_raw_id - 1
297 299 end_pos = end_raw_id
298 300
299 301 commit_ids = self.commit_ids
300 302
301 303 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
302 304 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
303 305 svn_rev = long(self.commit_ids[-1])
304 306 commit_ids = self._remote.node_history(
305 307 path=branch_name, revision=svn_rev, limit=None)
306 308 commit_ids = [str(i) for i in reversed(commit_ids)]
307 309
308 310 if start_pos or end_pos:
309 311 commit_ids = commit_ids[start_pos:end_pos]
310 312 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
311 313
312 314 def _sanitize_commit_id(self, commit_id):
313 315 if commit_id and commit_id.isdigit():
314 316 if int(commit_id) <= len(self.commit_ids):
315 317 return commit_id
316 318 else:
317 319 raise CommitDoesNotExistError(
318 320 "Commit %s does not exist." % (commit_id, ))
319 321 if commit_id not in [
320 322 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
321 323 raise CommitDoesNotExistError(
322 324 "Commit id %s not understood." % (commit_id, ))
323 325 svn_rev = self._remote.lookup('HEAD')
324 326 return str(svn_rev)
325 327
326 328 def get_diff(
327 329 self, commit1, commit2, path=None, ignore_whitespace=False,
328 330 context=3, path1=None):
329 331 self._validate_diff_commits(commit1, commit2)
330 332 svn_rev1 = long(commit1.raw_id)
331 333 svn_rev2 = long(commit2.raw_id)
332 334 diff = self._remote.diff(
333 335 svn_rev1, svn_rev2, path1=path1, path2=path,
334 336 ignore_whitespace=ignore_whitespace, context=context)
335 337 return SubversionDiff(diff)
336 338
337 339
338 340 def _sanitize_url(url):
339 341 if '://' not in url:
340 342 url = 'file://' + urllib.pathname2url(url)
341 343 return url
@@ -1,246 +1,253 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 This serves as a drop in replacement for pycurl. It implements the pycurl Curl
23 23 class in a way that is compatible with gevent.
24 24 """
25 25
26 26
27 27 import logging
28 28 import gevent
29 29 import pycurl
30 import greenlet
30 31
31 32 # Import everything from pycurl.
32 33 # This allows us to use this module as a drop in replacement of pycurl.
33 34 from pycurl import * # noqa
34 35
35 36 from gevent import core
36 37 from gevent.hub import Waiter
37 38
38 39
39 40 log = logging.getLogger(__name__)
40 41
41 42
42 43 class GeventCurlMulti(object):
43 44 """
44 45 Wrapper around pycurl.CurlMulti that integrates it into gevent's event
45 46 loop.
46 47
47 48 Parts of this class are a modified version of code copied from the Tornado
48 49 Web Server project which is licensed under the Apache License, Version 2.0
49 50 (the "License"). To be more specific the code originates from this file:
50 51 https://github.com/tornadoweb/tornado/blob/stable/tornado/curl_httpclient.py
51 52
52 53 This is the original license header of the origin:
53 54
54 55 Copyright 2009 Facebook
55 56
56 57 Licensed under the Apache License, Version 2.0 (the "License"); you may
57 58 not use this file except in compliance with the License. You may obtain
58 59 a copy of the License at
59 60
60 61 http://www.apache.org/licenses/LICENSE-2.0
61 62
62 63 Unless required by applicable law or agreed to in writing, software
63 64 distributed under the License is distributed on an "AS IS" BASIS,
64 65 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
65 66 implied. See the License for the specific language governing
66 67 permissions and limitations under the License.
67 68 """
68 69
69 70 def __init__(self, loop=None):
70 71 self._watchers = {}
71 72 self._timeout = None
72 73 self.loop = loop or gevent.get_hub().loop
73 74
74 75 # Setup curl's multi instance.
75 76 self._curl_multi = pycurl.CurlMulti()
76 77 self.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
77 78 self.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
78 79
79 80 def __getattr__(self, item):
80 81 """
81 82 The pycurl.CurlMulti class is final and we cannot subclass it.
82 83 Therefore we are wrapping it and forward everything to it here.
83 84 """
84 85 return getattr(self._curl_multi, item)
85 86
86 87 def add_handle(self, curl):
87 88 """
88 89 Add handle variant that also takes care about the initial invocation of
89 90 socket action method. This is done by setting an immediate timeout.
90 91 """
91 92 result = self._curl_multi.add_handle(curl)
92 93 self._set_timeout(0)
93 94 return result
94 95
95 96 def _handle_socket(self, event, fd, multi, data):
96 97 """
97 98 Called by libcurl when it wants to change the file descriptors it cares
98 99 about.
99 100 """
100 101 event_map = {
101 102 pycurl.POLL_NONE: core.NONE,
102 103 pycurl.POLL_IN: core.READ,
103 104 pycurl.POLL_OUT: core.WRITE,
104 105 pycurl.POLL_INOUT: core.READ | core.WRITE
105 106 }
106 107
107 108 if event == pycurl.POLL_REMOVE:
108 109 watcher = self._watchers.pop(fd, None)
109 110 if watcher is not None:
110 111 watcher.stop()
111 112 else:
112 113 gloop_event = event_map[event]
113 114 watcher = self._watchers.get(fd)
114 115 if watcher is None:
115 116 watcher = self.loop.io(fd, gloop_event)
116 117 watcher.start(self._handle_events, fd, pass_events=True)
117 118 self._watchers[fd] = watcher
118 119 else:
119 120 if watcher.events != gloop_event:
120 121 watcher.stop()
121 122 watcher.events = gloop_event
122 123 watcher.start(self._handle_events, fd, pass_events=True)
123 124
124 125 def _set_timeout(self, msecs):
125 126 """
126 127 Called by libcurl to schedule a timeout.
127 128 """
128 129 if self._timeout is not None:
129 130 self._timeout.stop()
130 131 self._timeout = self.loop.timer(msecs/1000.0)
131 132 self._timeout.start(self._handle_timeout)
132 133
133 134 def _handle_events(self, events, fd):
134 135 action = 0
135 136 if events & core.READ:
136 137 action |= pycurl.CSELECT_IN
137 138 if events & core.WRITE:
138 139 action |= pycurl.CSELECT_OUT
139 140 while True:
140 141 try:
141 142 ret, num_handles = self._curl_multi.socket_action(fd, action)
142 143 except pycurl.error as e:
143 144 ret = e.args[0]
144 145 if ret != pycurl.E_CALL_MULTI_PERFORM:
145 146 break
146 147 self._finish_pending_requests()
147 148
148 149 def _handle_timeout(self):
149 150 """
150 151 Called by IOLoop when the requested timeout has passed.
151 152 """
152 153 if self._timeout is not None:
153 154 self._timeout.stop()
154 155 self._timeout = None
155 156 while True:
156 157 try:
157 158 ret, num_handles = self._curl_multi.socket_action(
158 159 pycurl.SOCKET_TIMEOUT, 0)
159 160 except pycurl.error as e:
160 161 ret = e.args[0]
161 162 if ret != pycurl.E_CALL_MULTI_PERFORM:
162 163 break
163 164 self._finish_pending_requests()
164 165
165 166 # In theory, we shouldn't have to do this because curl will call
166 167 # _set_timeout whenever the timeout changes. However, sometimes after
167 168 # _handle_timeout we will need to reschedule immediately even though
168 169 # nothing has changed from curl's perspective. This is because when
169 170 # socket_action is called with SOCKET_TIMEOUT, libcurl decides
170 171 # internally which timeouts need to be processed by using a monotonic
171 172 # clock (where available) while tornado uses python's time.time() to
172 173 # decide when timeouts have occurred. When those clocks disagree on
173 174 # elapsed time (as they will whenever there is an NTP adjustment),
174 175 # tornado might call _handle_timeout before libcurl is ready. After
175 176 # each timeout, resync the scheduled timeout with libcurl's current
176 177 # state.
177 178 new_timeout = self._curl_multi.timeout()
178 179 if new_timeout >= 0:
179 180 self._set_timeout(new_timeout)
180 181
181 182 def _finish_pending_requests(self):
182 183 """
183 184 Process any requests that were completed by the last call to
184 185 multi.socket_action.
185 186 """
186 187 while True:
187 188 num_q, ok_list, err_list = self._curl_multi.info_read()
188 189 for curl in ok_list:
189 190 curl.waiter.switch(None)
190 191 for curl, errnum, errmsg in err_list:
191 192 curl.waiter.throw(Exception('%s %s' % (errnum, errmsg)))
192 193 if num_q == 0:
193 194 break
194 195
195 196
196 197 class GeventCurl(object):
197 198 """
198 199 Gevent compatible implementation of the pycurl.Curl class. Essentially a
199 200 wrapper around pycurl.Curl with a customized perform method. It uses the
200 201 GeventCurlMulti class to implement a blocking API to libcurl's "easy"
201 202 interface.
202 203 """
203 204
204 205 # Reference to the GeventCurlMulti instance.
205 206 _multi_instance = None
206 207
207 208 def __init__(self):
208 209 self._curl = pycurl.Curl()
209 210
210 211 def __getattr__(self, item):
211 212 """
212 213 The pycurl.Curl class is final and we cannot subclass it. Therefore we
213 214 are wrapping it and forward everything to it here.
214 215 """
215 216 return getattr(self._curl, item)
216 217
217 218 @property
218 219 def _multi(self):
219 220 """
220 221 Lazy property that returns the GeventCurlMulti instance. The value is
221 222 cached as a class attribute. Therefore only one instance per process
222 223 exists.
223 224 """
224 225 if GeventCurl._multi_instance is None:
225 226 GeventCurl._multi_instance = GeventCurlMulti()
226 227 return GeventCurl._multi_instance
227 228
228 229 def perform(self):
229 230 """
230 231 This perform method is compatible with gevent because it uses gevent
231 232 synchronization mechanisms to wait for the request to finish.
232 233 """
234 if getattr(self._curl, 'waiter', None) is not None:
235 current = greenlet.getcurrent()
236 msg = 'This curl object is already used by another greenlet, {}, \n' \
237 'this is {}'.format(self._curl.waiter, current)
238 raise Exception(msg)
239
233 240 waiter = self._curl.waiter = Waiter()
234 241 try:
235 242 self._multi.add_handle(self._curl)
236 243 try:
237 244 return waiter.get()
238 245 finally:
239 246 self._multi.remove_handle(self._curl)
240 247 finally:
241 248 del self._curl.waiter
242 249
243 250
244 251 # Curl is originally imported from pycurl. At this point we override it with
245 252 # our custom implementation.
246 253 Curl = GeventCurl
@@ -1,4511 +1,4514 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Database Models for RhodeCode Enterprise
23 23 """
24 24
25 25 import re
26 26 import os
27 27 import time
28 28 import hashlib
29 29 import logging
30 30 import datetime
31 31 import warnings
32 32 import ipaddress
33 33 import functools
34 34 import traceback
35 35 import collections
36 36
37 37 from sqlalchemy import (
38 38 or_, and_, not_, func, TypeDecorator, event,
39 39 Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column,
40 40 Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary,
41 41 Text, Float, PickleType)
42 42 from sqlalchemy.sql.expression import true, false
43 43 from sqlalchemy.sql.functions import coalesce, count # noqa
44 44 from sqlalchemy.orm import (
45 45 relationship, joinedload, class_mapper, validates, aliased)
46 46 from sqlalchemy.ext.declarative import declared_attr
47 47 from sqlalchemy.ext.hybrid import hybrid_property
48 48 from sqlalchemy.exc import IntegrityError # noqa
49 49 from sqlalchemy.dialects.mysql import LONGTEXT
50 50 from zope.cachedescriptors.property import Lazy as LazyProperty
51 51
52 52 from pyramid.threadlocal import get_current_request
53 53
54 54 from rhodecode.translation import _
55 55 from rhodecode.lib.vcs import get_vcs_instance
56 56 from rhodecode.lib.vcs.backends.base import EmptyCommit, Reference
57 57 from rhodecode.lib.utils2 import (
58 58 str2bool, safe_str, get_commit_safe, safe_unicode, sha1_safe,
59 59 time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict,
60 60 glob2re, StrictAttributeDict, cleaned_uri)
61 61 from rhodecode.lib.jsonalchemy import MutationObj, MutationList, JsonType, \
62 62 JsonRaw
63 63 from rhodecode.lib.ext_json import json
64 64 from rhodecode.lib.caching_query import FromCache
65 65 from rhodecode.lib.encrypt import AESCipher
66 66
67 67 from rhodecode.model.meta import Base, Session
68 68
69 69 URL_SEP = '/'
70 70 log = logging.getLogger(__name__)
71 71
72 72 # =============================================================================
73 73 # BASE CLASSES
74 74 # =============================================================================
75 75
76 76 # this is propagated from .ini file rhodecode.encrypted_values.secret or
77 77 # beaker.session.secret if first is not set.
78 78 # and initialized at environment.py
79 79 ENCRYPTION_KEY = None
80 80
81 81 # used to sort permissions by types, '#' used here is not allowed to be in
82 82 # usernames, and it's very early in sorted string.printable table.
83 83 PERMISSION_TYPE_SORT = {
84 84 'admin': '####',
85 85 'write': '###',
86 86 'read': '##',
87 87 'none': '#',
88 88 }
89 89
90 90
91 91 def display_user_sort(obj):
92 92 """
93 93 Sort function used to sort permissions in .permissions() function of
94 94 Repository, RepoGroup, UserGroup. Also it put the default user in front
95 95 of all other resources
96 96 """
97 97
98 98 if obj.username == User.DEFAULT_USER:
99 99 return '#####'
100 100 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
101 101 return prefix + obj.username
102 102
103 103
104 104 def display_user_group_sort(obj):
105 105 """
106 106 Sort function used to sort permissions in .permissions() function of
107 107 Repository, RepoGroup, UserGroup. Also it put the default user in front
108 108 of all other resources
109 109 """
110 110
111 111 prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '')
112 112 return prefix + obj.users_group_name
113 113
114 114
115 115 def _hash_key(k):
116 116 return sha1_safe(k)
117 117
118 118
119 119 def in_filter_generator(qry, items, limit=500):
120 120 """
121 121 Splits IN() into multiple with OR
122 122 e.g.::
123 123 cnt = Repository.query().filter(
124 124 or_(
125 125 *in_filter_generator(Repository.repo_id, range(100000))
126 126 )).count()
127 127 """
128 128 if not items:
129 129 # empty list will cause empty query which might cause security issues
130 130 # this can lead to hidden unpleasant results
131 131 items = [-1]
132 132
133 133 parts = []
134 134 for chunk in xrange(0, len(items), limit):
135 135 parts.append(
136 136 qry.in_(items[chunk: chunk + limit])
137 137 )
138 138
139 139 return parts
140 140
141 141
142 142 base_table_args = {
143 143 'extend_existing': True,
144 144 'mysql_engine': 'InnoDB',
145 145 'mysql_charset': 'utf8',
146 146 'sqlite_autoincrement': True
147 147 }
148 148
149 149
150 150 class EncryptedTextValue(TypeDecorator):
151 151 """
152 152 Special column for encrypted long text data, use like::
153 153
154 154 value = Column("encrypted_value", EncryptedValue(), nullable=False)
155 155
156 156 This column is intelligent so if value is in unencrypted form it return
157 157 unencrypted form, but on save it always encrypts
158 158 """
159 159 impl = Text
160 160
161 161 def process_bind_param(self, value, dialect):
162 162 if not value:
163 163 return value
164 164 if value.startswith('enc$aes$') or value.startswith('enc$aes_hmac$'):
165 165 # protect against double encrypting if someone manually starts
166 166 # doing
167 167 raise ValueError('value needs to be in unencrypted format, ie. '
168 168 'not starting with enc$aes')
169 169 return 'enc$aes_hmac$%s' % AESCipher(
170 170 ENCRYPTION_KEY, hmac=True).encrypt(value)
171 171
172 172 def process_result_value(self, value, dialect):
173 173 import rhodecode
174 174
175 175 if not value:
176 176 return value
177 177
178 178 parts = value.split('$', 3)
179 179 if not len(parts) == 3:
180 180 # probably not encrypted values
181 181 return value
182 182 else:
183 183 if parts[0] != 'enc':
184 184 # parts ok but without our header ?
185 185 return value
186 186 enc_strict_mode = str2bool(rhodecode.CONFIG.get(
187 187 'rhodecode.encrypted_values.strict') or True)
188 188 # at that stage we know it's our encryption
189 189 if parts[1] == 'aes':
190 190 decrypted_data = AESCipher(ENCRYPTION_KEY).decrypt(parts[2])
191 191 elif parts[1] == 'aes_hmac':
192 192 decrypted_data = AESCipher(
193 193 ENCRYPTION_KEY, hmac=True,
194 194 strict_verification=enc_strict_mode).decrypt(parts[2])
195 195 else:
196 196 raise ValueError(
197 197 'Encryption type part is wrong, must be `aes` '
198 198 'or `aes_hmac`, got `%s` instead' % (parts[1]))
199 199 return decrypted_data
200 200
201 201
202 202 class BaseModel(object):
203 203 """
204 204 Base Model for all classes
205 205 """
206 206
207 207 @classmethod
208 208 def _get_keys(cls):
209 209 """return column names for this model """
210 210 return class_mapper(cls).c.keys()
211 211
212 212 def get_dict(self):
213 213 """
214 214 return dict with keys and values corresponding
215 215 to this model data """
216 216
217 217 d = {}
218 218 for k in self._get_keys():
219 219 d[k] = getattr(self, k)
220 220
221 221 # also use __json__() if present to get additional fields
222 222 _json_attr = getattr(self, '__json__', None)
223 223 if _json_attr:
224 224 # update with attributes from __json__
225 225 if callable(_json_attr):
226 226 _json_attr = _json_attr()
227 227 for k, val in _json_attr.iteritems():
228 228 d[k] = val
229 229 return d
230 230
231 231 def get_appstruct(self):
232 232 """return list with keys and values tuples corresponding
233 233 to this model data """
234 234
235 235 lst = []
236 236 for k in self._get_keys():
237 237 lst.append((k, getattr(self, k),))
238 238 return lst
239 239
240 240 def populate_obj(self, populate_dict):
241 241 """populate model with data from given populate_dict"""
242 242
243 243 for k in self._get_keys():
244 244 if k in populate_dict:
245 245 setattr(self, k, populate_dict[k])
246 246
247 247 @classmethod
248 248 def query(cls):
249 249 return Session().query(cls)
250 250
251 251 @classmethod
252 252 def get(cls, id_):
253 253 if id_:
254 254 return cls.query().get(id_)
255 255
256 256 @classmethod
257 257 def get_or_404(cls, id_):
258 258 from pyramid.httpexceptions import HTTPNotFound
259 259
260 260 try:
261 261 id_ = int(id_)
262 262 except (TypeError, ValueError):
263 263 raise HTTPNotFound()
264 264
265 265 res = cls.query().get(id_)
266 266 if not res:
267 267 raise HTTPNotFound()
268 268 return res
269 269
270 270 @classmethod
271 271 def getAll(cls):
272 272 # deprecated and left for backward compatibility
273 273 return cls.get_all()
274 274
275 275 @classmethod
276 276 def get_all(cls):
277 277 return cls.query().all()
278 278
279 279 @classmethod
280 280 def delete(cls, id_):
281 281 obj = cls.query().get(id_)
282 282 Session().delete(obj)
283 283
284 284 @classmethod
285 285 def identity_cache(cls, session, attr_name, value):
286 286 exist_in_session = []
287 287 for (item_cls, pkey), instance in session.identity_map.items():
288 288 if cls == item_cls and getattr(instance, attr_name) == value:
289 289 exist_in_session.append(instance)
290 290 if exist_in_session:
291 291 if len(exist_in_session) == 1:
292 292 return exist_in_session[0]
293 293 log.exception(
294 294 'multiple objects with attr %s and '
295 295 'value %s found with same name: %r',
296 296 attr_name, value, exist_in_session)
297 297
298 298 def __repr__(self):
299 299 if hasattr(self, '__unicode__'):
300 300 # python repr needs to return str
301 301 try:
302 302 return safe_str(self.__unicode__())
303 303 except UnicodeDecodeError:
304 304 pass
305 305 return '<DB:%s>' % (self.__class__.__name__)
306 306
307 307
308 308 class RhodeCodeSetting(Base, BaseModel):
309 309 __tablename__ = 'rhodecode_settings'
310 310 __table_args__ = (
311 311 UniqueConstraint('app_settings_name'),
312 312 base_table_args
313 313 )
314 314
315 315 SETTINGS_TYPES = {
316 316 'str': safe_str,
317 317 'int': safe_int,
318 318 'unicode': safe_unicode,
319 319 'bool': str2bool,
320 320 'list': functools.partial(aslist, sep=',')
321 321 }
322 322 DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions'
323 323 GLOBAL_CONF_KEY = 'app_settings'
324 324
325 325 app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
326 326 app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None)
327 327 _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None)
328 328 _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None)
329 329
330 330 def __init__(self, key='', val='', type='unicode'):
331 331 self.app_settings_name = key
332 332 self.app_settings_type = type
333 333 self.app_settings_value = val
334 334
335 335 @validates('_app_settings_value')
336 336 def validate_settings_value(self, key, val):
337 337 assert type(val) == unicode
338 338 return val
339 339
340 340 @hybrid_property
341 341 def app_settings_value(self):
342 342 v = self._app_settings_value
343 343 _type = self.app_settings_type
344 344 if _type:
345 345 _type = self.app_settings_type.split('.')[0]
346 346 # decode the encrypted value
347 347 if 'encrypted' in self.app_settings_type:
348 348 cipher = EncryptedTextValue()
349 349 v = safe_unicode(cipher.process_result_value(v, None))
350 350
351 351 converter = self.SETTINGS_TYPES.get(_type) or \
352 352 self.SETTINGS_TYPES['unicode']
353 353 return converter(v)
354 354
355 355 @app_settings_value.setter
356 356 def app_settings_value(self, val):
357 357 """
358 358 Setter that will always make sure we use unicode in app_settings_value
359 359
360 360 :param val:
361 361 """
362 362 val = safe_unicode(val)
363 363 # encode the encrypted value
364 364 if 'encrypted' in self.app_settings_type:
365 365 cipher = EncryptedTextValue()
366 366 val = safe_unicode(cipher.process_bind_param(val, None))
367 367 self._app_settings_value = val
368 368
369 369 @hybrid_property
370 370 def app_settings_type(self):
371 371 return self._app_settings_type
372 372
373 373 @app_settings_type.setter
374 374 def app_settings_type(self, val):
375 375 if val.split('.')[0] not in self.SETTINGS_TYPES:
376 376 raise Exception('type must be one of %s got %s'
377 377 % (self.SETTINGS_TYPES.keys(), val))
378 378 self._app_settings_type = val
379 379
380 380 def __unicode__(self):
381 381 return u"<%s('%s:%s[%s]')>" % (
382 382 self.__class__.__name__,
383 383 self.app_settings_name, self.app_settings_value,
384 384 self.app_settings_type
385 385 )
386 386
387 387
388 388 class RhodeCodeUi(Base, BaseModel):
389 389 __tablename__ = 'rhodecode_ui'
390 390 __table_args__ = (
391 391 UniqueConstraint('ui_key'),
392 392 base_table_args
393 393 )
394 394
395 395 HOOK_REPO_SIZE = 'changegroup.repo_size'
396 396 # HG
397 397 HOOK_PRE_PULL = 'preoutgoing.pre_pull'
398 398 HOOK_PULL = 'outgoing.pull_logger'
399 399 HOOK_PRE_PUSH = 'prechangegroup.pre_push'
400 400 HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push'
401 401 HOOK_PUSH = 'changegroup.push_logger'
402 402 HOOK_PUSH_KEY = 'pushkey.key_push'
403 403
404 404 # TODO: johbo: Unify way how hooks are configured for git and hg,
405 405 # git part is currently hardcoded.
406 406
407 407 # SVN PATTERNS
408 408 SVN_BRANCH_ID = 'vcs_svn_branch'
409 409 SVN_TAG_ID = 'vcs_svn_tag'
410 410
411 411 ui_id = Column(
412 412 "ui_id", Integer(), nullable=False, unique=True, default=None,
413 413 primary_key=True)
414 414 ui_section = Column(
415 415 "ui_section", String(255), nullable=True, unique=None, default=None)
416 416 ui_key = Column(
417 417 "ui_key", String(255), nullable=True, unique=None, default=None)
418 418 ui_value = Column(
419 419 "ui_value", String(255), nullable=True, unique=None, default=None)
420 420 ui_active = Column(
421 421 "ui_active", Boolean(), nullable=True, unique=None, default=True)
422 422
423 423 def __repr__(self):
424 424 return '<%s[%s]%s=>%s]>' % (self.__class__.__name__, self.ui_section,
425 425 self.ui_key, self.ui_value)
426 426
427 427
428 428 class RepoRhodeCodeSetting(Base, BaseModel):
429 429 __tablename__ = 'repo_rhodecode_settings'
430 430 __table_args__ = (
431 431 UniqueConstraint(
432 432 'app_settings_name', 'repository_id',
433 433 name='uq_repo_rhodecode_setting_name_repo_id'),
434 434 base_table_args
435 435 )
436 436
437 437 repository_id = Column(
438 438 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
439 439 nullable=False)
440 440 app_settings_id = Column(
441 441 "app_settings_id", Integer(), nullable=False, unique=True,
442 442 default=None, primary_key=True)
443 443 app_settings_name = Column(
444 444 "app_settings_name", String(255), nullable=True, unique=None,
445 445 default=None)
446 446 _app_settings_value = Column(
447 447 "app_settings_value", String(4096), nullable=True, unique=None,
448 448 default=None)
449 449 _app_settings_type = Column(
450 450 "app_settings_type", String(255), nullable=True, unique=None,
451 451 default=None)
452 452
453 453 repository = relationship('Repository')
454 454
455 455 def __init__(self, repository_id, key='', val='', type='unicode'):
456 456 self.repository_id = repository_id
457 457 self.app_settings_name = key
458 458 self.app_settings_type = type
459 459 self.app_settings_value = val
460 460
461 461 @validates('_app_settings_value')
462 462 def validate_settings_value(self, key, val):
463 463 assert type(val) == unicode
464 464 return val
465 465
466 466 @hybrid_property
467 467 def app_settings_value(self):
468 468 v = self._app_settings_value
469 469 type_ = self.app_settings_type
470 470 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
471 471 converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode']
472 472 return converter(v)
473 473
474 474 @app_settings_value.setter
475 475 def app_settings_value(self, val):
476 476 """
477 477 Setter that will always make sure we use unicode in app_settings_value
478 478
479 479 :param val:
480 480 """
481 481 self._app_settings_value = safe_unicode(val)
482 482
483 483 @hybrid_property
484 484 def app_settings_type(self):
485 485 return self._app_settings_type
486 486
487 487 @app_settings_type.setter
488 488 def app_settings_type(self, val):
489 489 SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES
490 490 if val not in SETTINGS_TYPES:
491 491 raise Exception('type must be one of %s got %s'
492 492 % (SETTINGS_TYPES.keys(), val))
493 493 self._app_settings_type = val
494 494
495 495 def __unicode__(self):
496 496 return u"<%s('%s:%s:%s[%s]')>" % (
497 497 self.__class__.__name__, self.repository.repo_name,
498 498 self.app_settings_name, self.app_settings_value,
499 499 self.app_settings_type
500 500 )
501 501
502 502
503 503 class RepoRhodeCodeUi(Base, BaseModel):
504 504 __tablename__ = 'repo_rhodecode_ui'
505 505 __table_args__ = (
506 506 UniqueConstraint(
507 507 'repository_id', 'ui_section', 'ui_key',
508 508 name='uq_repo_rhodecode_ui_repository_id_section_key'),
509 509 base_table_args
510 510 )
511 511
512 512 repository_id = Column(
513 513 "repository_id", Integer(), ForeignKey('repositories.repo_id'),
514 514 nullable=False)
515 515 ui_id = Column(
516 516 "ui_id", Integer(), nullable=False, unique=True, default=None,
517 517 primary_key=True)
518 518 ui_section = Column(
519 519 "ui_section", String(255), nullable=True, unique=None, default=None)
520 520 ui_key = Column(
521 521 "ui_key", String(255), nullable=True, unique=None, default=None)
522 522 ui_value = Column(
523 523 "ui_value", String(255), nullable=True, unique=None, default=None)
524 524 ui_active = Column(
525 525 "ui_active", Boolean(), nullable=True, unique=None, default=True)
526 526
527 527 repository = relationship('Repository')
528 528
529 529 def __repr__(self):
530 530 return '<%s[%s:%s]%s=>%s]>' % (
531 531 self.__class__.__name__, self.repository.repo_name,
532 532 self.ui_section, self.ui_key, self.ui_value)
533 533
534 534
535 535 class User(Base, BaseModel):
536 536 __tablename__ = 'users'
537 537 __table_args__ = (
538 538 UniqueConstraint('username'), UniqueConstraint('email'),
539 539 Index('u_username_idx', 'username'),
540 540 Index('u_email_idx', 'email'),
541 541 base_table_args
542 542 )
543 543
544 544 DEFAULT_USER = 'default'
545 545 DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org'
546 546 DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}'
547 547
548 548 user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
549 549 username = Column("username", String(255), nullable=True, unique=None, default=None)
550 550 password = Column("password", String(255), nullable=True, unique=None, default=None)
551 551 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
552 552 admin = Column("admin", Boolean(), nullable=True, unique=None, default=False)
553 553 name = Column("firstname", String(255), nullable=True, unique=None, default=None)
554 554 lastname = Column("lastname", String(255), nullable=True, unique=None, default=None)
555 555 _email = Column("email", String(255), nullable=True, unique=None, default=None)
556 556 last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None)
557 557 last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None)
558 558
559 559 extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None)
560 560 extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None)
561 561 _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None)
562 562 inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
563 563 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
564 564 _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data
565 565
566 566 user_log = relationship('UserLog')
567 567 user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all')
568 568
569 569 repositories = relationship('Repository')
570 570 repository_groups = relationship('RepoGroup')
571 571 user_groups = relationship('UserGroup')
572 572
573 573 user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all')
574 574 followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all')
575 575
576 576 repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all')
577 577 repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all')
578 578 user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all')
579 579
580 580 group_member = relationship('UserGroupMember', cascade='all')
581 581
582 582 notifications = relationship('UserNotification', cascade='all')
583 583 # notifications assigned to this user
584 584 user_created_notifications = relationship('Notification', cascade='all')
585 585 # comments created by this user
586 586 user_comments = relationship('ChangesetComment', cascade='all')
587 587 # user profile extra info
588 588 user_emails = relationship('UserEmailMap', cascade='all')
589 589 user_ip_map = relationship('UserIpMap', cascade='all')
590 590 user_auth_tokens = relationship('UserApiKeys', cascade='all')
591 591 user_ssh_keys = relationship('UserSshKeys', cascade='all')
592 592
593 593 # gists
594 594 user_gists = relationship('Gist', cascade='all')
595 595 # user pull requests
596 596 user_pull_requests = relationship('PullRequest', cascade='all')
597 597 # external identities
598 598 extenal_identities = relationship(
599 599 'ExternalIdentity',
600 600 primaryjoin="User.user_id==ExternalIdentity.local_user_id",
601 601 cascade='all')
602 602 # review rules
603 603 user_review_rules = relationship('RepoReviewRuleUser', cascade='all')
604 604
605 605 def __unicode__(self):
606 606 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
607 607 self.user_id, self.username)
608 608
609 609 @hybrid_property
610 610 def email(self):
611 611 return self._email
612 612
613 613 @email.setter
614 614 def email(self, val):
615 615 self._email = val.lower() if val else None
616 616
617 617 @hybrid_property
618 618 def first_name(self):
619 619 from rhodecode.lib import helpers as h
620 620 if self.name:
621 621 return h.escape(self.name)
622 622 return self.name
623 623
624 624 @hybrid_property
625 625 def last_name(self):
626 626 from rhodecode.lib import helpers as h
627 627 if self.lastname:
628 628 return h.escape(self.lastname)
629 629 return self.lastname
630 630
631 631 @hybrid_property
632 632 def api_key(self):
633 633 """
634 634 Fetch if exist an auth-token with role ALL connected to this user
635 635 """
636 636 user_auth_token = UserApiKeys.query()\
637 637 .filter(UserApiKeys.user_id == self.user_id)\
638 638 .filter(or_(UserApiKeys.expires == -1,
639 639 UserApiKeys.expires >= time.time()))\
640 640 .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first()
641 641 if user_auth_token:
642 642 user_auth_token = user_auth_token.api_key
643 643
644 644 return user_auth_token
645 645
646 646 @api_key.setter
647 647 def api_key(self, val):
648 648 # don't allow to set API key this is deprecated for now
649 649 self._api_key = None
650 650
651 651 @property
652 652 def reviewer_pull_requests(self):
653 653 return PullRequestReviewers.query() \
654 654 .options(joinedload(PullRequestReviewers.pull_request)) \
655 655 .filter(PullRequestReviewers.user_id == self.user_id) \
656 656 .all()
657 657
658 658 @property
659 659 def firstname(self):
660 660 # alias for future
661 661 return self.name
662 662
663 663 @property
664 664 def emails(self):
665 665 other = UserEmailMap.query()\
666 666 .filter(UserEmailMap.user == self) \
667 667 .order_by(UserEmailMap.email_id.asc()) \
668 668 .all()
669 669 return [self.email] + [x.email for x in other]
670 670
671 671 @property
672 672 def auth_tokens(self):
673 673 auth_tokens = self.get_auth_tokens()
674 674 return [x.api_key for x in auth_tokens]
675 675
676 676 def get_auth_tokens(self):
677 677 return UserApiKeys.query()\
678 678 .filter(UserApiKeys.user == self)\
679 679 .order_by(UserApiKeys.user_api_key_id.asc())\
680 680 .all()
681 681
682 682 @LazyProperty
683 683 def feed_token(self):
684 684 return self.get_feed_token()
685 685
686 686 def get_feed_token(self, cache=True):
687 687 feed_tokens = UserApiKeys.query()\
688 688 .filter(UserApiKeys.user == self)\
689 689 .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED)
690 690 if cache:
691 691 feed_tokens = feed_tokens.options(
692 692 FromCache("sql_cache_short", "get_user_feed_token_%s" % self.user_id))
693 693
694 694 feed_tokens = feed_tokens.all()
695 695 if feed_tokens:
696 696 return feed_tokens[0].api_key
697 697 return 'NO_FEED_TOKEN_AVAILABLE'
698 698
699 699 @classmethod
700 700 def get(cls, user_id, cache=False):
701 701 if not user_id:
702 702 return
703 703
704 704 user = cls.query()
705 705 if cache:
706 706 user = user.options(
707 707 FromCache("sql_cache_short", "get_users_%s" % user_id))
708 708 return user.get(user_id)
709 709
710 710 @classmethod
711 711 def extra_valid_auth_tokens(cls, user, role=None):
712 712 tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\
713 713 .filter(or_(UserApiKeys.expires == -1,
714 714 UserApiKeys.expires >= time.time()))
715 715 if role:
716 716 tokens = tokens.filter(or_(UserApiKeys.role == role,
717 717 UserApiKeys.role == UserApiKeys.ROLE_ALL))
718 718 return tokens.all()
719 719
720 720 def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None):
721 721 from rhodecode.lib import auth
722 722
723 723 log.debug('Trying to authenticate user: %s via auth-token, '
724 724 'and roles: %s', self, roles)
725 725
726 726 if not auth_token:
727 727 return False
728 728
729 729 crypto_backend = auth.crypto_backend()
730 730
731 731 roles = (roles or []) + [UserApiKeys.ROLE_ALL]
732 732 tokens_q = UserApiKeys.query()\
733 733 .filter(UserApiKeys.user_id == self.user_id)\
734 734 .filter(or_(UserApiKeys.expires == -1,
735 735 UserApiKeys.expires >= time.time()))
736 736
737 737 tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles))
738 738
739 739 plain_tokens = []
740 740 hash_tokens = []
741 741
742 742 for token in tokens_q.all():
743 743 # verify scope first
744 744 if token.repo_id:
745 745 # token has a scope, we need to verify it
746 746 if scope_repo_id != token.repo_id:
747 747 log.debug(
748 748 'Scope mismatch: token has a set repo scope: %s, '
749 749 'and calling scope is:%s, skipping further checks',
750 750 token.repo, scope_repo_id)
751 751 # token has a scope, and it doesn't match, skip token
752 752 continue
753 753
754 754 if token.api_key.startswith(crypto_backend.ENC_PREF):
755 755 hash_tokens.append(token.api_key)
756 756 else:
757 757 plain_tokens.append(token.api_key)
758 758
759 759 is_plain_match = auth_token in plain_tokens
760 760 if is_plain_match:
761 761 return True
762 762
763 763 for hashed in hash_tokens:
764 764 # TODO(marcink): this is expensive to calculate, but most secure
765 765 match = crypto_backend.hash_check(auth_token, hashed)
766 766 if match:
767 767 return True
768 768
769 769 return False
770 770
771 771 @property
772 772 def ip_addresses(self):
773 773 ret = UserIpMap.query().filter(UserIpMap.user == self).all()
774 774 return [x.ip_addr for x in ret]
775 775
776 776 @property
777 777 def username_and_name(self):
778 778 return '%s (%s %s)' % (self.username, self.first_name, self.last_name)
779 779
780 780 @property
781 781 def username_or_name_or_email(self):
782 782 full_name = self.full_name if self.full_name is not ' ' else None
783 783 return self.username or full_name or self.email
784 784
785 785 @property
786 786 def full_name(self):
787 787 return '%s %s' % (self.first_name, self.last_name)
788 788
789 789 @property
790 790 def full_name_or_username(self):
791 791 return ('%s %s' % (self.first_name, self.last_name)
792 792 if (self.first_name and self.last_name) else self.username)
793 793
794 794 @property
795 795 def full_contact(self):
796 796 return '%s %s <%s>' % (self.first_name, self.last_name, self.email)
797 797
798 798 @property
799 799 def short_contact(self):
800 800 return '%s %s' % (self.first_name, self.last_name)
801 801
802 802 @property
803 803 def is_admin(self):
804 804 return self.admin
805 805
806 806 def AuthUser(self, **kwargs):
807 807 """
808 808 Returns instance of AuthUser for this user
809 809 """
810 810 from rhodecode.lib.auth import AuthUser
811 811 return AuthUser(user_id=self.user_id, username=self.username, **kwargs)
812 812
813 813 @hybrid_property
814 814 def user_data(self):
815 815 if not self._user_data:
816 816 return {}
817 817
818 818 try:
819 819 return json.loads(self._user_data)
820 820 except TypeError:
821 821 return {}
822 822
823 823 @user_data.setter
824 824 def user_data(self, val):
825 825 if not isinstance(val, dict):
826 826 raise Exception('user_data must be dict, got %s' % type(val))
827 827 try:
828 828 self._user_data = json.dumps(val)
829 829 except Exception:
830 830 log.error(traceback.format_exc())
831 831
832 832 @classmethod
833 833 def get_by_username(cls, username, case_insensitive=False,
834 834 cache=False, identity_cache=False):
835 835 session = Session()
836 836
837 837 if case_insensitive:
838 838 q = cls.query().filter(
839 839 func.lower(cls.username) == func.lower(username))
840 840 else:
841 841 q = cls.query().filter(cls.username == username)
842 842
843 843 if cache:
844 844 if identity_cache:
845 845 val = cls.identity_cache(session, 'username', username)
846 846 if val:
847 847 return val
848 848 else:
849 849 cache_key = "get_user_by_name_%s" % _hash_key(username)
850 850 q = q.options(
851 851 FromCache("sql_cache_short", cache_key))
852 852
853 853 return q.scalar()
854 854
855 855 @classmethod
856 856 def get_by_auth_token(cls, auth_token, cache=False):
857 857 q = UserApiKeys.query()\
858 858 .filter(UserApiKeys.api_key == auth_token)\
859 859 .filter(or_(UserApiKeys.expires == -1,
860 860 UserApiKeys.expires >= time.time()))
861 861 if cache:
862 862 q = q.options(
863 863 FromCache("sql_cache_short", "get_auth_token_%s" % auth_token))
864 864
865 865 match = q.first()
866 866 if match:
867 867 return match.user
868 868
869 869 @classmethod
870 870 def get_by_email(cls, email, case_insensitive=False, cache=False):
871 871
872 872 if case_insensitive:
873 873 q = cls.query().filter(func.lower(cls.email) == func.lower(email))
874 874
875 875 else:
876 876 q = cls.query().filter(cls.email == email)
877 877
878 878 email_key = _hash_key(email)
879 879 if cache:
880 880 q = q.options(
881 881 FromCache("sql_cache_short", "get_email_key_%s" % email_key))
882 882
883 883 ret = q.scalar()
884 884 if ret is None:
885 885 q = UserEmailMap.query()
886 886 # try fetching in alternate email map
887 887 if case_insensitive:
888 888 q = q.filter(func.lower(UserEmailMap.email) == func.lower(email))
889 889 else:
890 890 q = q.filter(UserEmailMap.email == email)
891 891 q = q.options(joinedload(UserEmailMap.user))
892 892 if cache:
893 893 q = q.options(
894 894 FromCache("sql_cache_short", "get_email_map_key_%s" % email_key))
895 895 ret = getattr(q.scalar(), 'user', None)
896 896
897 897 return ret
898 898
899 899 @classmethod
900 900 def get_from_cs_author(cls, author):
901 901 """
902 902 Tries to get User objects out of commit author string
903 903
904 904 :param author:
905 905 """
906 906 from rhodecode.lib.helpers import email, author_name
907 907 # Valid email in the attribute passed, see if they're in the system
908 908 _email = email(author)
909 909 if _email:
910 910 user = cls.get_by_email(_email, case_insensitive=True)
911 911 if user:
912 912 return user
913 913 # Maybe we can match by username?
914 914 _author = author_name(author)
915 915 user = cls.get_by_username(_author, case_insensitive=True)
916 916 if user:
917 917 return user
918 918
919 919 def update_userdata(self, **kwargs):
920 920 usr = self
921 921 old = usr.user_data
922 922 old.update(**kwargs)
923 923 usr.user_data = old
924 924 Session().add(usr)
925 925 log.debug('updated userdata with ', kwargs)
926 926
927 927 def update_lastlogin(self):
928 928 """Update user lastlogin"""
929 929 self.last_login = datetime.datetime.now()
930 930 Session().add(self)
931 931 log.debug('updated user %s lastlogin', self.username)
932 932
933 933 def update_password(self, new_password):
934 934 from rhodecode.lib.auth import get_crypt_password
935 935
936 936 self.password = get_crypt_password(new_password)
937 937 Session().add(self)
938 938
939 939 @classmethod
940 940 def get_first_super_admin(cls):
941 941 user = User.query().filter(User.admin == true()).first()
942 942 if user is None:
943 943 raise Exception('FATAL: Missing administrative account!')
944 944 return user
945 945
946 946 @classmethod
947 947 def get_all_super_admins(cls):
948 948 """
949 949 Returns all admin accounts sorted by username
950 950 """
951 951 return User.query().filter(User.admin == true())\
952 952 .order_by(User.username.asc()).all()
953 953
954 954 @classmethod
955 955 def get_default_user(cls, cache=False, refresh=False):
956 956 user = User.get_by_username(User.DEFAULT_USER, cache=cache)
957 957 if user is None:
958 958 raise Exception('FATAL: Missing default account!')
959 959 if refresh:
960 960 # The default user might be based on outdated state which
961 961 # has been loaded from the cache.
962 962 # A call to refresh() ensures that the
963 963 # latest state from the database is used.
964 964 Session().refresh(user)
965 965 return user
966 966
967 967 def _get_default_perms(self, user, suffix=''):
968 968 from rhodecode.model.permission import PermissionModel
969 969 return PermissionModel().get_default_perms(user.user_perms, suffix)
970 970
971 971 def get_default_perms(self, suffix=''):
972 972 return self._get_default_perms(self, suffix)
973 973
974 974 def get_api_data(self, include_secrets=False, details='full'):
975 975 """
976 976 Common function for generating user related data for API
977 977
978 978 :param include_secrets: By default secrets in the API data will be replaced
979 979 by a placeholder value to prevent exposing this data by accident. In case
980 980 this data shall be exposed, set this flag to ``True``.
981 981
982 982 :param details: details can be 'basic|full' basic gives only a subset of
983 983 the available user information that includes user_id, name and emails.
984 984 """
985 985 user = self
986 986 user_data = self.user_data
987 987 data = {
988 988 'user_id': user.user_id,
989 989 'username': user.username,
990 990 'firstname': user.name,
991 991 'lastname': user.lastname,
992 992 'email': user.email,
993 993 'emails': user.emails,
994 994 }
995 995 if details == 'basic':
996 996 return data
997 997
998 998 auth_token_length = 40
999 999 auth_token_replacement = '*' * auth_token_length
1000 1000
1001 1001 extras = {
1002 1002 'auth_tokens': [auth_token_replacement],
1003 1003 'active': user.active,
1004 1004 'admin': user.admin,
1005 1005 'extern_type': user.extern_type,
1006 1006 'extern_name': user.extern_name,
1007 1007 'last_login': user.last_login,
1008 1008 'last_activity': user.last_activity,
1009 1009 'ip_addresses': user.ip_addresses,
1010 1010 'language': user_data.get('language')
1011 1011 }
1012 1012 data.update(extras)
1013 1013
1014 1014 if include_secrets:
1015 1015 data['auth_tokens'] = user.auth_tokens
1016 1016 return data
1017 1017
1018 1018 def __json__(self):
1019 1019 data = {
1020 1020 'full_name': self.full_name,
1021 1021 'full_name_or_username': self.full_name_or_username,
1022 1022 'short_contact': self.short_contact,
1023 1023 'full_contact': self.full_contact,
1024 1024 }
1025 1025 data.update(self.get_api_data())
1026 1026 return data
1027 1027
1028 1028
1029 1029 class UserApiKeys(Base, BaseModel):
1030 1030 __tablename__ = 'user_api_keys'
1031 1031 __table_args__ = (
1032 1032 Index('uak_api_key_idx', 'api_key', unique=True),
1033 1033 Index('uak_api_key_expires_idx', 'api_key', 'expires'),
1034 1034 base_table_args
1035 1035 )
1036 1036 __mapper_args__ = {}
1037 1037
1038 1038 # ApiKey role
1039 1039 ROLE_ALL = 'token_role_all'
1040 1040 ROLE_HTTP = 'token_role_http'
1041 1041 ROLE_VCS = 'token_role_vcs'
1042 1042 ROLE_API = 'token_role_api'
1043 1043 ROLE_FEED = 'token_role_feed'
1044 1044 ROLE_PASSWORD_RESET = 'token_password_reset'
1045 1045
1046 1046 ROLES = [ROLE_ALL, ROLE_HTTP, ROLE_VCS, ROLE_API, ROLE_FEED]
1047 1047
1048 1048 user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1049 1049 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1050 1050 api_key = Column("api_key", String(255), nullable=False, unique=True)
1051 1051 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1052 1052 expires = Column('expires', Float(53), nullable=False)
1053 1053 role = Column('role', String(255), nullable=True)
1054 1054 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1055 1055
1056 1056 # scope columns
1057 1057 repo_id = Column(
1058 1058 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
1059 1059 nullable=True, unique=None, default=None)
1060 1060 repo = relationship('Repository', lazy='joined')
1061 1061
1062 1062 repo_group_id = Column(
1063 1063 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
1064 1064 nullable=True, unique=None, default=None)
1065 1065 repo_group = relationship('RepoGroup', lazy='joined')
1066 1066
1067 1067 user = relationship('User', lazy='joined')
1068 1068
1069 1069 def __unicode__(self):
1070 1070 return u"<%s('%s')>" % (self.__class__.__name__, self.role)
1071 1071
1072 1072 def __json__(self):
1073 1073 data = {
1074 1074 'auth_token': self.api_key,
1075 1075 'role': self.role,
1076 1076 'scope': self.scope_humanized,
1077 1077 'expired': self.expired
1078 1078 }
1079 1079 return data
1080 1080
1081 1081 def get_api_data(self, include_secrets=False):
1082 1082 data = self.__json__()
1083 1083 if include_secrets:
1084 1084 return data
1085 1085 else:
1086 1086 data['auth_token'] = self.token_obfuscated
1087 1087 return data
1088 1088
1089 1089 @hybrid_property
1090 1090 def description_safe(self):
1091 1091 from rhodecode.lib import helpers as h
1092 1092 return h.escape(self.description)
1093 1093
1094 1094 @property
1095 1095 def expired(self):
1096 1096 if self.expires == -1:
1097 1097 return False
1098 1098 return time.time() > self.expires
1099 1099
1100 1100 @classmethod
1101 1101 def _get_role_name(cls, role):
1102 1102 return {
1103 1103 cls.ROLE_ALL: _('all'),
1104 1104 cls.ROLE_HTTP: _('http/web interface'),
1105 1105 cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'),
1106 1106 cls.ROLE_API: _('api calls'),
1107 1107 cls.ROLE_FEED: _('feed access'),
1108 1108 }.get(role, role)
1109 1109
1110 1110 @property
1111 1111 def role_humanized(self):
1112 1112 return self._get_role_name(self.role)
1113 1113
1114 1114 def _get_scope(self):
1115 1115 if self.repo:
1116 1116 return repr(self.repo)
1117 1117 if self.repo_group:
1118 1118 return repr(self.repo_group) + ' (recursive)'
1119 1119 return 'global'
1120 1120
1121 1121 @property
1122 1122 def scope_humanized(self):
1123 1123 return self._get_scope()
1124 1124
1125 1125 @property
1126 1126 def token_obfuscated(self):
1127 1127 if self.api_key:
1128 1128 return self.api_key[:4] + "****"
1129 1129
1130 1130
1131 1131 class UserEmailMap(Base, BaseModel):
1132 1132 __tablename__ = 'user_email_map'
1133 1133 __table_args__ = (
1134 1134 Index('uem_email_idx', 'email'),
1135 1135 UniqueConstraint('email'),
1136 1136 base_table_args
1137 1137 )
1138 1138 __mapper_args__ = {}
1139 1139
1140 1140 email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1141 1141 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1142 1142 _email = Column("email", String(255), nullable=True, unique=False, default=None)
1143 1143 user = relationship('User', lazy='joined')
1144 1144
1145 1145 @validates('_email')
1146 1146 def validate_email(self, key, email):
1147 1147 # check if this email is not main one
1148 1148 main_email = Session().query(User).filter(User.email == email).scalar()
1149 1149 if main_email is not None:
1150 1150 raise AttributeError('email %s is present is user table' % email)
1151 1151 return email
1152 1152
1153 1153 @hybrid_property
1154 1154 def email(self):
1155 1155 return self._email
1156 1156
1157 1157 @email.setter
1158 1158 def email(self, val):
1159 1159 self._email = val.lower() if val else None
1160 1160
1161 1161
1162 1162 class UserIpMap(Base, BaseModel):
1163 1163 __tablename__ = 'user_ip_map'
1164 1164 __table_args__ = (
1165 1165 UniqueConstraint('user_id', 'ip_addr'),
1166 1166 base_table_args
1167 1167 )
1168 1168 __mapper_args__ = {}
1169 1169
1170 1170 ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1171 1171 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1172 1172 ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None)
1173 1173 active = Column("active", Boolean(), nullable=True, unique=None, default=True)
1174 1174 description = Column("description", String(10000), nullable=True, unique=None, default=None)
1175 1175 user = relationship('User', lazy='joined')
1176 1176
1177 1177 @hybrid_property
1178 1178 def description_safe(self):
1179 1179 from rhodecode.lib import helpers as h
1180 1180 return h.escape(self.description)
1181 1181
1182 1182 @classmethod
1183 1183 def _get_ip_range(cls, ip_addr):
1184 1184 net = ipaddress.ip_network(safe_unicode(ip_addr), strict=False)
1185 1185 return [str(net.network_address), str(net.broadcast_address)]
1186 1186
1187 1187 def __json__(self):
1188 1188 return {
1189 1189 'ip_addr': self.ip_addr,
1190 1190 'ip_range': self._get_ip_range(self.ip_addr),
1191 1191 }
1192 1192
1193 1193 def __unicode__(self):
1194 1194 return u"<%s('user_id:%s=>%s')>" % (self.__class__.__name__,
1195 1195 self.user_id, self.ip_addr)
1196 1196
1197 1197
1198 1198 class UserSshKeys(Base, BaseModel):
1199 1199 __tablename__ = 'user_ssh_keys'
1200 1200 __table_args__ = (
1201 1201 Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'),
1202 1202
1203 1203 UniqueConstraint('ssh_key_fingerprint'),
1204 1204
1205 1205 base_table_args
1206 1206 )
1207 1207 __mapper_args__ = {}
1208 1208
1209 1209 ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True)
1210 1210 ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None)
1211 1211 ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None)
1212 1212
1213 1213 description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
1214 1214
1215 1215 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1216 1216 accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None)
1217 1217 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
1218 1218
1219 1219 user = relationship('User', lazy='joined')
1220 1220
1221 1221 def __json__(self):
1222 1222 data = {
1223 1223 'ssh_fingerprint': self.ssh_key_fingerprint,
1224 1224 'description': self.description,
1225 1225 'created_on': self.created_on
1226 1226 }
1227 1227 return data
1228 1228
1229 1229 def get_api_data(self):
1230 1230 data = self.__json__()
1231 1231 return data
1232 1232
1233 1233
1234 1234 class UserLog(Base, BaseModel):
1235 1235 __tablename__ = 'user_logs'
1236 1236 __table_args__ = (
1237 1237 base_table_args,
1238 1238 )
1239 1239
1240 1240 VERSION_1 = 'v1'
1241 1241 VERSION_2 = 'v2'
1242 1242 VERSIONS = [VERSION_1, VERSION_2]
1243 1243
1244 1244 user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1245 1245 user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None)
1246 1246 username = Column("username", String(255), nullable=True, unique=None, default=None)
1247 1247 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None)
1248 1248 repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None)
1249 1249 user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None)
1250 1250 action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None)
1251 1251 action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None)
1252 1252
1253 1253 version = Column("version", String(255), nullable=True, default=VERSION_1)
1254 1254 user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1255 1255 action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT()))))
1256 1256
1257 1257 def __unicode__(self):
1258 1258 return u"<%s('id:%s:%s')>" % (
1259 1259 self.__class__.__name__, self.repository_name, self.action)
1260 1260
1261 1261 def __json__(self):
1262 1262 return {
1263 1263 'user_id': self.user_id,
1264 1264 'username': self.username,
1265 1265 'repository_id': self.repository_id,
1266 1266 'repository_name': self.repository_name,
1267 1267 'user_ip': self.user_ip,
1268 1268 'action_date': self.action_date,
1269 1269 'action': self.action,
1270 1270 }
1271 1271
1272 1272 @hybrid_property
1273 1273 def entry_id(self):
1274 1274 return self.user_log_id
1275 1275
1276 1276 @property
1277 1277 def action_as_day(self):
1278 1278 return datetime.date(*self.action_date.timetuple()[:3])
1279 1279
1280 1280 user = relationship('User')
1281 1281 repository = relationship('Repository', cascade='')
1282 1282
1283 1283
1284 1284 class UserGroup(Base, BaseModel):
1285 1285 __tablename__ = 'users_groups'
1286 1286 __table_args__ = (
1287 1287 base_table_args,
1288 1288 )
1289 1289
1290 1290 users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1291 1291 users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None)
1292 1292 user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None)
1293 1293 users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None)
1294 1294 inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True)
1295 1295 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
1296 1296 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1297 1297 _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data
1298 1298
1299 1299 members = relationship('UserGroupMember', cascade="all, delete, delete-orphan", lazy="joined")
1300 1300 users_group_to_perm = relationship('UserGroupToPerm', cascade='all')
1301 1301 users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1302 1302 users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
1303 1303 user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all')
1304 1304 user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm ', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all')
1305 1305
1306 1306 user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all')
1307 1307 user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id")
1308 1308
1309 1309 @classmethod
1310 1310 def _load_group_data(cls, column):
1311 1311 if not column:
1312 1312 return {}
1313 1313
1314 1314 try:
1315 1315 return json.loads(column) or {}
1316 1316 except TypeError:
1317 1317 return {}
1318 1318
1319 1319 @hybrid_property
1320 1320 def description_safe(self):
1321 1321 from rhodecode.lib import helpers as h
1322 1322 return h.escape(self.user_group_description)
1323 1323
1324 1324 @hybrid_property
1325 1325 def group_data(self):
1326 1326 return self._load_group_data(self._group_data)
1327 1327
1328 1328 @group_data.expression
1329 1329 def group_data(self, **kwargs):
1330 1330 return self._group_data
1331 1331
1332 1332 @group_data.setter
1333 1333 def group_data(self, val):
1334 1334 try:
1335 1335 self._group_data = json.dumps(val)
1336 1336 except Exception:
1337 1337 log.error(traceback.format_exc())
1338 1338
1339 1339 @classmethod
1340 1340 def _load_sync(cls, group_data):
1341 1341 if group_data:
1342 1342 return group_data.get('extern_type')
1343 1343
1344 1344 @property
1345 1345 def sync(self):
1346 1346 return self._load_sync(self.group_data)
1347 1347
1348 1348 def __unicode__(self):
1349 1349 return u"<%s('id:%s:%s')>" % (self.__class__.__name__,
1350 1350 self.users_group_id,
1351 1351 self.users_group_name)
1352 1352
1353 1353 @classmethod
1354 1354 def get_by_group_name(cls, group_name, cache=False,
1355 1355 case_insensitive=False):
1356 1356 if case_insensitive:
1357 1357 q = cls.query().filter(func.lower(cls.users_group_name) ==
1358 1358 func.lower(group_name))
1359 1359
1360 1360 else:
1361 1361 q = cls.query().filter(cls.users_group_name == group_name)
1362 1362 if cache:
1363 1363 q = q.options(
1364 1364 FromCache("sql_cache_short", "get_group_%s" % _hash_key(group_name)))
1365 1365 return q.scalar()
1366 1366
1367 1367 @classmethod
1368 1368 def get(cls, user_group_id, cache=False):
1369 1369 if not user_group_id:
1370 1370 return
1371 1371
1372 1372 user_group = cls.query()
1373 1373 if cache:
1374 1374 user_group = user_group.options(
1375 1375 FromCache("sql_cache_short", "get_users_group_%s" % user_group_id))
1376 1376 return user_group.get(user_group_id)
1377 1377
1378 1378 def permissions(self, with_admins=True, with_owner=True):
1379 1379 q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self)
1380 1380 q = q.options(joinedload(UserUserGroupToPerm.user_group),
1381 1381 joinedload(UserUserGroupToPerm.user),
1382 1382 joinedload(UserUserGroupToPerm.permission),)
1383 1383
1384 1384 # get owners and admins and permissions. We do a trick of re-writing
1385 1385 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1386 1386 # has a global reference and changing one object propagates to all
1387 1387 # others. This means if admin is also an owner admin_row that change
1388 1388 # would propagate to both objects
1389 1389 perm_rows = []
1390 1390 for _usr in q.all():
1391 1391 usr = AttributeDict(_usr.user.get_dict())
1392 1392 usr.permission = _usr.permission.permission_name
1393 1393 perm_rows.append(usr)
1394 1394
1395 1395 # filter the perm rows by 'default' first and then sort them by
1396 1396 # admin,write,read,none permissions sorted again alphabetically in
1397 1397 # each group
1398 1398 perm_rows = sorted(perm_rows, key=display_user_sort)
1399 1399
1400 1400 _admin_perm = 'usergroup.admin'
1401 1401 owner_row = []
1402 1402 if with_owner:
1403 1403 usr = AttributeDict(self.user.get_dict())
1404 1404 usr.owner_row = True
1405 1405 usr.permission = _admin_perm
1406 1406 owner_row.append(usr)
1407 1407
1408 1408 super_admin_rows = []
1409 1409 if with_admins:
1410 1410 for usr in User.get_all_super_admins():
1411 1411 # if this admin is also owner, don't double the record
1412 1412 if usr.user_id == owner_row[0].user_id:
1413 1413 owner_row[0].admin_row = True
1414 1414 else:
1415 1415 usr = AttributeDict(usr.get_dict())
1416 1416 usr.admin_row = True
1417 1417 usr.permission = _admin_perm
1418 1418 super_admin_rows.append(usr)
1419 1419
1420 1420 return super_admin_rows + owner_row + perm_rows
1421 1421
1422 1422 def permission_user_groups(self):
1423 1423 q = UserGroupUserGroupToPerm.query().filter(UserGroupUserGroupToPerm.target_user_group == self)
1424 1424 q = q.options(joinedload(UserGroupUserGroupToPerm.user_group),
1425 1425 joinedload(UserGroupUserGroupToPerm.target_user_group),
1426 1426 joinedload(UserGroupUserGroupToPerm.permission),)
1427 1427
1428 1428 perm_rows = []
1429 1429 for _user_group in q.all():
1430 1430 usr = AttributeDict(_user_group.user_group.get_dict())
1431 1431 usr.permission = _user_group.permission.permission_name
1432 1432 perm_rows.append(usr)
1433 1433
1434 1434 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1435 1435 return perm_rows
1436 1436
1437 1437 def _get_default_perms(self, user_group, suffix=''):
1438 1438 from rhodecode.model.permission import PermissionModel
1439 1439 return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix)
1440 1440
1441 1441 def get_default_perms(self, suffix=''):
1442 1442 return self._get_default_perms(self, suffix)
1443 1443
1444 1444 def get_api_data(self, with_group_members=True, include_secrets=False):
1445 1445 """
1446 1446 :param include_secrets: See :meth:`User.get_api_data`, this parameter is
1447 1447 basically forwarded.
1448 1448
1449 1449 """
1450 1450 user_group = self
1451 1451 data = {
1452 1452 'users_group_id': user_group.users_group_id,
1453 1453 'group_name': user_group.users_group_name,
1454 1454 'group_description': user_group.user_group_description,
1455 1455 'active': user_group.users_group_active,
1456 1456 'owner': user_group.user.username,
1457 1457 'sync': user_group.sync,
1458 1458 'owner_email': user_group.user.email,
1459 1459 }
1460 1460
1461 1461 if with_group_members:
1462 1462 users = []
1463 1463 for user in user_group.members:
1464 1464 user = user.user
1465 1465 users.append(user.get_api_data(include_secrets=include_secrets))
1466 1466 data['users'] = users
1467 1467
1468 1468 return data
1469 1469
1470 1470
1471 1471 class UserGroupMember(Base, BaseModel):
1472 1472 __tablename__ = 'users_groups_members'
1473 1473 __table_args__ = (
1474 1474 base_table_args,
1475 1475 )
1476 1476
1477 1477 users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1478 1478 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
1479 1479 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
1480 1480
1481 1481 user = relationship('User', lazy='joined')
1482 1482 users_group = relationship('UserGroup')
1483 1483
1484 1484 def __init__(self, gr_id='', u_id=''):
1485 1485 self.users_group_id = gr_id
1486 1486 self.user_id = u_id
1487 1487
1488 1488
1489 1489 class RepositoryField(Base, BaseModel):
1490 1490 __tablename__ = 'repositories_fields'
1491 1491 __table_args__ = (
1492 1492 UniqueConstraint('repository_id', 'field_key'), # no-multi field
1493 1493 base_table_args,
1494 1494 )
1495 1495
1496 1496 PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields
1497 1497
1498 1498 repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
1499 1499 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
1500 1500 field_key = Column("field_key", String(250))
1501 1501 field_label = Column("field_label", String(1024), nullable=False)
1502 1502 field_value = Column("field_value", String(10000), nullable=False)
1503 1503 field_desc = Column("field_desc", String(1024), nullable=False)
1504 1504 field_type = Column("field_type", String(255), nullable=False, unique=None)
1505 1505 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
1506 1506
1507 1507 repository = relationship('Repository')
1508 1508
1509 1509 @property
1510 1510 def field_key_prefixed(self):
1511 1511 return 'ex_%s' % self.field_key
1512 1512
1513 1513 @classmethod
1514 1514 def un_prefix_key(cls, key):
1515 1515 if key.startswith(cls.PREFIX):
1516 1516 return key[len(cls.PREFIX):]
1517 1517 return key
1518 1518
1519 1519 @classmethod
1520 1520 def get_by_key_name(cls, key, repo):
1521 1521 row = cls.query()\
1522 1522 .filter(cls.repository == repo)\
1523 1523 .filter(cls.field_key == key).scalar()
1524 1524 return row
1525 1525
1526 1526
1527 1527 class Repository(Base, BaseModel):
1528 1528 __tablename__ = 'repositories'
1529 1529 __table_args__ = (
1530 1530 Index('r_repo_name_idx', 'repo_name', mysql_length=255),
1531 1531 base_table_args,
1532 1532 )
1533 1533 DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}'
1534 1534 DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}'
1535 1535 DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}'
1536 1536
1537 1537 STATE_CREATED = 'repo_state_created'
1538 1538 STATE_PENDING = 'repo_state_pending'
1539 1539 STATE_ERROR = 'repo_state_error'
1540 1540
1541 1541 LOCK_AUTOMATIC = 'lock_auto'
1542 1542 LOCK_API = 'lock_api'
1543 1543 LOCK_WEB = 'lock_web'
1544 1544 LOCK_PULL = 'lock_pull'
1545 1545
1546 1546 NAME_SEP = URL_SEP
1547 1547
1548 1548 repo_id = Column(
1549 1549 "repo_id", Integer(), nullable=False, unique=True, default=None,
1550 1550 primary_key=True)
1551 1551 _repo_name = Column(
1552 1552 "repo_name", Text(), nullable=False, default=None)
1553 1553 _repo_name_hash = Column(
1554 1554 "repo_name_hash", String(255), nullable=False, unique=True)
1555 1555 repo_state = Column("repo_state", String(255), nullable=True)
1556 1556
1557 1557 clone_uri = Column(
1558 1558 "clone_uri", EncryptedTextValue(), nullable=True, unique=False,
1559 1559 default=None)
1560 1560 push_uri = Column(
1561 1561 "push_uri", EncryptedTextValue(), nullable=True, unique=False,
1562 1562 default=None)
1563 1563 repo_type = Column(
1564 1564 "repo_type", String(255), nullable=False, unique=False, default=None)
1565 1565 user_id = Column(
1566 1566 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
1567 1567 unique=False, default=None)
1568 1568 private = Column(
1569 1569 "private", Boolean(), nullable=True, unique=None, default=None)
1570 1570 enable_statistics = Column(
1571 1571 "statistics", Boolean(), nullable=True, unique=None, default=True)
1572 1572 enable_downloads = Column(
1573 1573 "downloads", Boolean(), nullable=True, unique=None, default=True)
1574 1574 description = Column(
1575 1575 "description", String(10000), nullable=True, unique=None, default=None)
1576 1576 created_on = Column(
1577 1577 'created_on', DateTime(timezone=False), nullable=True, unique=None,
1578 1578 default=datetime.datetime.now)
1579 1579 updated_on = Column(
1580 1580 'updated_on', DateTime(timezone=False), nullable=True, unique=None,
1581 1581 default=datetime.datetime.now)
1582 1582 _landing_revision = Column(
1583 1583 "landing_revision", String(255), nullable=False, unique=False,
1584 1584 default=None)
1585 1585 enable_locking = Column(
1586 1586 "enable_locking", Boolean(), nullable=False, unique=None,
1587 1587 default=False)
1588 1588 _locked = Column(
1589 1589 "locked", String(255), nullable=True, unique=False, default=None)
1590 1590 _changeset_cache = Column(
1591 1591 "changeset_cache", LargeBinary(), nullable=True) # JSON data
1592 1592
1593 1593 fork_id = Column(
1594 1594 "fork_id", Integer(), ForeignKey('repositories.repo_id'),
1595 1595 nullable=True, unique=False, default=None)
1596 1596 group_id = Column(
1597 1597 "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True,
1598 1598 unique=False, default=None)
1599 1599
1600 1600 user = relationship('User', lazy='joined')
1601 1601 fork = relationship('Repository', remote_side=repo_id, lazy='joined')
1602 1602 group = relationship('RepoGroup', lazy='joined')
1603 1603 repo_to_perm = relationship(
1604 1604 'UserRepoToPerm', cascade='all',
1605 1605 order_by='UserRepoToPerm.repo_to_perm_id')
1606 1606 users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all')
1607 1607 stats = relationship('Statistics', cascade='all', uselist=False)
1608 1608
1609 1609 followers = relationship(
1610 1610 'UserFollowing',
1611 1611 primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id',
1612 1612 cascade='all')
1613 1613 extra_fields = relationship(
1614 1614 'RepositoryField', cascade="all, delete, delete-orphan")
1615 1615 logs = relationship('UserLog')
1616 1616 comments = relationship(
1617 1617 'ChangesetComment', cascade="all, delete, delete-orphan")
1618 1618 pull_requests_source = relationship(
1619 1619 'PullRequest',
1620 1620 primaryjoin='PullRequest.source_repo_id==Repository.repo_id',
1621 1621 cascade="all, delete, delete-orphan")
1622 1622 pull_requests_target = relationship(
1623 1623 'PullRequest',
1624 1624 primaryjoin='PullRequest.target_repo_id==Repository.repo_id',
1625 1625 cascade="all, delete, delete-orphan")
1626 1626 ui = relationship('RepoRhodeCodeUi', cascade="all")
1627 1627 settings = relationship('RepoRhodeCodeSetting', cascade="all")
1628 1628 integrations = relationship('Integration',
1629 1629 cascade="all, delete, delete-orphan")
1630 1630
1631 1631 scoped_tokens = relationship('UserApiKeys', cascade="all")
1632 1632
1633 1633 def __unicode__(self):
1634 1634 return u"<%s('%s:%s')>" % (self.__class__.__name__, self.repo_id,
1635 1635 safe_unicode(self.repo_name))
1636 1636
1637 1637 @hybrid_property
1638 1638 def description_safe(self):
1639 1639 from rhodecode.lib import helpers as h
1640 1640 return h.escape(self.description)
1641 1641
1642 1642 @hybrid_property
1643 1643 def landing_rev(self):
1644 1644 # always should return [rev_type, rev]
1645 1645 if self._landing_revision:
1646 1646 _rev_info = self._landing_revision.split(':')
1647 1647 if len(_rev_info) < 2:
1648 1648 _rev_info.insert(0, 'rev')
1649 1649 return [_rev_info[0], _rev_info[1]]
1650 1650 return [None, None]
1651 1651
1652 1652 @landing_rev.setter
1653 1653 def landing_rev(self, val):
1654 1654 if ':' not in val:
1655 1655 raise ValueError('value must be delimited with `:` and consist '
1656 1656 'of <rev_type>:<rev>, got %s instead' % val)
1657 1657 self._landing_revision = val
1658 1658
1659 1659 @hybrid_property
1660 1660 def locked(self):
1661 1661 if self._locked:
1662 1662 user_id, timelocked, reason = self._locked.split(':')
1663 1663 lock_values = int(user_id), timelocked, reason
1664 1664 else:
1665 1665 lock_values = [None, None, None]
1666 1666 return lock_values
1667 1667
1668 1668 @locked.setter
1669 1669 def locked(self, val):
1670 1670 if val and isinstance(val, (list, tuple)):
1671 1671 self._locked = ':'.join(map(str, val))
1672 1672 else:
1673 1673 self._locked = None
1674 1674
1675 1675 @hybrid_property
1676 1676 def changeset_cache(self):
1677 1677 from rhodecode.lib.vcs.backends.base import EmptyCommit
1678 1678 dummy = EmptyCommit().__json__()
1679 1679 if not self._changeset_cache:
1680 1680 return dummy
1681 1681 try:
1682 1682 return json.loads(self._changeset_cache)
1683 1683 except TypeError:
1684 1684 return dummy
1685 1685 except Exception:
1686 1686 log.error(traceback.format_exc())
1687 1687 return dummy
1688 1688
1689 1689 @changeset_cache.setter
1690 1690 def changeset_cache(self, val):
1691 1691 try:
1692 1692 self._changeset_cache = json.dumps(val)
1693 1693 except Exception:
1694 1694 log.error(traceback.format_exc())
1695 1695
1696 1696 @hybrid_property
1697 1697 def repo_name(self):
1698 1698 return self._repo_name
1699 1699
1700 1700 @repo_name.setter
1701 1701 def repo_name(self, value):
1702 1702 self._repo_name = value
1703 1703 self._repo_name_hash = hashlib.sha1(safe_str(value)).hexdigest()
1704 1704
1705 1705 @classmethod
1706 1706 def normalize_repo_name(cls, repo_name):
1707 1707 """
1708 1708 Normalizes os specific repo_name to the format internally stored inside
1709 1709 database using URL_SEP
1710 1710
1711 1711 :param cls:
1712 1712 :param repo_name:
1713 1713 """
1714 1714 return cls.NAME_SEP.join(repo_name.split(os.sep))
1715 1715
1716 1716 @classmethod
1717 1717 def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False):
1718 1718 session = Session()
1719 1719 q = session.query(cls).filter(cls.repo_name == repo_name)
1720 1720
1721 1721 if cache:
1722 1722 if identity_cache:
1723 1723 val = cls.identity_cache(session, 'repo_name', repo_name)
1724 1724 if val:
1725 1725 return val
1726 1726 else:
1727 1727 cache_key = "get_repo_by_name_%s" % _hash_key(repo_name)
1728 1728 q = q.options(
1729 1729 FromCache("sql_cache_short", cache_key))
1730 1730
1731 1731 return q.scalar()
1732 1732
1733 1733 @classmethod
1734 1734 def get_by_id_or_repo_name(cls, repoid):
1735 1735 if isinstance(repoid, (int, long)):
1736 1736 try:
1737 1737 repo = cls.get(repoid)
1738 1738 except ValueError:
1739 1739 repo = None
1740 1740 else:
1741 1741 repo = cls.get_by_repo_name(repoid)
1742 1742 return repo
1743 1743
1744 1744 @classmethod
1745 1745 def get_by_full_path(cls, repo_full_path):
1746 1746 repo_name = repo_full_path.split(cls.base_path(), 1)[-1]
1747 1747 repo_name = cls.normalize_repo_name(repo_name)
1748 1748 return cls.get_by_repo_name(repo_name.strip(URL_SEP))
1749 1749
1750 1750 @classmethod
1751 1751 def get_repo_forks(cls, repo_id):
1752 1752 return cls.query().filter(Repository.fork_id == repo_id)
1753 1753
1754 1754 @classmethod
1755 1755 def base_path(cls):
1756 1756 """
1757 1757 Returns base path when all repos are stored
1758 1758
1759 1759 :param cls:
1760 1760 """
1761 1761 q = Session().query(RhodeCodeUi)\
1762 1762 .filter(RhodeCodeUi.ui_key == cls.NAME_SEP)
1763 1763 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1764 1764 return q.one().ui_value
1765 1765
1766 1766 @classmethod
1767 1767 def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None),
1768 1768 case_insensitive=True):
1769 1769 q = Repository.query()
1770 1770
1771 1771 if not isinstance(user_id, Optional):
1772 1772 q = q.filter(Repository.user_id == user_id)
1773 1773
1774 1774 if not isinstance(group_id, Optional):
1775 1775 q = q.filter(Repository.group_id == group_id)
1776 1776
1777 1777 if case_insensitive:
1778 1778 q = q.order_by(func.lower(Repository.repo_name))
1779 1779 else:
1780 1780 q = q.order_by(Repository.repo_name)
1781 1781 return q.all()
1782 1782
1783 1783 @property
1784 1784 def forks(self):
1785 1785 """
1786 1786 Return forks of this repo
1787 1787 """
1788 1788 return Repository.get_repo_forks(self.repo_id)
1789 1789
1790 1790 @property
1791 1791 def parent(self):
1792 1792 """
1793 1793 Returns fork parent
1794 1794 """
1795 1795 return self.fork
1796 1796
1797 1797 @property
1798 1798 def just_name(self):
1799 1799 return self.repo_name.split(self.NAME_SEP)[-1]
1800 1800
1801 1801 @property
1802 1802 def groups_with_parents(self):
1803 1803 groups = []
1804 1804 if self.group is None:
1805 1805 return groups
1806 1806
1807 1807 cur_gr = self.group
1808 1808 groups.insert(0, cur_gr)
1809 1809 while 1:
1810 1810 gr = getattr(cur_gr, 'parent_group', None)
1811 1811 cur_gr = cur_gr.parent_group
1812 1812 if gr is None:
1813 1813 break
1814 1814 groups.insert(0, gr)
1815 1815
1816 1816 return groups
1817 1817
1818 1818 @property
1819 1819 def groups_and_repo(self):
1820 1820 return self.groups_with_parents, self
1821 1821
1822 1822 @LazyProperty
1823 1823 def repo_path(self):
1824 1824 """
1825 1825 Returns base full path for that repository means where it actually
1826 1826 exists on a filesystem
1827 1827 """
1828 1828 q = Session().query(RhodeCodeUi).filter(
1829 1829 RhodeCodeUi.ui_key == self.NAME_SEP)
1830 1830 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
1831 1831 return q.one().ui_value
1832 1832
1833 1833 @property
1834 1834 def repo_full_path(self):
1835 1835 p = [self.repo_path]
1836 1836 # we need to split the name by / since this is how we store the
1837 1837 # names in the database, but that eventually needs to be converted
1838 1838 # into a valid system path
1839 1839 p += self.repo_name.split(self.NAME_SEP)
1840 1840 return os.path.join(*map(safe_unicode, p))
1841 1841
1842 1842 @property
1843 1843 def cache_keys(self):
1844 1844 """
1845 1845 Returns associated cache keys for that repo
1846 1846 """
1847 1847 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
1848 1848 repo_id=self.repo_id)
1849 1849 return CacheKey.query()\
1850 1850 .filter(CacheKey.cache_args == invalidation_namespace)\
1851 1851 .order_by(CacheKey.cache_key)\
1852 1852 .all()
1853 1853
1854 1854 @property
1855 1855 def cached_diffs_relative_dir(self):
1856 1856 """
1857 1857 Return a relative to the repository store path of cached diffs
1858 1858 used for safe display for users, who shouldn't know the absolute store
1859 1859 path
1860 1860 """
1861 1861 return os.path.join(
1862 1862 os.path.dirname(self.repo_name),
1863 1863 self.cached_diffs_dir.split(os.path.sep)[-1])
1864 1864
1865 1865 @property
1866 1866 def cached_diffs_dir(self):
1867 1867 path = self.repo_full_path
1868 1868 return os.path.join(
1869 1869 os.path.dirname(path),
1870 1870 '.__shadow_diff_cache_repo_{}'.format(self.repo_id))
1871 1871
1872 1872 def cached_diffs(self):
1873 1873 diff_cache_dir = self.cached_diffs_dir
1874 1874 if os.path.isdir(diff_cache_dir):
1875 1875 return os.listdir(diff_cache_dir)
1876 1876 return []
1877 1877
1878 1878 def shadow_repos(self):
1879 1879 shadow_repos_pattern = '.__shadow_repo_{}'.format(self.repo_id)
1880 1880 return [
1881 1881 x for x in os.listdir(os.path.dirname(self.repo_full_path))
1882 1882 if x.startswith(shadow_repos_pattern)]
1883 1883
1884 1884 def get_new_name(self, repo_name):
1885 1885 """
1886 1886 returns new full repository name based on assigned group and new new
1887 1887
1888 1888 :param group_name:
1889 1889 """
1890 1890 path_prefix = self.group.full_path_splitted if self.group else []
1891 1891 return self.NAME_SEP.join(path_prefix + [repo_name])
1892 1892
1893 1893 @property
1894 1894 def _config(self):
1895 1895 """
1896 1896 Returns db based config object.
1897 1897 """
1898 1898 from rhodecode.lib.utils import make_db_config
1899 1899 return make_db_config(clear_session=False, repo=self)
1900 1900
1901 1901 def permissions(self, with_admins=True, with_owner=True):
1902 1902 q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self)
1903 1903 q = q.options(joinedload(UserRepoToPerm.repository),
1904 1904 joinedload(UserRepoToPerm.user),
1905 1905 joinedload(UserRepoToPerm.permission),)
1906 1906
1907 1907 # get owners and admins and permissions. We do a trick of re-writing
1908 1908 # objects from sqlalchemy to named-tuples due to sqlalchemy session
1909 1909 # has a global reference and changing one object propagates to all
1910 1910 # others. This means if admin is also an owner admin_row that change
1911 1911 # would propagate to both objects
1912 1912 perm_rows = []
1913 1913 for _usr in q.all():
1914 1914 usr = AttributeDict(_usr.user.get_dict())
1915 1915 usr.permission = _usr.permission.permission_name
1916 1916 perm_rows.append(usr)
1917 1917
1918 1918 # filter the perm rows by 'default' first and then sort them by
1919 1919 # admin,write,read,none permissions sorted again alphabetically in
1920 1920 # each group
1921 1921 perm_rows = sorted(perm_rows, key=display_user_sort)
1922 1922
1923 1923 _admin_perm = 'repository.admin'
1924 1924 owner_row = []
1925 1925 if with_owner:
1926 1926 usr = AttributeDict(self.user.get_dict())
1927 1927 usr.owner_row = True
1928 1928 usr.permission = _admin_perm
1929 1929 owner_row.append(usr)
1930 1930
1931 1931 super_admin_rows = []
1932 1932 if with_admins:
1933 1933 for usr in User.get_all_super_admins():
1934 1934 # if this admin is also owner, don't double the record
1935 1935 if usr.user_id == owner_row[0].user_id:
1936 1936 owner_row[0].admin_row = True
1937 1937 else:
1938 1938 usr = AttributeDict(usr.get_dict())
1939 1939 usr.admin_row = True
1940 1940 usr.permission = _admin_perm
1941 1941 super_admin_rows.append(usr)
1942 1942
1943 1943 return super_admin_rows + owner_row + perm_rows
1944 1944
1945 1945 def permission_user_groups(self):
1946 1946 q = UserGroupRepoToPerm.query().filter(
1947 1947 UserGroupRepoToPerm.repository == self)
1948 1948 q = q.options(joinedload(UserGroupRepoToPerm.repository),
1949 1949 joinedload(UserGroupRepoToPerm.users_group),
1950 1950 joinedload(UserGroupRepoToPerm.permission),)
1951 1951
1952 1952 perm_rows = []
1953 1953 for _user_group in q.all():
1954 1954 usr = AttributeDict(_user_group.users_group.get_dict())
1955 1955 usr.permission = _user_group.permission.permission_name
1956 1956 perm_rows.append(usr)
1957 1957
1958 1958 perm_rows = sorted(perm_rows, key=display_user_group_sort)
1959 1959 return perm_rows
1960 1960
1961 1961 def get_api_data(self, include_secrets=False):
1962 1962 """
1963 1963 Common function for generating repo api data
1964 1964
1965 1965 :param include_secrets: See :meth:`User.get_api_data`.
1966 1966
1967 1967 """
1968 1968 # TODO: mikhail: Here there is an anti-pattern, we probably need to
1969 1969 # move this methods on models level.
1970 1970 from rhodecode.model.settings import SettingsModel
1971 1971 from rhodecode.model.repo import RepoModel
1972 1972
1973 1973 repo = self
1974 1974 _user_id, _time, _reason = self.locked
1975 1975
1976 1976 data = {
1977 1977 'repo_id': repo.repo_id,
1978 1978 'repo_name': repo.repo_name,
1979 1979 'repo_type': repo.repo_type,
1980 1980 'clone_uri': repo.clone_uri or '',
1981 1981 'push_uri': repo.push_uri or '',
1982 1982 'url': RepoModel().get_url(self),
1983 1983 'private': repo.private,
1984 1984 'created_on': repo.created_on,
1985 1985 'description': repo.description_safe,
1986 1986 'landing_rev': repo.landing_rev,
1987 1987 'owner': repo.user.username,
1988 1988 'fork_of': repo.fork.repo_name if repo.fork else None,
1989 1989 'fork_of_id': repo.fork.repo_id if repo.fork else None,
1990 1990 'enable_statistics': repo.enable_statistics,
1991 1991 'enable_locking': repo.enable_locking,
1992 1992 'enable_downloads': repo.enable_downloads,
1993 1993 'last_changeset': repo.changeset_cache,
1994 1994 'locked_by': User.get(_user_id).get_api_data(
1995 1995 include_secrets=include_secrets) if _user_id else None,
1996 1996 'locked_date': time_to_datetime(_time) if _time else None,
1997 1997 'lock_reason': _reason if _reason else None,
1998 1998 }
1999 1999
2000 2000 # TODO: mikhail: should be per-repo settings here
2001 2001 rc_config = SettingsModel().get_all_settings()
2002 2002 repository_fields = str2bool(
2003 2003 rc_config.get('rhodecode_repository_fields'))
2004 2004 if repository_fields:
2005 2005 for f in self.extra_fields:
2006 2006 data[f.field_key_prefixed] = f.field_value
2007 2007
2008 2008 return data
2009 2009
2010 2010 @classmethod
2011 2011 def lock(cls, repo, user_id, lock_time=None, lock_reason=None):
2012 2012 if not lock_time:
2013 2013 lock_time = time.time()
2014 2014 if not lock_reason:
2015 2015 lock_reason = cls.LOCK_AUTOMATIC
2016 2016 repo.locked = [user_id, lock_time, lock_reason]
2017 2017 Session().add(repo)
2018 2018 Session().commit()
2019 2019
2020 2020 @classmethod
2021 2021 def unlock(cls, repo):
2022 2022 repo.locked = None
2023 2023 Session().add(repo)
2024 2024 Session().commit()
2025 2025
2026 2026 @classmethod
2027 2027 def getlock(cls, repo):
2028 2028 return repo.locked
2029 2029
2030 2030 def is_user_lock(self, user_id):
2031 2031 if self.lock[0]:
2032 2032 lock_user_id = safe_int(self.lock[0])
2033 2033 user_id = safe_int(user_id)
2034 2034 # both are ints, and they are equal
2035 2035 return all([lock_user_id, user_id]) and lock_user_id == user_id
2036 2036
2037 2037 return False
2038 2038
2039 2039 def get_locking_state(self, action, user_id, only_when_enabled=True):
2040 2040 """
2041 2041 Checks locking on this repository, if locking is enabled and lock is
2042 2042 present returns a tuple of make_lock, locked, locked_by.
2043 2043 make_lock can have 3 states None (do nothing) True, make lock
2044 2044 False release lock, This value is later propagated to hooks, which
2045 2045 do the locking. Think about this as signals passed to hooks what to do.
2046 2046
2047 2047 """
2048 2048 # TODO: johbo: This is part of the business logic and should be moved
2049 2049 # into the RepositoryModel.
2050 2050
2051 2051 if action not in ('push', 'pull'):
2052 2052 raise ValueError("Invalid action value: %s" % repr(action))
2053 2053
2054 2054 # defines if locked error should be thrown to user
2055 2055 currently_locked = False
2056 2056 # defines if new lock should be made, tri-state
2057 2057 make_lock = None
2058 2058 repo = self
2059 2059 user = User.get(user_id)
2060 2060
2061 2061 lock_info = repo.locked
2062 2062
2063 2063 if repo and (repo.enable_locking or not only_when_enabled):
2064 2064 if action == 'push':
2065 2065 # check if it's already locked !, if it is compare users
2066 2066 locked_by_user_id = lock_info[0]
2067 2067 if user.user_id == locked_by_user_id:
2068 2068 log.debug(
2069 2069 'Got `push` action from user %s, now unlocking', user)
2070 2070 # unlock if we have push from user who locked
2071 2071 make_lock = False
2072 2072 else:
2073 2073 # we're not the same user who locked, ban with
2074 2074 # code defined in settings (default is 423 HTTP Locked) !
2075 2075 log.debug('Repo %s is currently locked by %s', repo, user)
2076 2076 currently_locked = True
2077 2077 elif action == 'pull':
2078 2078 # [0] user [1] date
2079 2079 if lock_info[0] and lock_info[1]:
2080 2080 log.debug('Repo %s is currently locked by %s', repo, user)
2081 2081 currently_locked = True
2082 2082 else:
2083 2083 log.debug('Setting lock on repo %s by %s', repo, user)
2084 2084 make_lock = True
2085 2085
2086 2086 else:
2087 2087 log.debug('Repository %s do not have locking enabled', repo)
2088 2088
2089 2089 log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s',
2090 2090 make_lock, currently_locked, lock_info)
2091 2091
2092 2092 from rhodecode.lib.auth import HasRepoPermissionAny
2093 2093 perm_check = HasRepoPermissionAny('repository.write', 'repository.admin')
2094 2094 if make_lock and not perm_check(repo_name=repo.repo_name, user=user):
2095 2095 # if we don't have at least write permission we cannot make a lock
2096 2096 log.debug('lock state reset back to FALSE due to lack '
2097 2097 'of at least read permission')
2098 2098 make_lock = False
2099 2099
2100 2100 return make_lock, currently_locked, lock_info
2101 2101
2102 2102 @property
2103 2103 def last_db_change(self):
2104 2104 return self.updated_on
2105 2105
2106 2106 @property
2107 2107 def clone_uri_hidden(self):
2108 2108 clone_uri = self.clone_uri
2109 2109 if clone_uri:
2110 2110 import urlobject
2111 2111 url_obj = urlobject.URLObject(cleaned_uri(clone_uri))
2112 2112 if url_obj.password:
2113 2113 clone_uri = url_obj.with_password('*****')
2114 2114 return clone_uri
2115 2115
2116 2116 @property
2117 2117 def push_uri_hidden(self):
2118 2118 push_uri = self.push_uri
2119 2119 if push_uri:
2120 2120 import urlobject
2121 2121 url_obj = urlobject.URLObject(cleaned_uri(push_uri))
2122 2122 if url_obj.password:
2123 2123 push_uri = url_obj.with_password('*****')
2124 2124 return push_uri
2125 2125
2126 2126 def clone_url(self, **override):
2127 2127 from rhodecode.model.settings import SettingsModel
2128 2128
2129 2129 uri_tmpl = None
2130 2130 if 'with_id' in override:
2131 2131 uri_tmpl = self.DEFAULT_CLONE_URI_ID
2132 2132 del override['with_id']
2133 2133
2134 2134 if 'uri_tmpl' in override:
2135 2135 uri_tmpl = override['uri_tmpl']
2136 2136 del override['uri_tmpl']
2137 2137
2138 2138 ssh = False
2139 2139 if 'ssh' in override:
2140 2140 ssh = True
2141 2141 del override['ssh']
2142 2142
2143 2143 # we didn't override our tmpl from **overrides
2144 2144 if not uri_tmpl:
2145 2145 rc_config = SettingsModel().get_all_settings(cache=True)
2146 2146 if ssh:
2147 2147 uri_tmpl = rc_config.get(
2148 2148 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH
2149 2149 else:
2150 2150 uri_tmpl = rc_config.get(
2151 2151 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI
2152 2152
2153 2153 request = get_current_request()
2154 2154 return get_clone_url(request=request,
2155 2155 uri_tmpl=uri_tmpl,
2156 2156 repo_name=self.repo_name,
2157 2157 repo_id=self.repo_id, **override)
2158 2158
2159 2159 def set_state(self, state):
2160 2160 self.repo_state = state
2161 2161 Session().add(self)
2162 2162 #==========================================================================
2163 2163 # SCM PROPERTIES
2164 2164 #==========================================================================
2165 2165
2166 2166 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
2167 2167 return get_commit_safe(
2168 2168 self.scm_instance(), commit_id, commit_idx, pre_load=pre_load)
2169 2169
2170 2170 def get_changeset(self, rev=None, pre_load=None):
2171 2171 warnings.warn("Use get_commit", DeprecationWarning)
2172 2172 commit_id = None
2173 2173 commit_idx = None
2174 2174 if isinstance(rev, basestring):
2175 2175 commit_id = rev
2176 2176 else:
2177 2177 commit_idx = rev
2178 2178 return self.get_commit(commit_id=commit_id, commit_idx=commit_idx,
2179 2179 pre_load=pre_load)
2180 2180
2181 2181 def get_landing_commit(self):
2182 2182 """
2183 2183 Returns landing commit, or if that doesn't exist returns the tip
2184 2184 """
2185 2185 _rev_type, _rev = self.landing_rev
2186 2186 commit = self.get_commit(_rev)
2187 2187 if isinstance(commit, EmptyCommit):
2188 2188 return self.get_commit()
2189 2189 return commit
2190 2190
2191 2191 def update_commit_cache(self, cs_cache=None, config=None):
2192 2192 """
2193 2193 Update cache of last changeset for repository, keys should be::
2194 2194
2195 2195 short_id
2196 2196 raw_id
2197 2197 revision
2198 2198 parents
2199 2199 message
2200 2200 date
2201 2201 author
2202 2202
2203 2203 :param cs_cache:
2204 2204 """
2205 2205 from rhodecode.lib.vcs.backends.base import BaseChangeset
2206 2206 if cs_cache is None:
2207 2207 # use no-cache version here
2208 2208 scm_repo = self.scm_instance(cache=False, config=config)
2209 2209 if scm_repo:
2210 2210 cs_cache = scm_repo.get_commit(
2211 2211 pre_load=["author", "date", "message", "parents"])
2212 2212 else:
2213 2213 cs_cache = EmptyCommit()
2214 2214
2215 2215 if isinstance(cs_cache, BaseChangeset):
2216 2216 cs_cache = cs_cache.__json__()
2217 2217
2218 2218 def is_outdated(new_cs_cache):
2219 2219 if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or
2220 2220 new_cs_cache['revision'] != self.changeset_cache['revision']):
2221 2221 return True
2222 2222 return False
2223 2223
2224 2224 # check if we have maybe already latest cached revision
2225 2225 if is_outdated(cs_cache) or not self.changeset_cache:
2226 2226 _default = datetime.datetime.utcnow()
2227 2227 last_change = cs_cache.get('date') or _default
2228 2228 if self.updated_on and self.updated_on > last_change:
2229 2229 # we check if last update is newer than the new value
2230 2230 # if yes, we use the current timestamp instead. Imagine you get
2231 2231 # old commit pushed 1y ago, we'd set last update 1y to ago.
2232 2232 last_change = _default
2233 2233 log.debug('updated repo %s with new cs cache %s',
2234 2234 self.repo_name, cs_cache)
2235 2235 self.updated_on = last_change
2236 2236 self.changeset_cache = cs_cache
2237 2237 Session().add(self)
2238 2238 Session().commit()
2239 2239 else:
2240 2240 log.debug('Skipping update_commit_cache for repo:`%s` '
2241 2241 'commit already with latest changes', self.repo_name)
2242 2242
2243 2243 @property
2244 2244 def tip(self):
2245 2245 return self.get_commit('tip')
2246 2246
2247 2247 @property
2248 2248 def author(self):
2249 2249 return self.tip.author
2250 2250
2251 2251 @property
2252 2252 def last_change(self):
2253 2253 return self.scm_instance().last_change
2254 2254
2255 2255 def get_comments(self, revisions=None):
2256 2256 """
2257 2257 Returns comments for this repository grouped by revisions
2258 2258
2259 2259 :param revisions: filter query by revisions only
2260 2260 """
2261 2261 cmts = ChangesetComment.query()\
2262 2262 .filter(ChangesetComment.repo == self)
2263 2263 if revisions:
2264 2264 cmts = cmts.filter(ChangesetComment.revision.in_(revisions))
2265 2265 grouped = collections.defaultdict(list)
2266 2266 for cmt in cmts.all():
2267 2267 grouped[cmt.revision].append(cmt)
2268 2268 return grouped
2269 2269
2270 2270 def statuses(self, revisions=None):
2271 2271 """
2272 2272 Returns statuses for this repository
2273 2273
2274 2274 :param revisions: list of revisions to get statuses for
2275 2275 """
2276 2276 statuses = ChangesetStatus.query()\
2277 2277 .filter(ChangesetStatus.repo == self)\
2278 2278 .filter(ChangesetStatus.version == 0)
2279 2279
2280 2280 if revisions:
2281 2281 # Try doing the filtering in chunks to avoid hitting limits
2282 2282 size = 500
2283 2283 status_results = []
2284 2284 for chunk in xrange(0, len(revisions), size):
2285 2285 status_results += statuses.filter(
2286 2286 ChangesetStatus.revision.in_(
2287 2287 revisions[chunk: chunk+size])
2288 2288 ).all()
2289 2289 else:
2290 2290 status_results = statuses.all()
2291 2291
2292 2292 grouped = {}
2293 2293
2294 2294 # maybe we have open new pullrequest without a status?
2295 2295 stat = ChangesetStatus.STATUS_UNDER_REVIEW
2296 2296 status_lbl = ChangesetStatus.get_status_lbl(stat)
2297 2297 for pr in PullRequest.query().filter(PullRequest.source_repo == self).all():
2298 2298 for rev in pr.revisions:
2299 2299 pr_id = pr.pull_request_id
2300 2300 pr_repo = pr.target_repo.repo_name
2301 2301 grouped[rev] = [stat, status_lbl, pr_id, pr_repo]
2302 2302
2303 2303 for stat in status_results:
2304 2304 pr_id = pr_repo = None
2305 2305 if stat.pull_request:
2306 2306 pr_id = stat.pull_request.pull_request_id
2307 2307 pr_repo = stat.pull_request.target_repo.repo_name
2308 2308 grouped[stat.revision] = [str(stat.status), stat.status_lbl,
2309 2309 pr_id, pr_repo]
2310 2310 return grouped
2311 2311
2312 2312 # ==========================================================================
2313 2313 # SCM CACHE INSTANCE
2314 2314 # ==========================================================================
2315 2315
2316 2316 def scm_instance(self, **kwargs):
2317 2317 import rhodecode
2318 2318
2319 2319 # Passing a config will not hit the cache currently only used
2320 2320 # for repo2dbmapper
2321 2321 config = kwargs.pop('config', None)
2322 2322 cache = kwargs.pop('cache', None)
2323 2323 full_cache = str2bool(rhodecode.CONFIG.get('vcs_full_cache'))
2324 2324 # if cache is NOT defined use default global, else we have a full
2325 2325 # control over cache behaviour
2326 2326 if cache is None and full_cache and not config:
2327 2327 return self._get_instance_cached()
2328 2328 return self._get_instance(cache=bool(cache), config=config)
2329 2329
2330 2330 def _get_instance_cached(self):
2331 2331 from rhodecode.lib import rc_cache
2332 2332
2333 2333 cache_namespace_uid = 'cache_repo_instance.{}'.format(self.repo_id)
2334 2334 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
2335 2335 repo_id=self.repo_id)
2336 2336 region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid)
2337 2337
2338 2338 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid)
2339 2339 def get_instance_cached(repo_id):
2340 2340 return self._get_instance()
2341 2341
2342 # we must use thread scoped cache here,
2343 # because each thread of gevent needs it's own connection and cache
2342 2344 inv_context_manager = rc_cache.InvalidationContext(
2343 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace)
2345 uid=cache_namespace_uid, invalidation_namespace=invalidation_namespace,
2346 thread_scoped=True)
2344 2347 with inv_context_manager as invalidation_context:
2345 2348 args = (self.repo_id,)
2346 2349 # re-compute and store cache if we get invalidate signal
2347 2350 if invalidation_context.should_invalidate():
2348 2351 instance = get_instance_cached.refresh(*args)
2349 2352 else:
2350 2353 instance = get_instance_cached(*args)
2351 2354
2352 2355 log.debug(
2353 2356 'Repo instance fetched in %.3fs', inv_context_manager.compute_time)
2354 2357 return instance
2355 2358
2356 2359 def _get_instance(self, cache=True, config=None):
2357 2360 config = config or self._config
2358 2361 custom_wire = {
2359 2362 'cache': cache # controls the vcs.remote cache
2360 2363 }
2361 2364 repo = get_vcs_instance(
2362 2365 repo_path=safe_str(self.repo_full_path),
2363 2366 config=config,
2364 2367 with_wire=custom_wire,
2365 2368 create=False,
2366 2369 _vcs_alias=self.repo_type)
2367 2370
2368 2371 return repo
2369 2372
2370 2373 def __json__(self):
2371 2374 return {'landing_rev': self.landing_rev}
2372 2375
2373 2376 def get_dict(self):
2374 2377
2375 2378 # Since we transformed `repo_name` to a hybrid property, we need to
2376 2379 # keep compatibility with the code which uses `repo_name` field.
2377 2380
2378 2381 result = super(Repository, self).get_dict()
2379 2382 result['repo_name'] = result.pop('_repo_name', None)
2380 2383 return result
2381 2384
2382 2385
2383 2386 class RepoGroup(Base, BaseModel):
2384 2387 __tablename__ = 'groups'
2385 2388 __table_args__ = (
2386 2389 UniqueConstraint('group_name', 'group_parent_id'),
2387 2390 CheckConstraint('group_id != group_parent_id'),
2388 2391 base_table_args,
2389 2392 )
2390 2393 __mapper_args__ = {'order_by': 'group_name'}
2391 2394
2392 2395 CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups
2393 2396
2394 2397 group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2395 2398 group_name = Column("group_name", String(255), nullable=False, unique=True, default=None)
2396 2399 group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None)
2397 2400 group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None)
2398 2401 enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False)
2399 2402 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None)
2400 2403 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
2401 2404 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
2402 2405 personal = Column('personal', Boolean(), nullable=True, unique=None, default=None)
2403 2406
2404 2407 repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id')
2405 2408 users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all')
2406 2409 parent_group = relationship('RepoGroup', remote_side=group_id)
2407 2410 user = relationship('User')
2408 2411 integrations = relationship('Integration',
2409 2412 cascade="all, delete, delete-orphan")
2410 2413
2411 2414 def __init__(self, group_name='', parent_group=None):
2412 2415 self.group_name = group_name
2413 2416 self.parent_group = parent_group
2414 2417
2415 2418 def __unicode__(self):
2416 2419 return u"<%s('id:%s:%s')>" % (
2417 2420 self.__class__.__name__, self.group_id, self.group_name)
2418 2421
2419 2422 @hybrid_property
2420 2423 def description_safe(self):
2421 2424 from rhodecode.lib import helpers as h
2422 2425 return h.escape(self.group_description)
2423 2426
2424 2427 @classmethod
2425 2428 def _generate_choice(cls, repo_group):
2426 2429 from webhelpers.html import literal as _literal
2427 2430 _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k))
2428 2431 return repo_group.group_id, _name(repo_group.full_path_splitted)
2429 2432
2430 2433 @classmethod
2431 2434 def groups_choices(cls, groups=None, show_empty_group=True):
2432 2435 if not groups:
2433 2436 groups = cls.query().all()
2434 2437
2435 2438 repo_groups = []
2436 2439 if show_empty_group:
2437 2440 repo_groups = [(-1, u'-- %s --' % _('No parent'))]
2438 2441
2439 2442 repo_groups.extend([cls._generate_choice(x) for x in groups])
2440 2443
2441 2444 repo_groups = sorted(
2442 2445 repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0])
2443 2446 return repo_groups
2444 2447
2445 2448 @classmethod
2446 2449 def url_sep(cls):
2447 2450 return URL_SEP
2448 2451
2449 2452 @classmethod
2450 2453 def get_by_group_name(cls, group_name, cache=False, case_insensitive=False):
2451 2454 if case_insensitive:
2452 2455 gr = cls.query().filter(func.lower(cls.group_name)
2453 2456 == func.lower(group_name))
2454 2457 else:
2455 2458 gr = cls.query().filter(cls.group_name == group_name)
2456 2459 if cache:
2457 2460 name_key = _hash_key(group_name)
2458 2461 gr = gr.options(
2459 2462 FromCache("sql_cache_short", "get_group_%s" % name_key))
2460 2463 return gr.scalar()
2461 2464
2462 2465 @classmethod
2463 2466 def get_user_personal_repo_group(cls, user_id):
2464 2467 user = User.get(user_id)
2465 2468 if user.username == User.DEFAULT_USER:
2466 2469 return None
2467 2470
2468 2471 return cls.query()\
2469 2472 .filter(cls.personal == true()) \
2470 2473 .filter(cls.user == user).scalar()
2471 2474
2472 2475 @classmethod
2473 2476 def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None),
2474 2477 case_insensitive=True):
2475 2478 q = RepoGroup.query()
2476 2479
2477 2480 if not isinstance(user_id, Optional):
2478 2481 q = q.filter(RepoGroup.user_id == user_id)
2479 2482
2480 2483 if not isinstance(group_id, Optional):
2481 2484 q = q.filter(RepoGroup.group_parent_id == group_id)
2482 2485
2483 2486 if case_insensitive:
2484 2487 q = q.order_by(func.lower(RepoGroup.group_name))
2485 2488 else:
2486 2489 q = q.order_by(RepoGroup.group_name)
2487 2490 return q.all()
2488 2491
2489 2492 @property
2490 2493 def parents(self):
2491 2494 parents_recursion_limit = 10
2492 2495 groups = []
2493 2496 if self.parent_group is None:
2494 2497 return groups
2495 2498 cur_gr = self.parent_group
2496 2499 groups.insert(0, cur_gr)
2497 2500 cnt = 0
2498 2501 while 1:
2499 2502 cnt += 1
2500 2503 gr = getattr(cur_gr, 'parent_group', None)
2501 2504 cur_gr = cur_gr.parent_group
2502 2505 if gr is None:
2503 2506 break
2504 2507 if cnt == parents_recursion_limit:
2505 2508 # this will prevent accidental infinit loops
2506 2509 log.error(('more than %s parents found for group %s, stopping '
2507 2510 'recursive parent fetching' % (parents_recursion_limit, self)))
2508 2511 break
2509 2512
2510 2513 groups.insert(0, gr)
2511 2514 return groups
2512 2515
2513 2516 @property
2514 2517 def last_db_change(self):
2515 2518 return self.updated_on
2516 2519
2517 2520 @property
2518 2521 def children(self):
2519 2522 return RepoGroup.query().filter(RepoGroup.parent_group == self)
2520 2523
2521 2524 @property
2522 2525 def name(self):
2523 2526 return self.group_name.split(RepoGroup.url_sep())[-1]
2524 2527
2525 2528 @property
2526 2529 def full_path(self):
2527 2530 return self.group_name
2528 2531
2529 2532 @property
2530 2533 def full_path_splitted(self):
2531 2534 return self.group_name.split(RepoGroup.url_sep())
2532 2535
2533 2536 @property
2534 2537 def repositories(self):
2535 2538 return Repository.query()\
2536 2539 .filter(Repository.group == self)\
2537 2540 .order_by(Repository.repo_name)
2538 2541
2539 2542 @property
2540 2543 def repositories_recursive_count(self):
2541 2544 cnt = self.repositories.count()
2542 2545
2543 2546 def children_count(group):
2544 2547 cnt = 0
2545 2548 for child in group.children:
2546 2549 cnt += child.repositories.count()
2547 2550 cnt += children_count(child)
2548 2551 return cnt
2549 2552
2550 2553 return cnt + children_count(self)
2551 2554
2552 2555 def _recursive_objects(self, include_repos=True):
2553 2556 all_ = []
2554 2557
2555 2558 def _get_members(root_gr):
2556 2559 if include_repos:
2557 2560 for r in root_gr.repositories:
2558 2561 all_.append(r)
2559 2562 childs = root_gr.children.all()
2560 2563 if childs:
2561 2564 for gr in childs:
2562 2565 all_.append(gr)
2563 2566 _get_members(gr)
2564 2567
2565 2568 _get_members(self)
2566 2569 return [self] + all_
2567 2570
2568 2571 def recursive_groups_and_repos(self):
2569 2572 """
2570 2573 Recursive return all groups, with repositories in those groups
2571 2574 """
2572 2575 return self._recursive_objects()
2573 2576
2574 2577 def recursive_groups(self):
2575 2578 """
2576 2579 Returns all children groups for this group including children of children
2577 2580 """
2578 2581 return self._recursive_objects(include_repos=False)
2579 2582
2580 2583 def get_new_name(self, group_name):
2581 2584 """
2582 2585 returns new full group name based on parent and new name
2583 2586
2584 2587 :param group_name:
2585 2588 """
2586 2589 path_prefix = (self.parent_group.full_path_splitted if
2587 2590 self.parent_group else [])
2588 2591 return RepoGroup.url_sep().join(path_prefix + [group_name])
2589 2592
2590 2593 def permissions(self, with_admins=True, with_owner=True):
2591 2594 q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self)
2592 2595 q = q.options(joinedload(UserRepoGroupToPerm.group),
2593 2596 joinedload(UserRepoGroupToPerm.user),
2594 2597 joinedload(UserRepoGroupToPerm.permission),)
2595 2598
2596 2599 # get owners and admins and permissions. We do a trick of re-writing
2597 2600 # objects from sqlalchemy to named-tuples due to sqlalchemy session
2598 2601 # has a global reference and changing one object propagates to all
2599 2602 # others. This means if admin is also an owner admin_row that change
2600 2603 # would propagate to both objects
2601 2604 perm_rows = []
2602 2605 for _usr in q.all():
2603 2606 usr = AttributeDict(_usr.user.get_dict())
2604 2607 usr.permission = _usr.permission.permission_name
2605 2608 perm_rows.append(usr)
2606 2609
2607 2610 # filter the perm rows by 'default' first and then sort them by
2608 2611 # admin,write,read,none permissions sorted again alphabetically in
2609 2612 # each group
2610 2613 perm_rows = sorted(perm_rows, key=display_user_sort)
2611 2614
2612 2615 _admin_perm = 'group.admin'
2613 2616 owner_row = []
2614 2617 if with_owner:
2615 2618 usr = AttributeDict(self.user.get_dict())
2616 2619 usr.owner_row = True
2617 2620 usr.permission = _admin_perm
2618 2621 owner_row.append(usr)
2619 2622
2620 2623 super_admin_rows = []
2621 2624 if with_admins:
2622 2625 for usr in User.get_all_super_admins():
2623 2626 # if this admin is also owner, don't double the record
2624 2627 if usr.user_id == owner_row[0].user_id:
2625 2628 owner_row[0].admin_row = True
2626 2629 else:
2627 2630 usr = AttributeDict(usr.get_dict())
2628 2631 usr.admin_row = True
2629 2632 usr.permission = _admin_perm
2630 2633 super_admin_rows.append(usr)
2631 2634
2632 2635 return super_admin_rows + owner_row + perm_rows
2633 2636
2634 2637 def permission_user_groups(self):
2635 2638 q = UserGroupRepoGroupToPerm.query().filter(UserGroupRepoGroupToPerm.group == self)
2636 2639 q = q.options(joinedload(UserGroupRepoGroupToPerm.group),
2637 2640 joinedload(UserGroupRepoGroupToPerm.users_group),
2638 2641 joinedload(UserGroupRepoGroupToPerm.permission),)
2639 2642
2640 2643 perm_rows = []
2641 2644 for _user_group in q.all():
2642 2645 usr = AttributeDict(_user_group.users_group.get_dict())
2643 2646 usr.permission = _user_group.permission.permission_name
2644 2647 perm_rows.append(usr)
2645 2648
2646 2649 perm_rows = sorted(perm_rows, key=display_user_group_sort)
2647 2650 return perm_rows
2648 2651
2649 2652 def get_api_data(self):
2650 2653 """
2651 2654 Common function for generating api data
2652 2655
2653 2656 """
2654 2657 group = self
2655 2658 data = {
2656 2659 'group_id': group.group_id,
2657 2660 'group_name': group.group_name,
2658 2661 'group_description': group.description_safe,
2659 2662 'parent_group': group.parent_group.group_name if group.parent_group else None,
2660 2663 'repositories': [x.repo_name for x in group.repositories],
2661 2664 'owner': group.user.username,
2662 2665 }
2663 2666 return data
2664 2667
2665 2668
2666 2669 class Permission(Base, BaseModel):
2667 2670 __tablename__ = 'permissions'
2668 2671 __table_args__ = (
2669 2672 Index('p_perm_name_idx', 'permission_name'),
2670 2673 base_table_args,
2671 2674 )
2672 2675
2673 2676 PERMS = [
2674 2677 ('hg.admin', _('RhodeCode Super Administrator')),
2675 2678
2676 2679 ('repository.none', _('Repository no access')),
2677 2680 ('repository.read', _('Repository read access')),
2678 2681 ('repository.write', _('Repository write access')),
2679 2682 ('repository.admin', _('Repository admin access')),
2680 2683
2681 2684 ('group.none', _('Repository group no access')),
2682 2685 ('group.read', _('Repository group read access')),
2683 2686 ('group.write', _('Repository group write access')),
2684 2687 ('group.admin', _('Repository group admin access')),
2685 2688
2686 2689 ('usergroup.none', _('User group no access')),
2687 2690 ('usergroup.read', _('User group read access')),
2688 2691 ('usergroup.write', _('User group write access')),
2689 2692 ('usergroup.admin', _('User group admin access')),
2690 2693
2691 2694 ('hg.repogroup.create.false', _('Repository Group creation disabled')),
2692 2695 ('hg.repogroup.create.true', _('Repository Group creation enabled')),
2693 2696
2694 2697 ('hg.usergroup.create.false', _('User Group creation disabled')),
2695 2698 ('hg.usergroup.create.true', _('User Group creation enabled')),
2696 2699
2697 2700 ('hg.create.none', _('Repository creation disabled')),
2698 2701 ('hg.create.repository', _('Repository creation enabled')),
2699 2702 ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')),
2700 2703 ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')),
2701 2704
2702 2705 ('hg.fork.none', _('Repository forking disabled')),
2703 2706 ('hg.fork.repository', _('Repository forking enabled')),
2704 2707
2705 2708 ('hg.register.none', _('Registration disabled')),
2706 2709 ('hg.register.manual_activate', _('User Registration with manual account activation')),
2707 2710 ('hg.register.auto_activate', _('User Registration with automatic account activation')),
2708 2711
2709 2712 ('hg.password_reset.enabled', _('Password reset enabled')),
2710 2713 ('hg.password_reset.hidden', _('Password reset hidden')),
2711 2714 ('hg.password_reset.disabled', _('Password reset disabled')),
2712 2715
2713 2716 ('hg.extern_activate.manual', _('Manual activation of external account')),
2714 2717 ('hg.extern_activate.auto', _('Automatic activation of external account')),
2715 2718
2716 2719 ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')),
2717 2720 ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')),
2718 2721 ]
2719 2722
2720 2723 # definition of system default permissions for DEFAULT user
2721 2724 DEFAULT_USER_PERMISSIONS = [
2722 2725 'repository.read',
2723 2726 'group.read',
2724 2727 'usergroup.read',
2725 2728 'hg.create.repository',
2726 2729 'hg.repogroup.create.false',
2727 2730 'hg.usergroup.create.false',
2728 2731 'hg.create.write_on_repogroup.true',
2729 2732 'hg.fork.repository',
2730 2733 'hg.register.manual_activate',
2731 2734 'hg.password_reset.enabled',
2732 2735 'hg.extern_activate.auto',
2733 2736 'hg.inherit_default_perms.true',
2734 2737 ]
2735 2738
2736 2739 # defines which permissions are more important higher the more important
2737 2740 # Weight defines which permissions are more important.
2738 2741 # The higher number the more important.
2739 2742 PERM_WEIGHTS = {
2740 2743 'repository.none': 0,
2741 2744 'repository.read': 1,
2742 2745 'repository.write': 3,
2743 2746 'repository.admin': 4,
2744 2747
2745 2748 'group.none': 0,
2746 2749 'group.read': 1,
2747 2750 'group.write': 3,
2748 2751 'group.admin': 4,
2749 2752
2750 2753 'usergroup.none': 0,
2751 2754 'usergroup.read': 1,
2752 2755 'usergroup.write': 3,
2753 2756 'usergroup.admin': 4,
2754 2757
2755 2758 'hg.repogroup.create.false': 0,
2756 2759 'hg.repogroup.create.true': 1,
2757 2760
2758 2761 'hg.usergroup.create.false': 0,
2759 2762 'hg.usergroup.create.true': 1,
2760 2763
2761 2764 'hg.fork.none': 0,
2762 2765 'hg.fork.repository': 1,
2763 2766 'hg.create.none': 0,
2764 2767 'hg.create.repository': 1
2765 2768 }
2766 2769
2767 2770 permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2768 2771 permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None)
2769 2772 permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None)
2770 2773
2771 2774 def __unicode__(self):
2772 2775 return u"<%s('%s:%s')>" % (
2773 2776 self.__class__.__name__, self.permission_id, self.permission_name
2774 2777 )
2775 2778
2776 2779 @classmethod
2777 2780 def get_by_key(cls, key):
2778 2781 return cls.query().filter(cls.permission_name == key).scalar()
2779 2782
2780 2783 @classmethod
2781 2784 def get_default_repo_perms(cls, user_id, repo_id=None):
2782 2785 q = Session().query(UserRepoToPerm, Repository, Permission)\
2783 2786 .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\
2784 2787 .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\
2785 2788 .filter(UserRepoToPerm.user_id == user_id)
2786 2789 if repo_id:
2787 2790 q = q.filter(UserRepoToPerm.repository_id == repo_id)
2788 2791 return q.all()
2789 2792
2790 2793 @classmethod
2791 2794 def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None):
2792 2795 q = Session().query(UserGroupRepoToPerm, Repository, Permission)\
2793 2796 .join(
2794 2797 Permission,
2795 2798 UserGroupRepoToPerm.permission_id == Permission.permission_id)\
2796 2799 .join(
2797 2800 Repository,
2798 2801 UserGroupRepoToPerm.repository_id == Repository.repo_id)\
2799 2802 .join(
2800 2803 UserGroup,
2801 2804 UserGroupRepoToPerm.users_group_id ==
2802 2805 UserGroup.users_group_id)\
2803 2806 .join(
2804 2807 UserGroupMember,
2805 2808 UserGroupRepoToPerm.users_group_id ==
2806 2809 UserGroupMember.users_group_id)\
2807 2810 .filter(
2808 2811 UserGroupMember.user_id == user_id,
2809 2812 UserGroup.users_group_active == true())
2810 2813 if repo_id:
2811 2814 q = q.filter(UserGroupRepoToPerm.repository_id == repo_id)
2812 2815 return q.all()
2813 2816
2814 2817 @classmethod
2815 2818 def get_default_group_perms(cls, user_id, repo_group_id=None):
2816 2819 q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\
2817 2820 .join((Permission, UserRepoGroupToPerm.permission_id == Permission.permission_id))\
2818 2821 .join((RepoGroup, UserRepoGroupToPerm.group_id == RepoGroup.group_id))\
2819 2822 .filter(UserRepoGroupToPerm.user_id == user_id)
2820 2823 if repo_group_id:
2821 2824 q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id)
2822 2825 return q.all()
2823 2826
2824 2827 @classmethod
2825 2828 def get_default_group_perms_from_user_group(
2826 2829 cls, user_id, repo_group_id=None):
2827 2830 q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\
2828 2831 .join(
2829 2832 Permission,
2830 2833 UserGroupRepoGroupToPerm.permission_id ==
2831 2834 Permission.permission_id)\
2832 2835 .join(
2833 2836 RepoGroup,
2834 2837 UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\
2835 2838 .join(
2836 2839 UserGroup,
2837 2840 UserGroupRepoGroupToPerm.users_group_id ==
2838 2841 UserGroup.users_group_id)\
2839 2842 .join(
2840 2843 UserGroupMember,
2841 2844 UserGroupRepoGroupToPerm.users_group_id ==
2842 2845 UserGroupMember.users_group_id)\
2843 2846 .filter(
2844 2847 UserGroupMember.user_id == user_id,
2845 2848 UserGroup.users_group_active == true())
2846 2849 if repo_group_id:
2847 2850 q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id)
2848 2851 return q.all()
2849 2852
2850 2853 @classmethod
2851 2854 def get_default_user_group_perms(cls, user_id, user_group_id=None):
2852 2855 q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\
2853 2856 .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\
2854 2857 .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\
2855 2858 .filter(UserUserGroupToPerm.user_id == user_id)
2856 2859 if user_group_id:
2857 2860 q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id)
2858 2861 return q.all()
2859 2862
2860 2863 @classmethod
2861 2864 def get_default_user_group_perms_from_user_group(
2862 2865 cls, user_id, user_group_id=None):
2863 2866 TargetUserGroup = aliased(UserGroup, name='target_user_group')
2864 2867 q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\
2865 2868 .join(
2866 2869 Permission,
2867 2870 UserGroupUserGroupToPerm.permission_id ==
2868 2871 Permission.permission_id)\
2869 2872 .join(
2870 2873 TargetUserGroup,
2871 2874 UserGroupUserGroupToPerm.target_user_group_id ==
2872 2875 TargetUserGroup.users_group_id)\
2873 2876 .join(
2874 2877 UserGroup,
2875 2878 UserGroupUserGroupToPerm.user_group_id ==
2876 2879 UserGroup.users_group_id)\
2877 2880 .join(
2878 2881 UserGroupMember,
2879 2882 UserGroupUserGroupToPerm.user_group_id ==
2880 2883 UserGroupMember.users_group_id)\
2881 2884 .filter(
2882 2885 UserGroupMember.user_id == user_id,
2883 2886 UserGroup.users_group_active == true())
2884 2887 if user_group_id:
2885 2888 q = q.filter(
2886 2889 UserGroupUserGroupToPerm.user_group_id == user_group_id)
2887 2890
2888 2891 return q.all()
2889 2892
2890 2893
2891 2894 class UserRepoToPerm(Base, BaseModel):
2892 2895 __tablename__ = 'repo_to_perm'
2893 2896 __table_args__ = (
2894 2897 UniqueConstraint('user_id', 'repository_id', 'permission_id'),
2895 2898 base_table_args
2896 2899 )
2897 2900
2898 2901 repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2899 2902 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2900 2903 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2901 2904 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2902 2905
2903 2906 user = relationship('User')
2904 2907 repository = relationship('Repository')
2905 2908 permission = relationship('Permission')
2906 2909
2907 2910 @classmethod
2908 2911 def create(cls, user, repository, permission):
2909 2912 n = cls()
2910 2913 n.user = user
2911 2914 n.repository = repository
2912 2915 n.permission = permission
2913 2916 Session().add(n)
2914 2917 return n
2915 2918
2916 2919 def __unicode__(self):
2917 2920 return u'<%s => %s >' % (self.user, self.repository)
2918 2921
2919 2922
2920 2923 class UserUserGroupToPerm(Base, BaseModel):
2921 2924 __tablename__ = 'user_user_group_to_perm'
2922 2925 __table_args__ = (
2923 2926 UniqueConstraint('user_id', 'user_group_id', 'permission_id'),
2924 2927 base_table_args
2925 2928 )
2926 2929
2927 2930 user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2928 2931 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2929 2932 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2930 2933 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2931 2934
2932 2935 user = relationship('User')
2933 2936 user_group = relationship('UserGroup')
2934 2937 permission = relationship('Permission')
2935 2938
2936 2939 @classmethod
2937 2940 def create(cls, user, user_group, permission):
2938 2941 n = cls()
2939 2942 n.user = user
2940 2943 n.user_group = user_group
2941 2944 n.permission = permission
2942 2945 Session().add(n)
2943 2946 return n
2944 2947
2945 2948 def __unicode__(self):
2946 2949 return u'<%s => %s >' % (self.user, self.user_group)
2947 2950
2948 2951
2949 2952 class UserToPerm(Base, BaseModel):
2950 2953 __tablename__ = 'user_to_perm'
2951 2954 __table_args__ = (
2952 2955 UniqueConstraint('user_id', 'permission_id'),
2953 2956 base_table_args
2954 2957 )
2955 2958
2956 2959 user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2957 2960 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
2958 2961 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2959 2962
2960 2963 user = relationship('User')
2961 2964 permission = relationship('Permission', lazy='joined')
2962 2965
2963 2966 def __unicode__(self):
2964 2967 return u'<%s => %s >' % (self.user, self.permission)
2965 2968
2966 2969
2967 2970 class UserGroupRepoToPerm(Base, BaseModel):
2968 2971 __tablename__ = 'users_group_repo_to_perm'
2969 2972 __table_args__ = (
2970 2973 UniqueConstraint('repository_id', 'users_group_id', 'permission_id'),
2971 2974 base_table_args
2972 2975 )
2973 2976
2974 2977 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
2975 2978 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
2976 2979 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
2977 2980 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None)
2978 2981
2979 2982 users_group = relationship('UserGroup')
2980 2983 permission = relationship('Permission')
2981 2984 repository = relationship('Repository')
2982 2985
2983 2986 @classmethod
2984 2987 def create(cls, users_group, repository, permission):
2985 2988 n = cls()
2986 2989 n.users_group = users_group
2987 2990 n.repository = repository
2988 2991 n.permission = permission
2989 2992 Session().add(n)
2990 2993 return n
2991 2994
2992 2995 def __unicode__(self):
2993 2996 return u'<UserGroupRepoToPerm:%s => %s >' % (self.users_group, self.repository)
2994 2997
2995 2998
2996 2999 class UserGroupUserGroupToPerm(Base, BaseModel):
2997 3000 __tablename__ = 'user_group_user_group_to_perm'
2998 3001 __table_args__ = (
2999 3002 UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'),
3000 3003 CheckConstraint('target_user_group_id != user_group_id'),
3001 3004 base_table_args
3002 3005 )
3003 3006
3004 3007 user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3005 3008 target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3006 3009 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3007 3010 user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3008 3011
3009 3012 target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id')
3010 3013 user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id')
3011 3014 permission = relationship('Permission')
3012 3015
3013 3016 @classmethod
3014 3017 def create(cls, target_user_group, user_group, permission):
3015 3018 n = cls()
3016 3019 n.target_user_group = target_user_group
3017 3020 n.user_group = user_group
3018 3021 n.permission = permission
3019 3022 Session().add(n)
3020 3023 return n
3021 3024
3022 3025 def __unicode__(self):
3023 3026 return u'<UserGroupUserGroup:%s => %s >' % (self.target_user_group, self.user_group)
3024 3027
3025 3028
3026 3029 class UserGroupToPerm(Base, BaseModel):
3027 3030 __tablename__ = 'users_group_to_perm'
3028 3031 __table_args__ = (
3029 3032 UniqueConstraint('users_group_id', 'permission_id',),
3030 3033 base_table_args
3031 3034 )
3032 3035
3033 3036 users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3034 3037 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3035 3038 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3036 3039
3037 3040 users_group = relationship('UserGroup')
3038 3041 permission = relationship('Permission')
3039 3042
3040 3043
3041 3044 class UserRepoGroupToPerm(Base, BaseModel):
3042 3045 __tablename__ = 'user_repo_group_to_perm'
3043 3046 __table_args__ = (
3044 3047 UniqueConstraint('user_id', 'group_id', 'permission_id'),
3045 3048 base_table_args
3046 3049 )
3047 3050
3048 3051 group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3049 3052 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3050 3053 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3051 3054 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3052 3055
3053 3056 user = relationship('User')
3054 3057 group = relationship('RepoGroup')
3055 3058 permission = relationship('Permission')
3056 3059
3057 3060 @classmethod
3058 3061 def create(cls, user, repository_group, permission):
3059 3062 n = cls()
3060 3063 n.user = user
3061 3064 n.group = repository_group
3062 3065 n.permission = permission
3063 3066 Session().add(n)
3064 3067 return n
3065 3068
3066 3069
3067 3070 class UserGroupRepoGroupToPerm(Base, BaseModel):
3068 3071 __tablename__ = 'users_group_repo_group_to_perm'
3069 3072 __table_args__ = (
3070 3073 UniqueConstraint('users_group_id', 'group_id'),
3071 3074 base_table_args
3072 3075 )
3073 3076
3074 3077 users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3075 3078 users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None)
3076 3079 group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None)
3077 3080 permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None)
3078 3081
3079 3082 users_group = relationship('UserGroup')
3080 3083 permission = relationship('Permission')
3081 3084 group = relationship('RepoGroup')
3082 3085
3083 3086 @classmethod
3084 3087 def create(cls, user_group, repository_group, permission):
3085 3088 n = cls()
3086 3089 n.users_group = user_group
3087 3090 n.group = repository_group
3088 3091 n.permission = permission
3089 3092 Session().add(n)
3090 3093 return n
3091 3094
3092 3095 def __unicode__(self):
3093 3096 return u'<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group)
3094 3097
3095 3098
3096 3099 class Statistics(Base, BaseModel):
3097 3100 __tablename__ = 'statistics'
3098 3101 __table_args__ = (
3099 3102 base_table_args
3100 3103 )
3101 3104
3102 3105 stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3103 3106 repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None)
3104 3107 stat_on_revision = Column("stat_on_revision", Integer(), nullable=False)
3105 3108 commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False)#JSON data
3106 3109 commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False)#JSON data
3107 3110 languages = Column("languages", LargeBinary(1000000), nullable=False)#JSON data
3108 3111
3109 3112 repository = relationship('Repository', single_parent=True)
3110 3113
3111 3114
3112 3115 class UserFollowing(Base, BaseModel):
3113 3116 __tablename__ = 'user_followings'
3114 3117 __table_args__ = (
3115 3118 UniqueConstraint('user_id', 'follows_repository_id'),
3116 3119 UniqueConstraint('user_id', 'follows_user_id'),
3117 3120 base_table_args
3118 3121 )
3119 3122
3120 3123 user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3121 3124 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None)
3122 3125 follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None)
3123 3126 follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None)
3124 3127 follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now)
3125 3128
3126 3129 user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id')
3127 3130
3128 3131 follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id')
3129 3132 follows_repository = relationship('Repository', order_by='Repository.repo_name')
3130 3133
3131 3134 @classmethod
3132 3135 def get_repo_followers(cls, repo_id):
3133 3136 return cls.query().filter(cls.follows_repo_id == repo_id)
3134 3137
3135 3138
3136 3139 class CacheKey(Base, BaseModel):
3137 3140 __tablename__ = 'cache_invalidation'
3138 3141 __table_args__ = (
3139 3142 UniqueConstraint('cache_key'),
3140 3143 Index('key_idx', 'cache_key'),
3141 3144 base_table_args,
3142 3145 )
3143 3146
3144 3147 CACHE_TYPE_FEED = 'FEED'
3145 3148 CACHE_TYPE_README = 'README'
3146 3149 # namespaces used to register process/thread aware caches
3147 3150 REPO_INVALIDATION_NAMESPACE = 'repo_cache:{repo_id}'
3148 3151 SETTINGS_INVALIDATION_NAMESPACE = 'system_settings'
3149 3152
3150 3153 cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True)
3151 3154 cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None)
3152 3155 cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None)
3153 3156 cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False)
3154 3157
3155 3158 def __init__(self, cache_key, cache_args=''):
3156 3159 self.cache_key = cache_key
3157 3160 self.cache_args = cache_args
3158 3161 self.cache_active = False
3159 3162
3160 3163 def __unicode__(self):
3161 3164 return u"<%s('%s:%s[%s]')>" % (
3162 3165 self.__class__.__name__,
3163 3166 self.cache_id, self.cache_key, self.cache_active)
3164 3167
3165 3168 def _cache_key_partition(self):
3166 3169 prefix, repo_name, suffix = self.cache_key.partition(self.cache_args)
3167 3170 return prefix, repo_name, suffix
3168 3171
3169 3172 def get_prefix(self):
3170 3173 """
3171 3174 Try to extract prefix from existing cache key. The key could consist
3172 3175 of prefix, repo_name, suffix
3173 3176 """
3174 3177 # this returns prefix, repo_name, suffix
3175 3178 return self._cache_key_partition()[0]
3176 3179
3177 3180 def get_suffix(self):
3178 3181 """
3179 3182 get suffix that might have been used in _get_cache_key to
3180 3183 generate self.cache_key. Only used for informational purposes
3181 3184 in repo_edit.mako.
3182 3185 """
3183 3186 # prefix, repo_name, suffix
3184 3187 return self._cache_key_partition()[2]
3185 3188
3186 3189 @classmethod
3187 3190 def delete_all_cache(cls):
3188 3191 """
3189 3192 Delete all cache keys from database.
3190 3193 Should only be run when all instances are down and all entries
3191 3194 thus stale.
3192 3195 """
3193 3196 cls.query().delete()
3194 3197 Session().commit()
3195 3198
3196 3199 @classmethod
3197 3200 def set_invalidate(cls, cache_uid, delete=False):
3198 3201 """
3199 3202 Mark all caches of a repo as invalid in the database.
3200 3203 """
3201 3204
3202 3205 try:
3203 3206 qry = Session().query(cls).filter(cls.cache_args == cache_uid)
3204 3207 if delete:
3205 3208 qry.delete()
3206 3209 log.debug('cache objects deleted for cache args %s',
3207 3210 safe_str(cache_uid))
3208 3211 else:
3209 3212 qry.update({"cache_active": False})
3210 3213 log.debug('cache objects marked as invalid for cache args %s',
3211 3214 safe_str(cache_uid))
3212 3215
3213 3216 Session().commit()
3214 3217 except Exception:
3215 3218 log.exception(
3216 3219 'Cache key invalidation failed for cache args %s',
3217 3220 safe_str(cache_uid))
3218 3221 Session().rollback()
3219 3222
3220 3223 @classmethod
3221 3224 def get_active_cache(cls, cache_key):
3222 3225 inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar()
3223 3226 if inv_obj:
3224 3227 return inv_obj
3225 3228 return None
3226 3229
3227 3230
3228 3231 class ChangesetComment(Base, BaseModel):
3229 3232 __tablename__ = 'changeset_comments'
3230 3233 __table_args__ = (
3231 3234 Index('cc_revision_idx', 'revision'),
3232 3235 base_table_args,
3233 3236 )
3234 3237
3235 3238 COMMENT_OUTDATED = u'comment_outdated'
3236 3239 COMMENT_TYPE_NOTE = u'note'
3237 3240 COMMENT_TYPE_TODO = u'todo'
3238 3241 COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO]
3239 3242
3240 3243 comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True)
3241 3244 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3242 3245 revision = Column('revision', String(40), nullable=True)
3243 3246 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3244 3247 pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True)
3245 3248 line_no = Column('line_no', Unicode(10), nullable=True)
3246 3249 hl_lines = Column('hl_lines', Unicode(512), nullable=True)
3247 3250 f_path = Column('f_path', Unicode(1000), nullable=True)
3248 3251 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False)
3249 3252 text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False)
3250 3253 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3251 3254 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3252 3255 renderer = Column('renderer', Unicode(64), nullable=True)
3253 3256 display_state = Column('display_state', Unicode(128), nullable=True)
3254 3257
3255 3258 comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE)
3256 3259 resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True)
3257 3260 resolved_comment = relationship('ChangesetComment', remote_side=comment_id, backref='resolved_by')
3258 3261 author = relationship('User', lazy='joined')
3259 3262 repo = relationship('Repository')
3260 3263 status_change = relationship('ChangesetStatus', cascade="all, delete, delete-orphan", lazy='joined')
3261 3264 pull_request = relationship('PullRequest', lazy='joined')
3262 3265 pull_request_version = relationship('PullRequestVersion')
3263 3266
3264 3267 @classmethod
3265 3268 def get_users(cls, revision=None, pull_request_id=None):
3266 3269 """
3267 3270 Returns user associated with this ChangesetComment. ie those
3268 3271 who actually commented
3269 3272
3270 3273 :param cls:
3271 3274 :param revision:
3272 3275 """
3273 3276 q = Session().query(User)\
3274 3277 .join(ChangesetComment.author)
3275 3278 if revision:
3276 3279 q = q.filter(cls.revision == revision)
3277 3280 elif pull_request_id:
3278 3281 q = q.filter(cls.pull_request_id == pull_request_id)
3279 3282 return q.all()
3280 3283
3281 3284 @classmethod
3282 3285 def get_index_from_version(cls, pr_version, versions):
3283 3286 num_versions = [x.pull_request_version_id for x in versions]
3284 3287 try:
3285 3288 return num_versions.index(pr_version) +1
3286 3289 except (IndexError, ValueError):
3287 3290 return
3288 3291
3289 3292 @property
3290 3293 def outdated(self):
3291 3294 return self.display_state == self.COMMENT_OUTDATED
3292 3295
3293 3296 def outdated_at_version(self, version):
3294 3297 """
3295 3298 Checks if comment is outdated for given pull request version
3296 3299 """
3297 3300 return self.outdated and self.pull_request_version_id != version
3298 3301
3299 3302 def older_than_version(self, version):
3300 3303 """
3301 3304 Checks if comment is made from previous version than given
3302 3305 """
3303 3306 if version is None:
3304 3307 return self.pull_request_version_id is not None
3305 3308
3306 3309 return self.pull_request_version_id < version
3307 3310
3308 3311 @property
3309 3312 def resolved(self):
3310 3313 return self.resolved_by[0] if self.resolved_by else None
3311 3314
3312 3315 @property
3313 3316 def is_todo(self):
3314 3317 return self.comment_type == self.COMMENT_TYPE_TODO
3315 3318
3316 3319 @property
3317 3320 def is_inline(self):
3318 3321 return self.line_no and self.f_path
3319 3322
3320 3323 def get_index_version(self, versions):
3321 3324 return self.get_index_from_version(
3322 3325 self.pull_request_version_id, versions)
3323 3326
3324 3327 def __repr__(self):
3325 3328 if self.comment_id:
3326 3329 return '<DB:Comment #%s>' % self.comment_id
3327 3330 else:
3328 3331 return '<DB:Comment at %#x>' % id(self)
3329 3332
3330 3333 def get_api_data(self):
3331 3334 comment = self
3332 3335 data = {
3333 3336 'comment_id': comment.comment_id,
3334 3337 'comment_type': comment.comment_type,
3335 3338 'comment_text': comment.text,
3336 3339 'comment_status': comment.status_change,
3337 3340 'comment_f_path': comment.f_path,
3338 3341 'comment_lineno': comment.line_no,
3339 3342 'comment_author': comment.author,
3340 3343 'comment_created_on': comment.created_on
3341 3344 }
3342 3345 return data
3343 3346
3344 3347 def __json__(self):
3345 3348 data = dict()
3346 3349 data.update(self.get_api_data())
3347 3350 return data
3348 3351
3349 3352
3350 3353 class ChangesetStatus(Base, BaseModel):
3351 3354 __tablename__ = 'changeset_statuses'
3352 3355 __table_args__ = (
3353 3356 Index('cs_revision_idx', 'revision'),
3354 3357 Index('cs_version_idx', 'version'),
3355 3358 UniqueConstraint('repo_id', 'revision', 'version'),
3356 3359 base_table_args
3357 3360 )
3358 3361
3359 3362 STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed'
3360 3363 STATUS_APPROVED = 'approved'
3361 3364 STATUS_REJECTED = 'rejected'
3362 3365 STATUS_UNDER_REVIEW = 'under_review'
3363 3366
3364 3367 STATUSES = [
3365 3368 (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default
3366 3369 (STATUS_APPROVED, _("Approved")),
3367 3370 (STATUS_REJECTED, _("Rejected")),
3368 3371 (STATUS_UNDER_REVIEW, _("Under Review")),
3369 3372 ]
3370 3373
3371 3374 changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True)
3372 3375 repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False)
3373 3376 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None)
3374 3377 revision = Column('revision', String(40), nullable=False)
3375 3378 status = Column('status', String(128), nullable=False, default=DEFAULT)
3376 3379 changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'))
3377 3380 modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now)
3378 3381 version = Column('version', Integer(), nullable=False, default=0)
3379 3382 pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True)
3380 3383
3381 3384 author = relationship('User', lazy='joined')
3382 3385 repo = relationship('Repository')
3383 3386 comment = relationship('ChangesetComment', lazy='joined')
3384 3387 pull_request = relationship('PullRequest', lazy='joined')
3385 3388
3386 3389 def __unicode__(self):
3387 3390 return u"<%s('%s[v%s]:%s')>" % (
3388 3391 self.__class__.__name__,
3389 3392 self.status, self.version, self.author
3390 3393 )
3391 3394
3392 3395 @classmethod
3393 3396 def get_status_lbl(cls, value):
3394 3397 return dict(cls.STATUSES).get(value)
3395 3398
3396 3399 @property
3397 3400 def status_lbl(self):
3398 3401 return ChangesetStatus.get_status_lbl(self.status)
3399 3402
3400 3403 def get_api_data(self):
3401 3404 status = self
3402 3405 data = {
3403 3406 'status_id': status.changeset_status_id,
3404 3407 'status': status.status,
3405 3408 }
3406 3409 return data
3407 3410
3408 3411 def __json__(self):
3409 3412 data = dict()
3410 3413 data.update(self.get_api_data())
3411 3414 return data
3412 3415
3413 3416
3414 3417 class _PullRequestBase(BaseModel):
3415 3418 """
3416 3419 Common attributes of pull request and version entries.
3417 3420 """
3418 3421
3419 3422 # .status values
3420 3423 STATUS_NEW = u'new'
3421 3424 STATUS_OPEN = u'open'
3422 3425 STATUS_CLOSED = u'closed'
3423 3426
3424 3427 title = Column('title', Unicode(255), nullable=True)
3425 3428 description = Column(
3426 3429 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'),
3427 3430 nullable=True)
3428 3431 description_renderer = Column('description_renderer', Unicode(64), nullable=True)
3429 3432
3430 3433 # new/open/closed status of pull request (not approve/reject/etc)
3431 3434 status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW)
3432 3435 created_on = Column(
3433 3436 'created_on', DateTime(timezone=False), nullable=False,
3434 3437 default=datetime.datetime.now)
3435 3438 updated_on = Column(
3436 3439 'updated_on', DateTime(timezone=False), nullable=False,
3437 3440 default=datetime.datetime.now)
3438 3441
3439 3442 @declared_attr
3440 3443 def user_id(cls):
3441 3444 return Column(
3442 3445 "user_id", Integer(), ForeignKey('users.user_id'), nullable=False,
3443 3446 unique=None)
3444 3447
3445 3448 # 500 revisions max
3446 3449 _revisions = Column(
3447 3450 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql'))
3448 3451
3449 3452 @declared_attr
3450 3453 def source_repo_id(cls):
3451 3454 # TODO: dan: rename column to source_repo_id
3452 3455 return Column(
3453 3456 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3454 3457 nullable=False)
3455 3458
3456 3459 source_ref = Column('org_ref', Unicode(255), nullable=False)
3457 3460
3458 3461 @declared_attr
3459 3462 def target_repo_id(cls):
3460 3463 # TODO: dan: rename column to target_repo_id
3461 3464 return Column(
3462 3465 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'),
3463 3466 nullable=False)
3464 3467
3465 3468 target_ref = Column('other_ref', Unicode(255), nullable=False)
3466 3469 _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True)
3467 3470
3468 3471 # TODO: dan: rename column to last_merge_source_rev
3469 3472 _last_merge_source_rev = Column(
3470 3473 'last_merge_org_rev', String(40), nullable=True)
3471 3474 # TODO: dan: rename column to last_merge_target_rev
3472 3475 _last_merge_target_rev = Column(
3473 3476 'last_merge_other_rev', String(40), nullable=True)
3474 3477 _last_merge_status = Column('merge_status', Integer(), nullable=True)
3475 3478 merge_rev = Column('merge_rev', String(40), nullable=True)
3476 3479
3477 3480 reviewer_data = Column(
3478 3481 'reviewer_data_json', MutationObj.as_mutable(
3479 3482 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
3480 3483
3481 3484 @property
3482 3485 def reviewer_data_json(self):
3483 3486 return json.dumps(self.reviewer_data)
3484 3487
3485 3488 @hybrid_property
3486 3489 def description_safe(self):
3487 3490 from rhodecode.lib import helpers as h
3488 3491 return h.escape(self.description)
3489 3492
3490 3493 @hybrid_property
3491 3494 def revisions(self):
3492 3495 return self._revisions.split(':') if self._revisions else []
3493 3496
3494 3497 @revisions.setter
3495 3498 def revisions(self, val):
3496 3499 self._revisions = ':'.join(val)
3497 3500
3498 3501 @hybrid_property
3499 3502 def last_merge_status(self):
3500 3503 return safe_int(self._last_merge_status)
3501 3504
3502 3505 @last_merge_status.setter
3503 3506 def last_merge_status(self, val):
3504 3507 self._last_merge_status = val
3505 3508
3506 3509 @declared_attr
3507 3510 def author(cls):
3508 3511 return relationship('User', lazy='joined')
3509 3512
3510 3513 @declared_attr
3511 3514 def source_repo(cls):
3512 3515 return relationship(
3513 3516 'Repository',
3514 3517 primaryjoin='%s.source_repo_id==Repository.repo_id' % cls.__name__)
3515 3518
3516 3519 @property
3517 3520 def source_ref_parts(self):
3518 3521 return self.unicode_to_reference(self.source_ref)
3519 3522
3520 3523 @declared_attr
3521 3524 def target_repo(cls):
3522 3525 return relationship(
3523 3526 'Repository',
3524 3527 primaryjoin='%s.target_repo_id==Repository.repo_id' % cls.__name__)
3525 3528
3526 3529 @property
3527 3530 def target_ref_parts(self):
3528 3531 return self.unicode_to_reference(self.target_ref)
3529 3532
3530 3533 @property
3531 3534 def shadow_merge_ref(self):
3532 3535 return self.unicode_to_reference(self._shadow_merge_ref)
3533 3536
3534 3537 @shadow_merge_ref.setter
3535 3538 def shadow_merge_ref(self, ref):
3536 3539 self._shadow_merge_ref = self.reference_to_unicode(ref)
3537 3540
3538 3541 def unicode_to_reference(self, raw):
3539 3542 """
3540 3543 Convert a unicode (or string) to a reference object.
3541 3544 If unicode evaluates to False it returns None.
3542 3545 """
3543 3546 if raw:
3544 3547 refs = raw.split(':')
3545 3548 return Reference(*refs)
3546 3549 else:
3547 3550 return None
3548 3551
3549 3552 def reference_to_unicode(self, ref):
3550 3553 """
3551 3554 Convert a reference object to unicode.
3552 3555 If reference is None it returns None.
3553 3556 """
3554 3557 if ref:
3555 3558 return u':'.join(ref)
3556 3559 else:
3557 3560 return None
3558 3561
3559 3562 def get_api_data(self, with_merge_state=True):
3560 3563 from rhodecode.model.pull_request import PullRequestModel
3561 3564
3562 3565 pull_request = self
3563 3566 if with_merge_state:
3564 3567 merge_status = PullRequestModel().merge_status(pull_request)
3565 3568 merge_state = {
3566 3569 'status': merge_status[0],
3567 3570 'message': safe_unicode(merge_status[1]),
3568 3571 }
3569 3572 else:
3570 3573 merge_state = {'status': 'not_available',
3571 3574 'message': 'not_available'}
3572 3575
3573 3576 merge_data = {
3574 3577 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request),
3575 3578 'reference': (
3576 3579 pull_request.shadow_merge_ref._asdict()
3577 3580 if pull_request.shadow_merge_ref else None),
3578 3581 }
3579 3582
3580 3583 data = {
3581 3584 'pull_request_id': pull_request.pull_request_id,
3582 3585 'url': PullRequestModel().get_url(pull_request),
3583 3586 'title': pull_request.title,
3584 3587 'description': pull_request.description,
3585 3588 'status': pull_request.status,
3586 3589 'created_on': pull_request.created_on,
3587 3590 'updated_on': pull_request.updated_on,
3588 3591 'commit_ids': pull_request.revisions,
3589 3592 'review_status': pull_request.calculated_review_status(),
3590 3593 'mergeable': merge_state,
3591 3594 'source': {
3592 3595 'clone_url': pull_request.source_repo.clone_url(),
3593 3596 'repository': pull_request.source_repo.repo_name,
3594 3597 'reference': {
3595 3598 'name': pull_request.source_ref_parts.name,
3596 3599 'type': pull_request.source_ref_parts.type,
3597 3600 'commit_id': pull_request.source_ref_parts.commit_id,
3598 3601 },
3599 3602 },
3600 3603 'target': {
3601 3604 'clone_url': pull_request.target_repo.clone_url(),
3602 3605 'repository': pull_request.target_repo.repo_name,
3603 3606 'reference': {
3604 3607 'name': pull_request.target_ref_parts.name,
3605 3608 'type': pull_request.target_ref_parts.type,
3606 3609 'commit_id': pull_request.target_ref_parts.commit_id,
3607 3610 },
3608 3611 },
3609 3612 'merge': merge_data,
3610 3613 'author': pull_request.author.get_api_data(include_secrets=False,
3611 3614 details='basic'),
3612 3615 'reviewers': [
3613 3616 {
3614 3617 'user': reviewer.get_api_data(include_secrets=False,
3615 3618 details='basic'),
3616 3619 'reasons': reasons,
3617 3620 'review_status': st[0][1].status if st else 'not_reviewed',
3618 3621 }
3619 3622 for obj, reviewer, reasons, mandatory, st in
3620 3623 pull_request.reviewers_statuses()
3621 3624 ]
3622 3625 }
3623 3626
3624 3627 return data
3625 3628
3626 3629
3627 3630 class PullRequest(Base, _PullRequestBase):
3628 3631 __tablename__ = 'pull_requests'
3629 3632 __table_args__ = (
3630 3633 base_table_args,
3631 3634 )
3632 3635
3633 3636 pull_request_id = Column(
3634 3637 'pull_request_id', Integer(), nullable=False, primary_key=True)
3635 3638
3636 3639 def __repr__(self):
3637 3640 if self.pull_request_id:
3638 3641 return '<DB:PullRequest #%s>' % self.pull_request_id
3639 3642 else:
3640 3643 return '<DB:PullRequest at %#x>' % id(self)
3641 3644
3642 3645 reviewers = relationship('PullRequestReviewers',
3643 3646 cascade="all, delete, delete-orphan")
3644 3647 statuses = relationship('ChangesetStatus',
3645 3648 cascade="all, delete, delete-orphan")
3646 3649 comments = relationship('ChangesetComment',
3647 3650 cascade="all, delete, delete-orphan")
3648 3651 versions = relationship('PullRequestVersion',
3649 3652 cascade="all, delete, delete-orphan",
3650 3653 lazy='dynamic')
3651 3654
3652 3655 @classmethod
3653 3656 def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj,
3654 3657 internal_methods=None):
3655 3658
3656 3659 class PullRequestDisplay(object):
3657 3660 """
3658 3661 Special object wrapper for showing PullRequest data via Versions
3659 3662 It mimics PR object as close as possible. This is read only object
3660 3663 just for display
3661 3664 """
3662 3665
3663 3666 def __init__(self, attrs, internal=None):
3664 3667 self.attrs = attrs
3665 3668 # internal have priority over the given ones via attrs
3666 3669 self.internal = internal or ['versions']
3667 3670
3668 3671 def __getattr__(self, item):
3669 3672 if item in self.internal:
3670 3673 return getattr(self, item)
3671 3674 try:
3672 3675 return self.attrs[item]
3673 3676 except KeyError:
3674 3677 raise AttributeError(
3675 3678 '%s object has no attribute %s' % (self, item))
3676 3679
3677 3680 def __repr__(self):
3678 3681 return '<DB:PullRequestDisplay #%s>' % self.attrs.get('pull_request_id')
3679 3682
3680 3683 def versions(self):
3681 3684 return pull_request_obj.versions.order_by(
3682 3685 PullRequestVersion.pull_request_version_id).all()
3683 3686
3684 3687 def is_closed(self):
3685 3688 return pull_request_obj.is_closed()
3686 3689
3687 3690 @property
3688 3691 def pull_request_version_id(self):
3689 3692 return getattr(pull_request_obj, 'pull_request_version_id', None)
3690 3693
3691 3694 attrs = StrictAttributeDict(pull_request_obj.get_api_data())
3692 3695
3693 3696 attrs.author = StrictAttributeDict(
3694 3697 pull_request_obj.author.get_api_data())
3695 3698 if pull_request_obj.target_repo:
3696 3699 attrs.target_repo = StrictAttributeDict(
3697 3700 pull_request_obj.target_repo.get_api_data())
3698 3701 attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url
3699 3702
3700 3703 if pull_request_obj.source_repo:
3701 3704 attrs.source_repo = StrictAttributeDict(
3702 3705 pull_request_obj.source_repo.get_api_data())
3703 3706 attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url
3704 3707
3705 3708 attrs.source_ref_parts = pull_request_obj.source_ref_parts
3706 3709 attrs.target_ref_parts = pull_request_obj.target_ref_parts
3707 3710 attrs.revisions = pull_request_obj.revisions
3708 3711
3709 3712 attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref
3710 3713 attrs.reviewer_data = org_pull_request_obj.reviewer_data
3711 3714 attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json
3712 3715
3713 3716 return PullRequestDisplay(attrs, internal=internal_methods)
3714 3717
3715 3718 def is_closed(self):
3716 3719 return self.status == self.STATUS_CLOSED
3717 3720
3718 3721 def __json__(self):
3719 3722 return {
3720 3723 'revisions': self.revisions,
3721 3724 }
3722 3725
3723 3726 def calculated_review_status(self):
3724 3727 from rhodecode.model.changeset_status import ChangesetStatusModel
3725 3728 return ChangesetStatusModel().calculated_review_status(self)
3726 3729
3727 3730 def reviewers_statuses(self):
3728 3731 from rhodecode.model.changeset_status import ChangesetStatusModel
3729 3732 return ChangesetStatusModel().reviewers_statuses(self)
3730 3733
3731 3734 @property
3732 3735 def workspace_id(self):
3733 3736 from rhodecode.model.pull_request import PullRequestModel
3734 3737 return PullRequestModel()._workspace_id(self)
3735 3738
3736 3739 def get_shadow_repo(self):
3737 3740 workspace_id = self.workspace_id
3738 3741 vcs_obj = self.target_repo.scm_instance()
3739 3742 shadow_repository_path = vcs_obj._get_shadow_repository_path(
3740 3743 self.target_repo.repo_id, workspace_id)
3741 3744 if os.path.isdir(shadow_repository_path):
3742 3745 return vcs_obj._get_shadow_instance(shadow_repository_path)
3743 3746
3744 3747
3745 3748 class PullRequestVersion(Base, _PullRequestBase):
3746 3749 __tablename__ = 'pull_request_versions'
3747 3750 __table_args__ = (
3748 3751 base_table_args,
3749 3752 )
3750 3753
3751 3754 pull_request_version_id = Column(
3752 3755 'pull_request_version_id', Integer(), nullable=False, primary_key=True)
3753 3756 pull_request_id = Column(
3754 3757 'pull_request_id', Integer(),
3755 3758 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3756 3759 pull_request = relationship('PullRequest')
3757 3760
3758 3761 def __repr__(self):
3759 3762 if self.pull_request_version_id:
3760 3763 return '<DB:PullRequestVersion #%s>' % self.pull_request_version_id
3761 3764 else:
3762 3765 return '<DB:PullRequestVersion at %#x>' % id(self)
3763 3766
3764 3767 @property
3765 3768 def reviewers(self):
3766 3769 return self.pull_request.reviewers
3767 3770
3768 3771 @property
3769 3772 def versions(self):
3770 3773 return self.pull_request.versions
3771 3774
3772 3775 def is_closed(self):
3773 3776 # calculate from original
3774 3777 return self.pull_request.status == self.STATUS_CLOSED
3775 3778
3776 3779 def calculated_review_status(self):
3777 3780 return self.pull_request.calculated_review_status()
3778 3781
3779 3782 def reviewers_statuses(self):
3780 3783 return self.pull_request.reviewers_statuses()
3781 3784
3782 3785
3783 3786 class PullRequestReviewers(Base, BaseModel):
3784 3787 __tablename__ = 'pull_request_reviewers'
3785 3788 __table_args__ = (
3786 3789 base_table_args,
3787 3790 )
3788 3791
3789 3792 @hybrid_property
3790 3793 def reasons(self):
3791 3794 if not self._reasons:
3792 3795 return []
3793 3796 return self._reasons
3794 3797
3795 3798 @reasons.setter
3796 3799 def reasons(self, val):
3797 3800 val = val or []
3798 3801 if any(not isinstance(x, basestring) for x in val):
3799 3802 raise Exception('invalid reasons type, must be list of strings')
3800 3803 self._reasons = val
3801 3804
3802 3805 pull_requests_reviewers_id = Column(
3803 3806 'pull_requests_reviewers_id', Integer(), nullable=False,
3804 3807 primary_key=True)
3805 3808 pull_request_id = Column(
3806 3809 "pull_request_id", Integer(),
3807 3810 ForeignKey('pull_requests.pull_request_id'), nullable=False)
3808 3811 user_id = Column(
3809 3812 "user_id", Integer(), ForeignKey('users.user_id'), nullable=True)
3810 3813 _reasons = Column(
3811 3814 'reason', MutationList.as_mutable(
3812 3815 JsonType('list', dialect_map=dict(mysql=UnicodeText(16384)))))
3813 3816
3814 3817 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
3815 3818 user = relationship('User')
3816 3819 pull_request = relationship('PullRequest')
3817 3820
3818 3821 rule_data = Column(
3819 3822 'rule_data_json',
3820 3823 JsonType(dialect_map=dict(mysql=UnicodeText(16384))))
3821 3824
3822 3825 def rule_user_group_data(self):
3823 3826 """
3824 3827 Returns the voting user group rule data for this reviewer
3825 3828 """
3826 3829
3827 3830 if self.rule_data and 'vote_rule' in self.rule_data:
3828 3831 user_group_data = {}
3829 3832 if 'rule_user_group_entry_id' in self.rule_data:
3830 3833 # means a group with voting rules !
3831 3834 user_group_data['id'] = self.rule_data['rule_user_group_entry_id']
3832 3835 user_group_data['name'] = self.rule_data['rule_name']
3833 3836 user_group_data['vote_rule'] = self.rule_data['vote_rule']
3834 3837
3835 3838 return user_group_data
3836 3839
3837 3840 def __unicode__(self):
3838 3841 return u"<%s('id:%s')>" % (self.__class__.__name__,
3839 3842 self.pull_requests_reviewers_id)
3840 3843
3841 3844
3842 3845 class Notification(Base, BaseModel):
3843 3846 __tablename__ = 'notifications'
3844 3847 __table_args__ = (
3845 3848 Index('notification_type_idx', 'type'),
3846 3849 base_table_args,
3847 3850 )
3848 3851
3849 3852 TYPE_CHANGESET_COMMENT = u'cs_comment'
3850 3853 TYPE_MESSAGE = u'message'
3851 3854 TYPE_MENTION = u'mention'
3852 3855 TYPE_REGISTRATION = u'registration'
3853 3856 TYPE_PULL_REQUEST = u'pull_request'
3854 3857 TYPE_PULL_REQUEST_COMMENT = u'pull_request_comment'
3855 3858
3856 3859 notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True)
3857 3860 subject = Column('subject', Unicode(512), nullable=True)
3858 3861 body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True)
3859 3862 created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True)
3860 3863 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3861 3864 type_ = Column('type', Unicode(255))
3862 3865
3863 3866 created_by_user = relationship('User')
3864 3867 notifications_to_users = relationship('UserNotification', lazy='joined',
3865 3868 cascade="all, delete, delete-orphan")
3866 3869
3867 3870 @property
3868 3871 def recipients(self):
3869 3872 return [x.user for x in UserNotification.query()\
3870 3873 .filter(UserNotification.notification == self)\
3871 3874 .order_by(UserNotification.user_id.asc()).all()]
3872 3875
3873 3876 @classmethod
3874 3877 def create(cls, created_by, subject, body, recipients, type_=None):
3875 3878 if type_ is None:
3876 3879 type_ = Notification.TYPE_MESSAGE
3877 3880
3878 3881 notification = cls()
3879 3882 notification.created_by_user = created_by
3880 3883 notification.subject = subject
3881 3884 notification.body = body
3882 3885 notification.type_ = type_
3883 3886 notification.created_on = datetime.datetime.now()
3884 3887
3885 3888 # For each recipient link the created notification to his account
3886 3889 for u in recipients:
3887 3890 assoc = UserNotification()
3888 3891 assoc.user_id = u.user_id
3889 3892 assoc.notification = notification
3890 3893
3891 3894 # if created_by is inside recipients mark his notification
3892 3895 # as read
3893 3896 if u.user_id == created_by.user_id:
3894 3897 assoc.read = True
3895 3898 Session().add(assoc)
3896 3899
3897 3900 Session().add(notification)
3898 3901
3899 3902 return notification
3900 3903
3901 3904
3902 3905 class UserNotification(Base, BaseModel):
3903 3906 __tablename__ = 'user_to_notification'
3904 3907 __table_args__ = (
3905 3908 UniqueConstraint('user_id', 'notification_id'),
3906 3909 base_table_args
3907 3910 )
3908 3911
3909 3912 user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True)
3910 3913 notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True)
3911 3914 read = Column('read', Boolean, default=False)
3912 3915 sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None)
3913 3916
3914 3917 user = relationship('User', lazy="joined")
3915 3918 notification = relationship('Notification', lazy="joined",
3916 3919 order_by=lambda: Notification.created_on.desc(),)
3917 3920
3918 3921 def mark_as_read(self):
3919 3922 self.read = True
3920 3923 Session().add(self)
3921 3924
3922 3925
3923 3926 class Gist(Base, BaseModel):
3924 3927 __tablename__ = 'gists'
3925 3928 __table_args__ = (
3926 3929 Index('g_gist_access_id_idx', 'gist_access_id'),
3927 3930 Index('g_created_on_idx', 'created_on'),
3928 3931 base_table_args
3929 3932 )
3930 3933
3931 3934 GIST_PUBLIC = u'public'
3932 3935 GIST_PRIVATE = u'private'
3933 3936 DEFAULT_FILENAME = u'gistfile1.txt'
3934 3937
3935 3938 ACL_LEVEL_PUBLIC = u'acl_public'
3936 3939 ACL_LEVEL_PRIVATE = u'acl_private'
3937 3940
3938 3941 gist_id = Column('gist_id', Integer(), primary_key=True)
3939 3942 gist_access_id = Column('gist_access_id', Unicode(250))
3940 3943 gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql'))
3941 3944 gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True)
3942 3945 gist_expires = Column('gist_expires', Float(53), nullable=False)
3943 3946 gist_type = Column('gist_type', Unicode(128), nullable=False)
3944 3947 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3945 3948 modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
3946 3949 acl_level = Column('acl_level', Unicode(128), nullable=True)
3947 3950
3948 3951 owner = relationship('User')
3949 3952
3950 3953 def __repr__(self):
3951 3954 return '<Gist:[%s]%s>' % (self.gist_type, self.gist_access_id)
3952 3955
3953 3956 @hybrid_property
3954 3957 def description_safe(self):
3955 3958 from rhodecode.lib import helpers as h
3956 3959 return h.escape(self.gist_description)
3957 3960
3958 3961 @classmethod
3959 3962 def get_or_404(cls, id_):
3960 3963 from pyramid.httpexceptions import HTTPNotFound
3961 3964
3962 3965 res = cls.query().filter(cls.gist_access_id == id_).scalar()
3963 3966 if not res:
3964 3967 raise HTTPNotFound()
3965 3968 return res
3966 3969
3967 3970 @classmethod
3968 3971 def get_by_access_id(cls, gist_access_id):
3969 3972 return cls.query().filter(cls.gist_access_id == gist_access_id).scalar()
3970 3973
3971 3974 def gist_url(self):
3972 3975 from rhodecode.model.gist import GistModel
3973 3976 return GistModel().get_url(self)
3974 3977
3975 3978 @classmethod
3976 3979 def base_path(cls):
3977 3980 """
3978 3981 Returns base path when all gists are stored
3979 3982
3980 3983 :param cls:
3981 3984 """
3982 3985 from rhodecode.model.gist import GIST_STORE_LOC
3983 3986 q = Session().query(RhodeCodeUi)\
3984 3987 .filter(RhodeCodeUi.ui_key == URL_SEP)
3985 3988 q = q.options(FromCache("sql_cache_short", "repository_repo_path"))
3986 3989 return os.path.join(q.one().ui_value, GIST_STORE_LOC)
3987 3990
3988 3991 def get_api_data(self):
3989 3992 """
3990 3993 Common function for generating gist related data for API
3991 3994 """
3992 3995 gist = self
3993 3996 data = {
3994 3997 'gist_id': gist.gist_id,
3995 3998 'type': gist.gist_type,
3996 3999 'access_id': gist.gist_access_id,
3997 4000 'description': gist.gist_description,
3998 4001 'url': gist.gist_url(),
3999 4002 'expires': gist.gist_expires,
4000 4003 'created_on': gist.created_on,
4001 4004 'modified_at': gist.modified_at,
4002 4005 'content': None,
4003 4006 'acl_level': gist.acl_level,
4004 4007 }
4005 4008 return data
4006 4009
4007 4010 def __json__(self):
4008 4011 data = dict(
4009 4012 )
4010 4013 data.update(self.get_api_data())
4011 4014 return data
4012 4015 # SCM functions
4013 4016
4014 4017 def scm_instance(self, **kwargs):
4015 4018 full_repo_path = os.path.join(self.base_path(), self.gist_access_id)
4016 4019 return get_vcs_instance(
4017 4020 repo_path=safe_str(full_repo_path), create=False)
4018 4021
4019 4022
4020 4023 class ExternalIdentity(Base, BaseModel):
4021 4024 __tablename__ = 'external_identities'
4022 4025 __table_args__ = (
4023 4026 Index('local_user_id_idx', 'local_user_id'),
4024 4027 Index('external_id_idx', 'external_id'),
4025 4028 base_table_args
4026 4029 )
4027 4030
4028 4031 external_id = Column('external_id', Unicode(255), default=u'',
4029 4032 primary_key=True)
4030 4033 external_username = Column('external_username', Unicode(1024), default=u'')
4031 4034 local_user_id = Column('local_user_id', Integer(),
4032 4035 ForeignKey('users.user_id'), primary_key=True)
4033 4036 provider_name = Column('provider_name', Unicode(255), default=u'',
4034 4037 primary_key=True)
4035 4038 access_token = Column('access_token', String(1024), default=u'')
4036 4039 alt_token = Column('alt_token', String(1024), default=u'')
4037 4040 token_secret = Column('token_secret', String(1024), default=u'')
4038 4041
4039 4042 @classmethod
4040 4043 def by_external_id_and_provider(cls, external_id, provider_name,
4041 4044 local_user_id=None):
4042 4045 """
4043 4046 Returns ExternalIdentity instance based on search params
4044 4047
4045 4048 :param external_id:
4046 4049 :param provider_name:
4047 4050 :return: ExternalIdentity
4048 4051 """
4049 4052 query = cls.query()
4050 4053 query = query.filter(cls.external_id == external_id)
4051 4054 query = query.filter(cls.provider_name == provider_name)
4052 4055 if local_user_id:
4053 4056 query = query.filter(cls.local_user_id == local_user_id)
4054 4057 return query.first()
4055 4058
4056 4059 @classmethod
4057 4060 def user_by_external_id_and_provider(cls, external_id, provider_name):
4058 4061 """
4059 4062 Returns User instance based on search params
4060 4063
4061 4064 :param external_id:
4062 4065 :param provider_name:
4063 4066 :return: User
4064 4067 """
4065 4068 query = User.query()
4066 4069 query = query.filter(cls.external_id == external_id)
4067 4070 query = query.filter(cls.provider_name == provider_name)
4068 4071 query = query.filter(User.user_id == cls.local_user_id)
4069 4072 return query.first()
4070 4073
4071 4074 @classmethod
4072 4075 def by_local_user_id(cls, local_user_id):
4073 4076 """
4074 4077 Returns all tokens for user
4075 4078
4076 4079 :param local_user_id:
4077 4080 :return: ExternalIdentity
4078 4081 """
4079 4082 query = cls.query()
4080 4083 query = query.filter(cls.local_user_id == local_user_id)
4081 4084 return query
4082 4085
4083 4086
4084 4087 class Integration(Base, BaseModel):
4085 4088 __tablename__ = 'integrations'
4086 4089 __table_args__ = (
4087 4090 base_table_args
4088 4091 )
4089 4092
4090 4093 integration_id = Column('integration_id', Integer(), primary_key=True)
4091 4094 integration_type = Column('integration_type', String(255))
4092 4095 enabled = Column('enabled', Boolean(), nullable=False)
4093 4096 name = Column('name', String(255), nullable=False)
4094 4097 child_repos_only = Column('child_repos_only', Boolean(), nullable=False,
4095 4098 default=False)
4096 4099
4097 4100 settings = Column(
4098 4101 'settings_json', MutationObj.as_mutable(
4099 4102 JsonType(dialect_map=dict(mysql=UnicodeText(16384)))))
4100 4103 repo_id = Column(
4101 4104 'repo_id', Integer(), ForeignKey('repositories.repo_id'),
4102 4105 nullable=True, unique=None, default=None)
4103 4106 repo = relationship('Repository', lazy='joined')
4104 4107
4105 4108 repo_group_id = Column(
4106 4109 'repo_group_id', Integer(), ForeignKey('groups.group_id'),
4107 4110 nullable=True, unique=None, default=None)
4108 4111 repo_group = relationship('RepoGroup', lazy='joined')
4109 4112
4110 4113 @property
4111 4114 def scope(self):
4112 4115 if self.repo:
4113 4116 return repr(self.repo)
4114 4117 if self.repo_group:
4115 4118 if self.child_repos_only:
4116 4119 return repr(self.repo_group) + ' (child repos only)'
4117 4120 else:
4118 4121 return repr(self.repo_group) + ' (recursive)'
4119 4122 if self.child_repos_only:
4120 4123 return 'root_repos'
4121 4124 return 'global'
4122 4125
4123 4126 def __repr__(self):
4124 4127 return '<Integration(%r, %r)>' % (self.integration_type, self.scope)
4125 4128
4126 4129
4127 4130 class RepoReviewRuleUser(Base, BaseModel):
4128 4131 __tablename__ = 'repo_review_rules_users'
4129 4132 __table_args__ = (
4130 4133 base_table_args
4131 4134 )
4132 4135
4133 4136 repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True)
4134 4137 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4135 4138 user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False)
4136 4139 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4137 4140 user = relationship('User')
4138 4141
4139 4142 def rule_data(self):
4140 4143 return {
4141 4144 'mandatory': self.mandatory
4142 4145 }
4143 4146
4144 4147
4145 4148 class RepoReviewRuleUserGroup(Base, BaseModel):
4146 4149 __tablename__ = 'repo_review_rules_users_groups'
4147 4150 __table_args__ = (
4148 4151 base_table_args
4149 4152 )
4150 4153
4151 4154 VOTE_RULE_ALL = -1
4152 4155
4153 4156 repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True)
4154 4157 repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id'))
4155 4158 users_group_id = Column("users_group_id", Integer(),ForeignKey('users_groups.users_group_id'), nullable=False)
4156 4159 mandatory = Column("mandatory", Boolean(), nullable=False, default=False)
4157 4160 vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL)
4158 4161 users_group = relationship('UserGroup')
4159 4162
4160 4163 def rule_data(self):
4161 4164 return {
4162 4165 'mandatory': self.mandatory,
4163 4166 'vote_rule': self.vote_rule
4164 4167 }
4165 4168
4166 4169 @property
4167 4170 def vote_rule_label(self):
4168 4171 if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL:
4169 4172 return 'all must vote'
4170 4173 else:
4171 4174 return 'min. vote {}'.format(self.vote_rule)
4172 4175
4173 4176
4174 4177 class RepoReviewRule(Base, BaseModel):
4175 4178 __tablename__ = 'repo_review_rules'
4176 4179 __table_args__ = (
4177 4180 base_table_args
4178 4181 )
4179 4182
4180 4183 repo_review_rule_id = Column(
4181 4184 'repo_review_rule_id', Integer(), primary_key=True)
4182 4185 repo_id = Column(
4183 4186 "repo_id", Integer(), ForeignKey('repositories.repo_id'))
4184 4187 repo = relationship('Repository', backref='review_rules')
4185 4188
4186 4189 review_rule_name = Column('review_rule_name', String(255))
4187 4190 _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4188 4191 _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4189 4192 _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default=u'*') # glob
4190 4193
4191 4194 use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False)
4192 4195 forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False)
4193 4196 forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False)
4194 4197 forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False)
4195 4198
4196 4199 rule_users = relationship('RepoReviewRuleUser')
4197 4200 rule_user_groups = relationship('RepoReviewRuleUserGroup')
4198 4201
4199 4202 def _validate_pattern(self, value):
4200 4203 re.compile('^' + glob2re(value) + '$')
4201 4204
4202 4205 @hybrid_property
4203 4206 def source_branch_pattern(self):
4204 4207 return self._branch_pattern or '*'
4205 4208
4206 4209 @source_branch_pattern.setter
4207 4210 def source_branch_pattern(self, value):
4208 4211 self._validate_pattern(value)
4209 4212 self._branch_pattern = value or '*'
4210 4213
4211 4214 @hybrid_property
4212 4215 def target_branch_pattern(self):
4213 4216 return self._target_branch_pattern or '*'
4214 4217
4215 4218 @target_branch_pattern.setter
4216 4219 def target_branch_pattern(self, value):
4217 4220 self._validate_pattern(value)
4218 4221 self._target_branch_pattern = value or '*'
4219 4222
4220 4223 @hybrid_property
4221 4224 def file_pattern(self):
4222 4225 return self._file_pattern or '*'
4223 4226
4224 4227 @file_pattern.setter
4225 4228 def file_pattern(self, value):
4226 4229 self._validate_pattern(value)
4227 4230 self._file_pattern = value or '*'
4228 4231
4229 4232 def matches(self, source_branch, target_branch, files_changed):
4230 4233 """
4231 4234 Check if this review rule matches a branch/files in a pull request
4232 4235
4233 4236 :param source_branch: source branch name for the commit
4234 4237 :param target_branch: target branch name for the commit
4235 4238 :param files_changed: list of file paths changed in the pull request
4236 4239 """
4237 4240
4238 4241 source_branch = source_branch or ''
4239 4242 target_branch = target_branch or ''
4240 4243 files_changed = files_changed or []
4241 4244
4242 4245 branch_matches = True
4243 4246 if source_branch or target_branch:
4244 4247 if self.source_branch_pattern == '*':
4245 4248 source_branch_match = True
4246 4249 else:
4247 4250 if self.source_branch_pattern.startswith('re:'):
4248 4251 source_pattern = self.source_branch_pattern[3:]
4249 4252 else:
4250 4253 source_pattern = '^' + glob2re(self.source_branch_pattern) + '$'
4251 4254 source_branch_regex = re.compile(source_pattern)
4252 4255 source_branch_match = bool(source_branch_regex.search(source_branch))
4253 4256 if self.target_branch_pattern == '*':
4254 4257 target_branch_match = True
4255 4258 else:
4256 4259 if self.target_branch_pattern.startswith('re:'):
4257 4260 target_pattern = self.target_branch_pattern[3:]
4258 4261 else:
4259 4262 target_pattern = '^' + glob2re(self.target_branch_pattern) + '$'
4260 4263 target_branch_regex = re.compile(target_pattern)
4261 4264 target_branch_match = bool(target_branch_regex.search(target_branch))
4262 4265
4263 4266 branch_matches = source_branch_match and target_branch_match
4264 4267
4265 4268 files_matches = True
4266 4269 if self.file_pattern != '*':
4267 4270 files_matches = False
4268 4271 if self.file_pattern.startswith('re:'):
4269 4272 file_pattern = self.file_pattern[3:]
4270 4273 else:
4271 4274 file_pattern = glob2re(self.file_pattern)
4272 4275 file_regex = re.compile(file_pattern)
4273 4276 for filename in files_changed:
4274 4277 if file_regex.search(filename):
4275 4278 files_matches = True
4276 4279 break
4277 4280
4278 4281 return branch_matches and files_matches
4279 4282
4280 4283 @property
4281 4284 def review_users(self):
4282 4285 """ Returns the users which this rule applies to """
4283 4286
4284 4287 users = collections.OrderedDict()
4285 4288
4286 4289 for rule_user in self.rule_users:
4287 4290 if rule_user.user.active:
4288 4291 if rule_user.user not in users:
4289 4292 users[rule_user.user.username] = {
4290 4293 'user': rule_user.user,
4291 4294 'source': 'user',
4292 4295 'source_data': {},
4293 4296 'data': rule_user.rule_data()
4294 4297 }
4295 4298
4296 4299 for rule_user_group in self.rule_user_groups:
4297 4300 source_data = {
4298 4301 'user_group_id': rule_user_group.users_group.users_group_id,
4299 4302 'name': rule_user_group.users_group.users_group_name,
4300 4303 'members': len(rule_user_group.users_group.members)
4301 4304 }
4302 4305 for member in rule_user_group.users_group.members:
4303 4306 if member.user.active:
4304 4307 key = member.user.username
4305 4308 if key in users:
4306 4309 # skip this member as we have him already
4307 4310 # this prevents from override the "first" matched
4308 4311 # users with duplicates in multiple groups
4309 4312 continue
4310 4313
4311 4314 users[key] = {
4312 4315 'user': member.user,
4313 4316 'source': 'user_group',
4314 4317 'source_data': source_data,
4315 4318 'data': rule_user_group.rule_data()
4316 4319 }
4317 4320
4318 4321 return users
4319 4322
4320 4323 def user_group_vote_rule(self):
4321 4324 rules = []
4322 4325 if self.rule_user_groups:
4323 4326 for user_group in self.rule_user_groups:
4324 4327 rules.append(user_group)
4325 4328 return rules
4326 4329
4327 4330 def __repr__(self):
4328 4331 return '<RepoReviewerRule(id=%r, repo=%r)>' % (
4329 4332 self.repo_review_rule_id, self.repo)
4330 4333
4331 4334
4332 4335 class ScheduleEntry(Base, BaseModel):
4333 4336 __tablename__ = 'schedule_entries'
4334 4337 __table_args__ = (
4335 4338 UniqueConstraint('schedule_name', name='s_schedule_name_idx'),
4336 4339 UniqueConstraint('task_uid', name='s_task_uid_idx'),
4337 4340 base_table_args,
4338 4341 )
4339 4342
4340 4343 schedule_types = ['crontab', 'timedelta', 'integer']
4341 4344 schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True)
4342 4345
4343 4346 schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None)
4344 4347 schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None)
4345 4348 schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True)
4346 4349
4347 4350 _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None)
4348 4351 schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT()))))
4349 4352
4350 4353 schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None)
4351 4354 schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0)
4352 4355
4353 4356 # task
4354 4357 task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None)
4355 4358 task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None)
4356 4359 task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT()))))
4357 4360 task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT()))))
4358 4361
4359 4362 created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now)
4360 4363 updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None)
4361 4364
4362 4365 @hybrid_property
4363 4366 def schedule_type(self):
4364 4367 return self._schedule_type
4365 4368
4366 4369 @schedule_type.setter
4367 4370 def schedule_type(self, val):
4368 4371 if val not in self.schedule_types:
4369 4372 raise ValueError('Value must be on of `{}` and got `{}`'.format(
4370 4373 val, self.schedule_type))
4371 4374
4372 4375 self._schedule_type = val
4373 4376
4374 4377 @classmethod
4375 4378 def get_uid(cls, obj):
4376 4379 args = obj.task_args
4377 4380 kwargs = obj.task_kwargs
4378 4381 if isinstance(args, JsonRaw):
4379 4382 try:
4380 4383 args = json.loads(args)
4381 4384 except ValueError:
4382 4385 args = tuple()
4383 4386
4384 4387 if isinstance(kwargs, JsonRaw):
4385 4388 try:
4386 4389 kwargs = json.loads(kwargs)
4387 4390 except ValueError:
4388 4391 kwargs = dict()
4389 4392
4390 4393 dot_notation = obj.task_dot_notation
4391 4394 val = '.'.join(map(safe_str, [
4392 4395 sorted(dot_notation), args, sorted(kwargs.items())]))
4393 4396 return hashlib.sha1(val).hexdigest()
4394 4397
4395 4398 @classmethod
4396 4399 def get_by_schedule_name(cls, schedule_name):
4397 4400 return cls.query().filter(cls.schedule_name == schedule_name).scalar()
4398 4401
4399 4402 @classmethod
4400 4403 def get_by_schedule_id(cls, schedule_id):
4401 4404 return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar()
4402 4405
4403 4406 @property
4404 4407 def task(self):
4405 4408 return self.task_dot_notation
4406 4409
4407 4410 @property
4408 4411 def schedule(self):
4409 4412 from rhodecode.lib.celerylib.utils import raw_2_schedule
4410 4413 schedule = raw_2_schedule(self.schedule_definition, self.schedule_type)
4411 4414 return schedule
4412 4415
4413 4416 @property
4414 4417 def args(self):
4415 4418 try:
4416 4419 return list(self.task_args or [])
4417 4420 except ValueError:
4418 4421 return list()
4419 4422
4420 4423 @property
4421 4424 def kwargs(self):
4422 4425 try:
4423 4426 return dict(self.task_kwargs or {})
4424 4427 except ValueError:
4425 4428 return dict()
4426 4429
4427 4430 def _as_raw(self, val):
4428 4431 if hasattr(val, 'de_coerce'):
4429 4432 val = val.de_coerce()
4430 4433 if val:
4431 4434 val = json.dumps(val)
4432 4435
4433 4436 return val
4434 4437
4435 4438 @property
4436 4439 def schedule_definition_raw(self):
4437 4440 return self._as_raw(self.schedule_definition)
4438 4441
4439 4442 @property
4440 4443 def args_raw(self):
4441 4444 return self._as_raw(self.task_args)
4442 4445
4443 4446 @property
4444 4447 def kwargs_raw(self):
4445 4448 return self._as_raw(self.task_kwargs)
4446 4449
4447 4450 def __repr__(self):
4448 4451 return '<DB:ScheduleEntry({}:{})>'.format(
4449 4452 self.schedule_entry_id, self.schedule_name)
4450 4453
4451 4454
4452 4455 @event.listens_for(ScheduleEntry, 'before_update')
4453 4456 def update_task_uid(mapper, connection, target):
4454 4457 target.task_uid = ScheduleEntry.get_uid(target)
4455 4458
4456 4459
4457 4460 @event.listens_for(ScheduleEntry, 'before_insert')
4458 4461 def set_task_uid(mapper, connection, target):
4459 4462 target.task_uid = ScheduleEntry.get_uid(target)
4460 4463
4461 4464
4462 4465 class DbMigrateVersion(Base, BaseModel):
4463 4466 __tablename__ = 'db_migrate_version'
4464 4467 __table_args__ = (
4465 4468 base_table_args,
4466 4469 )
4467 4470
4468 4471 repository_id = Column('repository_id', String(250), primary_key=True)
4469 4472 repository_path = Column('repository_path', Text)
4470 4473 version = Column('version', Integer)
4471 4474
4472 4475 @classmethod
4473 4476 def set_version(cls, version):
4474 4477 """
4475 4478 Helper for forcing a different version, usually for debugging purposes via ishell.
4476 4479 """
4477 4480 ver = DbMigrateVersion.query().first()
4478 4481 ver.version = version
4479 4482 Session().commit()
4480 4483
4481 4484
4482 4485 class DbSession(Base, BaseModel):
4483 4486 __tablename__ = 'db_session'
4484 4487 __table_args__ = (
4485 4488 base_table_args,
4486 4489 )
4487 4490
4488 4491 def __repr__(self):
4489 4492 return '<DB:DbSession({})>'.format(self.id)
4490 4493
4491 4494 id = Column('id', Integer())
4492 4495 namespace = Column('namespace', String(255), primary_key=True)
4493 4496 accessed = Column('accessed', DateTime, nullable=False)
4494 4497 created = Column('created', DateTime, nullable=False)
4495 4498 data = Column('data', PickleType, nullable=False)
4496 4499
4497 4500
4498 4501 class BeakerCache(Base, BaseModel):
4499 4502 __tablename__ = 'beaker_cache'
4500 4503 __table_args__ = (
4501 4504 base_table_args,
4502 4505 )
4503 4506
4504 4507 def __repr__(self):
4505 4508 return '<DB:DbSession({})>'.format(self.id)
4506 4509
4507 4510 id = Column('id', Integer())
4508 4511 namespace = Column('namespace', String(255), primary_key=True)
4509 4512 accessed = Column('accessed', DateTime, nullable=False)
4510 4513 created = Column('created', DateTime, nullable=False)
4511 4514 data = Column('data', PickleType, nullable=False)
General Comments 0
You need to be logged in to leave comments. Login now