##// END OF EJS Templates
gists: fixed cache problems and updated ui
marcink -
r3869:816873d4 default
parent child Browse files
Show More
@@ -1,1879 +1,1881 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24 import os
24 import os
25 import re
25 import re
26 import time
26 import time
27 import shutil
27 import shutil
28 import datetime
28 import datetime
29 import fnmatch
29 import fnmatch
30 import itertools
30 import itertools
31 import logging
31 import logging
32 import collections
32 import collections
33 import warnings
33 import warnings
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from pyramid import compat
37 from pyramid import compat
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.translation import lazy_ugettext
40 from rhodecode.translation import lazy_ugettext
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 from rhodecode.lib.vcs import connection
42 from rhodecode.lib.vcs import connection
43 from rhodecode.lib.vcs.utils import author_name, author_email
43 from rhodecode.lib.vcs.utils import author_name, author_email
44 from rhodecode.lib.vcs.conf import settings
44 from rhodecode.lib.vcs.conf import settings
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 RepositoryError)
50 RepositoryError)
51
51
52
52
53 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
54
54
55
55
56 FILEMODE_DEFAULT = 0o100644
56 FILEMODE_DEFAULT = 0o100644
57 FILEMODE_EXECUTABLE = 0o100755
57 FILEMODE_EXECUTABLE = 0o100755
58 EMPTY_COMMIT_ID = '0' * 40
58 EMPTY_COMMIT_ID = '0' * 40
59
59
60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61
61
62
62
63 class MergeFailureReason(object):
63 class MergeFailureReason(object):
64 """
64 """
65 Enumeration with all the reasons why the server side merge could fail.
65 Enumeration with all the reasons why the server side merge could fail.
66
66
67 DO NOT change the number of the reasons, as they may be stored in the
67 DO NOT change the number of the reasons, as they may be stored in the
68 database.
68 database.
69
69
70 Changing the name of a reason is acceptable and encouraged to deprecate old
70 Changing the name of a reason is acceptable and encouraged to deprecate old
71 reasons.
71 reasons.
72 """
72 """
73
73
74 # Everything went well.
74 # Everything went well.
75 NONE = 0
75 NONE = 0
76
76
77 # An unexpected exception was raised. Check the logs for more details.
77 # An unexpected exception was raised. Check the logs for more details.
78 UNKNOWN = 1
78 UNKNOWN = 1
79
79
80 # The merge was not successful, there are conflicts.
80 # The merge was not successful, there are conflicts.
81 MERGE_FAILED = 2
81 MERGE_FAILED = 2
82
82
83 # The merge succeeded but we could not push it to the target repository.
83 # The merge succeeded but we could not push it to the target repository.
84 PUSH_FAILED = 3
84 PUSH_FAILED = 3
85
85
86 # The specified target is not a head in the target repository.
86 # The specified target is not a head in the target repository.
87 TARGET_IS_NOT_HEAD = 4
87 TARGET_IS_NOT_HEAD = 4
88
88
89 # The source repository contains more branches than the target. Pushing
89 # The source repository contains more branches than the target. Pushing
90 # the merge will create additional branches in the target.
90 # the merge will create additional branches in the target.
91 HG_SOURCE_HAS_MORE_BRANCHES = 5
91 HG_SOURCE_HAS_MORE_BRANCHES = 5
92
92
93 # The target reference has multiple heads. That does not allow to correctly
93 # The target reference has multiple heads. That does not allow to correctly
94 # identify the target location. This could only happen for mercurial
94 # identify the target location. This could only happen for mercurial
95 # branches.
95 # branches.
96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
97
97
98 # The target repository is locked
98 # The target repository is locked
99 TARGET_IS_LOCKED = 7
99 TARGET_IS_LOCKED = 7
100
100
101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
102 # A involved commit could not be found.
102 # A involved commit could not be found.
103 _DEPRECATED_MISSING_COMMIT = 8
103 _DEPRECATED_MISSING_COMMIT = 8
104
104
105 # The target repo reference is missing.
105 # The target repo reference is missing.
106 MISSING_TARGET_REF = 9
106 MISSING_TARGET_REF = 9
107
107
108 # The source repo reference is missing.
108 # The source repo reference is missing.
109 MISSING_SOURCE_REF = 10
109 MISSING_SOURCE_REF = 10
110
110
111 # The merge was not successful, there are conflicts related to sub
111 # The merge was not successful, there are conflicts related to sub
112 # repositories.
112 # repositories.
113 SUBREPO_MERGE_FAILED = 11
113 SUBREPO_MERGE_FAILED = 11
114
114
115
115
116 class UpdateFailureReason(object):
116 class UpdateFailureReason(object):
117 """
117 """
118 Enumeration with all the reasons why the pull request update could fail.
118 Enumeration with all the reasons why the pull request update could fail.
119
119
120 DO NOT change the number of the reasons, as they may be stored in the
120 DO NOT change the number of the reasons, as they may be stored in the
121 database.
121 database.
122
122
123 Changing the name of a reason is acceptable and encouraged to deprecate old
123 Changing the name of a reason is acceptable and encouraged to deprecate old
124 reasons.
124 reasons.
125 """
125 """
126
126
127 # Everything went well.
127 # Everything went well.
128 NONE = 0
128 NONE = 0
129
129
130 # An unexpected exception was raised. Check the logs for more details.
130 # An unexpected exception was raised. Check the logs for more details.
131 UNKNOWN = 1
131 UNKNOWN = 1
132
132
133 # The pull request is up to date.
133 # The pull request is up to date.
134 NO_CHANGE = 2
134 NO_CHANGE = 2
135
135
136 # The pull request has a reference type that is not supported for update.
136 # The pull request has a reference type that is not supported for update.
137 WRONG_REF_TYPE = 3
137 WRONG_REF_TYPE = 3
138
138
139 # Update failed because the target reference is missing.
139 # Update failed because the target reference is missing.
140 MISSING_TARGET_REF = 4
140 MISSING_TARGET_REF = 4
141
141
142 # Update failed because the source reference is missing.
142 # Update failed because the source reference is missing.
143 MISSING_SOURCE_REF = 5
143 MISSING_SOURCE_REF = 5
144
144
145
145
146 class MergeResponse(object):
146 class MergeResponse(object):
147
147
148 # uses .format(**metadata) for variables
148 # uses .format(**metadata) for variables
149 MERGE_STATUS_MESSAGES = {
149 MERGE_STATUS_MESSAGES = {
150 MergeFailureReason.NONE: lazy_ugettext(
150 MergeFailureReason.NONE: lazy_ugettext(
151 u'This pull request can be automatically merged.'),
151 u'This pull request can be automatically merged.'),
152 MergeFailureReason.UNKNOWN: lazy_ugettext(
152 MergeFailureReason.UNKNOWN: lazy_ugettext(
153 u'This pull request cannot be merged because of an unhandled exception. '
153 u'This pull request cannot be merged because of an unhandled exception. '
154 u'{exception}'),
154 u'{exception}'),
155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
156 u'This pull request cannot be merged because of merge conflicts.'),
156 u'This pull request cannot be merged because of merge conflicts.'),
157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
158 u'This pull request could not be merged because push to '
158 u'This pull request could not be merged because push to '
159 u'target:`{target}@{merge_commit}` failed.'),
159 u'target:`{target}@{merge_commit}` failed.'),
160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
161 u'This pull request cannot be merged because the target '
161 u'This pull request cannot be merged because the target '
162 u'`{target_ref.name}` is not a head.'),
162 u'`{target_ref.name}` is not a head.'),
163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
164 u'This pull request cannot be merged because the source contains '
164 u'This pull request cannot be merged because the source contains '
165 u'more branches than the target.'),
165 u'more branches than the target.'),
166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
167 u'This pull request cannot be merged because the target `{target_ref.name}` '
167 u'This pull request cannot be merged because the target `{target_ref.name}` '
168 u'has multiple heads: `{heads}`.'),
168 u'has multiple heads: `{heads}`.'),
169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
170 u'This pull request cannot be merged because the target repository is '
170 u'This pull request cannot be merged because the target repository is '
171 u'locked by {locked_by}.'),
171 u'locked by {locked_by}.'),
172
172
173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
174 u'This pull request cannot be merged because the target '
174 u'This pull request cannot be merged because the target '
175 u'reference `{target_ref.name}` is missing.'),
175 u'reference `{target_ref.name}` is missing.'),
176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
177 u'This pull request cannot be merged because the source '
177 u'This pull request cannot be merged because the source '
178 u'reference `{source_ref.name}` is missing.'),
178 u'reference `{source_ref.name}` is missing.'),
179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
180 u'This pull request cannot be merged because of conflicts related '
180 u'This pull request cannot be merged because of conflicts related '
181 u'to sub repositories.'),
181 u'to sub repositories.'),
182
182
183 # Deprecations
183 # Deprecations
184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
185 u'This pull request cannot be merged because the target or the '
185 u'This pull request cannot be merged because the target or the '
186 u'source reference is missing.'),
186 u'source reference is missing.'),
187
187
188 }
188 }
189
189
190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
191 self.possible = possible
191 self.possible = possible
192 self.executed = executed
192 self.executed = executed
193 self.merge_ref = merge_ref
193 self.merge_ref = merge_ref
194 self.failure_reason = failure_reason
194 self.failure_reason = failure_reason
195 self.metadata = metadata or {}
195 self.metadata = metadata or {}
196
196
197 def __repr__(self):
197 def __repr__(self):
198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
199
199
200 def __eq__(self, other):
200 def __eq__(self, other):
201 same_instance = isinstance(other, self.__class__)
201 same_instance = isinstance(other, self.__class__)
202 return same_instance \
202 return same_instance \
203 and self.possible == other.possible \
203 and self.possible == other.possible \
204 and self.executed == other.executed \
204 and self.executed == other.executed \
205 and self.failure_reason == other.failure_reason
205 and self.failure_reason == other.failure_reason
206
206
207 @property
207 @property
208 def label(self):
208 def label(self):
209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
210 not k.startswith('_'))
210 not k.startswith('_'))
211 return label_dict.get(self.failure_reason)
211 return label_dict.get(self.failure_reason)
212
212
213 @property
213 @property
214 def merge_status_message(self):
214 def merge_status_message(self):
215 """
215 """
216 Return a human friendly error message for the given merge status code.
216 Return a human friendly error message for the given merge status code.
217 """
217 """
218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
219 try:
219 try:
220 return msg.format(**self.metadata)
220 return msg.format(**self.metadata)
221 except Exception:
221 except Exception:
222 log.exception('Failed to format %s message', self)
222 log.exception('Failed to format %s message', self)
223 return msg
223 return msg
224
224
225 def asdict(self):
225 def asdict(self):
226 data = {}
226 data = {}
227 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
227 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
228 'merge_status_message']:
228 'merge_status_message']:
229 data[k] = getattr(self, k)
229 data[k] = getattr(self, k)
230 return data
230 return data
231
231
232
232
233 class BaseRepository(object):
233 class BaseRepository(object):
234 """
234 """
235 Base Repository for final backends
235 Base Repository for final backends
236
236
237 .. attribute:: DEFAULT_BRANCH_NAME
237 .. attribute:: DEFAULT_BRANCH_NAME
238
238
239 name of default branch (i.e. "trunk" for svn, "master" for git etc.
239 name of default branch (i.e. "trunk" for svn, "master" for git etc.
240
240
241 .. attribute:: commit_ids
241 .. attribute:: commit_ids
242
242
243 list of all available commit ids, in ascending order
243 list of all available commit ids, in ascending order
244
244
245 .. attribute:: path
245 .. attribute:: path
246
246
247 absolute path to the repository
247 absolute path to the repository
248
248
249 .. attribute:: bookmarks
249 .. attribute:: bookmarks
250
250
251 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
251 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
252 there are no bookmarks or the backend implementation does not support
252 there are no bookmarks or the backend implementation does not support
253 bookmarks.
253 bookmarks.
254
254
255 .. attribute:: tags
255 .. attribute:: tags
256
256
257 Mapping from name to :term:`Commit ID` of the tag.
257 Mapping from name to :term:`Commit ID` of the tag.
258
258
259 """
259 """
260
260
261 DEFAULT_BRANCH_NAME = None
261 DEFAULT_BRANCH_NAME = None
262 DEFAULT_CONTACT = u"Unknown"
262 DEFAULT_CONTACT = u"Unknown"
263 DEFAULT_DESCRIPTION = u"unknown"
263 DEFAULT_DESCRIPTION = u"unknown"
264 EMPTY_COMMIT_ID = '0' * 40
264 EMPTY_COMMIT_ID = '0' * 40
265
265
266 path = None
266 path = None
267
267
268 _is_empty = None
268 _is_empty = None
269 _commit_ids = {}
269 _commit_ids = {}
270
270
271 def __init__(self, repo_path, config=None, create=False, **kwargs):
271 def __init__(self, repo_path, config=None, create=False, **kwargs):
272 """
272 """
273 Initializes repository. Raises RepositoryError if repository could
273 Initializes repository. Raises RepositoryError if repository could
274 not be find at the given ``repo_path`` or directory at ``repo_path``
274 not be find at the given ``repo_path`` or directory at ``repo_path``
275 exists and ``create`` is set to True.
275 exists and ``create`` is set to True.
276
276
277 :param repo_path: local path of the repository
277 :param repo_path: local path of the repository
278 :param config: repository configuration
278 :param config: repository configuration
279 :param create=False: if set to True, would try to create repository.
279 :param create=False: if set to True, would try to create repository.
280 :param src_url=None: if set, should be proper url from which repository
280 :param src_url=None: if set, should be proper url from which repository
281 would be cloned; requires ``create`` parameter to be set to True -
281 would be cloned; requires ``create`` parameter to be set to True -
282 raises RepositoryError if src_url is set and create evaluates to
282 raises RepositoryError if src_url is set and create evaluates to
283 False
283 False
284 """
284 """
285 raise NotImplementedError
285 raise NotImplementedError
286
286
287 def __repr__(self):
287 def __repr__(self):
288 return '<%s at %s>' % (self.__class__.__name__, self.path)
288 return '<%s at %s>' % (self.__class__.__name__, self.path)
289
289
290 def __len__(self):
290 def __len__(self):
291 return self.count()
291 return self.count()
292
292
293 def __eq__(self, other):
293 def __eq__(self, other):
294 same_instance = isinstance(other, self.__class__)
294 same_instance = isinstance(other, self.__class__)
295 return same_instance and other.path == self.path
295 return same_instance and other.path == self.path
296
296
297 def __ne__(self, other):
297 def __ne__(self, other):
298 return not self.__eq__(other)
298 return not self.__eq__(other)
299
299
300 def get_create_shadow_cache_pr_path(self, db_repo):
300 def get_create_shadow_cache_pr_path(self, db_repo):
301 path = db_repo.cached_diffs_dir
301 path = db_repo.cached_diffs_dir
302 if not os.path.exists(path):
302 if not os.path.exists(path):
303 os.makedirs(path, 0o755)
303 os.makedirs(path, 0o755)
304 return path
304 return path
305
305
306 @classmethod
306 @classmethod
307 def get_default_config(cls, default=None):
307 def get_default_config(cls, default=None):
308 config = Config()
308 config = Config()
309 if default and isinstance(default, list):
309 if default and isinstance(default, list):
310 for section, key, val in default:
310 for section, key, val in default:
311 config.set(section, key, val)
311 config.set(section, key, val)
312 return config
312 return config
313
313
314 @LazyProperty
314 @LazyProperty
315 def _remote(self):
315 def _remote(self):
316 raise NotImplementedError
316 raise NotImplementedError
317
317
318 def _heads(self, branch=None):
318 def _heads(self, branch=None):
319 return []
319 return []
320
320
321 @LazyProperty
321 @LazyProperty
322 def EMPTY_COMMIT(self):
322 def EMPTY_COMMIT(self):
323 return EmptyCommit(self.EMPTY_COMMIT_ID)
323 return EmptyCommit(self.EMPTY_COMMIT_ID)
324
324
325 @LazyProperty
325 @LazyProperty
326 def alias(self):
326 def alias(self):
327 for k, v in settings.BACKENDS.items():
327 for k, v in settings.BACKENDS.items():
328 if v.split('.')[-1] == str(self.__class__.__name__):
328 if v.split('.')[-1] == str(self.__class__.__name__):
329 return k
329 return k
330
330
331 @LazyProperty
331 @LazyProperty
332 def name(self):
332 def name(self):
333 return safe_unicode(os.path.basename(self.path))
333 return safe_unicode(os.path.basename(self.path))
334
334
335 @LazyProperty
335 @LazyProperty
336 def description(self):
336 def description(self):
337 raise NotImplementedError
337 raise NotImplementedError
338
338
339 def refs(self):
339 def refs(self):
340 """
340 """
341 returns a `dict` with branches, bookmarks, tags, and closed_branches
341 returns a `dict` with branches, bookmarks, tags, and closed_branches
342 for this repository
342 for this repository
343 """
343 """
344 return dict(
344 return dict(
345 branches=self.branches,
345 branches=self.branches,
346 branches_closed=self.branches_closed,
346 branches_closed=self.branches_closed,
347 tags=self.tags,
347 tags=self.tags,
348 bookmarks=self.bookmarks
348 bookmarks=self.bookmarks
349 )
349 )
350
350
351 @LazyProperty
351 @LazyProperty
352 def branches(self):
352 def branches(self):
353 """
353 """
354 A `dict` which maps branch names to commit ids.
354 A `dict` which maps branch names to commit ids.
355 """
355 """
356 raise NotImplementedError
356 raise NotImplementedError
357
357
358 @LazyProperty
358 @LazyProperty
359 def branches_closed(self):
359 def branches_closed(self):
360 """
360 """
361 A `dict` which maps tags names to commit ids.
361 A `dict` which maps tags names to commit ids.
362 """
362 """
363 raise NotImplementedError
363 raise NotImplementedError
364
364
365 @LazyProperty
365 @LazyProperty
366 def bookmarks(self):
366 def bookmarks(self):
367 """
367 """
368 A `dict` which maps tags names to commit ids.
368 A `dict` which maps tags names to commit ids.
369 """
369 """
370 raise NotImplementedError
370 raise NotImplementedError
371
371
372 @LazyProperty
372 @LazyProperty
373 def tags(self):
373 def tags(self):
374 """
374 """
375 A `dict` which maps tags names to commit ids.
375 A `dict` which maps tags names to commit ids.
376 """
376 """
377 raise NotImplementedError
377 raise NotImplementedError
378
378
379 @LazyProperty
379 @LazyProperty
380 def size(self):
380 def size(self):
381 """
381 """
382 Returns combined size in bytes for all repository files
382 Returns combined size in bytes for all repository files
383 """
383 """
384 tip = self.get_commit()
384 tip = self.get_commit()
385 return tip.size
385 return tip.size
386
386
387 def size_at_commit(self, commit_id):
387 def size_at_commit(self, commit_id):
388 commit = self.get_commit(commit_id)
388 commit = self.get_commit(commit_id)
389 return commit.size
389 return commit.size
390
390
391 def _check_for_empty(self):
391 def _check_for_empty(self):
392 no_commits = len(self._commit_ids) == 0
392 no_commits = len(self._commit_ids) == 0
393 if no_commits:
393 if no_commits:
394 # check on remote to be sure
394 # check on remote to be sure
395 return self._remote.is_empty()
395 return self._remote.is_empty()
396 else:
396 else:
397 return False
397 return False
398
398
399 def is_empty(self):
399 def is_empty(self):
400 if rhodecode.is_test:
400 if rhodecode.is_test:
401 return self._check_for_empty()
401 return self._check_for_empty()
402
402
403 if self._is_empty is None:
403 if self._is_empty is None:
404 # cache empty for production, but not tests
404 # cache empty for production, but not tests
405 self._is_empty = self._check_for_empty()
405 self._is_empty = self._check_for_empty()
406
406
407 return self._is_empty
407 return self._is_empty
408
408
409 @staticmethod
409 @staticmethod
410 def check_url(url, config):
410 def check_url(url, config):
411 """
411 """
412 Function will check given url and try to verify if it's a valid
412 Function will check given url and try to verify if it's a valid
413 link.
413 link.
414 """
414 """
415 raise NotImplementedError
415 raise NotImplementedError
416
416
417 @staticmethod
417 @staticmethod
418 def is_valid_repository(path):
418 def is_valid_repository(path):
419 """
419 """
420 Check if given `path` contains a valid repository of this backend
420 Check if given `path` contains a valid repository of this backend
421 """
421 """
422 raise NotImplementedError
422 raise NotImplementedError
423
423
424 # ==========================================================================
424 # ==========================================================================
425 # COMMITS
425 # COMMITS
426 # ==========================================================================
426 # ==========================================================================
427
427
428 @CachedProperty
428 @CachedProperty
429 def commit_ids(self):
429 def commit_ids(self):
430 raise NotImplementedError
430 raise NotImplementedError
431
431
432 def append_commit_id(self, commit_id):
432 def append_commit_id(self, commit_id):
433 if commit_id not in self.commit_ids:
433 if commit_id not in self.commit_ids:
434 self._rebuild_cache(self.commit_ids + [commit_id])
434 self._rebuild_cache(self.commit_ids + [commit_id])
435 # clear cache
435
436 self._invalidate_prop_cache('commit_ids')
436 # clear cache
437 self._invalidate_prop_cache('commit_ids')
438 self._is_empty = False
437
439
438 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
440 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
439 """
441 """
440 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
442 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
441 are both None, most recent commit is returned.
443 are both None, most recent commit is returned.
442
444
443 :param pre_load: Optional. List of commit attributes to load.
445 :param pre_load: Optional. List of commit attributes to load.
444
446
445 :raises ``EmptyRepositoryError``: if there are no commits
447 :raises ``EmptyRepositoryError``: if there are no commits
446 """
448 """
447 raise NotImplementedError
449 raise NotImplementedError
448
450
449 def __iter__(self):
451 def __iter__(self):
450 for commit_id in self.commit_ids:
452 for commit_id in self.commit_ids:
451 yield self.get_commit(commit_id=commit_id)
453 yield self.get_commit(commit_id=commit_id)
452
454
453 def get_commits(
455 def get_commits(
454 self, start_id=None, end_id=None, start_date=None, end_date=None,
456 self, start_id=None, end_id=None, start_date=None, end_date=None,
455 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
456 """
458 """
457 Returns iterator of `BaseCommit` objects from start to end
459 Returns iterator of `BaseCommit` objects from start to end
458 not inclusive. This should behave just like a list, ie. end is not
460 not inclusive. This should behave just like a list, ie. end is not
459 inclusive.
461 inclusive.
460
462
461 :param start_id: None or str, must be a valid commit id
463 :param start_id: None or str, must be a valid commit id
462 :param end_id: None or str, must be a valid commit id
464 :param end_id: None or str, must be a valid commit id
463 :param start_date:
465 :param start_date:
464 :param end_date:
466 :param end_date:
465 :param branch_name:
467 :param branch_name:
466 :param show_hidden:
468 :param show_hidden:
467 :param pre_load:
469 :param pre_load:
468 :param translate_tags:
470 :param translate_tags:
469 """
471 """
470 raise NotImplementedError
472 raise NotImplementedError
471
473
472 def __getitem__(self, key):
474 def __getitem__(self, key):
473 """
475 """
474 Allows index based access to the commit objects of this repository.
476 Allows index based access to the commit objects of this repository.
475 """
477 """
476 pre_load = ["author", "branch", "date", "message", "parents"]
478 pre_load = ["author", "branch", "date", "message", "parents"]
477 if isinstance(key, slice):
479 if isinstance(key, slice):
478 return self._get_range(key, pre_load)
480 return self._get_range(key, pre_load)
479 return self.get_commit(commit_idx=key, pre_load=pre_load)
481 return self.get_commit(commit_idx=key, pre_load=pre_load)
480
482
481 def _get_range(self, slice_obj, pre_load):
483 def _get_range(self, slice_obj, pre_load):
482 for commit_id in self.commit_ids.__getitem__(slice_obj):
484 for commit_id in self.commit_ids.__getitem__(slice_obj):
483 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
485 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
484
486
485 def count(self):
487 def count(self):
486 return len(self.commit_ids)
488 return len(self.commit_ids)
487
489
488 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
490 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
489 """
491 """
490 Creates and returns a tag for the given ``commit_id``.
492 Creates and returns a tag for the given ``commit_id``.
491
493
492 :param name: name for new tag
494 :param name: name for new tag
493 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
495 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
494 :param commit_id: commit id for which new tag would be created
496 :param commit_id: commit id for which new tag would be created
495 :param message: message of the tag's commit
497 :param message: message of the tag's commit
496 :param date: date of tag's commit
498 :param date: date of tag's commit
497
499
498 :raises TagAlreadyExistError: if tag with same name already exists
500 :raises TagAlreadyExistError: if tag with same name already exists
499 """
501 """
500 raise NotImplementedError
502 raise NotImplementedError
501
503
502 def remove_tag(self, name, user, message=None, date=None):
504 def remove_tag(self, name, user, message=None, date=None):
503 """
505 """
504 Removes tag with the given ``name``.
506 Removes tag with the given ``name``.
505
507
506 :param name: name of the tag to be removed
508 :param name: name of the tag to be removed
507 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
509 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
508 :param message: message of the tag's removal commit
510 :param message: message of the tag's removal commit
509 :param date: date of tag's removal commit
511 :param date: date of tag's removal commit
510
512
511 :raises TagDoesNotExistError: if tag with given name does not exists
513 :raises TagDoesNotExistError: if tag with given name does not exists
512 """
514 """
513 raise NotImplementedError
515 raise NotImplementedError
514
516
515 def get_diff(
517 def get_diff(
516 self, commit1, commit2, path=None, ignore_whitespace=False,
518 self, commit1, commit2, path=None, ignore_whitespace=False,
517 context=3, path1=None):
519 context=3, path1=None):
518 """
520 """
519 Returns (git like) *diff*, as plain text. Shows changes introduced by
521 Returns (git like) *diff*, as plain text. Shows changes introduced by
520 `commit2` since `commit1`.
522 `commit2` since `commit1`.
521
523
522 :param commit1: Entry point from which diff is shown. Can be
524 :param commit1: Entry point from which diff is shown. Can be
523 ``self.EMPTY_COMMIT`` - in this case, patch showing all
525 ``self.EMPTY_COMMIT`` - in this case, patch showing all
524 the changes since empty state of the repository until `commit2`
526 the changes since empty state of the repository until `commit2`
525 :param commit2: Until which commit changes should be shown.
527 :param commit2: Until which commit changes should be shown.
526 :param path: Can be set to a path of a file to create a diff of that
528 :param path: Can be set to a path of a file to create a diff of that
527 file. If `path1` is also set, this value is only associated to
529 file. If `path1` is also set, this value is only associated to
528 `commit2`.
530 `commit2`.
529 :param ignore_whitespace: If set to ``True``, would not show whitespace
531 :param ignore_whitespace: If set to ``True``, would not show whitespace
530 changes. Defaults to ``False``.
532 changes. Defaults to ``False``.
531 :param context: How many lines before/after changed lines should be
533 :param context: How many lines before/after changed lines should be
532 shown. Defaults to ``3``.
534 shown. Defaults to ``3``.
533 :param path1: Can be set to a path to associate with `commit1`. This
535 :param path1: Can be set to a path to associate with `commit1`. This
534 parameter works only for backends which support diff generation for
536 parameter works only for backends which support diff generation for
535 different paths. Other backends will raise a `ValueError` if `path1`
537 different paths. Other backends will raise a `ValueError` if `path1`
536 is set and has a different value than `path`.
538 is set and has a different value than `path`.
537 :param file_path: filter this diff by given path pattern
539 :param file_path: filter this diff by given path pattern
538 """
540 """
539 raise NotImplementedError
541 raise NotImplementedError
540
542
541 def strip(self, commit_id, branch=None):
543 def strip(self, commit_id, branch=None):
542 """
544 """
543 Strip given commit_id from the repository
545 Strip given commit_id from the repository
544 """
546 """
545 raise NotImplementedError
547 raise NotImplementedError
546
548
547 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
549 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
548 """
550 """
549 Return a latest common ancestor commit if one exists for this repo
551 Return a latest common ancestor commit if one exists for this repo
550 `commit_id1` vs `commit_id2` from `repo2`.
552 `commit_id1` vs `commit_id2` from `repo2`.
551
553
552 :param commit_id1: Commit it from this repository to use as a
554 :param commit_id1: Commit it from this repository to use as a
553 target for the comparison.
555 target for the comparison.
554 :param commit_id2: Source commit id to use for comparison.
556 :param commit_id2: Source commit id to use for comparison.
555 :param repo2: Source repository to use for comparison.
557 :param repo2: Source repository to use for comparison.
556 """
558 """
557 raise NotImplementedError
559 raise NotImplementedError
558
560
559 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
561 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
560 """
562 """
561 Compare this repository's revision `commit_id1` with `commit_id2`.
563 Compare this repository's revision `commit_id1` with `commit_id2`.
562
564
563 Returns a tuple(commits, ancestor) that would be merged from
565 Returns a tuple(commits, ancestor) that would be merged from
564 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
566 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
565 will be returned as ancestor.
567 will be returned as ancestor.
566
568
567 :param commit_id1: Commit it from this repository to use as a
569 :param commit_id1: Commit it from this repository to use as a
568 target for the comparison.
570 target for the comparison.
569 :param commit_id2: Source commit id to use for comparison.
571 :param commit_id2: Source commit id to use for comparison.
570 :param repo2: Source repository to use for comparison.
572 :param repo2: Source repository to use for comparison.
571 :param merge: If set to ``True`` will do a merge compare which also
573 :param merge: If set to ``True`` will do a merge compare which also
572 returns the common ancestor.
574 returns the common ancestor.
573 :param pre_load: Optional. List of commit attributes to load.
575 :param pre_load: Optional. List of commit attributes to load.
574 """
576 """
575 raise NotImplementedError
577 raise NotImplementedError
576
578
577 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
579 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
578 user_name='', user_email='', message='', dry_run=False,
580 user_name='', user_email='', message='', dry_run=False,
579 use_rebase=False, close_branch=False):
581 use_rebase=False, close_branch=False):
580 """
582 """
581 Merge the revisions specified in `source_ref` from `source_repo`
583 Merge the revisions specified in `source_ref` from `source_repo`
582 onto the `target_ref` of this repository.
584 onto the `target_ref` of this repository.
583
585
584 `source_ref` and `target_ref` are named tupls with the following
586 `source_ref` and `target_ref` are named tupls with the following
585 fields `type`, `name` and `commit_id`.
587 fields `type`, `name` and `commit_id`.
586
588
587 Returns a MergeResponse named tuple with the following fields
589 Returns a MergeResponse named tuple with the following fields
588 'possible', 'executed', 'source_commit', 'target_commit',
590 'possible', 'executed', 'source_commit', 'target_commit',
589 'merge_commit'.
591 'merge_commit'.
590
592
591 :param repo_id: `repo_id` target repo id.
593 :param repo_id: `repo_id` target repo id.
592 :param workspace_id: `workspace_id` unique identifier.
594 :param workspace_id: `workspace_id` unique identifier.
593 :param target_ref: `target_ref` points to the commit on top of which
595 :param target_ref: `target_ref` points to the commit on top of which
594 the `source_ref` should be merged.
596 the `source_ref` should be merged.
595 :param source_repo: The repository that contains the commits to be
597 :param source_repo: The repository that contains the commits to be
596 merged.
598 merged.
597 :param source_ref: `source_ref` points to the topmost commit from
599 :param source_ref: `source_ref` points to the topmost commit from
598 the `source_repo` which should be merged.
600 the `source_repo` which should be merged.
599 :param user_name: Merge commit `user_name`.
601 :param user_name: Merge commit `user_name`.
600 :param user_email: Merge commit `user_email`.
602 :param user_email: Merge commit `user_email`.
601 :param message: Merge commit `message`.
603 :param message: Merge commit `message`.
602 :param dry_run: If `True` the merge will not take place.
604 :param dry_run: If `True` the merge will not take place.
603 :param use_rebase: If `True` commits from the source will be rebased
605 :param use_rebase: If `True` commits from the source will be rebased
604 on top of the target instead of being merged.
606 on top of the target instead of being merged.
605 :param close_branch: If `True` branch will be close before merging it
607 :param close_branch: If `True` branch will be close before merging it
606 """
608 """
607 if dry_run:
609 if dry_run:
608 message = message or settings.MERGE_DRY_RUN_MESSAGE
610 message = message or settings.MERGE_DRY_RUN_MESSAGE
609 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
611 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
610 user_name = user_name or settings.MERGE_DRY_RUN_USER
612 user_name = user_name or settings.MERGE_DRY_RUN_USER
611 else:
613 else:
612 if not user_name:
614 if not user_name:
613 raise ValueError('user_name cannot be empty')
615 raise ValueError('user_name cannot be empty')
614 if not user_email:
616 if not user_email:
615 raise ValueError('user_email cannot be empty')
617 raise ValueError('user_email cannot be empty')
616 if not message:
618 if not message:
617 raise ValueError('message cannot be empty')
619 raise ValueError('message cannot be empty')
618
620
619 try:
621 try:
620 return self._merge_repo(
622 return self._merge_repo(
621 repo_id, workspace_id, target_ref, source_repo,
623 repo_id, workspace_id, target_ref, source_repo,
622 source_ref, message, user_name, user_email, dry_run=dry_run,
624 source_ref, message, user_name, user_email, dry_run=dry_run,
623 use_rebase=use_rebase, close_branch=close_branch)
625 use_rebase=use_rebase, close_branch=close_branch)
624 except RepositoryError as exc:
626 except RepositoryError as exc:
625 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
627 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
626 return MergeResponse(
628 return MergeResponse(
627 False, False, None, MergeFailureReason.UNKNOWN,
629 False, False, None, MergeFailureReason.UNKNOWN,
628 metadata={'exception': str(exc)})
630 metadata={'exception': str(exc)})
629
631
630 def _merge_repo(self, repo_id, workspace_id, target_ref,
632 def _merge_repo(self, repo_id, workspace_id, target_ref,
631 source_repo, source_ref, merge_message,
633 source_repo, source_ref, merge_message,
632 merger_name, merger_email, dry_run=False,
634 merger_name, merger_email, dry_run=False,
633 use_rebase=False, close_branch=False):
635 use_rebase=False, close_branch=False):
634 """Internal implementation of merge."""
636 """Internal implementation of merge."""
635 raise NotImplementedError
637 raise NotImplementedError
636
638
637 def _maybe_prepare_merge_workspace(
639 def _maybe_prepare_merge_workspace(
638 self, repo_id, workspace_id, target_ref, source_ref):
640 self, repo_id, workspace_id, target_ref, source_ref):
639 """
641 """
640 Create the merge workspace.
642 Create the merge workspace.
641
643
642 :param workspace_id: `workspace_id` unique identifier.
644 :param workspace_id: `workspace_id` unique identifier.
643 """
645 """
644 raise NotImplementedError
646 raise NotImplementedError
645
647
646 def _get_legacy_shadow_repository_path(self, workspace_id):
648 def _get_legacy_shadow_repository_path(self, workspace_id):
647 """
649 """
648 Legacy version that was used before. We still need it for
650 Legacy version that was used before. We still need it for
649 backward compat
651 backward compat
650 """
652 """
651 return os.path.join(
653 return os.path.join(
652 os.path.dirname(self.path),
654 os.path.dirname(self.path),
653 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
655 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
654
656
655 def _get_shadow_repository_path(self, repo_id, workspace_id):
657 def _get_shadow_repository_path(self, repo_id, workspace_id):
656 # The name of the shadow repository must start with '.', so it is
658 # The name of the shadow repository must start with '.', so it is
657 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
659 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
658 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
660 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
659 if os.path.exists(legacy_repository_path):
661 if os.path.exists(legacy_repository_path):
660 return legacy_repository_path
662 return legacy_repository_path
661 else:
663 else:
662 return os.path.join(
664 return os.path.join(
663 os.path.dirname(self.path),
665 os.path.dirname(self.path),
664 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
666 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
665
667
666 def cleanup_merge_workspace(self, repo_id, workspace_id):
668 def cleanup_merge_workspace(self, repo_id, workspace_id):
667 """
669 """
668 Remove merge workspace.
670 Remove merge workspace.
669
671
670 This function MUST not fail in case there is no workspace associated to
672 This function MUST not fail in case there is no workspace associated to
671 the given `workspace_id`.
673 the given `workspace_id`.
672
674
673 :param workspace_id: `workspace_id` unique identifier.
675 :param workspace_id: `workspace_id` unique identifier.
674 """
676 """
675 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
677 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
676 shadow_repository_path_del = '{}.{}.delete'.format(
678 shadow_repository_path_del = '{}.{}.delete'.format(
677 shadow_repository_path, time.time())
679 shadow_repository_path, time.time())
678
680
679 # move the shadow repo, so it never conflicts with the one used.
681 # move the shadow repo, so it never conflicts with the one used.
680 # we use this method because shutil.rmtree had some edge case problems
682 # we use this method because shutil.rmtree had some edge case problems
681 # removing symlinked repositories
683 # removing symlinked repositories
682 if not os.path.isdir(shadow_repository_path):
684 if not os.path.isdir(shadow_repository_path):
683 return
685 return
684
686
685 shutil.move(shadow_repository_path, shadow_repository_path_del)
687 shutil.move(shadow_repository_path, shadow_repository_path_del)
686 try:
688 try:
687 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
689 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
688 except Exception:
690 except Exception:
689 log.exception('Failed to gracefully remove shadow repo under %s',
691 log.exception('Failed to gracefully remove shadow repo under %s',
690 shadow_repository_path_del)
692 shadow_repository_path_del)
691 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
693 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
692
694
693 # ========== #
695 # ========== #
694 # COMMIT API #
696 # COMMIT API #
695 # ========== #
697 # ========== #
696
698
697 @LazyProperty
699 @LazyProperty
698 def in_memory_commit(self):
700 def in_memory_commit(self):
699 """
701 """
700 Returns :class:`InMemoryCommit` object for this repository.
702 Returns :class:`InMemoryCommit` object for this repository.
701 """
703 """
702 raise NotImplementedError
704 raise NotImplementedError
703
705
704 # ======================== #
706 # ======================== #
705 # UTILITIES FOR SUBCLASSES #
707 # UTILITIES FOR SUBCLASSES #
706 # ======================== #
708 # ======================== #
707
709
708 def _validate_diff_commits(self, commit1, commit2):
710 def _validate_diff_commits(self, commit1, commit2):
709 """
711 """
710 Validates that the given commits are related to this repository.
712 Validates that the given commits are related to this repository.
711
713
712 Intended as a utility for sub classes to have a consistent validation
714 Intended as a utility for sub classes to have a consistent validation
713 of input parameters in methods like :meth:`get_diff`.
715 of input parameters in methods like :meth:`get_diff`.
714 """
716 """
715 self._validate_commit(commit1)
717 self._validate_commit(commit1)
716 self._validate_commit(commit2)
718 self._validate_commit(commit2)
717 if (isinstance(commit1, EmptyCommit) and
719 if (isinstance(commit1, EmptyCommit) and
718 isinstance(commit2, EmptyCommit)):
720 isinstance(commit2, EmptyCommit)):
719 raise ValueError("Cannot compare two empty commits")
721 raise ValueError("Cannot compare two empty commits")
720
722
721 def _validate_commit(self, commit):
723 def _validate_commit(self, commit):
722 if not isinstance(commit, BaseCommit):
724 if not isinstance(commit, BaseCommit):
723 raise TypeError(
725 raise TypeError(
724 "%s is not of type BaseCommit" % repr(commit))
726 "%s is not of type BaseCommit" % repr(commit))
725 if commit.repository != self and not isinstance(commit, EmptyCommit):
727 if commit.repository != self and not isinstance(commit, EmptyCommit):
726 raise ValueError(
728 raise ValueError(
727 "Commit %s must be a valid commit from this repository %s, "
729 "Commit %s must be a valid commit from this repository %s, "
728 "related to this repository instead %s." %
730 "related to this repository instead %s." %
729 (commit, self, commit.repository))
731 (commit, self, commit.repository))
730
732
731 def _validate_commit_id(self, commit_id):
733 def _validate_commit_id(self, commit_id):
732 if not isinstance(commit_id, compat.string_types):
734 if not isinstance(commit_id, compat.string_types):
733 raise TypeError("commit_id must be a string value")
735 raise TypeError("commit_id must be a string value")
734
736
735 def _validate_commit_idx(self, commit_idx):
737 def _validate_commit_idx(self, commit_idx):
736 if not isinstance(commit_idx, (int, long)):
738 if not isinstance(commit_idx, (int, long)):
737 raise TypeError("commit_idx must be a numeric value")
739 raise TypeError("commit_idx must be a numeric value")
738
740
739 def _validate_branch_name(self, branch_name):
741 def _validate_branch_name(self, branch_name):
740 if branch_name and branch_name not in self.branches_all:
742 if branch_name and branch_name not in self.branches_all:
741 msg = ("Branch %s not found in %s" % (branch_name, self))
743 msg = ("Branch %s not found in %s" % (branch_name, self))
742 raise BranchDoesNotExistError(msg)
744 raise BranchDoesNotExistError(msg)
743
745
744 #
746 #
745 # Supporting deprecated API parts
747 # Supporting deprecated API parts
746 # TODO: johbo: consider to move this into a mixin
748 # TODO: johbo: consider to move this into a mixin
747 #
749 #
748
750
749 @property
751 @property
750 def EMPTY_CHANGESET(self):
752 def EMPTY_CHANGESET(self):
751 warnings.warn(
753 warnings.warn(
752 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
754 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
753 return self.EMPTY_COMMIT_ID
755 return self.EMPTY_COMMIT_ID
754
756
755 @property
757 @property
756 def revisions(self):
758 def revisions(self):
757 warnings.warn("Use commits attribute instead", DeprecationWarning)
759 warnings.warn("Use commits attribute instead", DeprecationWarning)
758 return self.commit_ids
760 return self.commit_ids
759
761
760 @revisions.setter
762 @revisions.setter
761 def revisions(self, value):
763 def revisions(self, value):
762 warnings.warn("Use commits attribute instead", DeprecationWarning)
764 warnings.warn("Use commits attribute instead", DeprecationWarning)
763 self.commit_ids = value
765 self.commit_ids = value
764
766
765 def get_changeset(self, revision=None, pre_load=None):
767 def get_changeset(self, revision=None, pre_load=None):
766 warnings.warn("Use get_commit instead", DeprecationWarning)
768 warnings.warn("Use get_commit instead", DeprecationWarning)
767 commit_id = None
769 commit_id = None
768 commit_idx = None
770 commit_idx = None
769 if isinstance(revision, compat.string_types):
771 if isinstance(revision, compat.string_types):
770 commit_id = revision
772 commit_id = revision
771 else:
773 else:
772 commit_idx = revision
774 commit_idx = revision
773 return self.get_commit(
775 return self.get_commit(
774 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
776 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
775
777
776 def get_changesets(
778 def get_changesets(
777 self, start=None, end=None, start_date=None, end_date=None,
779 self, start=None, end=None, start_date=None, end_date=None,
778 branch_name=None, pre_load=None):
780 branch_name=None, pre_load=None):
779 warnings.warn("Use get_commits instead", DeprecationWarning)
781 warnings.warn("Use get_commits instead", DeprecationWarning)
780 start_id = self._revision_to_commit(start)
782 start_id = self._revision_to_commit(start)
781 end_id = self._revision_to_commit(end)
783 end_id = self._revision_to_commit(end)
782 return self.get_commits(
784 return self.get_commits(
783 start_id=start_id, end_id=end_id, start_date=start_date,
785 start_id=start_id, end_id=end_id, start_date=start_date,
784 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
786 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
785
787
786 def _revision_to_commit(self, revision):
788 def _revision_to_commit(self, revision):
787 """
789 """
788 Translates a revision to a commit_id
790 Translates a revision to a commit_id
789
791
790 Helps to support the old changeset based API which allows to use
792 Helps to support the old changeset based API which allows to use
791 commit ids and commit indices interchangeable.
793 commit ids and commit indices interchangeable.
792 """
794 """
793 if revision is None:
795 if revision is None:
794 return revision
796 return revision
795
797
796 if isinstance(revision, compat.string_types):
798 if isinstance(revision, compat.string_types):
797 commit_id = revision
799 commit_id = revision
798 else:
800 else:
799 commit_id = self.commit_ids[revision]
801 commit_id = self.commit_ids[revision]
800 return commit_id
802 return commit_id
801
803
802 @property
804 @property
803 def in_memory_changeset(self):
805 def in_memory_changeset(self):
804 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
806 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
805 return self.in_memory_commit
807 return self.in_memory_commit
806
808
807 def get_path_permissions(self, username):
809 def get_path_permissions(self, username):
808 """
810 """
809 Returns a path permission checker or None if not supported
811 Returns a path permission checker or None if not supported
810
812
811 :param username: session user name
813 :param username: session user name
812 :return: an instance of BasePathPermissionChecker or None
814 :return: an instance of BasePathPermissionChecker or None
813 """
815 """
814 return None
816 return None
815
817
816 def install_hooks(self, force=False):
818 def install_hooks(self, force=False):
817 return self._remote.install_hooks(force)
819 return self._remote.install_hooks(force)
818
820
819 def get_hooks_info(self):
821 def get_hooks_info(self):
820 return self._remote.get_hooks_info()
822 return self._remote.get_hooks_info()
821
823
822
824
823 class BaseCommit(object):
825 class BaseCommit(object):
824 """
826 """
825 Each backend should implement it's commit representation.
827 Each backend should implement it's commit representation.
826
828
827 **Attributes**
829 **Attributes**
828
830
829 ``repository``
831 ``repository``
830 repository object within which commit exists
832 repository object within which commit exists
831
833
832 ``id``
834 ``id``
833 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
835 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
834 just ``tip``.
836 just ``tip``.
835
837
836 ``raw_id``
838 ``raw_id``
837 raw commit representation (i.e. full 40 length sha for git
839 raw commit representation (i.e. full 40 length sha for git
838 backend)
840 backend)
839
841
840 ``short_id``
842 ``short_id``
841 shortened (if apply) version of ``raw_id``; it would be simple
843 shortened (if apply) version of ``raw_id``; it would be simple
842 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
844 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
843 as ``raw_id`` for subversion
845 as ``raw_id`` for subversion
844
846
845 ``idx``
847 ``idx``
846 commit index
848 commit index
847
849
848 ``files``
850 ``files``
849 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
851 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
850
852
851 ``dirs``
853 ``dirs``
852 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
854 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
853
855
854 ``nodes``
856 ``nodes``
855 combined list of ``Node`` objects
857 combined list of ``Node`` objects
856
858
857 ``author``
859 ``author``
858 author of the commit, as unicode
860 author of the commit, as unicode
859
861
860 ``message``
862 ``message``
861 message of the commit, as unicode
863 message of the commit, as unicode
862
864
863 ``parents``
865 ``parents``
864 list of parent commits
866 list of parent commits
865
867
866 """
868 """
867
869
868 branch = None
870 branch = None
869 """
871 """
870 Depending on the backend this should be set to the branch name of the
872 Depending on the backend this should be set to the branch name of the
871 commit. Backends not supporting branches on commits should leave this
873 commit. Backends not supporting branches on commits should leave this
872 value as ``None``.
874 value as ``None``.
873 """
875 """
874
876
875 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
877 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
876 """
878 """
877 This template is used to generate a default prefix for repository archives
879 This template is used to generate a default prefix for repository archives
878 if no prefix has been specified.
880 if no prefix has been specified.
879 """
881 """
880
882
881 def __str__(self):
883 def __str__(self):
882 return '<%s at %s:%s>' % (
884 return '<%s at %s:%s>' % (
883 self.__class__.__name__, self.idx, self.short_id)
885 self.__class__.__name__, self.idx, self.short_id)
884
886
885 def __repr__(self):
887 def __repr__(self):
886 return self.__str__()
888 return self.__str__()
887
889
888 def __unicode__(self):
890 def __unicode__(self):
889 return u'%s:%s' % (self.idx, self.short_id)
891 return u'%s:%s' % (self.idx, self.short_id)
890
892
891 def __eq__(self, other):
893 def __eq__(self, other):
892 same_instance = isinstance(other, self.__class__)
894 same_instance = isinstance(other, self.__class__)
893 return same_instance and self.raw_id == other.raw_id
895 return same_instance and self.raw_id == other.raw_id
894
896
895 def __json__(self):
897 def __json__(self):
896 parents = []
898 parents = []
897 try:
899 try:
898 for parent in self.parents:
900 for parent in self.parents:
899 parents.append({'raw_id': parent.raw_id})
901 parents.append({'raw_id': parent.raw_id})
900 except NotImplementedError:
902 except NotImplementedError:
901 # empty commit doesn't have parents implemented
903 # empty commit doesn't have parents implemented
902 pass
904 pass
903
905
904 return {
906 return {
905 'short_id': self.short_id,
907 'short_id': self.short_id,
906 'raw_id': self.raw_id,
908 'raw_id': self.raw_id,
907 'revision': self.idx,
909 'revision': self.idx,
908 'message': self.message,
910 'message': self.message,
909 'date': self.date,
911 'date': self.date,
910 'author': self.author,
912 'author': self.author,
911 'parents': parents,
913 'parents': parents,
912 'branch': self.branch
914 'branch': self.branch
913 }
915 }
914
916
915 def __getstate__(self):
917 def __getstate__(self):
916 d = self.__dict__.copy()
918 d = self.__dict__.copy()
917 d.pop('_remote', None)
919 d.pop('_remote', None)
918 d.pop('repository', None)
920 d.pop('repository', None)
919 return d
921 return d
920
922
921 def _get_refs(self):
923 def _get_refs(self):
922 return {
924 return {
923 'branches': [self.branch] if self.branch else [],
925 'branches': [self.branch] if self.branch else [],
924 'bookmarks': getattr(self, 'bookmarks', []),
926 'bookmarks': getattr(self, 'bookmarks', []),
925 'tags': self.tags
927 'tags': self.tags
926 }
928 }
927
929
928 @LazyProperty
930 @LazyProperty
929 def last(self):
931 def last(self):
930 """
932 """
931 ``True`` if this is last commit in repository, ``False``
933 ``True`` if this is last commit in repository, ``False``
932 otherwise; trying to access this attribute while there is no
934 otherwise; trying to access this attribute while there is no
933 commits would raise `EmptyRepositoryError`
935 commits would raise `EmptyRepositoryError`
934 """
936 """
935 if self.repository is None:
937 if self.repository is None:
936 raise CommitError("Cannot check if it's most recent commit")
938 raise CommitError("Cannot check if it's most recent commit")
937 return self.raw_id == self.repository.commit_ids[-1]
939 return self.raw_id == self.repository.commit_ids[-1]
938
940
939 @LazyProperty
941 @LazyProperty
940 def parents(self):
942 def parents(self):
941 """
943 """
942 Returns list of parent commits.
944 Returns list of parent commits.
943 """
945 """
944 raise NotImplementedError
946 raise NotImplementedError
945
947
946 @LazyProperty
948 @LazyProperty
947 def first_parent(self):
949 def first_parent(self):
948 """
950 """
949 Returns list of parent commits.
951 Returns list of parent commits.
950 """
952 """
951 return self.parents[0] if self.parents else EmptyCommit()
953 return self.parents[0] if self.parents else EmptyCommit()
952
954
953 @property
955 @property
954 def merge(self):
956 def merge(self):
955 """
957 """
956 Returns boolean if commit is a merge.
958 Returns boolean if commit is a merge.
957 """
959 """
958 return len(self.parents) > 1
960 return len(self.parents) > 1
959
961
960 @LazyProperty
962 @LazyProperty
961 def children(self):
963 def children(self):
962 """
964 """
963 Returns list of child commits.
965 Returns list of child commits.
964 """
966 """
965 raise NotImplementedError
967 raise NotImplementedError
966
968
967 @LazyProperty
969 @LazyProperty
968 def id(self):
970 def id(self):
969 """
971 """
970 Returns string identifying this commit.
972 Returns string identifying this commit.
971 """
973 """
972 raise NotImplementedError
974 raise NotImplementedError
973
975
974 @LazyProperty
976 @LazyProperty
975 def raw_id(self):
977 def raw_id(self):
976 """
978 """
977 Returns raw string identifying this commit.
979 Returns raw string identifying this commit.
978 """
980 """
979 raise NotImplementedError
981 raise NotImplementedError
980
982
981 @LazyProperty
983 @LazyProperty
982 def short_id(self):
984 def short_id(self):
983 """
985 """
984 Returns shortened version of ``raw_id`` attribute, as string,
986 Returns shortened version of ``raw_id`` attribute, as string,
985 identifying this commit, useful for presentation to users.
987 identifying this commit, useful for presentation to users.
986 """
988 """
987 raise NotImplementedError
989 raise NotImplementedError
988
990
989 @LazyProperty
991 @LazyProperty
990 def idx(self):
992 def idx(self):
991 """
993 """
992 Returns integer identifying this commit.
994 Returns integer identifying this commit.
993 """
995 """
994 raise NotImplementedError
996 raise NotImplementedError
995
997
996 @LazyProperty
998 @LazyProperty
997 def committer(self):
999 def committer(self):
998 """
1000 """
999 Returns committer for this commit
1001 Returns committer for this commit
1000 """
1002 """
1001 raise NotImplementedError
1003 raise NotImplementedError
1002
1004
1003 @LazyProperty
1005 @LazyProperty
1004 def committer_name(self):
1006 def committer_name(self):
1005 """
1007 """
1006 Returns committer name for this commit
1008 Returns committer name for this commit
1007 """
1009 """
1008
1010
1009 return author_name(self.committer)
1011 return author_name(self.committer)
1010
1012
1011 @LazyProperty
1013 @LazyProperty
1012 def committer_email(self):
1014 def committer_email(self):
1013 """
1015 """
1014 Returns committer email address for this commit
1016 Returns committer email address for this commit
1015 """
1017 """
1016
1018
1017 return author_email(self.committer)
1019 return author_email(self.committer)
1018
1020
1019 @LazyProperty
1021 @LazyProperty
1020 def author(self):
1022 def author(self):
1021 """
1023 """
1022 Returns author for this commit
1024 Returns author for this commit
1023 """
1025 """
1024
1026
1025 raise NotImplementedError
1027 raise NotImplementedError
1026
1028
1027 @LazyProperty
1029 @LazyProperty
1028 def author_name(self):
1030 def author_name(self):
1029 """
1031 """
1030 Returns author name for this commit
1032 Returns author name for this commit
1031 """
1033 """
1032
1034
1033 return author_name(self.author)
1035 return author_name(self.author)
1034
1036
1035 @LazyProperty
1037 @LazyProperty
1036 def author_email(self):
1038 def author_email(self):
1037 """
1039 """
1038 Returns author email address for this commit
1040 Returns author email address for this commit
1039 """
1041 """
1040
1042
1041 return author_email(self.author)
1043 return author_email(self.author)
1042
1044
1043 def get_file_mode(self, path):
1045 def get_file_mode(self, path):
1044 """
1046 """
1045 Returns stat mode of the file at `path`.
1047 Returns stat mode of the file at `path`.
1046 """
1048 """
1047 raise NotImplementedError
1049 raise NotImplementedError
1048
1050
1049 def is_link(self, path):
1051 def is_link(self, path):
1050 """
1052 """
1051 Returns ``True`` if given `path` is a symlink
1053 Returns ``True`` if given `path` is a symlink
1052 """
1054 """
1053 raise NotImplementedError
1055 raise NotImplementedError
1054
1056
1055 def get_file_content(self, path):
1057 def get_file_content(self, path):
1056 """
1058 """
1057 Returns content of the file at the given `path`.
1059 Returns content of the file at the given `path`.
1058 """
1060 """
1059 raise NotImplementedError
1061 raise NotImplementedError
1060
1062
1061 def get_file_size(self, path):
1063 def get_file_size(self, path):
1062 """
1064 """
1063 Returns size of the file at the given `path`.
1065 Returns size of the file at the given `path`.
1064 """
1066 """
1065 raise NotImplementedError
1067 raise NotImplementedError
1066
1068
1067 def get_path_commit(self, path, pre_load=None):
1069 def get_path_commit(self, path, pre_load=None):
1068 """
1070 """
1069 Returns last commit of the file at the given `path`.
1071 Returns last commit of the file at the given `path`.
1070
1072
1071 :param pre_load: Optional. List of commit attributes to load.
1073 :param pre_load: Optional. List of commit attributes to load.
1072 """
1074 """
1073 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1075 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1074 if not commits:
1076 if not commits:
1075 raise RepositoryError(
1077 raise RepositoryError(
1076 'Failed to fetch history for path {}. '
1078 'Failed to fetch history for path {}. '
1077 'Please check if such path exists in your repository'.format(
1079 'Please check if such path exists in your repository'.format(
1078 path))
1080 path))
1079 return commits[0]
1081 return commits[0]
1080
1082
1081 def get_path_history(self, path, limit=None, pre_load=None):
1083 def get_path_history(self, path, limit=None, pre_load=None):
1082 """
1084 """
1083 Returns history of file as reversed list of :class:`BaseCommit`
1085 Returns history of file as reversed list of :class:`BaseCommit`
1084 objects for which file at given `path` has been modified.
1086 objects for which file at given `path` has been modified.
1085
1087
1086 :param limit: Optional. Allows to limit the size of the returned
1088 :param limit: Optional. Allows to limit the size of the returned
1087 history. This is intended as a hint to the underlying backend, so
1089 history. This is intended as a hint to the underlying backend, so
1088 that it can apply optimizations depending on the limit.
1090 that it can apply optimizations depending on the limit.
1089 :param pre_load: Optional. List of commit attributes to load.
1091 :param pre_load: Optional. List of commit attributes to load.
1090 """
1092 """
1091 raise NotImplementedError
1093 raise NotImplementedError
1092
1094
1093 def get_file_annotate(self, path, pre_load=None):
1095 def get_file_annotate(self, path, pre_load=None):
1094 """
1096 """
1095 Returns a generator of four element tuples with
1097 Returns a generator of four element tuples with
1096 lineno, sha, commit lazy loader and line
1098 lineno, sha, commit lazy loader and line
1097
1099
1098 :param pre_load: Optional. List of commit attributes to load.
1100 :param pre_load: Optional. List of commit attributes to load.
1099 """
1101 """
1100 raise NotImplementedError
1102 raise NotImplementedError
1101
1103
1102 def get_nodes(self, path):
1104 def get_nodes(self, path):
1103 """
1105 """
1104 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1106 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1105 state of commit at the given ``path``.
1107 state of commit at the given ``path``.
1106
1108
1107 :raises ``CommitError``: if node at the given ``path`` is not
1109 :raises ``CommitError``: if node at the given ``path`` is not
1108 instance of ``DirNode``
1110 instance of ``DirNode``
1109 """
1111 """
1110 raise NotImplementedError
1112 raise NotImplementedError
1111
1113
1112 def get_node(self, path):
1114 def get_node(self, path):
1113 """
1115 """
1114 Returns ``Node`` object from the given ``path``.
1116 Returns ``Node`` object from the given ``path``.
1115
1117
1116 :raises ``NodeDoesNotExistError``: if there is no node at the given
1118 :raises ``NodeDoesNotExistError``: if there is no node at the given
1117 ``path``
1119 ``path``
1118 """
1120 """
1119 raise NotImplementedError
1121 raise NotImplementedError
1120
1122
1121 def get_largefile_node(self, path):
1123 def get_largefile_node(self, path):
1122 """
1124 """
1123 Returns the path to largefile from Mercurial/Git-lfs storage.
1125 Returns the path to largefile from Mercurial/Git-lfs storage.
1124 or None if it's not a largefile node
1126 or None if it's not a largefile node
1125 """
1127 """
1126 return None
1128 return None
1127
1129
1128 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1130 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1129 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1131 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1130 """
1132 """
1131 Creates an archive containing the contents of the repository.
1133 Creates an archive containing the contents of the repository.
1132
1134
1133 :param archive_dest_path: path to the file which to create the archive.
1135 :param archive_dest_path: path to the file which to create the archive.
1134 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1136 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1135 :param prefix: name of root directory in archive.
1137 :param prefix: name of root directory in archive.
1136 Default is repository name and commit's short_id joined with dash:
1138 Default is repository name and commit's short_id joined with dash:
1137 ``"{repo_name}-{short_id}"``.
1139 ``"{repo_name}-{short_id}"``.
1138 :param write_metadata: write a metadata file into archive.
1140 :param write_metadata: write a metadata file into archive.
1139 :param mtime: custom modification time for archive creation, defaults
1141 :param mtime: custom modification time for archive creation, defaults
1140 to time.time() if not given.
1142 to time.time() if not given.
1141 :param archive_at_path: pack files at this path (default '/')
1143 :param archive_at_path: pack files at this path (default '/')
1142
1144
1143 :raise VCSError: If prefix has a problem.
1145 :raise VCSError: If prefix has a problem.
1144 """
1146 """
1145 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1147 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1146 if kind not in allowed_kinds:
1148 if kind not in allowed_kinds:
1147 raise ImproperArchiveTypeError(
1149 raise ImproperArchiveTypeError(
1148 'Archive kind (%s) not supported use one of %s' %
1150 'Archive kind (%s) not supported use one of %s' %
1149 (kind, allowed_kinds))
1151 (kind, allowed_kinds))
1150
1152
1151 prefix = self._validate_archive_prefix(prefix)
1153 prefix = self._validate_archive_prefix(prefix)
1152
1154
1153 mtime = mtime is not None or time.mktime(self.date.timetuple())
1155 mtime = mtime is not None or time.mktime(self.date.timetuple())
1154
1156
1155 file_info = []
1157 file_info = []
1156 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1158 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1157 for _r, _d, files in cur_rev.walk(archive_at_path):
1159 for _r, _d, files in cur_rev.walk(archive_at_path):
1158 for f in files:
1160 for f in files:
1159 f_path = os.path.join(prefix, f.path)
1161 f_path = os.path.join(prefix, f.path)
1160 file_info.append(
1162 file_info.append(
1161 (f_path, f.mode, f.is_link(), f.raw_bytes))
1163 (f_path, f.mode, f.is_link(), f.raw_bytes))
1162
1164
1163 if write_metadata:
1165 if write_metadata:
1164 metadata = [
1166 metadata = [
1165 ('repo_name', self.repository.name),
1167 ('repo_name', self.repository.name),
1166 ('commit_id', self.raw_id),
1168 ('commit_id', self.raw_id),
1167 ('mtime', mtime),
1169 ('mtime', mtime),
1168 ('branch', self.branch),
1170 ('branch', self.branch),
1169 ('tags', ','.join(self.tags)),
1171 ('tags', ','.join(self.tags)),
1170 ]
1172 ]
1171 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1173 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1172 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1174 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1173
1175
1174 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1176 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1175
1177
1176 def _validate_archive_prefix(self, prefix):
1178 def _validate_archive_prefix(self, prefix):
1177 if prefix is None:
1179 if prefix is None:
1178 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1180 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1179 repo_name=safe_str(self.repository.name),
1181 repo_name=safe_str(self.repository.name),
1180 short_id=self.short_id)
1182 short_id=self.short_id)
1181 elif not isinstance(prefix, str):
1183 elif not isinstance(prefix, str):
1182 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1184 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1183 elif prefix.startswith('/'):
1185 elif prefix.startswith('/'):
1184 raise VCSError("Prefix cannot start with leading slash")
1186 raise VCSError("Prefix cannot start with leading slash")
1185 elif prefix.strip() == '':
1187 elif prefix.strip() == '':
1186 raise VCSError("Prefix cannot be empty")
1188 raise VCSError("Prefix cannot be empty")
1187 return prefix
1189 return prefix
1188
1190
1189 @LazyProperty
1191 @LazyProperty
1190 def root(self):
1192 def root(self):
1191 """
1193 """
1192 Returns ``RootNode`` object for this commit.
1194 Returns ``RootNode`` object for this commit.
1193 """
1195 """
1194 return self.get_node('')
1196 return self.get_node('')
1195
1197
1196 def next(self, branch=None):
1198 def next(self, branch=None):
1197 """
1199 """
1198 Returns next commit from current, if branch is gives it will return
1200 Returns next commit from current, if branch is gives it will return
1199 next commit belonging to this branch
1201 next commit belonging to this branch
1200
1202
1201 :param branch: show commits within the given named branch
1203 :param branch: show commits within the given named branch
1202 """
1204 """
1203 indexes = xrange(self.idx + 1, self.repository.count())
1205 indexes = xrange(self.idx + 1, self.repository.count())
1204 return self._find_next(indexes, branch)
1206 return self._find_next(indexes, branch)
1205
1207
1206 def prev(self, branch=None):
1208 def prev(self, branch=None):
1207 """
1209 """
1208 Returns previous commit from current, if branch is gives it will
1210 Returns previous commit from current, if branch is gives it will
1209 return previous commit belonging to this branch
1211 return previous commit belonging to this branch
1210
1212
1211 :param branch: show commit within the given named branch
1213 :param branch: show commit within the given named branch
1212 """
1214 """
1213 indexes = xrange(self.idx - 1, -1, -1)
1215 indexes = xrange(self.idx - 1, -1, -1)
1214 return self._find_next(indexes, branch)
1216 return self._find_next(indexes, branch)
1215
1217
1216 def _find_next(self, indexes, branch=None):
1218 def _find_next(self, indexes, branch=None):
1217 if branch and self.branch != branch:
1219 if branch and self.branch != branch:
1218 raise VCSError('Branch option used on commit not belonging '
1220 raise VCSError('Branch option used on commit not belonging '
1219 'to that branch')
1221 'to that branch')
1220
1222
1221 for next_idx in indexes:
1223 for next_idx in indexes:
1222 commit = self.repository.get_commit(commit_idx=next_idx)
1224 commit = self.repository.get_commit(commit_idx=next_idx)
1223 if branch and branch != commit.branch:
1225 if branch and branch != commit.branch:
1224 continue
1226 continue
1225 return commit
1227 return commit
1226 raise CommitDoesNotExistError
1228 raise CommitDoesNotExistError
1227
1229
1228 def diff(self, ignore_whitespace=True, context=3):
1230 def diff(self, ignore_whitespace=True, context=3):
1229 """
1231 """
1230 Returns a `Diff` object representing the change made by this commit.
1232 Returns a `Diff` object representing the change made by this commit.
1231 """
1233 """
1232 parent = self.first_parent
1234 parent = self.first_parent
1233 diff = self.repository.get_diff(
1235 diff = self.repository.get_diff(
1234 parent, self,
1236 parent, self,
1235 ignore_whitespace=ignore_whitespace,
1237 ignore_whitespace=ignore_whitespace,
1236 context=context)
1238 context=context)
1237 return diff
1239 return diff
1238
1240
1239 @LazyProperty
1241 @LazyProperty
1240 def added(self):
1242 def added(self):
1241 """
1243 """
1242 Returns list of added ``FileNode`` objects.
1244 Returns list of added ``FileNode`` objects.
1243 """
1245 """
1244 raise NotImplementedError
1246 raise NotImplementedError
1245
1247
1246 @LazyProperty
1248 @LazyProperty
1247 def changed(self):
1249 def changed(self):
1248 """
1250 """
1249 Returns list of modified ``FileNode`` objects.
1251 Returns list of modified ``FileNode`` objects.
1250 """
1252 """
1251 raise NotImplementedError
1253 raise NotImplementedError
1252
1254
1253 @LazyProperty
1255 @LazyProperty
1254 def removed(self):
1256 def removed(self):
1255 """
1257 """
1256 Returns list of removed ``FileNode`` objects.
1258 Returns list of removed ``FileNode`` objects.
1257 """
1259 """
1258 raise NotImplementedError
1260 raise NotImplementedError
1259
1261
1260 @LazyProperty
1262 @LazyProperty
1261 def size(self):
1263 def size(self):
1262 """
1264 """
1263 Returns total number of bytes from contents of all filenodes.
1265 Returns total number of bytes from contents of all filenodes.
1264 """
1266 """
1265 return sum((node.size for node in self.get_filenodes_generator()))
1267 return sum((node.size for node in self.get_filenodes_generator()))
1266
1268
1267 def walk(self, topurl=''):
1269 def walk(self, topurl=''):
1268 """
1270 """
1269 Similar to os.walk method. Insted of filesystem it walks through
1271 Similar to os.walk method. Insted of filesystem it walks through
1270 commit starting at given ``topurl``. Returns generator of tuples
1272 commit starting at given ``topurl``. Returns generator of tuples
1271 (topnode, dirnodes, filenodes).
1273 (topnode, dirnodes, filenodes).
1272 """
1274 """
1273 topnode = self.get_node(topurl)
1275 topnode = self.get_node(topurl)
1274 if not topnode.is_dir():
1276 if not topnode.is_dir():
1275 return
1277 return
1276 yield (topnode, topnode.dirs, topnode.files)
1278 yield (topnode, topnode.dirs, topnode.files)
1277 for dirnode in topnode.dirs:
1279 for dirnode in topnode.dirs:
1278 for tup in self.walk(dirnode.path):
1280 for tup in self.walk(dirnode.path):
1279 yield tup
1281 yield tup
1280
1282
1281 def get_filenodes_generator(self):
1283 def get_filenodes_generator(self):
1282 """
1284 """
1283 Returns generator that yields *all* file nodes.
1285 Returns generator that yields *all* file nodes.
1284 """
1286 """
1285 for topnode, dirs, files in self.walk():
1287 for topnode, dirs, files in self.walk():
1286 for node in files:
1288 for node in files:
1287 yield node
1289 yield node
1288
1290
1289 #
1291 #
1290 # Utilities for sub classes to support consistent behavior
1292 # Utilities for sub classes to support consistent behavior
1291 #
1293 #
1292
1294
1293 def no_node_at_path(self, path):
1295 def no_node_at_path(self, path):
1294 return NodeDoesNotExistError(
1296 return NodeDoesNotExistError(
1295 u"There is no file nor directory at the given path: "
1297 u"There is no file nor directory at the given path: "
1296 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1298 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1297
1299
1298 def _fix_path(self, path):
1300 def _fix_path(self, path):
1299 """
1301 """
1300 Paths are stored without trailing slash so we need to get rid off it if
1302 Paths are stored without trailing slash so we need to get rid off it if
1301 needed.
1303 needed.
1302 """
1304 """
1303 return path.rstrip('/')
1305 return path.rstrip('/')
1304
1306
1305 #
1307 #
1306 # Deprecated API based on changesets
1308 # Deprecated API based on changesets
1307 #
1309 #
1308
1310
1309 @property
1311 @property
1310 def revision(self):
1312 def revision(self):
1311 warnings.warn("Use idx instead", DeprecationWarning)
1313 warnings.warn("Use idx instead", DeprecationWarning)
1312 return self.idx
1314 return self.idx
1313
1315
1314 @revision.setter
1316 @revision.setter
1315 def revision(self, value):
1317 def revision(self, value):
1316 warnings.warn("Use idx instead", DeprecationWarning)
1318 warnings.warn("Use idx instead", DeprecationWarning)
1317 self.idx = value
1319 self.idx = value
1318
1320
1319 def get_file_changeset(self, path):
1321 def get_file_changeset(self, path):
1320 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1322 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1321 return self.get_path_commit(path)
1323 return self.get_path_commit(path)
1322
1324
1323
1325
1324 class BaseChangesetClass(type):
1326 class BaseChangesetClass(type):
1325
1327
1326 def __instancecheck__(self, instance):
1328 def __instancecheck__(self, instance):
1327 return isinstance(instance, BaseCommit)
1329 return isinstance(instance, BaseCommit)
1328
1330
1329
1331
1330 class BaseChangeset(BaseCommit):
1332 class BaseChangeset(BaseCommit):
1331
1333
1332 __metaclass__ = BaseChangesetClass
1334 __metaclass__ = BaseChangesetClass
1333
1335
1334 def __new__(cls, *args, **kwargs):
1336 def __new__(cls, *args, **kwargs):
1335 warnings.warn(
1337 warnings.warn(
1336 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1338 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1337 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1339 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1338
1340
1339
1341
1340 class BaseInMemoryCommit(object):
1342 class BaseInMemoryCommit(object):
1341 """
1343 """
1342 Represents differences between repository's state (most recent head) and
1344 Represents differences between repository's state (most recent head) and
1343 changes made *in place*.
1345 changes made *in place*.
1344
1346
1345 **Attributes**
1347 **Attributes**
1346
1348
1347 ``repository``
1349 ``repository``
1348 repository object for this in-memory-commit
1350 repository object for this in-memory-commit
1349
1351
1350 ``added``
1352 ``added``
1351 list of ``FileNode`` objects marked as *added*
1353 list of ``FileNode`` objects marked as *added*
1352
1354
1353 ``changed``
1355 ``changed``
1354 list of ``FileNode`` objects marked as *changed*
1356 list of ``FileNode`` objects marked as *changed*
1355
1357
1356 ``removed``
1358 ``removed``
1357 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1359 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1358 *removed*
1360 *removed*
1359
1361
1360 ``parents``
1362 ``parents``
1361 list of :class:`BaseCommit` instances representing parents of
1363 list of :class:`BaseCommit` instances representing parents of
1362 in-memory commit. Should always be 2-element sequence.
1364 in-memory commit. Should always be 2-element sequence.
1363
1365
1364 """
1366 """
1365
1367
1366 def __init__(self, repository):
1368 def __init__(self, repository):
1367 self.repository = repository
1369 self.repository = repository
1368 self.added = []
1370 self.added = []
1369 self.changed = []
1371 self.changed = []
1370 self.removed = []
1372 self.removed = []
1371 self.parents = []
1373 self.parents = []
1372
1374
1373 def add(self, *filenodes):
1375 def add(self, *filenodes):
1374 """
1376 """
1375 Marks given ``FileNode`` objects as *to be committed*.
1377 Marks given ``FileNode`` objects as *to be committed*.
1376
1378
1377 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1379 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1378 latest commit
1380 latest commit
1379 :raises ``NodeAlreadyAddedError``: if node with same path is already
1381 :raises ``NodeAlreadyAddedError``: if node with same path is already
1380 marked as *added*
1382 marked as *added*
1381 """
1383 """
1382 # Check if not already marked as *added* first
1384 # Check if not already marked as *added* first
1383 for node in filenodes:
1385 for node in filenodes:
1384 if node.path in (n.path for n in self.added):
1386 if node.path in (n.path for n in self.added):
1385 raise NodeAlreadyAddedError(
1387 raise NodeAlreadyAddedError(
1386 "Such FileNode %s is already marked for addition"
1388 "Such FileNode %s is already marked for addition"
1387 % node.path)
1389 % node.path)
1388 for node in filenodes:
1390 for node in filenodes:
1389 self.added.append(node)
1391 self.added.append(node)
1390
1392
1391 def change(self, *filenodes):
1393 def change(self, *filenodes):
1392 """
1394 """
1393 Marks given ``FileNode`` objects to be *changed* in next commit.
1395 Marks given ``FileNode`` objects to be *changed* in next commit.
1394
1396
1395 :raises ``EmptyRepositoryError``: if there are no commits yet
1397 :raises ``EmptyRepositoryError``: if there are no commits yet
1396 :raises ``NodeAlreadyExistsError``: if node with same path is already
1398 :raises ``NodeAlreadyExistsError``: if node with same path is already
1397 marked to be *changed*
1399 marked to be *changed*
1398 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1400 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1399 marked to be *removed*
1401 marked to be *removed*
1400 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1402 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1401 commit
1403 commit
1402 :raises ``NodeNotChangedError``: if node hasn't really be changed
1404 :raises ``NodeNotChangedError``: if node hasn't really be changed
1403 """
1405 """
1404 for node in filenodes:
1406 for node in filenodes:
1405 if node.path in (n.path for n in self.removed):
1407 if node.path in (n.path for n in self.removed):
1406 raise NodeAlreadyRemovedError(
1408 raise NodeAlreadyRemovedError(
1407 "Node at %s is already marked as removed" % node.path)
1409 "Node at %s is already marked as removed" % node.path)
1408 try:
1410 try:
1409 self.repository.get_commit()
1411 self.repository.get_commit()
1410 except EmptyRepositoryError:
1412 except EmptyRepositoryError:
1411 raise EmptyRepositoryError(
1413 raise EmptyRepositoryError(
1412 "Nothing to change - try to *add* new nodes rather than "
1414 "Nothing to change - try to *add* new nodes rather than "
1413 "changing them")
1415 "changing them")
1414 for node in filenodes:
1416 for node in filenodes:
1415 if node.path in (n.path for n in self.changed):
1417 if node.path in (n.path for n in self.changed):
1416 raise NodeAlreadyChangedError(
1418 raise NodeAlreadyChangedError(
1417 "Node at '%s' is already marked as changed" % node.path)
1419 "Node at '%s' is already marked as changed" % node.path)
1418 self.changed.append(node)
1420 self.changed.append(node)
1419
1421
1420 def remove(self, *filenodes):
1422 def remove(self, *filenodes):
1421 """
1423 """
1422 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1424 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1423 *removed* in next commit.
1425 *removed* in next commit.
1424
1426
1425 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1427 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1426 be *removed*
1428 be *removed*
1427 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1429 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1428 be *changed*
1430 be *changed*
1429 """
1431 """
1430 for node in filenodes:
1432 for node in filenodes:
1431 if node.path in (n.path for n in self.removed):
1433 if node.path in (n.path for n in self.removed):
1432 raise NodeAlreadyRemovedError(
1434 raise NodeAlreadyRemovedError(
1433 "Node is already marked to for removal at %s" % node.path)
1435 "Node is already marked to for removal at %s" % node.path)
1434 if node.path in (n.path for n in self.changed):
1436 if node.path in (n.path for n in self.changed):
1435 raise NodeAlreadyChangedError(
1437 raise NodeAlreadyChangedError(
1436 "Node is already marked to be changed at %s" % node.path)
1438 "Node is already marked to be changed at %s" % node.path)
1437 # We only mark node as *removed* - real removal is done by
1439 # We only mark node as *removed* - real removal is done by
1438 # commit method
1440 # commit method
1439 self.removed.append(node)
1441 self.removed.append(node)
1440
1442
1441 def reset(self):
1443 def reset(self):
1442 """
1444 """
1443 Resets this instance to initial state (cleans ``added``, ``changed``
1445 Resets this instance to initial state (cleans ``added``, ``changed``
1444 and ``removed`` lists).
1446 and ``removed`` lists).
1445 """
1447 """
1446 self.added = []
1448 self.added = []
1447 self.changed = []
1449 self.changed = []
1448 self.removed = []
1450 self.removed = []
1449 self.parents = []
1451 self.parents = []
1450
1452
1451 def get_ipaths(self):
1453 def get_ipaths(self):
1452 """
1454 """
1453 Returns generator of paths from nodes marked as added, changed or
1455 Returns generator of paths from nodes marked as added, changed or
1454 removed.
1456 removed.
1455 """
1457 """
1456 for node in itertools.chain(self.added, self.changed, self.removed):
1458 for node in itertools.chain(self.added, self.changed, self.removed):
1457 yield node.path
1459 yield node.path
1458
1460
1459 def get_paths(self):
1461 def get_paths(self):
1460 """
1462 """
1461 Returns list of paths from nodes marked as added, changed or removed.
1463 Returns list of paths from nodes marked as added, changed or removed.
1462 """
1464 """
1463 return list(self.get_ipaths())
1465 return list(self.get_ipaths())
1464
1466
1465 def check_integrity(self, parents=None):
1467 def check_integrity(self, parents=None):
1466 """
1468 """
1467 Checks in-memory commit's integrity. Also, sets parents if not
1469 Checks in-memory commit's integrity. Also, sets parents if not
1468 already set.
1470 already set.
1469
1471
1470 :raises CommitError: if any error occurs (i.e.
1472 :raises CommitError: if any error occurs (i.e.
1471 ``NodeDoesNotExistError``).
1473 ``NodeDoesNotExistError``).
1472 """
1474 """
1473 if not self.parents:
1475 if not self.parents:
1474 parents = parents or []
1476 parents = parents or []
1475 if len(parents) == 0:
1477 if len(parents) == 0:
1476 try:
1478 try:
1477 parents = [self.repository.get_commit(), None]
1479 parents = [self.repository.get_commit(), None]
1478 except EmptyRepositoryError:
1480 except EmptyRepositoryError:
1479 parents = [None, None]
1481 parents = [None, None]
1480 elif len(parents) == 1:
1482 elif len(parents) == 1:
1481 parents += [None]
1483 parents += [None]
1482 self.parents = parents
1484 self.parents = parents
1483
1485
1484 # Local parents, only if not None
1486 # Local parents, only if not None
1485 parents = [p for p in self.parents if p]
1487 parents = [p for p in self.parents if p]
1486
1488
1487 # Check nodes marked as added
1489 # Check nodes marked as added
1488 for p in parents:
1490 for p in parents:
1489 for node in self.added:
1491 for node in self.added:
1490 try:
1492 try:
1491 p.get_node(node.path)
1493 p.get_node(node.path)
1492 except NodeDoesNotExistError:
1494 except NodeDoesNotExistError:
1493 pass
1495 pass
1494 else:
1496 else:
1495 raise NodeAlreadyExistsError(
1497 raise NodeAlreadyExistsError(
1496 "Node `%s` already exists at %s" % (node.path, p))
1498 "Node `%s` already exists at %s" % (node.path, p))
1497
1499
1498 # Check nodes marked as changed
1500 # Check nodes marked as changed
1499 missing = set(self.changed)
1501 missing = set(self.changed)
1500 not_changed = set(self.changed)
1502 not_changed = set(self.changed)
1501 if self.changed and not parents:
1503 if self.changed and not parents:
1502 raise NodeDoesNotExistError(str(self.changed[0].path))
1504 raise NodeDoesNotExistError(str(self.changed[0].path))
1503 for p in parents:
1505 for p in parents:
1504 for node in self.changed:
1506 for node in self.changed:
1505 try:
1507 try:
1506 old = p.get_node(node.path)
1508 old = p.get_node(node.path)
1507 missing.remove(node)
1509 missing.remove(node)
1508 # if content actually changed, remove node from not_changed
1510 # if content actually changed, remove node from not_changed
1509 if old.content != node.content:
1511 if old.content != node.content:
1510 not_changed.remove(node)
1512 not_changed.remove(node)
1511 except NodeDoesNotExistError:
1513 except NodeDoesNotExistError:
1512 pass
1514 pass
1513 if self.changed and missing:
1515 if self.changed and missing:
1514 raise NodeDoesNotExistError(
1516 raise NodeDoesNotExistError(
1515 "Node `%s` marked as modified but missing in parents: %s"
1517 "Node `%s` marked as modified but missing in parents: %s"
1516 % (node.path, parents))
1518 % (node.path, parents))
1517
1519
1518 if self.changed and not_changed:
1520 if self.changed and not_changed:
1519 raise NodeNotChangedError(
1521 raise NodeNotChangedError(
1520 "Node `%s` wasn't actually changed (parents: %s)"
1522 "Node `%s` wasn't actually changed (parents: %s)"
1521 % (not_changed.pop().path, parents))
1523 % (not_changed.pop().path, parents))
1522
1524
1523 # Check nodes marked as removed
1525 # Check nodes marked as removed
1524 if self.removed and not parents:
1526 if self.removed and not parents:
1525 raise NodeDoesNotExistError(
1527 raise NodeDoesNotExistError(
1526 "Cannot remove node at %s as there "
1528 "Cannot remove node at %s as there "
1527 "were no parents specified" % self.removed[0].path)
1529 "were no parents specified" % self.removed[0].path)
1528 really_removed = set()
1530 really_removed = set()
1529 for p in parents:
1531 for p in parents:
1530 for node in self.removed:
1532 for node in self.removed:
1531 try:
1533 try:
1532 p.get_node(node.path)
1534 p.get_node(node.path)
1533 really_removed.add(node)
1535 really_removed.add(node)
1534 except CommitError:
1536 except CommitError:
1535 pass
1537 pass
1536 not_removed = set(self.removed) - really_removed
1538 not_removed = set(self.removed) - really_removed
1537 if not_removed:
1539 if not_removed:
1538 # TODO: johbo: This code branch does not seem to be covered
1540 # TODO: johbo: This code branch does not seem to be covered
1539 raise NodeDoesNotExistError(
1541 raise NodeDoesNotExistError(
1540 "Cannot remove node at %s from "
1542 "Cannot remove node at %s from "
1541 "following parents: %s" % (not_removed, parents))
1543 "following parents: %s" % (not_removed, parents))
1542
1544
1543 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1545 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1544 """
1546 """
1545 Performs in-memory commit (doesn't check workdir in any way) and
1547 Performs in-memory commit (doesn't check workdir in any way) and
1546 returns newly created :class:`BaseCommit`. Updates repository's
1548 returns newly created :class:`BaseCommit`. Updates repository's
1547 attribute `commits`.
1549 attribute `commits`.
1548
1550
1549 .. note::
1551 .. note::
1550
1552
1551 While overriding this method each backend's should call
1553 While overriding this method each backend's should call
1552 ``self.check_integrity(parents)`` in the first place.
1554 ``self.check_integrity(parents)`` in the first place.
1553
1555
1554 :param message: message of the commit
1556 :param message: message of the commit
1555 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1557 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1556 :param parents: single parent or sequence of parents from which commit
1558 :param parents: single parent or sequence of parents from which commit
1557 would be derived
1559 would be derived
1558 :param date: ``datetime.datetime`` instance. Defaults to
1560 :param date: ``datetime.datetime`` instance. Defaults to
1559 ``datetime.datetime.now()``.
1561 ``datetime.datetime.now()``.
1560 :param branch: branch name, as string. If none given, default backend's
1562 :param branch: branch name, as string. If none given, default backend's
1561 branch would be used.
1563 branch would be used.
1562
1564
1563 :raises ``CommitError``: if any error occurs while committing
1565 :raises ``CommitError``: if any error occurs while committing
1564 """
1566 """
1565 raise NotImplementedError
1567 raise NotImplementedError
1566
1568
1567
1569
1568 class BaseInMemoryChangesetClass(type):
1570 class BaseInMemoryChangesetClass(type):
1569
1571
1570 def __instancecheck__(self, instance):
1572 def __instancecheck__(self, instance):
1571 return isinstance(instance, BaseInMemoryCommit)
1573 return isinstance(instance, BaseInMemoryCommit)
1572
1574
1573
1575
1574 class BaseInMemoryChangeset(BaseInMemoryCommit):
1576 class BaseInMemoryChangeset(BaseInMemoryCommit):
1575
1577
1576 __metaclass__ = BaseInMemoryChangesetClass
1578 __metaclass__ = BaseInMemoryChangesetClass
1577
1579
1578 def __new__(cls, *args, **kwargs):
1580 def __new__(cls, *args, **kwargs):
1579 warnings.warn(
1581 warnings.warn(
1580 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1582 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1581 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1583 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1582
1584
1583
1585
1584 class EmptyCommit(BaseCommit):
1586 class EmptyCommit(BaseCommit):
1585 """
1587 """
1586 An dummy empty commit. It's possible to pass hash when creating
1588 An dummy empty commit. It's possible to pass hash when creating
1587 an EmptyCommit
1589 an EmptyCommit
1588 """
1590 """
1589
1591
1590 def __init__(
1592 def __init__(
1591 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1593 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1592 message='', author='', date=None):
1594 message='', author='', date=None):
1593 self._empty_commit_id = commit_id
1595 self._empty_commit_id = commit_id
1594 # TODO: johbo: Solve idx parameter, default value does not make
1596 # TODO: johbo: Solve idx parameter, default value does not make
1595 # too much sense
1597 # too much sense
1596 self.idx = idx
1598 self.idx = idx
1597 self.message = message
1599 self.message = message
1598 self.author = author
1600 self.author = author
1599 self.date = date or datetime.datetime.fromtimestamp(0)
1601 self.date = date or datetime.datetime.fromtimestamp(0)
1600 self.repository = repo
1602 self.repository = repo
1601 self.alias = alias
1603 self.alias = alias
1602
1604
1603 @LazyProperty
1605 @LazyProperty
1604 def raw_id(self):
1606 def raw_id(self):
1605 """
1607 """
1606 Returns raw string identifying this commit, useful for web
1608 Returns raw string identifying this commit, useful for web
1607 representation.
1609 representation.
1608 """
1610 """
1609
1611
1610 return self._empty_commit_id
1612 return self._empty_commit_id
1611
1613
1612 @LazyProperty
1614 @LazyProperty
1613 def branch(self):
1615 def branch(self):
1614 if self.alias:
1616 if self.alias:
1615 from rhodecode.lib.vcs.backends import get_backend
1617 from rhodecode.lib.vcs.backends import get_backend
1616 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1618 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1617
1619
1618 @LazyProperty
1620 @LazyProperty
1619 def short_id(self):
1621 def short_id(self):
1620 return self.raw_id[:12]
1622 return self.raw_id[:12]
1621
1623
1622 @LazyProperty
1624 @LazyProperty
1623 def id(self):
1625 def id(self):
1624 return self.raw_id
1626 return self.raw_id
1625
1627
1626 def get_path_commit(self, path):
1628 def get_path_commit(self, path):
1627 return self
1629 return self
1628
1630
1629 def get_file_content(self, path):
1631 def get_file_content(self, path):
1630 return u''
1632 return u''
1631
1633
1632 def get_file_size(self, path):
1634 def get_file_size(self, path):
1633 return 0
1635 return 0
1634
1636
1635
1637
1636 class EmptyChangesetClass(type):
1638 class EmptyChangesetClass(type):
1637
1639
1638 def __instancecheck__(self, instance):
1640 def __instancecheck__(self, instance):
1639 return isinstance(instance, EmptyCommit)
1641 return isinstance(instance, EmptyCommit)
1640
1642
1641
1643
1642 class EmptyChangeset(EmptyCommit):
1644 class EmptyChangeset(EmptyCommit):
1643
1645
1644 __metaclass__ = EmptyChangesetClass
1646 __metaclass__ = EmptyChangesetClass
1645
1647
1646 def __new__(cls, *args, **kwargs):
1648 def __new__(cls, *args, **kwargs):
1647 warnings.warn(
1649 warnings.warn(
1648 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1650 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1649 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1651 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1650
1652
1651 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1653 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1652 alias=None, revision=-1, message='', author='', date=None):
1654 alias=None, revision=-1, message='', author='', date=None):
1653 if requested_revision is not None:
1655 if requested_revision is not None:
1654 warnings.warn(
1656 warnings.warn(
1655 "Parameter requested_revision not supported anymore",
1657 "Parameter requested_revision not supported anymore",
1656 DeprecationWarning)
1658 DeprecationWarning)
1657 super(EmptyChangeset, self).__init__(
1659 super(EmptyChangeset, self).__init__(
1658 commit_id=cs, repo=repo, alias=alias, idx=revision,
1660 commit_id=cs, repo=repo, alias=alias, idx=revision,
1659 message=message, author=author, date=date)
1661 message=message, author=author, date=date)
1660
1662
1661 @property
1663 @property
1662 def revision(self):
1664 def revision(self):
1663 warnings.warn("Use idx instead", DeprecationWarning)
1665 warnings.warn("Use idx instead", DeprecationWarning)
1664 return self.idx
1666 return self.idx
1665
1667
1666 @revision.setter
1668 @revision.setter
1667 def revision(self, value):
1669 def revision(self, value):
1668 warnings.warn("Use idx instead", DeprecationWarning)
1670 warnings.warn("Use idx instead", DeprecationWarning)
1669 self.idx = value
1671 self.idx = value
1670
1672
1671
1673
1672 class EmptyRepository(BaseRepository):
1674 class EmptyRepository(BaseRepository):
1673 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1675 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1674 pass
1676 pass
1675
1677
1676 def get_diff(self, *args, **kwargs):
1678 def get_diff(self, *args, **kwargs):
1677 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1679 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1678 return GitDiff('')
1680 return GitDiff('')
1679
1681
1680
1682
1681 class CollectionGenerator(object):
1683 class CollectionGenerator(object):
1682
1684
1683 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1685 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1684 self.repo = repo
1686 self.repo = repo
1685 self.commit_ids = commit_ids
1687 self.commit_ids = commit_ids
1686 # TODO: (oliver) this isn't currently hooked up
1688 # TODO: (oliver) this isn't currently hooked up
1687 self.collection_size = None
1689 self.collection_size = None
1688 self.pre_load = pre_load
1690 self.pre_load = pre_load
1689 self.translate_tag = translate_tag
1691 self.translate_tag = translate_tag
1690
1692
1691 def __len__(self):
1693 def __len__(self):
1692 if self.collection_size is not None:
1694 if self.collection_size is not None:
1693 return self.collection_size
1695 return self.collection_size
1694 return self.commit_ids.__len__()
1696 return self.commit_ids.__len__()
1695
1697
1696 def __iter__(self):
1698 def __iter__(self):
1697 for commit_id in self.commit_ids:
1699 for commit_id in self.commit_ids:
1698 # TODO: johbo: Mercurial passes in commit indices or commit ids
1700 # TODO: johbo: Mercurial passes in commit indices or commit ids
1699 yield self._commit_factory(commit_id)
1701 yield self._commit_factory(commit_id)
1700
1702
1701 def _commit_factory(self, commit_id):
1703 def _commit_factory(self, commit_id):
1702 """
1704 """
1703 Allows backends to override the way commits are generated.
1705 Allows backends to override the way commits are generated.
1704 """
1706 """
1705 return self.repo.get_commit(
1707 return self.repo.get_commit(
1706 commit_id=commit_id, pre_load=self.pre_load,
1708 commit_id=commit_id, pre_load=self.pre_load,
1707 translate_tag=self.translate_tag)
1709 translate_tag=self.translate_tag)
1708
1710
1709 def __getslice__(self, i, j):
1711 def __getslice__(self, i, j):
1710 """
1712 """
1711 Returns an iterator of sliced repository
1713 Returns an iterator of sliced repository
1712 """
1714 """
1713 commit_ids = self.commit_ids[i:j]
1715 commit_ids = self.commit_ids[i:j]
1714 return self.__class__(
1716 return self.__class__(
1715 self.repo, commit_ids, pre_load=self.pre_load,
1717 self.repo, commit_ids, pre_load=self.pre_load,
1716 translate_tag=self.translate_tag)
1718 translate_tag=self.translate_tag)
1717
1719
1718 def __repr__(self):
1720 def __repr__(self):
1719 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1721 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1720
1722
1721
1723
1722 class Config(object):
1724 class Config(object):
1723 """
1725 """
1724 Represents the configuration for a repository.
1726 Represents the configuration for a repository.
1725
1727
1726 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1728 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1727 standard library. It implements only the needed subset.
1729 standard library. It implements only the needed subset.
1728 """
1730 """
1729
1731
1730 def __init__(self):
1732 def __init__(self):
1731 self._values = {}
1733 self._values = {}
1732
1734
1733 def copy(self):
1735 def copy(self):
1734 clone = Config()
1736 clone = Config()
1735 for section, values in self._values.items():
1737 for section, values in self._values.items():
1736 clone._values[section] = values.copy()
1738 clone._values[section] = values.copy()
1737 return clone
1739 return clone
1738
1740
1739 def __repr__(self):
1741 def __repr__(self):
1740 return '<Config(%s sections) at %s>' % (
1742 return '<Config(%s sections) at %s>' % (
1741 len(self._values), hex(id(self)))
1743 len(self._values), hex(id(self)))
1742
1744
1743 def items(self, section):
1745 def items(self, section):
1744 return self._values.get(section, {}).iteritems()
1746 return self._values.get(section, {}).iteritems()
1745
1747
1746 def get(self, section, option):
1748 def get(self, section, option):
1747 return self._values.get(section, {}).get(option)
1749 return self._values.get(section, {}).get(option)
1748
1750
1749 def set(self, section, option, value):
1751 def set(self, section, option, value):
1750 section_values = self._values.setdefault(section, {})
1752 section_values = self._values.setdefault(section, {})
1751 section_values[option] = value
1753 section_values[option] = value
1752
1754
1753 def clear_section(self, section):
1755 def clear_section(self, section):
1754 self._values[section] = {}
1756 self._values[section] = {}
1755
1757
1756 def serialize(self):
1758 def serialize(self):
1757 """
1759 """
1758 Creates a list of three tuples (section, key, value) representing
1760 Creates a list of three tuples (section, key, value) representing
1759 this config object.
1761 this config object.
1760 """
1762 """
1761 items = []
1763 items = []
1762 for section in self._values:
1764 for section in self._values:
1763 for option, value in self._values[section].items():
1765 for option, value in self._values[section].items():
1764 items.append(
1766 items.append(
1765 (safe_str(section), safe_str(option), safe_str(value)))
1767 (safe_str(section), safe_str(option), safe_str(value)))
1766 return items
1768 return items
1767
1769
1768
1770
1769 class Diff(object):
1771 class Diff(object):
1770 """
1772 """
1771 Represents a diff result from a repository backend.
1773 Represents a diff result from a repository backend.
1772
1774
1773 Subclasses have to provide a backend specific value for
1775 Subclasses have to provide a backend specific value for
1774 :attr:`_header_re` and :attr:`_meta_re`.
1776 :attr:`_header_re` and :attr:`_meta_re`.
1775 """
1777 """
1776 _meta_re = None
1778 _meta_re = None
1777 _header_re = None
1779 _header_re = None
1778
1780
1779 def __init__(self, raw_diff):
1781 def __init__(self, raw_diff):
1780 self.raw = raw_diff
1782 self.raw = raw_diff
1781
1783
1782 def chunks(self):
1784 def chunks(self):
1783 """
1785 """
1784 split the diff in chunks of separate --git a/file b/file chunks
1786 split the diff in chunks of separate --git a/file b/file chunks
1785 to make diffs consistent we must prepend with \n, and make sure
1787 to make diffs consistent we must prepend with \n, and make sure
1786 we can detect last chunk as this was also has special rule
1788 we can detect last chunk as this was also has special rule
1787 """
1789 """
1788
1790
1789 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1791 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1790 header = diff_parts[0]
1792 header = diff_parts[0]
1791
1793
1792 if self._meta_re:
1794 if self._meta_re:
1793 match = self._meta_re.match(header)
1795 match = self._meta_re.match(header)
1794
1796
1795 chunks = diff_parts[1:]
1797 chunks = diff_parts[1:]
1796 total_chunks = len(chunks)
1798 total_chunks = len(chunks)
1797
1799
1798 return (
1800 return (
1799 DiffChunk(chunk, self, cur_chunk == total_chunks)
1801 DiffChunk(chunk, self, cur_chunk == total_chunks)
1800 for cur_chunk, chunk in enumerate(chunks, start=1))
1802 for cur_chunk, chunk in enumerate(chunks, start=1))
1801
1803
1802
1804
1803 class DiffChunk(object):
1805 class DiffChunk(object):
1804
1806
1805 def __init__(self, chunk, diff, last_chunk):
1807 def __init__(self, chunk, diff, last_chunk):
1806 self._diff = diff
1808 self._diff = diff
1807
1809
1808 # since we split by \ndiff --git that part is lost from original diff
1810 # since we split by \ndiff --git that part is lost from original diff
1809 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1811 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1810 if not last_chunk:
1812 if not last_chunk:
1811 chunk += '\n'
1813 chunk += '\n'
1812
1814
1813 match = self._diff._header_re.match(chunk)
1815 match = self._diff._header_re.match(chunk)
1814 self.header = match.groupdict()
1816 self.header = match.groupdict()
1815 self.diff = chunk[match.end():]
1817 self.diff = chunk[match.end():]
1816 self.raw = chunk
1818 self.raw = chunk
1817
1819
1818
1820
1819 class BasePathPermissionChecker(object):
1821 class BasePathPermissionChecker(object):
1820
1822
1821 @staticmethod
1823 @staticmethod
1822 def create_from_patterns(includes, excludes):
1824 def create_from_patterns(includes, excludes):
1823 if includes and '*' in includes and not excludes:
1825 if includes and '*' in includes and not excludes:
1824 return AllPathPermissionChecker()
1826 return AllPathPermissionChecker()
1825 elif excludes and '*' in excludes:
1827 elif excludes and '*' in excludes:
1826 return NonePathPermissionChecker()
1828 return NonePathPermissionChecker()
1827 else:
1829 else:
1828 return PatternPathPermissionChecker(includes, excludes)
1830 return PatternPathPermissionChecker(includes, excludes)
1829
1831
1830 @property
1832 @property
1831 def has_full_access(self):
1833 def has_full_access(self):
1832 raise NotImplemented()
1834 raise NotImplemented()
1833
1835
1834 def has_access(self, path):
1836 def has_access(self, path):
1835 raise NotImplemented()
1837 raise NotImplemented()
1836
1838
1837
1839
1838 class AllPathPermissionChecker(BasePathPermissionChecker):
1840 class AllPathPermissionChecker(BasePathPermissionChecker):
1839
1841
1840 @property
1842 @property
1841 def has_full_access(self):
1843 def has_full_access(self):
1842 return True
1844 return True
1843
1845
1844 def has_access(self, path):
1846 def has_access(self, path):
1845 return True
1847 return True
1846
1848
1847
1849
1848 class NonePathPermissionChecker(BasePathPermissionChecker):
1850 class NonePathPermissionChecker(BasePathPermissionChecker):
1849
1851
1850 @property
1852 @property
1851 def has_full_access(self):
1853 def has_full_access(self):
1852 return False
1854 return False
1853
1855
1854 def has_access(self, path):
1856 def has_access(self, path):
1855 return False
1857 return False
1856
1858
1857
1859
1858 class PatternPathPermissionChecker(BasePathPermissionChecker):
1860 class PatternPathPermissionChecker(BasePathPermissionChecker):
1859
1861
1860 def __init__(self, includes, excludes):
1862 def __init__(self, includes, excludes):
1861 self.includes = includes
1863 self.includes = includes
1862 self.excludes = excludes
1864 self.excludes = excludes
1863 self.includes_re = [] if not includes else [
1865 self.includes_re = [] if not includes else [
1864 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1866 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1865 self.excludes_re = [] if not excludes else [
1867 self.excludes_re = [] if not excludes else [
1866 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1868 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1867
1869
1868 @property
1870 @property
1869 def has_full_access(self):
1871 def has_full_access(self):
1870 return '*' in self.includes and not self.excludes
1872 return '*' in self.includes and not self.excludes
1871
1873
1872 def has_access(self, path):
1874 def has_access(self, path):
1873 for regex in self.excludes_re:
1875 for regex in self.excludes_re:
1874 if regex.match(path):
1876 if regex.match(path):
1875 return False
1877 return False
1876 for regex in self.includes_re:
1878 for regex in self.includes_re:
1877 if regex.match(path):
1879 if regex.match(path):
1878 return True
1880 return True
1879 return False
1881 return False
@@ -1,1073 +1,1074 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import re
22 import re
23 import shutil
23 import shutil
24 import time
24 import time
25 import logging
25 import logging
26 import traceback
26 import traceback
27 import datetime
27 import datetime
28
28
29 from pyramid.threadlocal import get_current_request
29 from pyramid.threadlocal import get_current_request
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from rhodecode import events
32 from rhodecode import events
33 from rhodecode.lib.auth import HasUserGroupPermissionAny
33 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 from rhodecode.lib.caching_query import FromCache
34 from rhodecode.lib.caching_query import FromCache
35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
36 from rhodecode.lib.hooks_base import log_delete_repository
36 from rhodecode.lib.hooks_base import log_delete_repository
37 from rhodecode.lib.user_log_filter import user_log_filter
37 from rhodecode.lib.user_log_filter import user_log_filter
38 from rhodecode.lib.utils import make_db_config
38 from rhodecode.lib.utils import make_db_config
39 from rhodecode.lib.utils2 import (
39 from rhodecode.lib.utils2 import (
40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
41 get_current_rhodecode_user, safe_int, datetime_to_time,
41 get_current_rhodecode_user, safe_int, datetime_to_time,
42 action_logger_generic)
42 action_logger_generic)
43 from rhodecode.lib.vcs.backends import get_backend
43 from rhodecode.lib.vcs.backends import get_backend
44 from rhodecode.model import BaseModel
44 from rhodecode.model import BaseModel
45 from rhodecode.model.db import (
45 from rhodecode.model.db import (
46 _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm,
46 _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49
49
50 from rhodecode.model.settings import VcsSettingsModel
50 from rhodecode.model.settings import VcsSettingsModel
51
51
52
52
53 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
54
54
55
55
56 class RepoModel(BaseModel):
56 class RepoModel(BaseModel):
57
57
58 cls = Repository
58 cls = Repository
59
59
60 def _get_user_group(self, users_group):
60 def _get_user_group(self, users_group):
61 return self._get_instance(UserGroup, users_group,
61 return self._get_instance(UserGroup, users_group,
62 callback=UserGroup.get_by_group_name)
62 callback=UserGroup.get_by_group_name)
63
63
64 def _get_repo_group(self, repo_group):
64 def _get_repo_group(self, repo_group):
65 return self._get_instance(RepoGroup, repo_group,
65 return self._get_instance(RepoGroup, repo_group,
66 callback=RepoGroup.get_by_group_name)
66 callback=RepoGroup.get_by_group_name)
67
67
68 def _create_default_perms(self, repository, private):
68 def _create_default_perms(self, repository, private):
69 # create default permission
69 # create default permission
70 default = 'repository.read'
70 default = 'repository.read'
71 def_user = User.get_default_user()
71 def_user = User.get_default_user()
72 for p in def_user.user_perms:
72 for p in def_user.user_perms:
73 if p.permission.permission_name.startswith('repository.'):
73 if p.permission.permission_name.startswith('repository.'):
74 default = p.permission.permission_name
74 default = p.permission.permission_name
75 break
75 break
76
76
77 default_perm = 'repository.none' if private else default
77 default_perm = 'repository.none' if private else default
78
78
79 repo_to_perm = UserRepoToPerm()
79 repo_to_perm = UserRepoToPerm()
80 repo_to_perm.permission = Permission.get_by_key(default_perm)
80 repo_to_perm.permission = Permission.get_by_key(default_perm)
81
81
82 repo_to_perm.repository = repository
82 repo_to_perm.repository = repository
83 repo_to_perm.user_id = def_user.user_id
83 repo_to_perm.user_id = def_user.user_id
84
84
85 return repo_to_perm
85 return repo_to_perm
86
86
87 @LazyProperty
87 @LazyProperty
88 def repos_path(self):
88 def repos_path(self):
89 """
89 """
90 Gets the repositories root path from database
90 Gets the repositories root path from database
91 """
91 """
92 settings_model = VcsSettingsModel(sa=self.sa)
92 settings_model = VcsSettingsModel(sa=self.sa)
93 return settings_model.get_repos_location()
93 return settings_model.get_repos_location()
94
94
95 def get(self, repo_id):
95 def get(self, repo_id):
96 repo = self.sa.query(Repository) \
96 repo = self.sa.query(Repository) \
97 .filter(Repository.repo_id == repo_id)
97 .filter(Repository.repo_id == repo_id)
98
98
99 return repo.scalar()
99 return repo.scalar()
100
100
101 def get_repo(self, repository):
101 def get_repo(self, repository):
102 return self._get_repo(repository)
102 return self._get_repo(repository)
103
103
104 def get_by_repo_name(self, repo_name, cache=False):
104 def get_by_repo_name(self, repo_name, cache=False):
105 repo = self.sa.query(Repository) \
105 repo = self.sa.query(Repository) \
106 .filter(Repository.repo_name == repo_name)
106 .filter(Repository.repo_name == repo_name)
107
107
108 if cache:
108 if cache:
109 name_key = _hash_key(repo_name)
109 name_key = _hash_key(repo_name)
110 repo = repo.options(
110 repo = repo.options(
111 FromCache("sql_cache_short", "get_repo_%s" % name_key))
111 FromCache("sql_cache_short", "get_repo_%s" % name_key))
112 return repo.scalar()
112 return repo.scalar()
113
113
114 def _extract_id_from_repo_name(self, repo_name):
114 def _extract_id_from_repo_name(self, repo_name):
115 if repo_name.startswith('/'):
115 if repo_name.startswith('/'):
116 repo_name = repo_name.lstrip('/')
116 repo_name = repo_name.lstrip('/')
117 by_id_match = re.match(r'^_(\d{1,})', repo_name)
117 by_id_match = re.match(r'^_(\d{1,})', repo_name)
118 if by_id_match:
118 if by_id_match:
119 return by_id_match.groups()[0]
119 return by_id_match.groups()[0]
120
120
121 def get_repo_by_id(self, repo_name):
121 def get_repo_by_id(self, repo_name):
122 """
122 """
123 Extracts repo_name by id from special urls.
123 Extracts repo_name by id from special urls.
124 Example url is _11/repo_name
124 Example url is _11/repo_name
125
125
126 :param repo_name:
126 :param repo_name:
127 :return: repo object if matched else None
127 :return: repo object if matched else None
128 """
128 """
129
129
130 try:
130 try:
131 _repo_id = self._extract_id_from_repo_name(repo_name)
131 _repo_id = self._extract_id_from_repo_name(repo_name)
132 if _repo_id:
132 if _repo_id:
133 return self.get(_repo_id)
133 return self.get(_repo_id)
134 except Exception:
134 except Exception:
135 log.exception('Failed to extract repo_name from URL')
135 log.exception('Failed to extract repo_name from URL')
136
136
137 return None
137 return None
138
138
139 def get_repos_for_root(self, root, traverse=False):
139 def get_repos_for_root(self, root, traverse=False):
140 if traverse:
140 if traverse:
141 like_expression = u'{}%'.format(safe_unicode(root))
141 like_expression = u'{}%'.format(safe_unicode(root))
142 repos = Repository.query().filter(
142 repos = Repository.query().filter(
143 Repository.repo_name.like(like_expression)).all()
143 Repository.repo_name.like(like_expression)).all()
144 else:
144 else:
145 if root and not isinstance(root, RepoGroup):
145 if root and not isinstance(root, RepoGroup):
146 raise ValueError(
146 raise ValueError(
147 'Root must be an instance '
147 'Root must be an instance '
148 'of RepoGroup, got:{} instead'.format(type(root)))
148 'of RepoGroup, got:{} instead'.format(type(root)))
149 repos = Repository.query().filter(Repository.group == root).all()
149 repos = Repository.query().filter(Repository.group == root).all()
150 return repos
150 return repos
151
151
152 def get_url(self, repo, request=None, permalink=False):
152 def get_url(self, repo, request=None, permalink=False):
153 if not request:
153 if not request:
154 request = get_current_request()
154 request = get_current_request()
155
155
156 if not request:
156 if not request:
157 return
157 return
158
158
159 if permalink:
159 if permalink:
160 return request.route_url(
160 return request.route_url(
161 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
161 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
162 else:
162 else:
163 return request.route_url(
163 return request.route_url(
164 'repo_summary', repo_name=safe_str(repo.repo_name))
164 'repo_summary', repo_name=safe_str(repo.repo_name))
165
165
166 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
166 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
167 if not request:
167 if not request:
168 request = get_current_request()
168 request = get_current_request()
169
169
170 if not request:
170 if not request:
171 return
171 return
172
172
173 if permalink:
173 if permalink:
174 return request.route_url(
174 return request.route_url(
175 'repo_commit', repo_name=safe_str(repo.repo_id),
175 'repo_commit', repo_name=safe_str(repo.repo_id),
176 commit_id=commit_id)
176 commit_id=commit_id)
177
177
178 else:
178 else:
179 return request.route_url(
179 return request.route_url(
180 'repo_commit', repo_name=safe_str(repo.repo_name),
180 'repo_commit', repo_name=safe_str(repo.repo_name),
181 commit_id=commit_id)
181 commit_id=commit_id)
182
182
183 def get_repo_log(self, repo, filter_term):
183 def get_repo_log(self, repo, filter_term):
184 repo_log = UserLog.query()\
184 repo_log = UserLog.query()\
185 .filter(or_(UserLog.repository_id == repo.repo_id,
185 .filter(or_(UserLog.repository_id == repo.repo_id,
186 UserLog.repository_name == repo.repo_name))\
186 UserLog.repository_name == repo.repo_name))\
187 .options(joinedload(UserLog.user))\
187 .options(joinedload(UserLog.user))\
188 .options(joinedload(UserLog.repository))\
188 .options(joinedload(UserLog.repository))\
189 .order_by(UserLog.action_date.desc())
189 .order_by(UserLog.action_date.desc())
190
190
191 repo_log = user_log_filter(repo_log, filter_term)
191 repo_log = user_log_filter(repo_log, filter_term)
192 return repo_log
192 return repo_log
193
193
194 @classmethod
194 @classmethod
195 def update_commit_cache(cls, repositories=None):
195 def update_commit_cache(cls, repositories=None):
196 if not repositories:
196 if not repositories:
197 repositories = Repository.getAll()
197 repositories = Repository.getAll()
198 for repo in repositories:
198 for repo in repositories:
199 repo.update_commit_cache()
199 repo.update_commit_cache()
200
200
201 def get_repos_as_dict(self, repo_list=None, admin=False,
201 def get_repos_as_dict(self, repo_list=None, admin=False,
202 super_user_actions=False, short_name=None):
202 super_user_actions=False, short_name=None):
203 _render = get_current_request().get_partial_renderer(
203 _render = get_current_request().get_partial_renderer(
204 'rhodecode:templates/data_table/_dt_elements.mako')
204 'rhodecode:templates/data_table/_dt_elements.mako')
205 c = _render.get_call_context()
205 c = _render.get_call_context()
206
206
207 def quick_menu(repo_name):
207 def quick_menu(repo_name):
208 return _render('quick_menu', repo_name)
208 return _render('quick_menu', repo_name)
209
209
210 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
210 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
211 if short_name is not None:
211 if short_name is not None:
212 short_name_var = short_name
212 short_name_var = short_name
213 else:
213 else:
214 short_name_var = not admin
214 short_name_var = not admin
215 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
215 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
216 short_name=short_name_var, admin=False)
216 short_name=short_name_var, admin=False)
217
217
218 def last_change(last_change):
218 def last_change(last_change):
219 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
219 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
220 last_change = last_change + datetime.timedelta(seconds=
220 last_change = last_change + datetime.timedelta(seconds=
221 (datetime.datetime.now() - datetime.datetime.utcnow()).seconds)
221 (datetime.datetime.now() - datetime.datetime.utcnow()).seconds)
222 return _render("last_change", last_change)
222 return _render("last_change", last_change)
223
223
224 def rss_lnk(repo_name):
224 def rss_lnk(repo_name):
225 return _render("rss", repo_name)
225 return _render("rss", repo_name)
226
226
227 def atom_lnk(repo_name):
227 def atom_lnk(repo_name):
228 return _render("atom", repo_name)
228 return _render("atom", repo_name)
229
229
230 def last_rev(repo_name, cs_cache):
230 def last_rev(repo_name, cs_cache):
231 return _render('revision', repo_name, cs_cache.get('revision'),
231 return _render('revision', repo_name, cs_cache.get('revision'),
232 cs_cache.get('raw_id'), cs_cache.get('author'),
232 cs_cache.get('raw_id'), cs_cache.get('author'),
233 cs_cache.get('message'), cs_cache.get('date'))
233 cs_cache.get('message'), cs_cache.get('date'))
234
234
235 def desc(desc):
235 def desc(desc):
236 return _render('repo_desc', desc, c.visual.stylify_metatags)
236 return _render('repo_desc', desc, c.visual.stylify_metatags)
237
237
238 def state(repo_state):
238 def state(repo_state):
239 return _render("repo_state", repo_state)
239 return _render("repo_state", repo_state)
240
240
241 def repo_actions(repo_name):
241 def repo_actions(repo_name):
242 return _render('repo_actions', repo_name, super_user_actions)
242 return _render('repo_actions', repo_name, super_user_actions)
243
243
244 def user_profile(username):
244 def user_profile(username):
245 return _render('user_profile', username)
245 return _render('user_profile', username)
246
246
247 repos_data = []
247 repos_data = []
248 for repo in repo_list:
248 for repo in repo_list:
249 cs_cache = repo.changeset_cache
249 cs_cache = repo.changeset_cache
250 row = {
250 row = {
251 "menu": quick_menu(repo.repo_name),
251 "menu": quick_menu(repo.repo_name),
252
252
253 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
253 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
254 repo.private, repo.archived, repo.fork),
254 repo.private, repo.archived, repo.fork),
255 "name_raw": repo.repo_name.lower(),
255 "name_raw": repo.repo_name.lower(),
256
256
257 "last_change": last_change(repo.last_commit_change),
257 "last_change": last_change(repo.last_commit_change),
258 "last_change_raw": datetime_to_time(repo.last_commit_change),
258 "last_change_raw": datetime_to_time(repo.last_commit_change),
259
259
260 "last_changeset": last_rev(repo.repo_name, cs_cache),
260 "last_changeset": last_rev(repo.repo_name, cs_cache),
261 "last_changeset_raw": cs_cache.get('revision'),
261 "last_changeset_raw": cs_cache.get('revision'),
262
262
263 "desc": desc(repo.description_safe),
263 "desc": desc(repo.description_safe),
264 "owner": user_profile(repo.user.username),
264 "owner": user_profile(repo.user.username),
265
265
266 "state": state(repo.repo_state),
266 "state": state(repo.repo_state),
267 "rss": rss_lnk(repo.repo_name),
267 "rss": rss_lnk(repo.repo_name),
268
268
269 "atom": atom_lnk(repo.repo_name),
269 "atom": atom_lnk(repo.repo_name),
270 }
270 }
271 if admin:
271 if admin:
272 row.update({
272 row.update({
273 "action": repo_actions(repo.repo_name),
273 "action": repo_actions(repo.repo_name),
274 })
274 })
275 repos_data.append(row)
275 repos_data.append(row)
276
276
277 return repos_data
277 return repos_data
278
278
279 def _get_defaults(self, repo_name):
279 def _get_defaults(self, repo_name):
280 """
280 """
281 Gets information about repository, and returns a dict for
281 Gets information about repository, and returns a dict for
282 usage in forms
282 usage in forms
283
283
284 :param repo_name:
284 :param repo_name:
285 """
285 """
286
286
287 repo_info = Repository.get_by_repo_name(repo_name)
287 repo_info = Repository.get_by_repo_name(repo_name)
288
288
289 if repo_info is None:
289 if repo_info is None:
290 return None
290 return None
291
291
292 defaults = repo_info.get_dict()
292 defaults = repo_info.get_dict()
293 defaults['repo_name'] = repo_info.just_name
293 defaults['repo_name'] = repo_info.just_name
294
294
295 groups = repo_info.groups_with_parents
295 groups = repo_info.groups_with_parents
296 parent_group = groups[-1] if groups else None
296 parent_group = groups[-1] if groups else None
297
297
298 # we use -1 as this is how in HTML, we mark an empty group
298 # we use -1 as this is how in HTML, we mark an empty group
299 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
299 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
300
300
301 keys_to_process = (
301 keys_to_process = (
302 {'k': 'repo_type', 'strip': False},
302 {'k': 'repo_type', 'strip': False},
303 {'k': 'repo_enable_downloads', 'strip': True},
303 {'k': 'repo_enable_downloads', 'strip': True},
304 {'k': 'repo_description', 'strip': True},
304 {'k': 'repo_description', 'strip': True},
305 {'k': 'repo_enable_locking', 'strip': True},
305 {'k': 'repo_enable_locking', 'strip': True},
306 {'k': 'repo_landing_rev', 'strip': True},
306 {'k': 'repo_landing_rev', 'strip': True},
307 {'k': 'clone_uri', 'strip': False},
307 {'k': 'clone_uri', 'strip': False},
308 {'k': 'push_uri', 'strip': False},
308 {'k': 'push_uri', 'strip': False},
309 {'k': 'repo_private', 'strip': True},
309 {'k': 'repo_private', 'strip': True},
310 {'k': 'repo_enable_statistics', 'strip': True}
310 {'k': 'repo_enable_statistics', 'strip': True}
311 )
311 )
312
312
313 for item in keys_to_process:
313 for item in keys_to_process:
314 attr = item['k']
314 attr = item['k']
315 if item['strip']:
315 if item['strip']:
316 attr = remove_prefix(item['k'], 'repo_')
316 attr = remove_prefix(item['k'], 'repo_')
317
317
318 val = defaults[attr]
318 val = defaults[attr]
319 if item['k'] == 'repo_landing_rev':
319 if item['k'] == 'repo_landing_rev':
320 val = ':'.join(defaults[attr])
320 val = ':'.join(defaults[attr])
321 defaults[item['k']] = val
321 defaults[item['k']] = val
322 if item['k'] == 'clone_uri':
322 if item['k'] == 'clone_uri':
323 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
323 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
324 if item['k'] == 'push_uri':
324 if item['k'] == 'push_uri':
325 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
325 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
326
326
327 # fill owner
327 # fill owner
328 if repo_info.user:
328 if repo_info.user:
329 defaults.update({'user': repo_info.user.username})
329 defaults.update({'user': repo_info.user.username})
330 else:
330 else:
331 replacement_user = User.get_first_super_admin().username
331 replacement_user = User.get_first_super_admin().username
332 defaults.update({'user': replacement_user})
332 defaults.update({'user': replacement_user})
333
333
334 return defaults
334 return defaults
335
335
336 def update(self, repo, **kwargs):
336 def update(self, repo, **kwargs):
337 try:
337 try:
338 cur_repo = self._get_repo(repo)
338 cur_repo = self._get_repo(repo)
339 source_repo_name = cur_repo.repo_name
339 source_repo_name = cur_repo.repo_name
340 if 'user' in kwargs:
340 if 'user' in kwargs:
341 cur_repo.user = User.get_by_username(kwargs['user'])
341 cur_repo.user = User.get_by_username(kwargs['user'])
342
342
343 if 'repo_group' in kwargs:
343 if 'repo_group' in kwargs:
344 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
344 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
345 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
345 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
346
346
347 update_keys = [
347 update_keys = [
348 (1, 'repo_description'),
348 (1, 'repo_description'),
349 (1, 'repo_landing_rev'),
349 (1, 'repo_landing_rev'),
350 (1, 'repo_private'),
350 (1, 'repo_private'),
351 (1, 'repo_enable_downloads'),
351 (1, 'repo_enable_downloads'),
352 (1, 'repo_enable_locking'),
352 (1, 'repo_enable_locking'),
353 (1, 'repo_enable_statistics'),
353 (1, 'repo_enable_statistics'),
354 (0, 'clone_uri'),
354 (0, 'clone_uri'),
355 (0, 'push_uri'),
355 (0, 'push_uri'),
356 (0, 'fork_id')
356 (0, 'fork_id')
357 ]
357 ]
358 for strip, k in update_keys:
358 for strip, k in update_keys:
359 if k in kwargs:
359 if k in kwargs:
360 val = kwargs[k]
360 val = kwargs[k]
361 if strip:
361 if strip:
362 k = remove_prefix(k, 'repo_')
362 k = remove_prefix(k, 'repo_')
363
363
364 setattr(cur_repo, k, val)
364 setattr(cur_repo, k, val)
365
365
366 new_name = cur_repo.get_new_name(kwargs['repo_name'])
366 new_name = cur_repo.get_new_name(kwargs['repo_name'])
367 cur_repo.repo_name = new_name
367 cur_repo.repo_name = new_name
368
368
369 # if private flag is set, reset default permission to NONE
369 # if private flag is set, reset default permission to NONE
370 if kwargs.get('repo_private'):
370 if kwargs.get('repo_private'):
371 EMPTY_PERM = 'repository.none'
371 EMPTY_PERM = 'repository.none'
372 RepoModel().grant_user_permission(
372 RepoModel().grant_user_permission(
373 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
373 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
374 )
374 )
375
375
376 # handle extra fields
376 # handle extra fields
377 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
377 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
378 k = RepositoryField.un_prefix_key(field)
378 k = RepositoryField.un_prefix_key(field)
379 ex_field = RepositoryField.get_by_key_name(
379 ex_field = RepositoryField.get_by_key_name(
380 key=k, repo=cur_repo)
380 key=k, repo=cur_repo)
381 if ex_field:
381 if ex_field:
382 ex_field.field_value = kwargs[field]
382 ex_field.field_value = kwargs[field]
383 self.sa.add(ex_field)
383 self.sa.add(ex_field)
384 cur_repo.updated_on = datetime.datetime.now()
384 cur_repo.updated_on = datetime.datetime.now()
385 self.sa.add(cur_repo)
385 self.sa.add(cur_repo)
386
386
387 if source_repo_name != new_name:
387 if source_repo_name != new_name:
388 # rename repository
388 # rename repository
389 self._rename_filesystem_repo(
389 self._rename_filesystem_repo(
390 old=source_repo_name, new=new_name)
390 old=source_repo_name, new=new_name)
391
391
392 return cur_repo
392 return cur_repo
393 except Exception:
393 except Exception:
394 log.error(traceback.format_exc())
394 log.error(traceback.format_exc())
395 raise
395 raise
396
396
397 def _create_repo(self, repo_name, repo_type, description, owner,
397 def _create_repo(self, repo_name, repo_type, description, owner,
398 private=False, clone_uri=None, repo_group=None,
398 private=False, clone_uri=None, repo_group=None,
399 landing_rev='rev:tip', fork_of=None,
399 landing_rev='rev:tip', fork_of=None,
400 copy_fork_permissions=False, enable_statistics=False,
400 copy_fork_permissions=False, enable_statistics=False,
401 enable_locking=False, enable_downloads=False,
401 enable_locking=False, enable_downloads=False,
402 copy_group_permissions=False,
402 copy_group_permissions=False,
403 state=Repository.STATE_PENDING):
403 state=Repository.STATE_PENDING):
404 """
404 """
405 Create repository inside database with PENDING state, this should be
405 Create repository inside database with PENDING state, this should be
406 only executed by create() repo. With exception of importing existing
406 only executed by create() repo. With exception of importing existing
407 repos
407 repos
408 """
408 """
409 from rhodecode.model.scm import ScmModel
409 from rhodecode.model.scm import ScmModel
410
410
411 owner = self._get_user(owner)
411 owner = self._get_user(owner)
412 fork_of = self._get_repo(fork_of)
412 fork_of = self._get_repo(fork_of)
413 repo_group = self._get_repo_group(safe_int(repo_group))
413 repo_group = self._get_repo_group(safe_int(repo_group))
414
414
415 try:
415 try:
416 repo_name = safe_unicode(repo_name)
416 repo_name = safe_unicode(repo_name)
417 description = safe_unicode(description)
417 description = safe_unicode(description)
418 # repo name is just a name of repository
418 # repo name is just a name of repository
419 # while repo_name_full is a full qualified name that is combined
419 # while repo_name_full is a full qualified name that is combined
420 # with name and path of group
420 # with name and path of group
421 repo_name_full = repo_name
421 repo_name_full = repo_name
422 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
422 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
423
423
424 new_repo = Repository()
424 new_repo = Repository()
425 new_repo.repo_state = state
425 new_repo.repo_state = state
426 new_repo.enable_statistics = False
426 new_repo.enable_statistics = False
427 new_repo.repo_name = repo_name_full
427 new_repo.repo_name = repo_name_full
428 new_repo.repo_type = repo_type
428 new_repo.repo_type = repo_type
429 new_repo.user = owner
429 new_repo.user = owner
430 new_repo.group = repo_group
430 new_repo.group = repo_group
431 new_repo.description = description or repo_name
431 new_repo.description = description or repo_name
432 new_repo.private = private
432 new_repo.private = private
433 new_repo.archived = False
433 new_repo.archived = False
434 new_repo.clone_uri = clone_uri
434 new_repo.clone_uri = clone_uri
435 new_repo.landing_rev = landing_rev
435 new_repo.landing_rev = landing_rev
436
436
437 new_repo.enable_statistics = enable_statistics
437 new_repo.enable_statistics = enable_statistics
438 new_repo.enable_locking = enable_locking
438 new_repo.enable_locking = enable_locking
439 new_repo.enable_downloads = enable_downloads
439 new_repo.enable_downloads = enable_downloads
440
440
441 if repo_group:
441 if repo_group:
442 new_repo.enable_locking = repo_group.enable_locking
442 new_repo.enable_locking = repo_group.enable_locking
443
443
444 if fork_of:
444 if fork_of:
445 parent_repo = fork_of
445 parent_repo = fork_of
446 new_repo.fork = parent_repo
446 new_repo.fork = parent_repo
447
447
448 events.trigger(events.RepoPreCreateEvent(new_repo))
448 events.trigger(events.RepoPreCreateEvent(new_repo))
449
449
450 self.sa.add(new_repo)
450 self.sa.add(new_repo)
451
451
452 EMPTY_PERM = 'repository.none'
452 EMPTY_PERM = 'repository.none'
453 if fork_of and copy_fork_permissions:
453 if fork_of and copy_fork_permissions:
454 repo = fork_of
454 repo = fork_of
455 user_perms = UserRepoToPerm.query() \
455 user_perms = UserRepoToPerm.query() \
456 .filter(UserRepoToPerm.repository == repo).all()
456 .filter(UserRepoToPerm.repository == repo).all()
457 group_perms = UserGroupRepoToPerm.query() \
457 group_perms = UserGroupRepoToPerm.query() \
458 .filter(UserGroupRepoToPerm.repository == repo).all()
458 .filter(UserGroupRepoToPerm.repository == repo).all()
459
459
460 for perm in user_perms:
460 for perm in user_perms:
461 UserRepoToPerm.create(
461 UserRepoToPerm.create(
462 perm.user, new_repo, perm.permission)
462 perm.user, new_repo, perm.permission)
463
463
464 for perm in group_perms:
464 for perm in group_perms:
465 UserGroupRepoToPerm.create(
465 UserGroupRepoToPerm.create(
466 perm.users_group, new_repo, perm.permission)
466 perm.users_group, new_repo, perm.permission)
467 # in case we copy permissions and also set this repo to private
467 # in case we copy permissions and also set this repo to private
468 # override the default user permission to make it a private repo
468 # override the default user permission to make it a private repo
469 if private:
469 if private:
470 RepoModel(self.sa).grant_user_permission(
470 RepoModel(self.sa).grant_user_permission(
471 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
471 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
472
472
473 elif repo_group and copy_group_permissions:
473 elif repo_group and copy_group_permissions:
474 user_perms = UserRepoGroupToPerm.query() \
474 user_perms = UserRepoGroupToPerm.query() \
475 .filter(UserRepoGroupToPerm.group == repo_group).all()
475 .filter(UserRepoGroupToPerm.group == repo_group).all()
476
476
477 group_perms = UserGroupRepoGroupToPerm.query() \
477 group_perms = UserGroupRepoGroupToPerm.query() \
478 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
478 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
479
479
480 for perm in user_perms:
480 for perm in user_perms:
481 perm_name = perm.permission.permission_name.replace(
481 perm_name = perm.permission.permission_name.replace(
482 'group.', 'repository.')
482 'group.', 'repository.')
483 perm_obj = Permission.get_by_key(perm_name)
483 perm_obj = Permission.get_by_key(perm_name)
484 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
484 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
485
485
486 for perm in group_perms:
486 for perm in group_perms:
487 perm_name = perm.permission.permission_name.replace(
487 perm_name = perm.permission.permission_name.replace(
488 'group.', 'repository.')
488 'group.', 'repository.')
489 perm_obj = Permission.get_by_key(perm_name)
489 perm_obj = Permission.get_by_key(perm_name)
490 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
490 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
491
491
492 if private:
492 if private:
493 RepoModel(self.sa).grant_user_permission(
493 RepoModel(self.sa).grant_user_permission(
494 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
494 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
495
495
496 else:
496 else:
497 perm_obj = self._create_default_perms(new_repo, private)
497 perm_obj = self._create_default_perms(new_repo, private)
498 self.sa.add(perm_obj)
498 self.sa.add(perm_obj)
499
499
500 # now automatically start following this repository as owner
500 # now automatically start following this repository as owner
501 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
501 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
502
502
503 # we need to flush here, in order to check if database won't
503 # we need to flush here, in order to check if database won't
504 # throw any exceptions, create filesystem dirs at the very end
504 # throw any exceptions, create filesystem dirs at the very end
505 self.sa.flush()
505 self.sa.flush()
506 events.trigger(events.RepoCreateEvent(new_repo))
506 events.trigger(events.RepoCreateEvent(new_repo))
507 return new_repo
507 return new_repo
508
508
509 except Exception:
509 except Exception:
510 log.error(traceback.format_exc())
510 log.error(traceback.format_exc())
511 raise
511 raise
512
512
513 def create(self, form_data, cur_user):
513 def create(self, form_data, cur_user):
514 """
514 """
515 Create repository using celery tasks
515 Create repository using celery tasks
516
516
517 :param form_data:
517 :param form_data:
518 :param cur_user:
518 :param cur_user:
519 """
519 """
520 from rhodecode.lib.celerylib import tasks, run_task
520 from rhodecode.lib.celerylib import tasks, run_task
521 return run_task(tasks.create_repo, form_data, cur_user)
521 return run_task(tasks.create_repo, form_data, cur_user)
522
522
523 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
523 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
524 perm_deletions=None, check_perms=True,
524 perm_deletions=None, check_perms=True,
525 cur_user=None):
525 cur_user=None):
526 if not perm_additions:
526 if not perm_additions:
527 perm_additions = []
527 perm_additions = []
528 if not perm_updates:
528 if not perm_updates:
529 perm_updates = []
529 perm_updates = []
530 if not perm_deletions:
530 if not perm_deletions:
531 perm_deletions = []
531 perm_deletions = []
532
532
533 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
533 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
534
534
535 changes = {
535 changes = {
536 'added': [],
536 'added': [],
537 'updated': [],
537 'updated': [],
538 'deleted': []
538 'deleted': []
539 }
539 }
540 # update permissions
540 # update permissions
541 for member_id, perm, member_type in perm_updates:
541 for member_id, perm, member_type in perm_updates:
542 member_id = int(member_id)
542 member_id = int(member_id)
543 if member_type == 'user':
543 if member_type == 'user':
544 member_name = User.get(member_id).username
544 member_name = User.get(member_id).username
545 # this updates also current one if found
545 # this updates also current one if found
546 self.grant_user_permission(
546 self.grant_user_permission(
547 repo=repo, user=member_id, perm=perm)
547 repo=repo, user=member_id, perm=perm)
548 elif member_type == 'user_group':
548 elif member_type == 'user_group':
549 # check if we have permissions to alter this usergroup
549 # check if we have permissions to alter this usergroup
550 member_name = UserGroup.get(member_id).users_group_name
550 member_name = UserGroup.get(member_id).users_group_name
551 if not check_perms or HasUserGroupPermissionAny(
551 if not check_perms or HasUserGroupPermissionAny(
552 *req_perms)(member_name, user=cur_user):
552 *req_perms)(member_name, user=cur_user):
553 self.grant_user_group_permission(
553 self.grant_user_group_permission(
554 repo=repo, group_name=member_id, perm=perm)
554 repo=repo, group_name=member_id, perm=perm)
555 else:
555 else:
556 raise ValueError("member_type must be 'user' or 'user_group' "
556 raise ValueError("member_type must be 'user' or 'user_group' "
557 "got {} instead".format(member_type))
557 "got {} instead".format(member_type))
558 changes['updated'].append({'type': member_type, 'id': member_id,
558 changes['updated'].append({'type': member_type, 'id': member_id,
559 'name': member_name, 'new_perm': perm})
559 'name': member_name, 'new_perm': perm})
560
560
561 # set new permissions
561 # set new permissions
562 for member_id, perm, member_type in perm_additions:
562 for member_id, perm, member_type in perm_additions:
563 member_id = int(member_id)
563 member_id = int(member_id)
564 if member_type == 'user':
564 if member_type == 'user':
565 member_name = User.get(member_id).username
565 member_name = User.get(member_id).username
566 self.grant_user_permission(
566 self.grant_user_permission(
567 repo=repo, user=member_id, perm=perm)
567 repo=repo, user=member_id, perm=perm)
568 elif member_type == 'user_group':
568 elif member_type == 'user_group':
569 # check if we have permissions to alter this usergroup
569 # check if we have permissions to alter this usergroup
570 member_name = UserGroup.get(member_id).users_group_name
570 member_name = UserGroup.get(member_id).users_group_name
571 if not check_perms or HasUserGroupPermissionAny(
571 if not check_perms or HasUserGroupPermissionAny(
572 *req_perms)(member_name, user=cur_user):
572 *req_perms)(member_name, user=cur_user):
573 self.grant_user_group_permission(
573 self.grant_user_group_permission(
574 repo=repo, group_name=member_id, perm=perm)
574 repo=repo, group_name=member_id, perm=perm)
575 else:
575 else:
576 raise ValueError("member_type must be 'user' or 'user_group' "
576 raise ValueError("member_type must be 'user' or 'user_group' "
577 "got {} instead".format(member_type))
577 "got {} instead".format(member_type))
578
578
579 changes['added'].append({'type': member_type, 'id': member_id,
579 changes['added'].append({'type': member_type, 'id': member_id,
580 'name': member_name, 'new_perm': perm})
580 'name': member_name, 'new_perm': perm})
581 # delete permissions
581 # delete permissions
582 for member_id, perm, member_type in perm_deletions:
582 for member_id, perm, member_type in perm_deletions:
583 member_id = int(member_id)
583 member_id = int(member_id)
584 if member_type == 'user':
584 if member_type == 'user':
585 member_name = User.get(member_id).username
585 member_name = User.get(member_id).username
586 self.revoke_user_permission(repo=repo, user=member_id)
586 self.revoke_user_permission(repo=repo, user=member_id)
587 elif member_type == 'user_group':
587 elif member_type == 'user_group':
588 # check if we have permissions to alter this usergroup
588 # check if we have permissions to alter this usergroup
589 member_name = UserGroup.get(member_id).users_group_name
589 member_name = UserGroup.get(member_id).users_group_name
590 if not check_perms or HasUserGroupPermissionAny(
590 if not check_perms or HasUserGroupPermissionAny(
591 *req_perms)(member_name, user=cur_user):
591 *req_perms)(member_name, user=cur_user):
592 self.revoke_user_group_permission(
592 self.revoke_user_group_permission(
593 repo=repo, group_name=member_id)
593 repo=repo, group_name=member_id)
594 else:
594 else:
595 raise ValueError("member_type must be 'user' or 'user_group' "
595 raise ValueError("member_type must be 'user' or 'user_group' "
596 "got {} instead".format(member_type))
596 "got {} instead".format(member_type))
597
597
598 changes['deleted'].append({'type': member_type, 'id': member_id,
598 changes['deleted'].append({'type': member_type, 'id': member_id,
599 'name': member_name, 'new_perm': perm})
599 'name': member_name, 'new_perm': perm})
600 return changes
600 return changes
601
601
602 def create_fork(self, form_data, cur_user):
602 def create_fork(self, form_data, cur_user):
603 """
603 """
604 Simple wrapper into executing celery task for fork creation
604 Simple wrapper into executing celery task for fork creation
605
605
606 :param form_data:
606 :param form_data:
607 :param cur_user:
607 :param cur_user:
608 """
608 """
609 from rhodecode.lib.celerylib import tasks, run_task
609 from rhodecode.lib.celerylib import tasks, run_task
610 return run_task(tasks.create_repo_fork, form_data, cur_user)
610 return run_task(tasks.create_repo_fork, form_data, cur_user)
611
611
612 def archive(self, repo):
612 def archive(self, repo):
613 """
613 """
614 Archive given repository. Set archive flag.
614 Archive given repository. Set archive flag.
615
615
616 :param repo:
616 :param repo:
617 """
617 """
618 repo = self._get_repo(repo)
618 repo = self._get_repo(repo)
619 if repo:
619 if repo:
620
620
621 try:
621 try:
622 repo.archived = True
622 repo.archived = True
623 self.sa.add(repo)
623 self.sa.add(repo)
624 self.sa.commit()
624 self.sa.commit()
625 except Exception:
625 except Exception:
626 log.error(traceback.format_exc())
626 log.error(traceback.format_exc())
627 raise
627 raise
628
628
629 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
629 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
630 """
630 """
631 Delete given repository, forks parameter defines what do do with
631 Delete given repository, forks parameter defines what do do with
632 attached forks. Throws AttachedForksError if deleted repo has attached
632 attached forks. Throws AttachedForksError if deleted repo has attached
633 forks
633 forks
634
634
635 :param repo:
635 :param repo:
636 :param forks: str 'delete' or 'detach'
636 :param forks: str 'delete' or 'detach'
637 :param pull_requests: str 'delete' or None
637 :param pull_requests: str 'delete' or None
638 :param fs_remove: remove(archive) repo from filesystem
638 :param fs_remove: remove(archive) repo from filesystem
639 """
639 """
640 if not cur_user:
640 if not cur_user:
641 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
641 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
642 repo = self._get_repo(repo)
642 repo = self._get_repo(repo)
643 if repo:
643 if repo:
644 if forks == 'detach':
644 if forks == 'detach':
645 for r in repo.forks:
645 for r in repo.forks:
646 r.fork = None
646 r.fork = None
647 self.sa.add(r)
647 self.sa.add(r)
648 elif forks == 'delete':
648 elif forks == 'delete':
649 for r in repo.forks:
649 for r in repo.forks:
650 self.delete(r, forks='delete')
650 self.delete(r, forks='delete')
651 elif [f for f in repo.forks]:
651 elif [f for f in repo.forks]:
652 raise AttachedForksError()
652 raise AttachedForksError()
653
653
654 # check for pull requests
654 # check for pull requests
655 pr_sources = repo.pull_requests_source
655 pr_sources = repo.pull_requests_source
656 pr_targets = repo.pull_requests_target
656 pr_targets = repo.pull_requests_target
657 if pull_requests != 'delete' and (pr_sources or pr_targets):
657 if pull_requests != 'delete' and (pr_sources or pr_targets):
658 raise AttachedPullRequestsError()
658 raise AttachedPullRequestsError()
659
659
660 old_repo_dict = repo.get_dict()
660 old_repo_dict = repo.get_dict()
661 events.trigger(events.RepoPreDeleteEvent(repo))
661 events.trigger(events.RepoPreDeleteEvent(repo))
662 try:
662 try:
663 self.sa.delete(repo)
663 self.sa.delete(repo)
664 if fs_remove:
664 if fs_remove:
665 self._delete_filesystem_repo(repo)
665 self._delete_filesystem_repo(repo)
666 else:
666 else:
667 log.debug('skipping removal from filesystem')
667 log.debug('skipping removal from filesystem')
668 old_repo_dict.update({
668 old_repo_dict.update({
669 'deleted_by': cur_user,
669 'deleted_by': cur_user,
670 'deleted_on': time.time(),
670 'deleted_on': time.time(),
671 })
671 })
672 log_delete_repository(**old_repo_dict)
672 log_delete_repository(**old_repo_dict)
673 events.trigger(events.RepoDeleteEvent(repo))
673 events.trigger(events.RepoDeleteEvent(repo))
674 except Exception:
674 except Exception:
675 log.error(traceback.format_exc())
675 log.error(traceback.format_exc())
676 raise
676 raise
677
677
678 def grant_user_permission(self, repo, user, perm):
678 def grant_user_permission(self, repo, user, perm):
679 """
679 """
680 Grant permission for user on given repository, or update existing one
680 Grant permission for user on given repository, or update existing one
681 if found
681 if found
682
682
683 :param repo: Instance of Repository, repository_id, or repository name
683 :param repo: Instance of Repository, repository_id, or repository name
684 :param user: Instance of User, user_id or username
684 :param user: Instance of User, user_id or username
685 :param perm: Instance of Permission, or permission_name
685 :param perm: Instance of Permission, or permission_name
686 """
686 """
687 user = self._get_user(user)
687 user = self._get_user(user)
688 repo = self._get_repo(repo)
688 repo = self._get_repo(repo)
689 permission = self._get_perm(perm)
689 permission = self._get_perm(perm)
690
690
691 # check if we have that permission already
691 # check if we have that permission already
692 obj = self.sa.query(UserRepoToPerm) \
692 obj = self.sa.query(UserRepoToPerm) \
693 .filter(UserRepoToPerm.user == user) \
693 .filter(UserRepoToPerm.user == user) \
694 .filter(UserRepoToPerm.repository == repo) \
694 .filter(UserRepoToPerm.repository == repo) \
695 .scalar()
695 .scalar()
696 if obj is None:
696 if obj is None:
697 # create new !
697 # create new !
698 obj = UserRepoToPerm()
698 obj = UserRepoToPerm()
699 obj.repository = repo
699 obj.repository = repo
700 obj.user = user
700 obj.user = user
701 obj.permission = permission
701 obj.permission = permission
702 self.sa.add(obj)
702 self.sa.add(obj)
703 log.debug('Granted perm %s to %s on %s', perm, user, repo)
703 log.debug('Granted perm %s to %s on %s', perm, user, repo)
704 action_logger_generic(
704 action_logger_generic(
705 'granted permission: {} to user: {} on repo: {}'.format(
705 'granted permission: {} to user: {} on repo: {}'.format(
706 perm, user, repo), namespace='security.repo')
706 perm, user, repo), namespace='security.repo')
707 return obj
707 return obj
708
708
709 def revoke_user_permission(self, repo, user):
709 def revoke_user_permission(self, repo, user):
710 """
710 """
711 Revoke permission for user on given repository
711 Revoke permission for user on given repository
712
712
713 :param repo: Instance of Repository, repository_id, or repository name
713 :param repo: Instance of Repository, repository_id, or repository name
714 :param user: Instance of User, user_id or username
714 :param user: Instance of User, user_id or username
715 """
715 """
716
716
717 user = self._get_user(user)
717 user = self._get_user(user)
718 repo = self._get_repo(repo)
718 repo = self._get_repo(repo)
719
719
720 obj = self.sa.query(UserRepoToPerm) \
720 obj = self.sa.query(UserRepoToPerm) \
721 .filter(UserRepoToPerm.repository == repo) \
721 .filter(UserRepoToPerm.repository == repo) \
722 .filter(UserRepoToPerm.user == user) \
722 .filter(UserRepoToPerm.user == user) \
723 .scalar()
723 .scalar()
724 if obj:
724 if obj:
725 self.sa.delete(obj)
725 self.sa.delete(obj)
726 log.debug('Revoked perm on %s on %s', repo, user)
726 log.debug('Revoked perm on %s on %s', repo, user)
727 action_logger_generic(
727 action_logger_generic(
728 'revoked permission from user: {} on repo: {}'.format(
728 'revoked permission from user: {} on repo: {}'.format(
729 user, repo), namespace='security.repo')
729 user, repo), namespace='security.repo')
730
730
731 def grant_user_group_permission(self, repo, group_name, perm):
731 def grant_user_group_permission(self, repo, group_name, perm):
732 """
732 """
733 Grant permission for user group on given repository, or update
733 Grant permission for user group on given repository, or update
734 existing one if found
734 existing one if found
735
735
736 :param repo: Instance of Repository, repository_id, or repository name
736 :param repo: Instance of Repository, repository_id, or repository name
737 :param group_name: Instance of UserGroup, users_group_id,
737 :param group_name: Instance of UserGroup, users_group_id,
738 or user group name
738 or user group name
739 :param perm: Instance of Permission, or permission_name
739 :param perm: Instance of Permission, or permission_name
740 """
740 """
741 repo = self._get_repo(repo)
741 repo = self._get_repo(repo)
742 group_name = self._get_user_group(group_name)
742 group_name = self._get_user_group(group_name)
743 permission = self._get_perm(perm)
743 permission = self._get_perm(perm)
744
744
745 # check if we have that permission already
745 # check if we have that permission already
746 obj = self.sa.query(UserGroupRepoToPerm) \
746 obj = self.sa.query(UserGroupRepoToPerm) \
747 .filter(UserGroupRepoToPerm.users_group == group_name) \
747 .filter(UserGroupRepoToPerm.users_group == group_name) \
748 .filter(UserGroupRepoToPerm.repository == repo) \
748 .filter(UserGroupRepoToPerm.repository == repo) \
749 .scalar()
749 .scalar()
750
750
751 if obj is None:
751 if obj is None:
752 # create new
752 # create new
753 obj = UserGroupRepoToPerm()
753 obj = UserGroupRepoToPerm()
754
754
755 obj.repository = repo
755 obj.repository = repo
756 obj.users_group = group_name
756 obj.users_group = group_name
757 obj.permission = permission
757 obj.permission = permission
758 self.sa.add(obj)
758 self.sa.add(obj)
759 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
759 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
760 action_logger_generic(
760 action_logger_generic(
761 'granted permission: {} to usergroup: {} on repo: {}'.format(
761 'granted permission: {} to usergroup: {} on repo: {}'.format(
762 perm, group_name, repo), namespace='security.repo')
762 perm, group_name, repo), namespace='security.repo')
763
763
764 return obj
764 return obj
765
765
766 def revoke_user_group_permission(self, repo, group_name):
766 def revoke_user_group_permission(self, repo, group_name):
767 """
767 """
768 Revoke permission for user group on given repository
768 Revoke permission for user group on given repository
769
769
770 :param repo: Instance of Repository, repository_id, or repository name
770 :param repo: Instance of Repository, repository_id, or repository name
771 :param group_name: Instance of UserGroup, users_group_id,
771 :param group_name: Instance of UserGroup, users_group_id,
772 or user group name
772 or user group name
773 """
773 """
774 repo = self._get_repo(repo)
774 repo = self._get_repo(repo)
775 group_name = self._get_user_group(group_name)
775 group_name = self._get_user_group(group_name)
776
776
777 obj = self.sa.query(UserGroupRepoToPerm) \
777 obj = self.sa.query(UserGroupRepoToPerm) \
778 .filter(UserGroupRepoToPerm.repository == repo) \
778 .filter(UserGroupRepoToPerm.repository == repo) \
779 .filter(UserGroupRepoToPerm.users_group == group_name) \
779 .filter(UserGroupRepoToPerm.users_group == group_name) \
780 .scalar()
780 .scalar()
781 if obj:
781 if obj:
782 self.sa.delete(obj)
782 self.sa.delete(obj)
783 log.debug('Revoked perm to %s on %s', repo, group_name)
783 log.debug('Revoked perm to %s on %s', repo, group_name)
784 action_logger_generic(
784 action_logger_generic(
785 'revoked permission from usergroup: {} on repo: {}'.format(
785 'revoked permission from usergroup: {} on repo: {}'.format(
786 group_name, repo), namespace='security.repo')
786 group_name, repo), namespace='security.repo')
787
787
788 def delete_stats(self, repo_name):
788 def delete_stats(self, repo_name):
789 """
789 """
790 removes stats for given repo
790 removes stats for given repo
791
791
792 :param repo_name:
792 :param repo_name:
793 """
793 """
794 repo = self._get_repo(repo_name)
794 repo = self._get_repo(repo_name)
795 try:
795 try:
796 obj = self.sa.query(Statistics) \
796 obj = self.sa.query(Statistics) \
797 .filter(Statistics.repository == repo).scalar()
797 .filter(Statistics.repository == repo).scalar()
798 if obj:
798 if obj:
799 self.sa.delete(obj)
799 self.sa.delete(obj)
800 except Exception:
800 except Exception:
801 log.error(traceback.format_exc())
801 log.error(traceback.format_exc())
802 raise
802 raise
803
803
804 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
804 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
805 field_type='str', field_desc=''):
805 field_type='str', field_desc=''):
806
806
807 repo = self._get_repo(repo_name)
807 repo = self._get_repo(repo_name)
808
808
809 new_field = RepositoryField()
809 new_field = RepositoryField()
810 new_field.repository = repo
810 new_field.repository = repo
811 new_field.field_key = field_key
811 new_field.field_key = field_key
812 new_field.field_type = field_type # python type
812 new_field.field_type = field_type # python type
813 new_field.field_value = field_value
813 new_field.field_value = field_value
814 new_field.field_desc = field_desc
814 new_field.field_desc = field_desc
815 new_field.field_label = field_label
815 new_field.field_label = field_label
816 self.sa.add(new_field)
816 self.sa.add(new_field)
817 return new_field
817 return new_field
818
818
819 def delete_repo_field(self, repo_name, field_key):
819 def delete_repo_field(self, repo_name, field_key):
820 repo = self._get_repo(repo_name)
820 repo = self._get_repo(repo_name)
821 field = RepositoryField.get_by_key_name(field_key, repo)
821 field = RepositoryField.get_by_key_name(field_key, repo)
822 if field:
822 if field:
823 self.sa.delete(field)
823 self.sa.delete(field)
824
824
825 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
825 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
826 clone_uri=None, repo_store_location=None,
826 clone_uri=None, repo_store_location=None,
827 use_global_config=False):
827 use_global_config=False, install_hooks=True):
828 """
828 """
829 makes repository on filesystem. It's group aware means it'll create
829 makes repository on filesystem. It's group aware means it'll create
830 a repository within a group, and alter the paths accordingly of
830 a repository within a group, and alter the paths accordingly of
831 group location
831 group location
832
832
833 :param repo_name:
833 :param repo_name:
834 :param alias:
834 :param alias:
835 :param parent:
835 :param parent:
836 :param clone_uri:
836 :param clone_uri:
837 :param repo_store_location:
837 :param repo_store_location:
838 """
838 """
839 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
839 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
840 from rhodecode.model.scm import ScmModel
840 from rhodecode.model.scm import ScmModel
841
841
842 if Repository.NAME_SEP in repo_name:
842 if Repository.NAME_SEP in repo_name:
843 raise ValueError(
843 raise ValueError(
844 'repo_name must not contain groups got `%s`' % repo_name)
844 'repo_name must not contain groups got `%s`' % repo_name)
845
845
846 if isinstance(repo_group, RepoGroup):
846 if isinstance(repo_group, RepoGroup):
847 new_parent_path = os.sep.join(repo_group.full_path_splitted)
847 new_parent_path = os.sep.join(repo_group.full_path_splitted)
848 else:
848 else:
849 new_parent_path = repo_group or ''
849 new_parent_path = repo_group or ''
850
850
851 if repo_store_location:
851 if repo_store_location:
852 _paths = [repo_store_location]
852 _paths = [repo_store_location]
853 else:
853 else:
854 _paths = [self.repos_path, new_parent_path, repo_name]
854 _paths = [self.repos_path, new_parent_path, repo_name]
855 # we need to make it str for mercurial
855 # we need to make it str for mercurial
856 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
856 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
857
857
858 # check if this path is not a repository
858 # check if this path is not a repository
859 if is_valid_repo(repo_path, self.repos_path):
859 if is_valid_repo(repo_path, self.repos_path):
860 raise Exception('This path %s is a valid repository' % repo_path)
860 raise Exception('This path %s is a valid repository' % repo_path)
861
861
862 # check if this path is a group
862 # check if this path is a group
863 if is_valid_repo_group(repo_path, self.repos_path):
863 if is_valid_repo_group(repo_path, self.repos_path):
864 raise Exception('This path %s is a valid group' % repo_path)
864 raise Exception('This path %s is a valid group' % repo_path)
865
865
866 log.info('creating repo %s in %s from url: `%s`',
866 log.info('creating repo %s in %s from url: `%s`',
867 repo_name, safe_unicode(repo_path),
867 repo_name, safe_unicode(repo_path),
868 obfuscate_url_pw(clone_uri))
868 obfuscate_url_pw(clone_uri))
869
869
870 backend = get_backend(repo_type)
870 backend = get_backend(repo_type)
871
871
872 config_repo = None if use_global_config else repo_name
872 config_repo = None if use_global_config else repo_name
873 if config_repo and new_parent_path:
873 if config_repo and new_parent_path:
874 config_repo = Repository.NAME_SEP.join(
874 config_repo = Repository.NAME_SEP.join(
875 (new_parent_path, config_repo))
875 (new_parent_path, config_repo))
876 config = make_db_config(clear_session=False, repo=config_repo)
876 config = make_db_config(clear_session=False, repo=config_repo)
877 config.set('extensions', 'largefiles', '')
877 config.set('extensions', 'largefiles', '')
878
878
879 # patch and reset hooks section of UI config to not run any
879 # patch and reset hooks section of UI config to not run any
880 # hooks on creating remote repo
880 # hooks on creating remote repo
881 config.clear_section('hooks')
881 config.clear_section('hooks')
882
882
883 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
883 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
884 if repo_type == 'git':
884 if repo_type == 'git':
885 repo = backend(
885 repo = backend(
886 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
886 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
887 with_wire={"cache": False})
887 with_wire={"cache": False})
888 else:
888 else:
889 repo = backend(
889 repo = backend(
890 repo_path, config=config, create=True, src_url=clone_uri,
890 repo_path, config=config, create=True, src_url=clone_uri,
891 with_wire={"cache": False})
891 with_wire={"cache": False})
892
892
893 repo.install_hooks()
893 if install_hooks:
894 repo.install_hooks()
894
895
895 log.debug('Created repo %s with %s backend',
896 log.debug('Created repo %s with %s backend',
896 safe_unicode(repo_name), safe_unicode(repo_type))
897 safe_unicode(repo_name), safe_unicode(repo_type))
897 return repo
898 return repo
898
899
899 def _rename_filesystem_repo(self, old, new):
900 def _rename_filesystem_repo(self, old, new):
900 """
901 """
901 renames repository on filesystem
902 renames repository on filesystem
902
903
903 :param old: old name
904 :param old: old name
904 :param new: new name
905 :param new: new name
905 """
906 """
906 log.info('renaming repo from %s to %s', old, new)
907 log.info('renaming repo from %s to %s', old, new)
907
908
908 old_path = os.path.join(self.repos_path, old)
909 old_path = os.path.join(self.repos_path, old)
909 new_path = os.path.join(self.repos_path, new)
910 new_path = os.path.join(self.repos_path, new)
910 if os.path.isdir(new_path):
911 if os.path.isdir(new_path):
911 raise Exception(
912 raise Exception(
912 'Was trying to rename to already existing dir %s' % new_path
913 'Was trying to rename to already existing dir %s' % new_path
913 )
914 )
914 shutil.move(old_path, new_path)
915 shutil.move(old_path, new_path)
915
916
916 def _delete_filesystem_repo(self, repo):
917 def _delete_filesystem_repo(self, repo):
917 """
918 """
918 removes repo from filesystem, the removal is acctually made by
919 removes repo from filesystem, the removal is acctually made by
919 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
920 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
920 repository is no longer valid for rhodecode, can be undeleted later on
921 repository is no longer valid for rhodecode, can be undeleted later on
921 by reverting the renames on this repository
922 by reverting the renames on this repository
922
923
923 :param repo: repo object
924 :param repo: repo object
924 """
925 """
925 rm_path = os.path.join(self.repos_path, repo.repo_name)
926 rm_path = os.path.join(self.repos_path, repo.repo_name)
926 repo_group = repo.group
927 repo_group = repo.group
927 log.info("Removing repository %s", rm_path)
928 log.info("Removing repository %s", rm_path)
928 # disable hg/git internal that it doesn't get detected as repo
929 # disable hg/git internal that it doesn't get detected as repo
929 alias = repo.repo_type
930 alias = repo.repo_type
930
931
931 config = make_db_config(clear_session=False)
932 config = make_db_config(clear_session=False)
932 config.set('extensions', 'largefiles', '')
933 config.set('extensions', 'largefiles', '')
933 bare = getattr(repo.scm_instance(config=config), 'bare', False)
934 bare = getattr(repo.scm_instance(config=config), 'bare', False)
934
935
935 # skip this for bare git repos
936 # skip this for bare git repos
936 if not bare:
937 if not bare:
937 # disable VCS repo
938 # disable VCS repo
938 vcs_path = os.path.join(rm_path, '.%s' % alias)
939 vcs_path = os.path.join(rm_path, '.%s' % alias)
939 if os.path.exists(vcs_path):
940 if os.path.exists(vcs_path):
940 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
941 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
941
942
942 _now = datetime.datetime.now()
943 _now = datetime.datetime.now()
943 _ms = str(_now.microsecond).rjust(6, '0')
944 _ms = str(_now.microsecond).rjust(6, '0')
944 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
945 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
945 repo.just_name)
946 repo.just_name)
946 if repo_group:
947 if repo_group:
947 # if repository is in group, prefix the removal path with the group
948 # if repository is in group, prefix the removal path with the group
948 args = repo_group.full_path_splitted + [_d]
949 args = repo_group.full_path_splitted + [_d]
949 _d = os.path.join(*args)
950 _d = os.path.join(*args)
950
951
951 if os.path.isdir(rm_path):
952 if os.path.isdir(rm_path):
952 shutil.move(rm_path, os.path.join(self.repos_path, _d))
953 shutil.move(rm_path, os.path.join(self.repos_path, _d))
953
954
954 # finally cleanup diff-cache if it exists
955 # finally cleanup diff-cache if it exists
955 cached_diffs_dir = repo.cached_diffs_dir
956 cached_diffs_dir = repo.cached_diffs_dir
956 if os.path.isdir(cached_diffs_dir):
957 if os.path.isdir(cached_diffs_dir):
957 shutil.rmtree(cached_diffs_dir)
958 shutil.rmtree(cached_diffs_dir)
958
959
959
960
960 class ReadmeFinder:
961 class ReadmeFinder:
961 """
962 """
962 Utility which knows how to find a readme for a specific commit.
963 Utility which knows how to find a readme for a specific commit.
963
964
964 The main idea is that this is a configurable algorithm. When creating an
965 The main idea is that this is a configurable algorithm. When creating an
965 instance you can define parameters, currently only the `default_renderer`.
966 instance you can define parameters, currently only the `default_renderer`.
966 Based on this configuration the method :meth:`search` behaves slightly
967 Based on this configuration the method :meth:`search` behaves slightly
967 different.
968 different.
968 """
969 """
969
970
970 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
971 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
971 path_re = re.compile(r'^docs?', re.IGNORECASE)
972 path_re = re.compile(r'^docs?', re.IGNORECASE)
972
973
973 default_priorities = {
974 default_priorities = {
974 None: 0,
975 None: 0,
975 '.text': 2,
976 '.text': 2,
976 '.txt': 3,
977 '.txt': 3,
977 '.rst': 1,
978 '.rst': 1,
978 '.rest': 2,
979 '.rest': 2,
979 '.md': 1,
980 '.md': 1,
980 '.mkdn': 2,
981 '.mkdn': 2,
981 '.mdown': 3,
982 '.mdown': 3,
982 '.markdown': 4,
983 '.markdown': 4,
983 }
984 }
984
985
985 path_priority = {
986 path_priority = {
986 'doc': 0,
987 'doc': 0,
987 'docs': 1,
988 'docs': 1,
988 }
989 }
989
990
990 FALLBACK_PRIORITY = 99
991 FALLBACK_PRIORITY = 99
991
992
992 RENDERER_TO_EXTENSION = {
993 RENDERER_TO_EXTENSION = {
993 'rst': ['.rst', '.rest'],
994 'rst': ['.rst', '.rest'],
994 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
995 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
995 }
996 }
996
997
997 def __init__(self, default_renderer=None):
998 def __init__(self, default_renderer=None):
998 self._default_renderer = default_renderer
999 self._default_renderer = default_renderer
999 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1000 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1000 default_renderer, [])
1001 default_renderer, [])
1001
1002
1002 def search(self, commit, path='/'):
1003 def search(self, commit, path='/'):
1003 """
1004 """
1004 Find a readme in the given `commit`.
1005 Find a readme in the given `commit`.
1005 """
1006 """
1006 nodes = commit.get_nodes(path)
1007 nodes = commit.get_nodes(path)
1007 matches = self._match_readmes(nodes)
1008 matches = self._match_readmes(nodes)
1008 matches = self._sort_according_to_priority(matches)
1009 matches = self._sort_according_to_priority(matches)
1009 if matches:
1010 if matches:
1010 return matches[0].node
1011 return matches[0].node
1011
1012
1012 paths = self._match_paths(nodes)
1013 paths = self._match_paths(nodes)
1013 paths = self._sort_paths_according_to_priority(paths)
1014 paths = self._sort_paths_according_to_priority(paths)
1014 for path in paths:
1015 for path in paths:
1015 match = self.search(commit, path=path)
1016 match = self.search(commit, path=path)
1016 if match:
1017 if match:
1017 return match
1018 return match
1018
1019
1019 return None
1020 return None
1020
1021
1021 def _match_readmes(self, nodes):
1022 def _match_readmes(self, nodes):
1022 for node in nodes:
1023 for node in nodes:
1023 if not node.is_file():
1024 if not node.is_file():
1024 continue
1025 continue
1025 path = node.path.rsplit('/', 1)[-1]
1026 path = node.path.rsplit('/', 1)[-1]
1026 match = self.readme_re.match(path)
1027 match = self.readme_re.match(path)
1027 if match:
1028 if match:
1028 extension = match.group(1)
1029 extension = match.group(1)
1029 yield ReadmeMatch(node, match, self._priority(extension))
1030 yield ReadmeMatch(node, match, self._priority(extension))
1030
1031
1031 def _match_paths(self, nodes):
1032 def _match_paths(self, nodes):
1032 for node in nodes:
1033 for node in nodes:
1033 if not node.is_dir():
1034 if not node.is_dir():
1034 continue
1035 continue
1035 match = self.path_re.match(node.path)
1036 match = self.path_re.match(node.path)
1036 if match:
1037 if match:
1037 yield node.path
1038 yield node.path
1038
1039
1039 def _priority(self, extension):
1040 def _priority(self, extension):
1040 renderer_priority = (
1041 renderer_priority = (
1041 0 if extension in self._renderer_extensions else 1)
1042 0 if extension in self._renderer_extensions else 1)
1042 extension_priority = self.default_priorities.get(
1043 extension_priority = self.default_priorities.get(
1043 extension, self.FALLBACK_PRIORITY)
1044 extension, self.FALLBACK_PRIORITY)
1044 return (renderer_priority, extension_priority)
1045 return (renderer_priority, extension_priority)
1045
1046
1046 def _sort_according_to_priority(self, matches):
1047 def _sort_according_to_priority(self, matches):
1047
1048
1048 def priority_and_path(match):
1049 def priority_and_path(match):
1049 return (match.priority, match.path)
1050 return (match.priority, match.path)
1050
1051
1051 return sorted(matches, key=priority_and_path)
1052 return sorted(matches, key=priority_and_path)
1052
1053
1053 def _sort_paths_according_to_priority(self, paths):
1054 def _sort_paths_according_to_priority(self, paths):
1054
1055
1055 def priority_and_path(path):
1056 def priority_and_path(path):
1056 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1057 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1057
1058
1058 return sorted(paths, key=priority_and_path)
1059 return sorted(paths, key=priority_and_path)
1059
1060
1060
1061
1061 class ReadmeMatch:
1062 class ReadmeMatch:
1062
1063
1063 def __init__(self, node, match, priority):
1064 def __init__(self, node, match, priority):
1064 self.node = node
1065 self.node = node
1065 self._match = match
1066 self._match = match
1066 self.priority = priority
1067 self.priority = priority
1067
1068
1068 @property
1069 @property
1069 def path(self):
1070 def path(self):
1070 return self.node.path
1071 return self.node.path
1071
1072
1072 def __repr__(self):
1073 def __repr__(self):
1073 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
1074 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,86 +1,85 b''
1 ## -*- coding: utf-8 -*-
1 ## -*- coding: utf-8 -*-
2 <%inherit file="/base/base.mako"/>
2 <%inherit file="/base/base.mako"/>
3
3
4 <%def name="title()">
4 <%def name="title()">
5 ${_('New Gist')}
5 ${_('New Gist')}
6 %if c.rhodecode_name:
6 %if c.rhodecode_name:
7 &middot; ${h.branding(c.rhodecode_name)}
7 &middot; ${h.branding(c.rhodecode_name)}
8 %endif
8 %endif
9 </%def>
9 </%def>
10
10
11 <%def name="breadcrumbs_links()">
11 <%def name="breadcrumbs_links()"></%def>
12 ${_('New Gist')}
13 </%def>
14
12
15 <%def name="menu_bar_nav()">
13 <%def name="menu_bar_nav()">
16 ${self.menu_items(active='gists')}
14 ${self.menu_items(active='gists')}
17 </%def>
15 </%def>
18
16
19 <%def name="main()">
17 <%def name="main()">
20 <div class="box">
18 <div class="box">
21 <!-- box / title -->
19 <!-- box / title -->
22 <div class="title">
20 <div class="title">
23 ${self.breadcrumbs()}
21
24 </div>
22 </div>
25
23
26 <div class="table">
24 <div class="table">
27 <div id="files_data">
25 <div id="files_data">
28 ${h.secure_form(h.route_path('gists_create'), id='eform', request=request)}
26 ${h.secure_form(h.route_path('gists_create'), id='eform', request=request)}
29 <div>
27 <div>
30 <textarea id="description" name="description" placeholder="${_('Gist description ...')}"></textarea>
31
32 <span class="gist-gravatar">
28 <span class="gist-gravatar">
33 ${self.gravatar(c.rhodecode_user.email, 30)}
29 ${self.gravatar(c.rhodecode_user.email, 30)}
34 </span>
30 </span>
35 <label for='gistid'>${_('Gist id')}</label>
31 <label for='gistid'>${_('Gist id')}</label>
36 ${h.text('gistid', placeholder=_('Auto generated'))}
32 ${h.text('gistid', placeholder=_('Auto generated'))}
33
34 <label for='lifetime'>${_('Gist lifetime')}</label>
35 ${h.dropdownmenu('lifetime', '', c.lifetime_options)}
37
36
38 <label for='lifetime'>${_('Gist lifetime')}</label>
37 <label for='acl_level'>${_('Gist access level')}</label>
39 ${h.dropdownmenu('lifetime', '', c.lifetime_options)}
38 ${h.dropdownmenu('gist_acl_level', '', c.acl_options)}
40
39
41 <label for='acl_level'>${_('Gist access level')}</label>
40 <textarea style="margin-top: 5px" id="description" name="description" placeholder="${_('Gist description ...')}"></textarea>
42 ${h.dropdownmenu('gist_acl_level', '', c.acl_options)}
41 </div>
43
42
44 </div>
45 <div id="codeblock" class="codeblock">
43 <div id="codeblock" class="codeblock">
46 <div class="code-header">
44 <div class="code-header">
47 <div class="form">
45 <div class="form">
48 <div class="fields">
46 <div class="fields">
49 ${h.text('filename', size=30, placeholder=_('name this file...'))}
47 ${h.text('filename', size=30, placeholder=_('name gist file...'))}
50 ${h.dropdownmenu('mimetype','plain',[('plain',_('plain'))],enable_filter=True)}
48 ${h.dropdownmenu('mimetype','plain',[('plain',_('plain'))],enable_filter=True)}
51 </div>
49 </div>
52 </div>
50 </div>
53 </div>
51 </div>
52
54 <div id="editor_container">
53 <div id="editor_container">
55 <div id="editor_pre"></div>
54 <div id="editor_pre"></div>
56 <textarea id="editor" name="content" ></textarea>
55 <textarea id="editor" name="content" ></textarea>
57 </div>
56 </div>
58 </div>
57 </div>
58
59 <div class="pull-right">
59 <div class="pull-right">
60 ${h.submit('private',_('Create Private Gist'),class_="btn")}
60 ${h.submit('private',_('Create Private Gist'),class_="btn")}
61 ${h.submit('public',_('Create Public Gist'),class_="btn")}
61 ${h.submit('public',_('Create Public Gist'),class_="btn")}
62 ${h.reset('reset',_('Reset'),class_="btn")}
63 </div>
62 </div>
64 ${h.end_form()}
63 ${h.end_form()}
65 </div>
64 </div>
66 </div>
65 </div>
67
66
68 </div>
67 </div>
69
68
70 <script type="text/javascript">
69 <script type="text/javascript">
71 var myCodeMirror = initCodeMirror('editor', '');
70 var myCodeMirror = initCodeMirror('editor', '');
72
71
73 var modes_select = $('#mimetype');
72 var modes_select = $('#mimetype');
74 fillCodeMirrorOptions(modes_select);
73 fillCodeMirrorOptions(modes_select);
75
74
76 var filename_selector = '#filename';
75 var filename_selector = '#filename';
77 // on change of select field set mode
76 // on change of select field set mode
78 setCodeMirrorModeFromSelect(
77 setCodeMirrorModeFromSelect(
79 modes_select, filename_selector, myCodeMirror, null);
78 modes_select, filename_selector, myCodeMirror, null);
80
79
81 // on entering the new filename set mode, from given extension
80 // on entering the new filename set mode, from given extension
82 setCodeMirrorModeFromInput(
81 setCodeMirrorModeFromInput(
83 modes_select, filename_selector, myCodeMirror, null);
82 modes_select, filename_selector, myCodeMirror, null);
84
83
85 </script>
84 </script>
86 </%def>
85 </%def>
General Comments 0
You need to be logged in to leave comments. Login now