##// END OF EJS Templates
commits: updated logic of in-memory-commits, fixed tests and re-architectured a bit how commit_ids are calculated and updated....
marcink -
r3743:b018c011 new-ui
parent child Browse files
Show More
@@ -1,1850 +1,1860 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24 import os
24 import os
25 import re
25 import re
26 import time
26 import time
27 import shutil
27 import shutil
28 import datetime
28 import datetime
29 import fnmatch
29 import fnmatch
30 import itertools
30 import itertools
31 import logging
31 import logging
32 import collections
32 import collections
33 import warnings
33 import warnings
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 from zope.cachedescriptors.property import CachedProperty
37
36 from pyramid import compat
38 from pyramid import compat
37
39
38 from rhodecode.translation import lazy_ugettext
40 from rhodecode.translation import lazy_ugettext
39 from rhodecode.lib.utils2 import safe_str, safe_unicode
41 from rhodecode.lib.utils2 import safe_str, safe_unicode
40 from rhodecode.lib.vcs import connection
42 from rhodecode.lib.vcs import connection
41 from rhodecode.lib.vcs.utils import author_name, author_email
43 from rhodecode.lib.vcs.utils import author_name, author_email
42 from rhodecode.lib.vcs.conf import settings
44 from rhodecode.lib.vcs.conf import settings
43 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
44 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
45 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
46 NodeDoesNotExistError, NodeNotChangedError, VCSError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
47 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
48 RepositoryError)
50 RepositoryError)
49
51
50
52
51 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
52
54
53
55
54 FILEMODE_DEFAULT = 0o100644
56 FILEMODE_DEFAULT = 0o100644
55 FILEMODE_EXECUTABLE = 0o100755
57 FILEMODE_EXECUTABLE = 0o100755
56
58
57 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
59 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
58
60
59
61
60 class MergeFailureReason(object):
62 class MergeFailureReason(object):
61 """
63 """
62 Enumeration with all the reasons why the server side merge could fail.
64 Enumeration with all the reasons why the server side merge could fail.
63
65
64 DO NOT change the number of the reasons, as they may be stored in the
66 DO NOT change the number of the reasons, as they may be stored in the
65 database.
67 database.
66
68
67 Changing the name of a reason is acceptable and encouraged to deprecate old
69 Changing the name of a reason is acceptable and encouraged to deprecate old
68 reasons.
70 reasons.
69 """
71 """
70
72
71 # Everything went well.
73 # Everything went well.
72 NONE = 0
74 NONE = 0
73
75
74 # An unexpected exception was raised. Check the logs for more details.
76 # An unexpected exception was raised. Check the logs for more details.
75 UNKNOWN = 1
77 UNKNOWN = 1
76
78
77 # The merge was not successful, there are conflicts.
79 # The merge was not successful, there are conflicts.
78 MERGE_FAILED = 2
80 MERGE_FAILED = 2
79
81
80 # The merge succeeded but we could not push it to the target repository.
82 # The merge succeeded but we could not push it to the target repository.
81 PUSH_FAILED = 3
83 PUSH_FAILED = 3
82
84
83 # The specified target is not a head in the target repository.
85 # The specified target is not a head in the target repository.
84 TARGET_IS_NOT_HEAD = 4
86 TARGET_IS_NOT_HEAD = 4
85
87
86 # The source repository contains more branches than the target. Pushing
88 # The source repository contains more branches than the target. Pushing
87 # the merge will create additional branches in the target.
89 # the merge will create additional branches in the target.
88 HG_SOURCE_HAS_MORE_BRANCHES = 5
90 HG_SOURCE_HAS_MORE_BRANCHES = 5
89
91
90 # The target reference has multiple heads. That does not allow to correctly
92 # The target reference has multiple heads. That does not allow to correctly
91 # identify the target location. This could only happen for mercurial
93 # identify the target location. This could only happen for mercurial
92 # branches.
94 # branches.
93 HG_TARGET_HAS_MULTIPLE_HEADS = 6
95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
94
96
95 # The target repository is locked
97 # The target repository is locked
96 TARGET_IS_LOCKED = 7
98 TARGET_IS_LOCKED = 7
97
99
98 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
99 # A involved commit could not be found.
101 # A involved commit could not be found.
100 _DEPRECATED_MISSING_COMMIT = 8
102 _DEPRECATED_MISSING_COMMIT = 8
101
103
102 # The target repo reference is missing.
104 # The target repo reference is missing.
103 MISSING_TARGET_REF = 9
105 MISSING_TARGET_REF = 9
104
106
105 # The source repo reference is missing.
107 # The source repo reference is missing.
106 MISSING_SOURCE_REF = 10
108 MISSING_SOURCE_REF = 10
107
109
108 # The merge was not successful, there are conflicts related to sub
110 # The merge was not successful, there are conflicts related to sub
109 # repositories.
111 # repositories.
110 SUBREPO_MERGE_FAILED = 11
112 SUBREPO_MERGE_FAILED = 11
111
113
112
114
113 class UpdateFailureReason(object):
115 class UpdateFailureReason(object):
114 """
116 """
115 Enumeration with all the reasons why the pull request update could fail.
117 Enumeration with all the reasons why the pull request update could fail.
116
118
117 DO NOT change the number of the reasons, as they may be stored in the
119 DO NOT change the number of the reasons, as they may be stored in the
118 database.
120 database.
119
121
120 Changing the name of a reason is acceptable and encouraged to deprecate old
122 Changing the name of a reason is acceptable and encouraged to deprecate old
121 reasons.
123 reasons.
122 """
124 """
123
125
124 # Everything went well.
126 # Everything went well.
125 NONE = 0
127 NONE = 0
126
128
127 # An unexpected exception was raised. Check the logs for more details.
129 # An unexpected exception was raised. Check the logs for more details.
128 UNKNOWN = 1
130 UNKNOWN = 1
129
131
130 # The pull request is up to date.
132 # The pull request is up to date.
131 NO_CHANGE = 2
133 NO_CHANGE = 2
132
134
133 # The pull request has a reference type that is not supported for update.
135 # The pull request has a reference type that is not supported for update.
134 WRONG_REF_TYPE = 3
136 WRONG_REF_TYPE = 3
135
137
136 # Update failed because the target reference is missing.
138 # Update failed because the target reference is missing.
137 MISSING_TARGET_REF = 4
139 MISSING_TARGET_REF = 4
138
140
139 # Update failed because the source reference is missing.
141 # Update failed because the source reference is missing.
140 MISSING_SOURCE_REF = 5
142 MISSING_SOURCE_REF = 5
141
143
142
144
143 class MergeResponse(object):
145 class MergeResponse(object):
144
146
145 # uses .format(**metadata) for variables
147 # uses .format(**metadata) for variables
146 MERGE_STATUS_MESSAGES = {
148 MERGE_STATUS_MESSAGES = {
147 MergeFailureReason.NONE: lazy_ugettext(
149 MergeFailureReason.NONE: lazy_ugettext(
148 u'This pull request can be automatically merged.'),
150 u'This pull request can be automatically merged.'),
149 MergeFailureReason.UNKNOWN: lazy_ugettext(
151 MergeFailureReason.UNKNOWN: lazy_ugettext(
150 u'This pull request cannot be merged because of an unhandled exception. '
152 u'This pull request cannot be merged because of an unhandled exception. '
151 u'{exception}'),
153 u'{exception}'),
152 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
154 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
153 u'This pull request cannot be merged because of merge conflicts.'),
155 u'This pull request cannot be merged because of merge conflicts.'),
154 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
156 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
155 u'This pull request could not be merged because push to '
157 u'This pull request could not be merged because push to '
156 u'target:`{target}@{merge_commit}` failed.'),
158 u'target:`{target}@{merge_commit}` failed.'),
157 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
159 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
158 u'This pull request cannot be merged because the target '
160 u'This pull request cannot be merged because the target '
159 u'`{target_ref.name}` is not a head.'),
161 u'`{target_ref.name}` is not a head.'),
160 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
162 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
161 u'This pull request cannot be merged because the source contains '
163 u'This pull request cannot be merged because the source contains '
162 u'more branches than the target.'),
164 u'more branches than the target.'),
163 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
165 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
164 u'This pull request cannot be merged because the target `{target_ref.name}` '
166 u'This pull request cannot be merged because the target `{target_ref.name}` '
165 u'has multiple heads: `{heads}`.'),
167 u'has multiple heads: `{heads}`.'),
166 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
168 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
167 u'This pull request cannot be merged because the target repository is '
169 u'This pull request cannot be merged because the target repository is '
168 u'locked by {locked_by}.'),
170 u'locked by {locked_by}.'),
169
171
170 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
172 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
171 u'This pull request cannot be merged because the target '
173 u'This pull request cannot be merged because the target '
172 u'reference `{target_ref.name}` is missing.'),
174 u'reference `{target_ref.name}` is missing.'),
173 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
175 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
174 u'This pull request cannot be merged because the source '
176 u'This pull request cannot be merged because the source '
175 u'reference `{source_ref.name}` is missing.'),
177 u'reference `{source_ref.name}` is missing.'),
176 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
178 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
177 u'This pull request cannot be merged because of conflicts related '
179 u'This pull request cannot be merged because of conflicts related '
178 u'to sub repositories.'),
180 u'to sub repositories.'),
179
181
180 # Deprecations
182 # Deprecations
181 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
183 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
182 u'This pull request cannot be merged because the target or the '
184 u'This pull request cannot be merged because the target or the '
183 u'source reference is missing.'),
185 u'source reference is missing.'),
184
186
185 }
187 }
186
188
187 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
189 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
188 self.possible = possible
190 self.possible = possible
189 self.executed = executed
191 self.executed = executed
190 self.merge_ref = merge_ref
192 self.merge_ref = merge_ref
191 self.failure_reason = failure_reason
193 self.failure_reason = failure_reason
192 self.metadata = metadata or {}
194 self.metadata = metadata or {}
193
195
194 def __repr__(self):
196 def __repr__(self):
195 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
197 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
196
198
197 def __eq__(self, other):
199 def __eq__(self, other):
198 same_instance = isinstance(other, self.__class__)
200 same_instance = isinstance(other, self.__class__)
199 return same_instance \
201 return same_instance \
200 and self.possible == other.possible \
202 and self.possible == other.possible \
201 and self.executed == other.executed \
203 and self.executed == other.executed \
202 and self.failure_reason == other.failure_reason
204 and self.failure_reason == other.failure_reason
203
205
204 @property
206 @property
205 def label(self):
207 def label(self):
206 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
208 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
207 not k.startswith('_'))
209 not k.startswith('_'))
208 return label_dict.get(self.failure_reason)
210 return label_dict.get(self.failure_reason)
209
211
210 @property
212 @property
211 def merge_status_message(self):
213 def merge_status_message(self):
212 """
214 """
213 Return a human friendly error message for the given merge status code.
215 Return a human friendly error message for the given merge status code.
214 """
216 """
215 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
217 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
216 try:
218 try:
217 return msg.format(**self.metadata)
219 return msg.format(**self.metadata)
218 except Exception:
220 except Exception:
219 log.exception('Failed to format %s message', self)
221 log.exception('Failed to format %s message', self)
220 return msg
222 return msg
221
223
222 def asdict(self):
224 def asdict(self):
223 data = {}
225 data = {}
224 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
226 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
225 'merge_status_message']:
227 'merge_status_message']:
226 data[k] = getattr(self, k)
228 data[k] = getattr(self, k)
227 return data
229 return data
228
230
229
231
230 class BaseRepository(object):
232 class BaseRepository(object):
231 """
233 """
232 Base Repository for final backends
234 Base Repository for final backends
233
235
234 .. attribute:: DEFAULT_BRANCH_NAME
236 .. attribute:: DEFAULT_BRANCH_NAME
235
237
236 name of default branch (i.e. "trunk" for svn, "master" for git etc.
238 name of default branch (i.e. "trunk" for svn, "master" for git etc.
237
239
238 .. attribute:: commit_ids
240 .. attribute:: commit_ids
239
241
240 list of all available commit ids, in ascending order
242 list of all available commit ids, in ascending order
241
243
242 .. attribute:: path
244 .. attribute:: path
243
245
244 absolute path to the repository
246 absolute path to the repository
245
247
246 .. attribute:: bookmarks
248 .. attribute:: bookmarks
247
249
248 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
250 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
249 there are no bookmarks or the backend implementation does not support
251 there are no bookmarks or the backend implementation does not support
250 bookmarks.
252 bookmarks.
251
253
252 .. attribute:: tags
254 .. attribute:: tags
253
255
254 Mapping from name to :term:`Commit ID` of the tag.
256 Mapping from name to :term:`Commit ID` of the tag.
255
257
256 """
258 """
257
259
258 DEFAULT_BRANCH_NAME = None
260 DEFAULT_BRANCH_NAME = None
259 DEFAULT_CONTACT = u"Unknown"
261 DEFAULT_CONTACT = u"Unknown"
260 DEFAULT_DESCRIPTION = u"unknown"
262 DEFAULT_DESCRIPTION = u"unknown"
261 EMPTY_COMMIT_ID = '0' * 40
263 EMPTY_COMMIT_ID = '0' * 40
262
264
263 path = None
265 path = None
266 _commit_ids_ver = 0
264
267
265 def __init__(self, repo_path, config=None, create=False, **kwargs):
268 def __init__(self, repo_path, config=None, create=False, **kwargs):
266 """
269 """
267 Initializes repository. Raises RepositoryError if repository could
270 Initializes repository. Raises RepositoryError if repository could
268 not be find at the given ``repo_path`` or directory at ``repo_path``
271 not be find at the given ``repo_path`` or directory at ``repo_path``
269 exists and ``create`` is set to True.
272 exists and ``create`` is set to True.
270
273
271 :param repo_path: local path of the repository
274 :param repo_path: local path of the repository
272 :param config: repository configuration
275 :param config: repository configuration
273 :param create=False: if set to True, would try to create repository.
276 :param create=False: if set to True, would try to create repository.
274 :param src_url=None: if set, should be proper url from which repository
277 :param src_url=None: if set, should be proper url from which repository
275 would be cloned; requires ``create`` parameter to be set to True -
278 would be cloned; requires ``create`` parameter to be set to True -
276 raises RepositoryError if src_url is set and create evaluates to
279 raises RepositoryError if src_url is set and create evaluates to
277 False
280 False
278 """
281 """
279 raise NotImplementedError
282 raise NotImplementedError
280
283
281 def __repr__(self):
284 def __repr__(self):
282 return '<%s at %s>' % (self.__class__.__name__, self.path)
285 return '<%s at %s>' % (self.__class__.__name__, self.path)
283
286
284 def __len__(self):
287 def __len__(self):
285 return self.count()
288 return self.count()
286
289
287 def __eq__(self, other):
290 def __eq__(self, other):
288 same_instance = isinstance(other, self.__class__)
291 same_instance = isinstance(other, self.__class__)
289 return same_instance and other.path == self.path
292 return same_instance and other.path == self.path
290
293
291 def __ne__(self, other):
294 def __ne__(self, other):
292 return not self.__eq__(other)
295 return not self.__eq__(other)
293
296
294 def get_create_shadow_cache_pr_path(self, db_repo):
297 def get_create_shadow_cache_pr_path(self, db_repo):
295 path = db_repo.cached_diffs_dir
298 path = db_repo.cached_diffs_dir
296 if not os.path.exists(path):
299 if not os.path.exists(path):
297 os.makedirs(path, 0o755)
300 os.makedirs(path, 0o755)
298 return path
301 return path
299
302
300 @classmethod
303 @classmethod
301 def get_default_config(cls, default=None):
304 def get_default_config(cls, default=None):
302 config = Config()
305 config = Config()
303 if default and isinstance(default, list):
306 if default and isinstance(default, list):
304 for section, key, val in default:
307 for section, key, val in default:
305 config.set(section, key, val)
308 config.set(section, key, val)
306 return config
309 return config
307
310
308 @LazyProperty
311 @LazyProperty
309 def _remote(self):
312 def _remote(self):
310 raise NotImplementedError
313 raise NotImplementedError
311
314
312 def _heads(self, branch=None):
315 def _heads(self, branch=None):
313 return []
316 return []
314
317
315 @LazyProperty
318 @LazyProperty
316 def EMPTY_COMMIT(self):
319 def EMPTY_COMMIT(self):
317 return EmptyCommit(self.EMPTY_COMMIT_ID)
320 return EmptyCommit(self.EMPTY_COMMIT_ID)
318
321
319 @LazyProperty
322 @LazyProperty
320 def alias(self):
323 def alias(self):
321 for k, v in settings.BACKENDS.items():
324 for k, v in settings.BACKENDS.items():
322 if v.split('.')[-1] == str(self.__class__.__name__):
325 if v.split('.')[-1] == str(self.__class__.__name__):
323 return k
326 return k
324
327
325 @LazyProperty
328 @LazyProperty
326 def name(self):
329 def name(self):
327 return safe_unicode(os.path.basename(self.path))
330 return safe_unicode(os.path.basename(self.path))
328
331
329 @LazyProperty
332 @LazyProperty
330 def description(self):
333 def description(self):
331 raise NotImplementedError
334 raise NotImplementedError
332
335
333 def refs(self):
336 def refs(self):
334 """
337 """
335 returns a `dict` with branches, bookmarks, tags, and closed_branches
338 returns a `dict` with branches, bookmarks, tags, and closed_branches
336 for this repository
339 for this repository
337 """
340 """
338 return dict(
341 return dict(
339 branches=self.branches,
342 branches=self.branches,
340 branches_closed=self.branches_closed,
343 branches_closed=self.branches_closed,
341 tags=self.tags,
344 tags=self.tags,
342 bookmarks=self.bookmarks
345 bookmarks=self.bookmarks
343 )
346 )
344
347
345 @LazyProperty
348 @LazyProperty
346 def branches(self):
349 def branches(self):
347 """
350 """
348 A `dict` which maps branch names to commit ids.
351 A `dict` which maps branch names to commit ids.
349 """
352 """
350 raise NotImplementedError
353 raise NotImplementedError
351
354
352 @LazyProperty
355 @LazyProperty
353 def branches_closed(self):
356 def branches_closed(self):
354 """
357 """
355 A `dict` which maps tags names to commit ids.
358 A `dict` which maps tags names to commit ids.
356 """
359 """
357 raise NotImplementedError
360 raise NotImplementedError
358
361
359 @LazyProperty
362 @LazyProperty
360 def bookmarks(self):
363 def bookmarks(self):
361 """
364 """
362 A `dict` which maps tags names to commit ids.
365 A `dict` which maps tags names to commit ids.
363 """
366 """
364 raise NotImplementedError
367 raise NotImplementedError
365
368
366 @LazyProperty
369 @LazyProperty
367 def tags(self):
370 def tags(self):
368 """
371 """
369 A `dict` which maps tags names to commit ids.
372 A `dict` which maps tags names to commit ids.
370 """
373 """
371 raise NotImplementedError
374 raise NotImplementedError
372
375
373 @LazyProperty
376 @LazyProperty
374 def size(self):
377 def size(self):
375 """
378 """
376 Returns combined size in bytes for all repository files
379 Returns combined size in bytes for all repository files
377 """
380 """
378 tip = self.get_commit()
381 tip = self.get_commit()
379 return tip.size
382 return tip.size
380
383
381 def size_at_commit(self, commit_id):
384 def size_at_commit(self, commit_id):
382 commit = self.get_commit(commit_id)
385 commit = self.get_commit(commit_id)
383 return commit.size
386 return commit.size
384
387
385 def is_empty(self):
388 def is_empty(self):
386 return self._remote.is_empty()
389 return self._remote.is_empty()
387
390
388 @staticmethod
391 @staticmethod
389 def check_url(url, config):
392 def check_url(url, config):
390 """
393 """
391 Function will check given url and try to verify if it's a valid
394 Function will check given url and try to verify if it's a valid
392 link.
395 link.
393 """
396 """
394 raise NotImplementedError
397 raise NotImplementedError
395
398
396 @staticmethod
399 @staticmethod
397 def is_valid_repository(path):
400 def is_valid_repository(path):
398 """
401 """
399 Check if given `path` contains a valid repository of this backend
402 Check if given `path` contains a valid repository of this backend
400 """
403 """
401 raise NotImplementedError
404 raise NotImplementedError
402
405
403 # ==========================================================================
406 # ==========================================================================
404 # COMMITS
407 # COMMITS
405 # ==========================================================================
408 # ==========================================================================
406
409
410 @CachedProperty('_commit_ids_ver')
411 def commit_ids(self):
412 raise NotImplementedError
413
414 def append_commit_id(self, commit_id):
415 if commit_id not in self.commit_ids:
416 self._rebuild_cache(self.commit_ids + [commit_id])
417 self._commit_ids_ver = time.time()
418
407 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
419 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
408 """
420 """
409 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
421 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
410 are both None, most recent commit is returned.
422 are both None, most recent commit is returned.
411
423
412 :param pre_load: Optional. List of commit attributes to load.
424 :param pre_load: Optional. List of commit attributes to load.
413
425
414 :raises ``EmptyRepositoryError``: if there are no commits
426 :raises ``EmptyRepositoryError``: if there are no commits
415 """
427 """
416 raise NotImplementedError
428 raise NotImplementedError
417
429
418 def __iter__(self):
430 def __iter__(self):
419 for commit_id in self.commit_ids:
431 for commit_id in self.commit_ids:
420 yield self.get_commit(commit_id=commit_id)
432 yield self.get_commit(commit_id=commit_id)
421
433
422 def get_commits(
434 def get_commits(
423 self, start_id=None, end_id=None, start_date=None, end_date=None,
435 self, start_id=None, end_id=None, start_date=None, end_date=None,
424 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
436 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
425 """
437 """
426 Returns iterator of `BaseCommit` objects from start to end
438 Returns iterator of `BaseCommit` objects from start to end
427 not inclusive. This should behave just like a list, ie. end is not
439 not inclusive. This should behave just like a list, ie. end is not
428 inclusive.
440 inclusive.
429
441
430 :param start_id: None or str, must be a valid commit id
442 :param start_id: None or str, must be a valid commit id
431 :param end_id: None or str, must be a valid commit id
443 :param end_id: None or str, must be a valid commit id
432 :param start_date:
444 :param start_date:
433 :param end_date:
445 :param end_date:
434 :param branch_name:
446 :param branch_name:
435 :param show_hidden:
447 :param show_hidden:
436 :param pre_load:
448 :param pre_load:
437 :param translate_tags:
449 :param translate_tags:
438 """
450 """
439 raise NotImplementedError
451 raise NotImplementedError
440
452
441 def __getitem__(self, key):
453 def __getitem__(self, key):
442 """
454 """
443 Allows index based access to the commit objects of this repository.
455 Allows index based access to the commit objects of this repository.
444 """
456 """
445 pre_load = ["author", "branch", "date", "message", "parents"]
457 pre_load = ["author", "branch", "date", "message", "parents"]
446 if isinstance(key, slice):
458 if isinstance(key, slice):
447 return self._get_range(key, pre_load)
459 return self._get_range(key, pre_load)
448 return self.get_commit(commit_idx=key, pre_load=pre_load)
460 return self.get_commit(commit_idx=key, pre_load=pre_load)
449
461
450 def _get_range(self, slice_obj, pre_load):
462 def _get_range(self, slice_obj, pre_load):
451 for commit_id in self.commit_ids.__getitem__(slice_obj):
463 for commit_id in self.commit_ids.__getitem__(slice_obj):
452 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
464 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
453
465
454 def count(self):
466 def count(self):
455 return len(self.commit_ids)
467 return len(self.commit_ids)
456
468
457 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
469 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
458 """
470 """
459 Creates and returns a tag for the given ``commit_id``.
471 Creates and returns a tag for the given ``commit_id``.
460
472
461 :param name: name for new tag
473 :param name: name for new tag
462 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
474 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
463 :param commit_id: commit id for which new tag would be created
475 :param commit_id: commit id for which new tag would be created
464 :param message: message of the tag's commit
476 :param message: message of the tag's commit
465 :param date: date of tag's commit
477 :param date: date of tag's commit
466
478
467 :raises TagAlreadyExistError: if tag with same name already exists
479 :raises TagAlreadyExistError: if tag with same name already exists
468 """
480 """
469 raise NotImplementedError
481 raise NotImplementedError
470
482
471 def remove_tag(self, name, user, message=None, date=None):
483 def remove_tag(self, name, user, message=None, date=None):
472 """
484 """
473 Removes tag with the given ``name``.
485 Removes tag with the given ``name``.
474
486
475 :param name: name of the tag to be removed
487 :param name: name of the tag to be removed
476 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
488 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
477 :param message: message of the tag's removal commit
489 :param message: message of the tag's removal commit
478 :param date: date of tag's removal commit
490 :param date: date of tag's removal commit
479
491
480 :raises TagDoesNotExistError: if tag with given name does not exists
492 :raises TagDoesNotExistError: if tag with given name does not exists
481 """
493 """
482 raise NotImplementedError
494 raise NotImplementedError
483
495
484 def get_diff(
496 def get_diff(
485 self, commit1, commit2, path=None, ignore_whitespace=False,
497 self, commit1, commit2, path=None, ignore_whitespace=False,
486 context=3, path1=None):
498 context=3, path1=None):
487 """
499 """
488 Returns (git like) *diff*, as plain text. Shows changes introduced by
500 Returns (git like) *diff*, as plain text. Shows changes introduced by
489 `commit2` since `commit1`.
501 `commit2` since `commit1`.
490
502
491 :param commit1: Entry point from which diff is shown. Can be
503 :param commit1: Entry point from which diff is shown. Can be
492 ``self.EMPTY_COMMIT`` - in this case, patch showing all
504 ``self.EMPTY_COMMIT`` - in this case, patch showing all
493 the changes since empty state of the repository until `commit2`
505 the changes since empty state of the repository until `commit2`
494 :param commit2: Until which commit changes should be shown.
506 :param commit2: Until which commit changes should be shown.
495 :param path: Can be set to a path of a file to create a diff of that
507 :param path: Can be set to a path of a file to create a diff of that
496 file. If `path1` is also set, this value is only associated to
508 file. If `path1` is also set, this value is only associated to
497 `commit2`.
509 `commit2`.
498 :param ignore_whitespace: If set to ``True``, would not show whitespace
510 :param ignore_whitespace: If set to ``True``, would not show whitespace
499 changes. Defaults to ``False``.
511 changes. Defaults to ``False``.
500 :param context: How many lines before/after changed lines should be
512 :param context: How many lines before/after changed lines should be
501 shown. Defaults to ``3``.
513 shown. Defaults to ``3``.
502 :param path1: Can be set to a path to associate with `commit1`. This
514 :param path1: Can be set to a path to associate with `commit1`. This
503 parameter works only for backends which support diff generation for
515 parameter works only for backends which support diff generation for
504 different paths. Other backends will raise a `ValueError` if `path1`
516 different paths. Other backends will raise a `ValueError` if `path1`
505 is set and has a different value than `path`.
517 is set and has a different value than `path`.
506 :param file_path: filter this diff by given path pattern
518 :param file_path: filter this diff by given path pattern
507 """
519 """
508 raise NotImplementedError
520 raise NotImplementedError
509
521
510 def strip(self, commit_id, branch=None):
522 def strip(self, commit_id, branch=None):
511 """
523 """
512 Strip given commit_id from the repository
524 Strip given commit_id from the repository
513 """
525 """
514 raise NotImplementedError
526 raise NotImplementedError
515
527
516 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
528 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
517 """
529 """
518 Return a latest common ancestor commit if one exists for this repo
530 Return a latest common ancestor commit if one exists for this repo
519 `commit_id1` vs `commit_id2` from `repo2`.
531 `commit_id1` vs `commit_id2` from `repo2`.
520
532
521 :param commit_id1: Commit it from this repository to use as a
533 :param commit_id1: Commit it from this repository to use as a
522 target for the comparison.
534 target for the comparison.
523 :param commit_id2: Source commit id to use for comparison.
535 :param commit_id2: Source commit id to use for comparison.
524 :param repo2: Source repository to use for comparison.
536 :param repo2: Source repository to use for comparison.
525 """
537 """
526 raise NotImplementedError
538 raise NotImplementedError
527
539
528 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
540 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
529 """
541 """
530 Compare this repository's revision `commit_id1` with `commit_id2`.
542 Compare this repository's revision `commit_id1` with `commit_id2`.
531
543
532 Returns a tuple(commits, ancestor) that would be merged from
544 Returns a tuple(commits, ancestor) that would be merged from
533 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
545 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
534 will be returned as ancestor.
546 will be returned as ancestor.
535
547
536 :param commit_id1: Commit it from this repository to use as a
548 :param commit_id1: Commit it from this repository to use as a
537 target for the comparison.
549 target for the comparison.
538 :param commit_id2: Source commit id to use for comparison.
550 :param commit_id2: Source commit id to use for comparison.
539 :param repo2: Source repository to use for comparison.
551 :param repo2: Source repository to use for comparison.
540 :param merge: If set to ``True`` will do a merge compare which also
552 :param merge: If set to ``True`` will do a merge compare which also
541 returns the common ancestor.
553 returns the common ancestor.
542 :param pre_load: Optional. List of commit attributes to load.
554 :param pre_load: Optional. List of commit attributes to load.
543 """
555 """
544 raise NotImplementedError
556 raise NotImplementedError
545
557
546 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
558 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
547 user_name='', user_email='', message='', dry_run=False,
559 user_name='', user_email='', message='', dry_run=False,
548 use_rebase=False, close_branch=False):
560 use_rebase=False, close_branch=False):
549 """
561 """
550 Merge the revisions specified in `source_ref` from `source_repo`
562 Merge the revisions specified in `source_ref` from `source_repo`
551 onto the `target_ref` of this repository.
563 onto the `target_ref` of this repository.
552
564
553 `source_ref` and `target_ref` are named tupls with the following
565 `source_ref` and `target_ref` are named tupls with the following
554 fields `type`, `name` and `commit_id`.
566 fields `type`, `name` and `commit_id`.
555
567
556 Returns a MergeResponse named tuple with the following fields
568 Returns a MergeResponse named tuple with the following fields
557 'possible', 'executed', 'source_commit', 'target_commit',
569 'possible', 'executed', 'source_commit', 'target_commit',
558 'merge_commit'.
570 'merge_commit'.
559
571
560 :param repo_id: `repo_id` target repo id.
572 :param repo_id: `repo_id` target repo id.
561 :param workspace_id: `workspace_id` unique identifier.
573 :param workspace_id: `workspace_id` unique identifier.
562 :param target_ref: `target_ref` points to the commit on top of which
574 :param target_ref: `target_ref` points to the commit on top of which
563 the `source_ref` should be merged.
575 the `source_ref` should be merged.
564 :param source_repo: The repository that contains the commits to be
576 :param source_repo: The repository that contains the commits to be
565 merged.
577 merged.
566 :param source_ref: `source_ref` points to the topmost commit from
578 :param source_ref: `source_ref` points to the topmost commit from
567 the `source_repo` which should be merged.
579 the `source_repo` which should be merged.
568 :param user_name: Merge commit `user_name`.
580 :param user_name: Merge commit `user_name`.
569 :param user_email: Merge commit `user_email`.
581 :param user_email: Merge commit `user_email`.
570 :param message: Merge commit `message`.
582 :param message: Merge commit `message`.
571 :param dry_run: If `True` the merge will not take place.
583 :param dry_run: If `True` the merge will not take place.
572 :param use_rebase: If `True` commits from the source will be rebased
584 :param use_rebase: If `True` commits from the source will be rebased
573 on top of the target instead of being merged.
585 on top of the target instead of being merged.
574 :param close_branch: If `True` branch will be close before merging it
586 :param close_branch: If `True` branch will be close before merging it
575 """
587 """
576 if dry_run:
588 if dry_run:
577 message = message or settings.MERGE_DRY_RUN_MESSAGE
589 message = message or settings.MERGE_DRY_RUN_MESSAGE
578 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
590 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
579 user_name = user_name or settings.MERGE_DRY_RUN_USER
591 user_name = user_name or settings.MERGE_DRY_RUN_USER
580 else:
592 else:
581 if not user_name:
593 if not user_name:
582 raise ValueError('user_name cannot be empty')
594 raise ValueError('user_name cannot be empty')
583 if not user_email:
595 if not user_email:
584 raise ValueError('user_email cannot be empty')
596 raise ValueError('user_email cannot be empty')
585 if not message:
597 if not message:
586 raise ValueError('message cannot be empty')
598 raise ValueError('message cannot be empty')
587
599
588 try:
600 try:
589 return self._merge_repo(
601 return self._merge_repo(
590 repo_id, workspace_id, target_ref, source_repo,
602 repo_id, workspace_id, target_ref, source_repo,
591 source_ref, message, user_name, user_email, dry_run=dry_run,
603 source_ref, message, user_name, user_email, dry_run=dry_run,
592 use_rebase=use_rebase, close_branch=close_branch)
604 use_rebase=use_rebase, close_branch=close_branch)
593 except RepositoryError as exc:
605 except RepositoryError as exc:
594 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
606 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
595 return MergeResponse(
607 return MergeResponse(
596 False, False, None, MergeFailureReason.UNKNOWN,
608 False, False, None, MergeFailureReason.UNKNOWN,
597 metadata={'exception': str(exc)})
609 metadata={'exception': str(exc)})
598
610
599 def _merge_repo(self, repo_id, workspace_id, target_ref,
611 def _merge_repo(self, repo_id, workspace_id, target_ref,
600 source_repo, source_ref, merge_message,
612 source_repo, source_ref, merge_message,
601 merger_name, merger_email, dry_run=False,
613 merger_name, merger_email, dry_run=False,
602 use_rebase=False, close_branch=False):
614 use_rebase=False, close_branch=False):
603 """Internal implementation of merge."""
615 """Internal implementation of merge."""
604 raise NotImplementedError
616 raise NotImplementedError
605
617
606 def _maybe_prepare_merge_workspace(
618 def _maybe_prepare_merge_workspace(
607 self, repo_id, workspace_id, target_ref, source_ref):
619 self, repo_id, workspace_id, target_ref, source_ref):
608 """
620 """
609 Create the merge workspace.
621 Create the merge workspace.
610
622
611 :param workspace_id: `workspace_id` unique identifier.
623 :param workspace_id: `workspace_id` unique identifier.
612 """
624 """
613 raise NotImplementedError
625 raise NotImplementedError
614
626
615 def _get_legacy_shadow_repository_path(self, workspace_id):
627 def _get_legacy_shadow_repository_path(self, workspace_id):
616 """
628 """
617 Legacy version that was used before. We still need it for
629 Legacy version that was used before. We still need it for
618 backward compat
630 backward compat
619 """
631 """
620 return os.path.join(
632 return os.path.join(
621 os.path.dirname(self.path),
633 os.path.dirname(self.path),
622 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
634 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
623
635
624 def _get_shadow_repository_path(self, repo_id, workspace_id):
636 def _get_shadow_repository_path(self, repo_id, workspace_id):
625 # The name of the shadow repository must start with '.', so it is
637 # The name of the shadow repository must start with '.', so it is
626 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
638 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
627 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
639 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
628 if os.path.exists(legacy_repository_path):
640 if os.path.exists(legacy_repository_path):
629 return legacy_repository_path
641 return legacy_repository_path
630 else:
642 else:
631 return os.path.join(
643 return os.path.join(
632 os.path.dirname(self.path),
644 os.path.dirname(self.path),
633 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
645 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
634
646
635 def cleanup_merge_workspace(self, repo_id, workspace_id):
647 def cleanup_merge_workspace(self, repo_id, workspace_id):
636 """
648 """
637 Remove merge workspace.
649 Remove merge workspace.
638
650
639 This function MUST not fail in case there is no workspace associated to
651 This function MUST not fail in case there is no workspace associated to
640 the given `workspace_id`.
652 the given `workspace_id`.
641
653
642 :param workspace_id: `workspace_id` unique identifier.
654 :param workspace_id: `workspace_id` unique identifier.
643 """
655 """
644 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
656 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
645 shadow_repository_path_del = '{}.{}.delete'.format(
657 shadow_repository_path_del = '{}.{}.delete'.format(
646 shadow_repository_path, time.time())
658 shadow_repository_path, time.time())
647
659
648 # move the shadow repo, so it never conflicts with the one used.
660 # move the shadow repo, so it never conflicts with the one used.
649 # we use this method because shutil.rmtree had some edge case problems
661 # we use this method because shutil.rmtree had some edge case problems
650 # removing symlinked repositories
662 # removing symlinked repositories
651 if not os.path.isdir(shadow_repository_path):
663 if not os.path.isdir(shadow_repository_path):
652 return
664 return
653
665
654 shutil.move(shadow_repository_path, shadow_repository_path_del)
666 shutil.move(shadow_repository_path, shadow_repository_path_del)
655 try:
667 try:
656 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
668 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
657 except Exception:
669 except Exception:
658 log.exception('Failed to gracefully remove shadow repo under %s',
670 log.exception('Failed to gracefully remove shadow repo under %s',
659 shadow_repository_path_del)
671 shadow_repository_path_del)
660 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
672 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
661
673
662 # ========== #
674 # ========== #
663 # COMMIT API #
675 # COMMIT API #
664 # ========== #
676 # ========== #
665
677
666 @LazyProperty
678 @LazyProperty
667 def in_memory_commit(self):
679 def in_memory_commit(self):
668 """
680 """
669 Returns :class:`InMemoryCommit` object for this repository.
681 Returns :class:`InMemoryCommit` object for this repository.
670 """
682 """
671 raise NotImplementedError
683 raise NotImplementedError
672
684
673 # ======================== #
685 # ======================== #
674 # UTILITIES FOR SUBCLASSES #
686 # UTILITIES FOR SUBCLASSES #
675 # ======================== #
687 # ======================== #
676
688
677 def _validate_diff_commits(self, commit1, commit2):
689 def _validate_diff_commits(self, commit1, commit2):
678 """
690 """
679 Validates that the given commits are related to this repository.
691 Validates that the given commits are related to this repository.
680
692
681 Intended as a utility for sub classes to have a consistent validation
693 Intended as a utility for sub classes to have a consistent validation
682 of input parameters in methods like :meth:`get_diff`.
694 of input parameters in methods like :meth:`get_diff`.
683 """
695 """
684 self._validate_commit(commit1)
696 self._validate_commit(commit1)
685 self._validate_commit(commit2)
697 self._validate_commit(commit2)
686 if (isinstance(commit1, EmptyCommit) and
698 if (isinstance(commit1, EmptyCommit) and
687 isinstance(commit2, EmptyCommit)):
699 isinstance(commit2, EmptyCommit)):
688 raise ValueError("Cannot compare two empty commits")
700 raise ValueError("Cannot compare two empty commits")
689
701
690 def _validate_commit(self, commit):
702 def _validate_commit(self, commit):
691 if not isinstance(commit, BaseCommit):
703 if not isinstance(commit, BaseCommit):
692 raise TypeError(
704 raise TypeError(
693 "%s is not of type BaseCommit" % repr(commit))
705 "%s is not of type BaseCommit" % repr(commit))
694 if commit.repository != self and not isinstance(commit, EmptyCommit):
706 if commit.repository != self and not isinstance(commit, EmptyCommit):
695 raise ValueError(
707 raise ValueError(
696 "Commit %s must be a valid commit from this repository %s, "
708 "Commit %s must be a valid commit from this repository %s, "
697 "related to this repository instead %s." %
709 "related to this repository instead %s." %
698 (commit, self, commit.repository))
710 (commit, self, commit.repository))
699
711
700 def _validate_commit_id(self, commit_id):
712 def _validate_commit_id(self, commit_id):
701 if not isinstance(commit_id, compat.string_types):
713 if not isinstance(commit_id, compat.string_types):
702 raise TypeError("commit_id must be a string value")
714 raise TypeError("commit_id must be a string value")
703
715
704 def _validate_commit_idx(self, commit_idx):
716 def _validate_commit_idx(self, commit_idx):
705 if not isinstance(commit_idx, (int, long)):
717 if not isinstance(commit_idx, (int, long)):
706 raise TypeError("commit_idx must be a numeric value")
718 raise TypeError("commit_idx must be a numeric value")
707
719
708 def _validate_branch_name(self, branch_name):
720 def _validate_branch_name(self, branch_name):
709 if branch_name and branch_name not in self.branches_all:
721 if branch_name and branch_name not in self.branches_all:
710 msg = ("Branch %s not found in %s" % (branch_name, self))
722 msg = ("Branch %s not found in %s" % (branch_name, self))
711 raise BranchDoesNotExistError(msg)
723 raise BranchDoesNotExistError(msg)
712
724
713 #
725 #
714 # Supporting deprecated API parts
726 # Supporting deprecated API parts
715 # TODO: johbo: consider to move this into a mixin
727 # TODO: johbo: consider to move this into a mixin
716 #
728 #
717
729
718 @property
730 @property
719 def EMPTY_CHANGESET(self):
731 def EMPTY_CHANGESET(self):
720 warnings.warn(
732 warnings.warn(
721 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
733 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
722 return self.EMPTY_COMMIT_ID
734 return self.EMPTY_COMMIT_ID
723
735
724 @property
736 @property
725 def revisions(self):
737 def revisions(self):
726 warnings.warn("Use commits attribute instead", DeprecationWarning)
738 warnings.warn("Use commits attribute instead", DeprecationWarning)
727 return self.commit_ids
739 return self.commit_ids
728
740
729 @revisions.setter
741 @revisions.setter
730 def revisions(self, value):
742 def revisions(self, value):
731 warnings.warn("Use commits attribute instead", DeprecationWarning)
743 warnings.warn("Use commits attribute instead", DeprecationWarning)
732 self.commit_ids = value
744 self.commit_ids = value
733
745
734 def get_changeset(self, revision=None, pre_load=None):
746 def get_changeset(self, revision=None, pre_load=None):
735 warnings.warn("Use get_commit instead", DeprecationWarning)
747 warnings.warn("Use get_commit instead", DeprecationWarning)
736 commit_id = None
748 commit_id = None
737 commit_idx = None
749 commit_idx = None
738 if isinstance(revision, compat.string_types):
750 if isinstance(revision, compat.string_types):
739 commit_id = revision
751 commit_id = revision
740 else:
752 else:
741 commit_idx = revision
753 commit_idx = revision
742 return self.get_commit(
754 return self.get_commit(
743 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
755 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
744
756
745 def get_changesets(
757 def get_changesets(
746 self, start=None, end=None, start_date=None, end_date=None,
758 self, start=None, end=None, start_date=None, end_date=None,
747 branch_name=None, pre_load=None):
759 branch_name=None, pre_load=None):
748 warnings.warn("Use get_commits instead", DeprecationWarning)
760 warnings.warn("Use get_commits instead", DeprecationWarning)
749 start_id = self._revision_to_commit(start)
761 start_id = self._revision_to_commit(start)
750 end_id = self._revision_to_commit(end)
762 end_id = self._revision_to_commit(end)
751 return self.get_commits(
763 return self.get_commits(
752 start_id=start_id, end_id=end_id, start_date=start_date,
764 start_id=start_id, end_id=end_id, start_date=start_date,
753 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
765 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
754
766
755 def _revision_to_commit(self, revision):
767 def _revision_to_commit(self, revision):
756 """
768 """
757 Translates a revision to a commit_id
769 Translates a revision to a commit_id
758
770
759 Helps to support the old changeset based API which allows to use
771 Helps to support the old changeset based API which allows to use
760 commit ids and commit indices interchangeable.
772 commit ids and commit indices interchangeable.
761 """
773 """
762 if revision is None:
774 if revision is None:
763 return revision
775 return revision
764
776
765 if isinstance(revision, compat.string_types):
777 if isinstance(revision, compat.string_types):
766 commit_id = revision
778 commit_id = revision
767 else:
779 else:
768 commit_id = self.commit_ids[revision]
780 commit_id = self.commit_ids[revision]
769 return commit_id
781 return commit_id
770
782
771 @property
783 @property
772 def in_memory_changeset(self):
784 def in_memory_changeset(self):
773 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
785 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
774 return self.in_memory_commit
786 return self.in_memory_commit
775
787
776 def get_path_permissions(self, username):
788 def get_path_permissions(self, username):
777 """
789 """
778 Returns a path permission checker or None if not supported
790 Returns a path permission checker or None if not supported
779
791
780 :param username: session user name
792 :param username: session user name
781 :return: an instance of BasePathPermissionChecker or None
793 :return: an instance of BasePathPermissionChecker or None
782 """
794 """
783 return None
795 return None
784
796
785 def install_hooks(self, force=False):
797 def install_hooks(self, force=False):
786 return self._remote.install_hooks(force)
798 return self._remote.install_hooks(force)
787
799
788 def get_hooks_info(self):
800 def get_hooks_info(self):
789 return self._remote.get_hooks_info()
801 return self._remote.get_hooks_info()
790
802
791
803
792 class BaseCommit(object):
804 class BaseCommit(object):
793 """
805 """
794 Each backend should implement it's commit representation.
806 Each backend should implement it's commit representation.
795
807
796 **Attributes**
808 **Attributes**
797
809
798 ``repository``
810 ``repository``
799 repository object within which commit exists
811 repository object within which commit exists
800
812
801 ``id``
813 ``id``
802 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
814 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
803 just ``tip``.
815 just ``tip``.
804
816
805 ``raw_id``
817 ``raw_id``
806 raw commit representation (i.e. full 40 length sha for git
818 raw commit representation (i.e. full 40 length sha for git
807 backend)
819 backend)
808
820
809 ``short_id``
821 ``short_id``
810 shortened (if apply) version of ``raw_id``; it would be simple
822 shortened (if apply) version of ``raw_id``; it would be simple
811 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
823 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
812 as ``raw_id`` for subversion
824 as ``raw_id`` for subversion
813
825
814 ``idx``
826 ``idx``
815 commit index
827 commit index
816
828
817 ``files``
829 ``files``
818 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
830 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
819
831
820 ``dirs``
832 ``dirs``
821 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
833 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
822
834
823 ``nodes``
835 ``nodes``
824 combined list of ``Node`` objects
836 combined list of ``Node`` objects
825
837
826 ``author``
838 ``author``
827 author of the commit, as unicode
839 author of the commit, as unicode
828
840
829 ``message``
841 ``message``
830 message of the commit, as unicode
842 message of the commit, as unicode
831
843
832 ``parents``
844 ``parents``
833 list of parent commits
845 list of parent commits
834
846
835 """
847 """
836
848
837 branch = None
849 branch = None
838 """
850 """
839 Depending on the backend this should be set to the branch name of the
851 Depending on the backend this should be set to the branch name of the
840 commit. Backends not supporting branches on commits should leave this
852 commit. Backends not supporting branches on commits should leave this
841 value as ``None``.
853 value as ``None``.
842 """
854 """
843
855
844 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
856 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
845 """
857 """
846 This template is used to generate a default prefix for repository archives
858 This template is used to generate a default prefix for repository archives
847 if no prefix has been specified.
859 if no prefix has been specified.
848 """
860 """
849
861
850 def __str__(self):
862 def __str__(self):
851 return '<%s at %s:%s>' % (
863 return '<%s at %s:%s>' % (
852 self.__class__.__name__, self.idx, self.short_id)
864 self.__class__.__name__, self.idx, self.short_id)
853
865
854 def __repr__(self):
866 def __repr__(self):
855 return self.__str__()
867 return self.__str__()
856
868
857 def __unicode__(self):
869 def __unicode__(self):
858 return u'%s:%s' % (self.idx, self.short_id)
870 return u'%s:%s' % (self.idx, self.short_id)
859
871
860 def __eq__(self, other):
872 def __eq__(self, other):
861 same_instance = isinstance(other, self.__class__)
873 same_instance = isinstance(other, self.__class__)
862 return same_instance and self.raw_id == other.raw_id
874 return same_instance and self.raw_id == other.raw_id
863
875
864 def __json__(self):
876 def __json__(self):
865 parents = []
877 parents = []
866 try:
878 try:
867 for parent in self.parents:
879 for parent in self.parents:
868 parents.append({'raw_id': parent.raw_id})
880 parents.append({'raw_id': parent.raw_id})
869 except NotImplementedError:
881 except NotImplementedError:
870 # empty commit doesn't have parents implemented
882 # empty commit doesn't have parents implemented
871 pass
883 pass
872
884
873 return {
885 return {
874 'short_id': self.short_id,
886 'short_id': self.short_id,
875 'raw_id': self.raw_id,
887 'raw_id': self.raw_id,
876 'revision': self.idx,
888 'revision': self.idx,
877 'message': self.message,
889 'message': self.message,
878 'date': self.date,
890 'date': self.date,
879 'author': self.author,
891 'author': self.author,
880 'parents': parents,
892 'parents': parents,
881 'branch': self.branch
893 'branch': self.branch
882 }
894 }
883
895
884 def __getstate__(self):
896 def __getstate__(self):
885 d = self.__dict__.copy()
897 d = self.__dict__.copy()
886 d.pop('_remote', None)
898 d.pop('_remote', None)
887 d.pop('repository', None)
899 d.pop('repository', None)
888 return d
900 return d
889
901
890 def _get_refs(self):
902 def _get_refs(self):
891 return {
903 return {
892 'branches': [self.branch] if self.branch else [],
904 'branches': [self.branch] if self.branch else [],
893 'bookmarks': getattr(self, 'bookmarks', []),
905 'bookmarks': getattr(self, 'bookmarks', []),
894 'tags': self.tags
906 'tags': self.tags
895 }
907 }
896
908
897 @LazyProperty
909 @LazyProperty
898 def last(self):
910 def last(self):
899 """
911 """
900 ``True`` if this is last commit in repository, ``False``
912 ``True`` if this is last commit in repository, ``False``
901 otherwise; trying to access this attribute while there is no
913 otherwise; trying to access this attribute while there is no
902 commits would raise `EmptyRepositoryError`
914 commits would raise `EmptyRepositoryError`
903 """
915 """
904 if self.repository is None:
916 if self.repository is None:
905 raise CommitError("Cannot check if it's most recent commit")
917 raise CommitError("Cannot check if it's most recent commit")
906 return self.raw_id == self.repository.commit_ids[-1]
918 return self.raw_id == self.repository.commit_ids[-1]
907
919
908 @LazyProperty
920 @LazyProperty
909 def parents(self):
921 def parents(self):
910 """
922 """
911 Returns list of parent commits.
923 Returns list of parent commits.
912 """
924 """
913 raise NotImplementedError
925 raise NotImplementedError
914
926
915 @LazyProperty
927 @LazyProperty
916 def first_parent(self):
928 def first_parent(self):
917 """
929 """
918 Returns list of parent commits.
930 Returns list of parent commits.
919 """
931 """
920 return self.parents[0] if self.parents else EmptyCommit()
932 return self.parents[0] if self.parents else EmptyCommit()
921
933
922 @property
934 @property
923 def merge(self):
935 def merge(self):
924 """
936 """
925 Returns boolean if commit is a merge.
937 Returns boolean if commit is a merge.
926 """
938 """
927 return len(self.parents) > 1
939 return len(self.parents) > 1
928
940
929 @LazyProperty
941 @LazyProperty
930 def children(self):
942 def children(self):
931 """
943 """
932 Returns list of child commits.
944 Returns list of child commits.
933 """
945 """
934 raise NotImplementedError
946 raise NotImplementedError
935
947
936 @LazyProperty
948 @LazyProperty
937 def id(self):
949 def id(self):
938 """
950 """
939 Returns string identifying this commit.
951 Returns string identifying this commit.
940 """
952 """
941 raise NotImplementedError
953 raise NotImplementedError
942
954
943 @LazyProperty
955 @LazyProperty
944 def raw_id(self):
956 def raw_id(self):
945 """
957 """
946 Returns raw string identifying this commit.
958 Returns raw string identifying this commit.
947 """
959 """
948 raise NotImplementedError
960 raise NotImplementedError
949
961
950 @LazyProperty
962 @LazyProperty
951 def short_id(self):
963 def short_id(self):
952 """
964 """
953 Returns shortened version of ``raw_id`` attribute, as string,
965 Returns shortened version of ``raw_id`` attribute, as string,
954 identifying this commit, useful for presentation to users.
966 identifying this commit, useful for presentation to users.
955 """
967 """
956 raise NotImplementedError
968 raise NotImplementedError
957
969
958 @LazyProperty
970 @LazyProperty
959 def idx(self):
971 def idx(self):
960 """
972 """
961 Returns integer identifying this commit.
973 Returns integer identifying this commit.
962 """
974 """
963 raise NotImplementedError
975 raise NotImplementedError
964
976
965 @LazyProperty
977 @LazyProperty
966 def committer(self):
978 def committer(self):
967 """
979 """
968 Returns committer for this commit
980 Returns committer for this commit
969 """
981 """
970 raise NotImplementedError
982 raise NotImplementedError
971
983
972 @LazyProperty
984 @LazyProperty
973 def committer_name(self):
985 def committer_name(self):
974 """
986 """
975 Returns committer name for this commit
987 Returns committer name for this commit
976 """
988 """
977
989
978 return author_name(self.committer)
990 return author_name(self.committer)
979
991
980 @LazyProperty
992 @LazyProperty
981 def committer_email(self):
993 def committer_email(self):
982 """
994 """
983 Returns committer email address for this commit
995 Returns committer email address for this commit
984 """
996 """
985
997
986 return author_email(self.committer)
998 return author_email(self.committer)
987
999
988 @LazyProperty
1000 @LazyProperty
989 def author(self):
1001 def author(self):
990 """
1002 """
991 Returns author for this commit
1003 Returns author for this commit
992 """
1004 """
993
1005
994 raise NotImplementedError
1006 raise NotImplementedError
995
1007
996 @LazyProperty
1008 @LazyProperty
997 def author_name(self):
1009 def author_name(self):
998 """
1010 """
999 Returns author name for this commit
1011 Returns author name for this commit
1000 """
1012 """
1001
1013
1002 return author_name(self.author)
1014 return author_name(self.author)
1003
1015
1004 @LazyProperty
1016 @LazyProperty
1005 def author_email(self):
1017 def author_email(self):
1006 """
1018 """
1007 Returns author email address for this commit
1019 Returns author email address for this commit
1008 """
1020 """
1009
1021
1010 return author_email(self.author)
1022 return author_email(self.author)
1011
1023
1012 def get_file_mode(self, path):
1024 def get_file_mode(self, path):
1013 """
1025 """
1014 Returns stat mode of the file at `path`.
1026 Returns stat mode of the file at `path`.
1015 """
1027 """
1016 raise NotImplementedError
1028 raise NotImplementedError
1017
1029
1018 def is_link(self, path):
1030 def is_link(self, path):
1019 """
1031 """
1020 Returns ``True`` if given `path` is a symlink
1032 Returns ``True`` if given `path` is a symlink
1021 """
1033 """
1022 raise NotImplementedError
1034 raise NotImplementedError
1023
1035
1024 def get_file_content(self, path):
1036 def get_file_content(self, path):
1025 """
1037 """
1026 Returns content of the file at the given `path`.
1038 Returns content of the file at the given `path`.
1027 """
1039 """
1028 raise NotImplementedError
1040 raise NotImplementedError
1029
1041
1030 def get_file_size(self, path):
1042 def get_file_size(self, path):
1031 """
1043 """
1032 Returns size of the file at the given `path`.
1044 Returns size of the file at the given `path`.
1033 """
1045 """
1034 raise NotImplementedError
1046 raise NotImplementedError
1035
1047
1036 def get_path_commit(self, path, pre_load=None):
1048 def get_path_commit(self, path, pre_load=None):
1037 """
1049 """
1038 Returns last commit of the file at the given `path`.
1050 Returns last commit of the file at the given `path`.
1039
1051
1040 :param pre_load: Optional. List of commit attributes to load.
1052 :param pre_load: Optional. List of commit attributes to load.
1041 """
1053 """
1042 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1054 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1043 if not commits:
1055 if not commits:
1044 raise RepositoryError(
1056 raise RepositoryError(
1045 'Failed to fetch history for path {}. '
1057 'Failed to fetch history for path {}. '
1046 'Please check if such path exists in your repository'.format(
1058 'Please check if such path exists in your repository'.format(
1047 path))
1059 path))
1048 return commits[0]
1060 return commits[0]
1049
1061
1050 def get_path_history(self, path, limit=None, pre_load=None):
1062 def get_path_history(self, path, limit=None, pre_load=None):
1051 """
1063 """
1052 Returns history of file as reversed list of :class:`BaseCommit`
1064 Returns history of file as reversed list of :class:`BaseCommit`
1053 objects for which file at given `path` has been modified.
1065 objects for which file at given `path` has been modified.
1054
1066
1055 :param limit: Optional. Allows to limit the size of the returned
1067 :param limit: Optional. Allows to limit the size of the returned
1056 history. This is intended as a hint to the underlying backend, so
1068 history. This is intended as a hint to the underlying backend, so
1057 that it can apply optimizations depending on the limit.
1069 that it can apply optimizations depending on the limit.
1058 :param pre_load: Optional. List of commit attributes to load.
1070 :param pre_load: Optional. List of commit attributes to load.
1059 """
1071 """
1060 raise NotImplementedError
1072 raise NotImplementedError
1061
1073
1062 def get_file_annotate(self, path, pre_load=None):
1074 def get_file_annotate(self, path, pre_load=None):
1063 """
1075 """
1064 Returns a generator of four element tuples with
1076 Returns a generator of four element tuples with
1065 lineno, sha, commit lazy loader and line
1077 lineno, sha, commit lazy loader and line
1066
1078
1067 :param pre_load: Optional. List of commit attributes to load.
1079 :param pre_load: Optional. List of commit attributes to load.
1068 """
1080 """
1069 raise NotImplementedError
1081 raise NotImplementedError
1070
1082
1071 def get_nodes(self, path):
1083 def get_nodes(self, path):
1072 """
1084 """
1073 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1085 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1074 state of commit at the given ``path``.
1086 state of commit at the given ``path``.
1075
1087
1076 :raises ``CommitError``: if node at the given ``path`` is not
1088 :raises ``CommitError``: if node at the given ``path`` is not
1077 instance of ``DirNode``
1089 instance of ``DirNode``
1078 """
1090 """
1079 raise NotImplementedError
1091 raise NotImplementedError
1080
1092
1081 def get_node(self, path):
1093 def get_node(self, path):
1082 """
1094 """
1083 Returns ``Node`` object from the given ``path``.
1095 Returns ``Node`` object from the given ``path``.
1084
1096
1085 :raises ``NodeDoesNotExistError``: if there is no node at the given
1097 :raises ``NodeDoesNotExistError``: if there is no node at the given
1086 ``path``
1098 ``path``
1087 """
1099 """
1088 raise NotImplementedError
1100 raise NotImplementedError
1089
1101
1090 def get_largefile_node(self, path):
1102 def get_largefile_node(self, path):
1091 """
1103 """
1092 Returns the path to largefile from Mercurial/Git-lfs storage.
1104 Returns the path to largefile from Mercurial/Git-lfs storage.
1093 or None if it's not a largefile node
1105 or None if it's not a largefile node
1094 """
1106 """
1095 return None
1107 return None
1096
1108
1097 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1109 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1098 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1110 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1099 """
1111 """
1100 Creates an archive containing the contents of the repository.
1112 Creates an archive containing the contents of the repository.
1101
1113
1102 :param archive_dest_path: path to the file which to create the archive.
1114 :param archive_dest_path: path to the file which to create the archive.
1103 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1115 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1104 :param prefix: name of root directory in archive.
1116 :param prefix: name of root directory in archive.
1105 Default is repository name and commit's short_id joined with dash:
1117 Default is repository name and commit's short_id joined with dash:
1106 ``"{repo_name}-{short_id}"``.
1118 ``"{repo_name}-{short_id}"``.
1107 :param write_metadata: write a metadata file into archive.
1119 :param write_metadata: write a metadata file into archive.
1108 :param mtime: custom modification time for archive creation, defaults
1120 :param mtime: custom modification time for archive creation, defaults
1109 to time.time() if not given.
1121 to time.time() if not given.
1110 :param archive_at_path: pack files at this path (default '/')
1122 :param archive_at_path: pack files at this path (default '/')
1111
1123
1112 :raise VCSError: If prefix has a problem.
1124 :raise VCSError: If prefix has a problem.
1113 """
1125 """
1114 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1126 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1115 if kind not in allowed_kinds:
1127 if kind not in allowed_kinds:
1116 raise ImproperArchiveTypeError(
1128 raise ImproperArchiveTypeError(
1117 'Archive kind (%s) not supported use one of %s' %
1129 'Archive kind (%s) not supported use one of %s' %
1118 (kind, allowed_kinds))
1130 (kind, allowed_kinds))
1119
1131
1120 prefix = self._validate_archive_prefix(prefix)
1132 prefix = self._validate_archive_prefix(prefix)
1121
1133
1122 mtime = mtime is not None or time.mktime(self.date.timetuple())
1134 mtime = mtime is not None or time.mktime(self.date.timetuple())
1123
1135
1124 file_info = []
1136 file_info = []
1125 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1137 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1126 for _r, _d, files in cur_rev.walk(archive_at_path):
1138 for _r, _d, files in cur_rev.walk(archive_at_path):
1127 for f in files:
1139 for f in files:
1128 f_path = os.path.join(prefix, f.path)
1140 f_path = os.path.join(prefix, f.path)
1129 file_info.append(
1141 file_info.append(
1130 (f_path, f.mode, f.is_link(), f.raw_bytes))
1142 (f_path, f.mode, f.is_link(), f.raw_bytes))
1131
1143
1132 if write_metadata:
1144 if write_metadata:
1133 metadata = [
1145 metadata = [
1134 ('repo_name', self.repository.name),
1146 ('repo_name', self.repository.name),
1135 ('commit_id', self.raw_id),
1147 ('commit_id', self.raw_id),
1136 ('mtime', mtime),
1148 ('mtime', mtime),
1137 ('branch', self.branch),
1149 ('branch', self.branch),
1138 ('tags', ','.join(self.tags)),
1150 ('tags', ','.join(self.tags)),
1139 ]
1151 ]
1140 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1152 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1141 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1153 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1142
1154
1143 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1155 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1144
1156
1145 def _validate_archive_prefix(self, prefix):
1157 def _validate_archive_prefix(self, prefix):
1146 if prefix is None:
1158 if prefix is None:
1147 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1159 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1148 repo_name=safe_str(self.repository.name),
1160 repo_name=safe_str(self.repository.name),
1149 short_id=self.short_id)
1161 short_id=self.short_id)
1150 elif not isinstance(prefix, str):
1162 elif not isinstance(prefix, str):
1151 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1163 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1152 elif prefix.startswith('/'):
1164 elif prefix.startswith('/'):
1153 raise VCSError("Prefix cannot start with leading slash")
1165 raise VCSError("Prefix cannot start with leading slash")
1154 elif prefix.strip() == '':
1166 elif prefix.strip() == '':
1155 raise VCSError("Prefix cannot be empty")
1167 raise VCSError("Prefix cannot be empty")
1156 return prefix
1168 return prefix
1157
1169
1158 @LazyProperty
1170 @LazyProperty
1159 def root(self):
1171 def root(self):
1160 """
1172 """
1161 Returns ``RootNode`` object for this commit.
1173 Returns ``RootNode`` object for this commit.
1162 """
1174 """
1163 return self.get_node('')
1175 return self.get_node('')
1164
1176
1165 def next(self, branch=None):
1177 def next(self, branch=None):
1166 """
1178 """
1167 Returns next commit from current, if branch is gives it will return
1179 Returns next commit from current, if branch is gives it will return
1168 next commit belonging to this branch
1180 next commit belonging to this branch
1169
1181
1170 :param branch: show commits within the given named branch
1182 :param branch: show commits within the given named branch
1171 """
1183 """
1172 indexes = xrange(self.idx + 1, self.repository.count())
1184 indexes = xrange(self.idx + 1, self.repository.count())
1173 return self._find_next(indexes, branch)
1185 return self._find_next(indexes, branch)
1174
1186
1175 def prev(self, branch=None):
1187 def prev(self, branch=None):
1176 """
1188 """
1177 Returns previous commit from current, if branch is gives it will
1189 Returns previous commit from current, if branch is gives it will
1178 return previous commit belonging to this branch
1190 return previous commit belonging to this branch
1179
1191
1180 :param branch: show commit within the given named branch
1192 :param branch: show commit within the given named branch
1181 """
1193 """
1182 indexes = xrange(self.idx - 1, -1, -1)
1194 indexes = xrange(self.idx - 1, -1, -1)
1183 return self._find_next(indexes, branch)
1195 return self._find_next(indexes, branch)
1184
1196
1185 def _find_next(self, indexes, branch=None):
1197 def _find_next(self, indexes, branch=None):
1186 if branch and self.branch != branch:
1198 if branch and self.branch != branch:
1187 raise VCSError('Branch option used on commit not belonging '
1199 raise VCSError('Branch option used on commit not belonging '
1188 'to that branch')
1200 'to that branch')
1189
1201
1190 for next_idx in indexes:
1202 for next_idx in indexes:
1191 commit = self.repository.get_commit(commit_idx=next_idx)
1203 commit = self.repository.get_commit(commit_idx=next_idx)
1192 if branch and branch != commit.branch:
1204 if branch and branch != commit.branch:
1193 continue
1205 continue
1194 return commit
1206 return commit
1195 raise CommitDoesNotExistError
1207 raise CommitDoesNotExistError
1196
1208
1197 def diff(self, ignore_whitespace=True, context=3):
1209 def diff(self, ignore_whitespace=True, context=3):
1198 """
1210 """
1199 Returns a `Diff` object representing the change made by this commit.
1211 Returns a `Diff` object representing the change made by this commit.
1200 """
1212 """
1201 parent = self.first_parent
1213 parent = self.first_parent
1202 diff = self.repository.get_diff(
1214 diff = self.repository.get_diff(
1203 parent, self,
1215 parent, self,
1204 ignore_whitespace=ignore_whitespace,
1216 ignore_whitespace=ignore_whitespace,
1205 context=context)
1217 context=context)
1206 return diff
1218 return diff
1207
1219
1208 @LazyProperty
1220 @LazyProperty
1209 def added(self):
1221 def added(self):
1210 """
1222 """
1211 Returns list of added ``FileNode`` objects.
1223 Returns list of added ``FileNode`` objects.
1212 """
1224 """
1213 raise NotImplementedError
1225 raise NotImplementedError
1214
1226
1215 @LazyProperty
1227 @LazyProperty
1216 def changed(self):
1228 def changed(self):
1217 """
1229 """
1218 Returns list of modified ``FileNode`` objects.
1230 Returns list of modified ``FileNode`` objects.
1219 """
1231 """
1220 raise NotImplementedError
1232 raise NotImplementedError
1221
1233
1222 @LazyProperty
1234 @LazyProperty
1223 def removed(self):
1235 def removed(self):
1224 """
1236 """
1225 Returns list of removed ``FileNode`` objects.
1237 Returns list of removed ``FileNode`` objects.
1226 """
1238 """
1227 raise NotImplementedError
1239 raise NotImplementedError
1228
1240
1229 @LazyProperty
1241 @LazyProperty
1230 def size(self):
1242 def size(self):
1231 """
1243 """
1232 Returns total number of bytes from contents of all filenodes.
1244 Returns total number of bytes from contents of all filenodes.
1233 """
1245 """
1234 return sum((node.size for node in self.get_filenodes_generator()))
1246 return sum((node.size for node in self.get_filenodes_generator()))
1235
1247
1236 def walk(self, topurl=''):
1248 def walk(self, topurl=''):
1237 """
1249 """
1238 Similar to os.walk method. Insted of filesystem it walks through
1250 Similar to os.walk method. Insted of filesystem it walks through
1239 commit starting at given ``topurl``. Returns generator of tuples
1251 commit starting at given ``topurl``. Returns generator of tuples
1240 (topnode, dirnodes, filenodes).
1252 (topnode, dirnodes, filenodes).
1241 """
1253 """
1242 topnode = self.get_node(topurl)
1254 topnode = self.get_node(topurl)
1243 if not topnode.is_dir():
1255 if not topnode.is_dir():
1244 return
1256 return
1245 yield (topnode, topnode.dirs, topnode.files)
1257 yield (topnode, topnode.dirs, topnode.files)
1246 for dirnode in topnode.dirs:
1258 for dirnode in topnode.dirs:
1247 for tup in self.walk(dirnode.path):
1259 for tup in self.walk(dirnode.path):
1248 yield tup
1260 yield tup
1249
1261
1250 def get_filenodes_generator(self):
1262 def get_filenodes_generator(self):
1251 """
1263 """
1252 Returns generator that yields *all* file nodes.
1264 Returns generator that yields *all* file nodes.
1253 """
1265 """
1254 for topnode, dirs, files in self.walk():
1266 for topnode, dirs, files in self.walk():
1255 for node in files:
1267 for node in files:
1256 yield node
1268 yield node
1257
1269
1258 #
1270 #
1259 # Utilities for sub classes to support consistent behavior
1271 # Utilities for sub classes to support consistent behavior
1260 #
1272 #
1261
1273
1262 def no_node_at_path(self, path):
1274 def no_node_at_path(self, path):
1263 return NodeDoesNotExistError(
1275 return NodeDoesNotExistError(
1264 u"There is no file nor directory at the given path: "
1276 u"There is no file nor directory at the given path: "
1265 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1277 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1266
1278
1267 def _fix_path(self, path):
1279 def _fix_path(self, path):
1268 """
1280 """
1269 Paths are stored without trailing slash so we need to get rid off it if
1281 Paths are stored without trailing slash so we need to get rid off it if
1270 needed.
1282 needed.
1271 """
1283 """
1272 return path.rstrip('/')
1284 return path.rstrip('/')
1273
1285
1274 #
1286 #
1275 # Deprecated API based on changesets
1287 # Deprecated API based on changesets
1276 #
1288 #
1277
1289
1278 @property
1290 @property
1279 def revision(self):
1291 def revision(self):
1280 warnings.warn("Use idx instead", DeprecationWarning)
1292 warnings.warn("Use idx instead", DeprecationWarning)
1281 return self.idx
1293 return self.idx
1282
1294
1283 @revision.setter
1295 @revision.setter
1284 def revision(self, value):
1296 def revision(self, value):
1285 warnings.warn("Use idx instead", DeprecationWarning)
1297 warnings.warn("Use idx instead", DeprecationWarning)
1286 self.idx = value
1298 self.idx = value
1287
1299
1288 def get_file_changeset(self, path):
1300 def get_file_changeset(self, path):
1289 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1301 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1290 return self.get_path_commit(path)
1302 return self.get_path_commit(path)
1291
1303
1292
1304
1293 class BaseChangesetClass(type):
1305 class BaseChangesetClass(type):
1294
1306
1295 def __instancecheck__(self, instance):
1307 def __instancecheck__(self, instance):
1296 return isinstance(instance, BaseCommit)
1308 return isinstance(instance, BaseCommit)
1297
1309
1298
1310
1299 class BaseChangeset(BaseCommit):
1311 class BaseChangeset(BaseCommit):
1300
1312
1301 __metaclass__ = BaseChangesetClass
1313 __metaclass__ = BaseChangesetClass
1302
1314
1303 def __new__(cls, *args, **kwargs):
1315 def __new__(cls, *args, **kwargs):
1304 warnings.warn(
1316 warnings.warn(
1305 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1317 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1306 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1318 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1307
1319
1308
1320
1309 class BaseInMemoryCommit(object):
1321 class BaseInMemoryCommit(object):
1310 """
1322 """
1311 Represents differences between repository's state (most recent head) and
1323 Represents differences between repository's state (most recent head) and
1312 changes made *in place*.
1324 changes made *in place*.
1313
1325
1314 **Attributes**
1326 **Attributes**
1315
1327
1316 ``repository``
1328 ``repository``
1317 repository object for this in-memory-commit
1329 repository object for this in-memory-commit
1318
1330
1319 ``added``
1331 ``added``
1320 list of ``FileNode`` objects marked as *added*
1332 list of ``FileNode`` objects marked as *added*
1321
1333
1322 ``changed``
1334 ``changed``
1323 list of ``FileNode`` objects marked as *changed*
1335 list of ``FileNode`` objects marked as *changed*
1324
1336
1325 ``removed``
1337 ``removed``
1326 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1338 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1327 *removed*
1339 *removed*
1328
1340
1329 ``parents``
1341 ``parents``
1330 list of :class:`BaseCommit` instances representing parents of
1342 list of :class:`BaseCommit` instances representing parents of
1331 in-memory commit. Should always be 2-element sequence.
1343 in-memory commit. Should always be 2-element sequence.
1332
1344
1333 """
1345 """
1334
1346
1335 def __init__(self, repository):
1347 def __init__(self, repository):
1336 self.repository = repository
1348 self.repository = repository
1337 self.added = []
1349 self.added = []
1338 self.changed = []
1350 self.changed = []
1339 self.removed = []
1351 self.removed = []
1340 self.parents = []
1352 self.parents = []
1341
1353
1342 def add(self, *filenodes):
1354 def add(self, *filenodes):
1343 """
1355 """
1344 Marks given ``FileNode`` objects as *to be committed*.
1356 Marks given ``FileNode`` objects as *to be committed*.
1345
1357
1346 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1358 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1347 latest commit
1359 latest commit
1348 :raises ``NodeAlreadyAddedError``: if node with same path is already
1360 :raises ``NodeAlreadyAddedError``: if node with same path is already
1349 marked as *added*
1361 marked as *added*
1350 """
1362 """
1351 # Check if not already marked as *added* first
1363 # Check if not already marked as *added* first
1352 for node in filenodes:
1364 for node in filenodes:
1353 if node.path in (n.path for n in self.added):
1365 if node.path in (n.path for n in self.added):
1354 raise NodeAlreadyAddedError(
1366 raise NodeAlreadyAddedError(
1355 "Such FileNode %s is already marked for addition"
1367 "Such FileNode %s is already marked for addition"
1356 % node.path)
1368 % node.path)
1357 for node in filenodes:
1369 for node in filenodes:
1358 self.added.append(node)
1370 self.added.append(node)
1359
1371
1360 def change(self, *filenodes):
1372 def change(self, *filenodes):
1361 """
1373 """
1362 Marks given ``FileNode`` objects to be *changed* in next commit.
1374 Marks given ``FileNode`` objects to be *changed* in next commit.
1363
1375
1364 :raises ``EmptyRepositoryError``: if there are no commits yet
1376 :raises ``EmptyRepositoryError``: if there are no commits yet
1365 :raises ``NodeAlreadyExistsError``: if node with same path is already
1377 :raises ``NodeAlreadyExistsError``: if node with same path is already
1366 marked to be *changed*
1378 marked to be *changed*
1367 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1379 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1368 marked to be *removed*
1380 marked to be *removed*
1369 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1381 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1370 commit
1382 commit
1371 :raises ``NodeNotChangedError``: if node hasn't really be changed
1383 :raises ``NodeNotChangedError``: if node hasn't really be changed
1372 """
1384 """
1373 for node in filenodes:
1385 for node in filenodes:
1374 if node.path in (n.path for n in self.removed):
1386 if node.path in (n.path for n in self.removed):
1375 raise NodeAlreadyRemovedError(
1387 raise NodeAlreadyRemovedError(
1376 "Node at %s is already marked as removed" % node.path)
1388 "Node at %s is already marked as removed" % node.path)
1377 try:
1389 try:
1378 self.repository.get_commit()
1390 self.repository.get_commit()
1379 except EmptyRepositoryError:
1391 except EmptyRepositoryError:
1380 raise EmptyRepositoryError(
1392 raise EmptyRepositoryError(
1381 "Nothing to change - try to *add* new nodes rather than "
1393 "Nothing to change - try to *add* new nodes rather than "
1382 "changing them")
1394 "changing them")
1383 for node in filenodes:
1395 for node in filenodes:
1384 if node.path in (n.path for n in self.changed):
1396 if node.path in (n.path for n in self.changed):
1385 raise NodeAlreadyChangedError(
1397 raise NodeAlreadyChangedError(
1386 "Node at '%s' is already marked as changed" % node.path)
1398 "Node at '%s' is already marked as changed" % node.path)
1387 self.changed.append(node)
1399 self.changed.append(node)
1388
1400
1389 def remove(self, *filenodes):
1401 def remove(self, *filenodes):
1390 """
1402 """
1391 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1403 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1392 *removed* in next commit.
1404 *removed* in next commit.
1393
1405
1394 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1406 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1395 be *removed*
1407 be *removed*
1396 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1408 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1397 be *changed*
1409 be *changed*
1398 """
1410 """
1399 for node in filenodes:
1411 for node in filenodes:
1400 if node.path in (n.path for n in self.removed):
1412 if node.path in (n.path for n in self.removed):
1401 raise NodeAlreadyRemovedError(
1413 raise NodeAlreadyRemovedError(
1402 "Node is already marked to for removal at %s" % node.path)
1414 "Node is already marked to for removal at %s" % node.path)
1403 if node.path in (n.path for n in self.changed):
1415 if node.path in (n.path for n in self.changed):
1404 raise NodeAlreadyChangedError(
1416 raise NodeAlreadyChangedError(
1405 "Node is already marked to be changed at %s" % node.path)
1417 "Node is already marked to be changed at %s" % node.path)
1406 # We only mark node as *removed* - real removal is done by
1418 # We only mark node as *removed* - real removal is done by
1407 # commit method
1419 # commit method
1408 self.removed.append(node)
1420 self.removed.append(node)
1409
1421
1410 def reset(self):
1422 def reset(self):
1411 """
1423 """
1412 Resets this instance to initial state (cleans ``added``, ``changed``
1424 Resets this instance to initial state (cleans ``added``, ``changed``
1413 and ``removed`` lists).
1425 and ``removed`` lists).
1414 """
1426 """
1415 self.added = []
1427 self.added = []
1416 self.changed = []
1428 self.changed = []
1417 self.removed = []
1429 self.removed = []
1418 self.parents = []
1430 self.parents = []
1419
1431
1420 def get_ipaths(self):
1432 def get_ipaths(self):
1421 """
1433 """
1422 Returns generator of paths from nodes marked as added, changed or
1434 Returns generator of paths from nodes marked as added, changed or
1423 removed.
1435 removed.
1424 """
1436 """
1425 for node in itertools.chain(self.added, self.changed, self.removed):
1437 for node in itertools.chain(self.added, self.changed, self.removed):
1426 yield node.path
1438 yield node.path
1427
1439
1428 def get_paths(self):
1440 def get_paths(self):
1429 """
1441 """
1430 Returns list of paths from nodes marked as added, changed or removed.
1442 Returns list of paths from nodes marked as added, changed or removed.
1431 """
1443 """
1432 return list(self.get_ipaths())
1444 return list(self.get_ipaths())
1433
1445
1434 def check_integrity(self, parents=None):
1446 def check_integrity(self, parents=None):
1435 """
1447 """
1436 Checks in-memory commit's integrity. Also, sets parents if not
1448 Checks in-memory commit's integrity. Also, sets parents if not
1437 already set.
1449 already set.
1438
1450
1439 :raises CommitError: if any error occurs (i.e.
1451 :raises CommitError: if any error occurs (i.e.
1440 ``NodeDoesNotExistError``).
1452 ``NodeDoesNotExistError``).
1441 """
1453 """
1442 if not self.parents:
1454 if not self.parents:
1443 parents = parents or []
1455 parents = parents or []
1444 if len(parents) == 0:
1456 if len(parents) == 0:
1445 try:
1457 try:
1446 parents = [self.repository.get_commit(), None]
1458 parents = [self.repository.get_commit(), None]
1447 except EmptyRepositoryError:
1459 except EmptyRepositoryError:
1448 parents = [None, None]
1460 parents = [None, None]
1449 elif len(parents) == 1:
1461 elif len(parents) == 1:
1450 parents += [None]
1462 parents += [None]
1451 self.parents = parents
1463 self.parents = parents
1452
1464
1453 # Local parents, only if not None
1465 # Local parents, only if not None
1454 parents = [p for p in self.parents if p]
1466 parents = [p for p in self.parents if p]
1455
1467
1456 # Check nodes marked as added
1468 # Check nodes marked as added
1457 for p in parents:
1469 for p in parents:
1458 for node in self.added:
1470 for node in self.added:
1459 try:
1471 try:
1460 p.get_node(node.path)
1472 p.get_node(node.path)
1461 except NodeDoesNotExistError:
1473 except NodeDoesNotExistError:
1462 pass
1474 pass
1463 else:
1475 else:
1464 raise NodeAlreadyExistsError(
1476 raise NodeAlreadyExistsError(
1465 "Node `%s` already exists at %s" % (node.path, p))
1477 "Node `%s` already exists at %s" % (node.path, p))
1466
1478
1467 # Check nodes marked as changed
1479 # Check nodes marked as changed
1468 missing = set(self.changed)
1480 missing = set(self.changed)
1469 not_changed = set(self.changed)
1481 not_changed = set(self.changed)
1470 if self.changed and not parents:
1482 if self.changed and not parents:
1471 raise NodeDoesNotExistError(str(self.changed[0].path))
1483 raise NodeDoesNotExistError(str(self.changed[0].path))
1472 for p in parents:
1484 for p in parents:
1473 for node in self.changed:
1485 for node in self.changed:
1474 try:
1486 try:
1475 old = p.get_node(node.path)
1487 old = p.get_node(node.path)
1476 missing.remove(node)
1488 missing.remove(node)
1477 # if content actually changed, remove node from not_changed
1489 # if content actually changed, remove node from not_changed
1478 if old.content != node.content:
1490 if old.content != node.content:
1479 not_changed.remove(node)
1491 not_changed.remove(node)
1480 except NodeDoesNotExistError:
1492 except NodeDoesNotExistError:
1481 pass
1493 pass
1482 if self.changed and missing:
1494 if self.changed and missing:
1483 raise NodeDoesNotExistError(
1495 raise NodeDoesNotExistError(
1484 "Node `%s` marked as modified but missing in parents: %s"
1496 "Node `%s` marked as modified but missing in parents: %s"
1485 % (node.path, parents))
1497 % (node.path, parents))
1486
1498
1487 if self.changed and not_changed:
1499 if self.changed and not_changed:
1488 raise NodeNotChangedError(
1500 raise NodeNotChangedError(
1489 "Node `%s` wasn't actually changed (parents: %s)"
1501 "Node `%s` wasn't actually changed (parents: %s)"
1490 % (not_changed.pop().path, parents))
1502 % (not_changed.pop().path, parents))
1491
1503
1492 # Check nodes marked as removed
1504 # Check nodes marked as removed
1493 if self.removed and not parents:
1505 if self.removed and not parents:
1494 raise NodeDoesNotExistError(
1506 raise NodeDoesNotExistError(
1495 "Cannot remove node at %s as there "
1507 "Cannot remove node at %s as there "
1496 "were no parents specified" % self.removed[0].path)
1508 "were no parents specified" % self.removed[0].path)
1497 really_removed = set()
1509 really_removed = set()
1498 for p in parents:
1510 for p in parents:
1499 for node in self.removed:
1511 for node in self.removed:
1500 try:
1512 try:
1501 p.get_node(node.path)
1513 p.get_node(node.path)
1502 really_removed.add(node)
1514 really_removed.add(node)
1503 except CommitError:
1515 except CommitError:
1504 pass
1516 pass
1505 not_removed = set(self.removed) - really_removed
1517 not_removed = set(self.removed) - really_removed
1506 if not_removed:
1518 if not_removed:
1507 # TODO: johbo: This code branch does not seem to be covered
1519 # TODO: johbo: This code branch does not seem to be covered
1508 raise NodeDoesNotExistError(
1520 raise NodeDoesNotExistError(
1509 "Cannot remove node at %s from "
1521 "Cannot remove node at %s from "
1510 "following parents: %s" % (not_removed, parents))
1522 "following parents: %s" % (not_removed, parents))
1511
1523
1512 def commit(
1524 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1513 self, message, author, parents=None, branch=None, date=None,
1514 **kwargs):
1515 """
1525 """
1516 Performs in-memory commit (doesn't check workdir in any way) and
1526 Performs in-memory commit (doesn't check workdir in any way) and
1517 returns newly created :class:`BaseCommit`. Updates repository's
1527 returns newly created :class:`BaseCommit`. Updates repository's
1518 attribute `commits`.
1528 attribute `commits`.
1519
1529
1520 .. note::
1530 .. note::
1521
1531
1522 While overriding this method each backend's should call
1532 While overriding this method each backend's should call
1523 ``self.check_integrity(parents)`` in the first place.
1533 ``self.check_integrity(parents)`` in the first place.
1524
1534
1525 :param message: message of the commit
1535 :param message: message of the commit
1526 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1536 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1527 :param parents: single parent or sequence of parents from which commit
1537 :param parents: single parent or sequence of parents from which commit
1528 would be derived
1538 would be derived
1529 :param date: ``datetime.datetime`` instance. Defaults to
1539 :param date: ``datetime.datetime`` instance. Defaults to
1530 ``datetime.datetime.now()``.
1540 ``datetime.datetime.now()``.
1531 :param branch: branch name, as string. If none given, default backend's
1541 :param branch: branch name, as string. If none given, default backend's
1532 branch would be used.
1542 branch would be used.
1533
1543
1534 :raises ``CommitError``: if any error occurs while committing
1544 :raises ``CommitError``: if any error occurs while committing
1535 """
1545 """
1536 raise NotImplementedError
1546 raise NotImplementedError
1537
1547
1538
1548
1539 class BaseInMemoryChangesetClass(type):
1549 class BaseInMemoryChangesetClass(type):
1540
1550
1541 def __instancecheck__(self, instance):
1551 def __instancecheck__(self, instance):
1542 return isinstance(instance, BaseInMemoryCommit)
1552 return isinstance(instance, BaseInMemoryCommit)
1543
1553
1544
1554
1545 class BaseInMemoryChangeset(BaseInMemoryCommit):
1555 class BaseInMemoryChangeset(BaseInMemoryCommit):
1546
1556
1547 __metaclass__ = BaseInMemoryChangesetClass
1557 __metaclass__ = BaseInMemoryChangesetClass
1548
1558
1549 def __new__(cls, *args, **kwargs):
1559 def __new__(cls, *args, **kwargs):
1550 warnings.warn(
1560 warnings.warn(
1551 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1561 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1552 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1562 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1553
1563
1554
1564
1555 class EmptyCommit(BaseCommit):
1565 class EmptyCommit(BaseCommit):
1556 """
1566 """
1557 An dummy empty commit. It's possible to pass hash when creating
1567 An dummy empty commit. It's possible to pass hash when creating
1558 an EmptyCommit
1568 an EmptyCommit
1559 """
1569 """
1560
1570
1561 def __init__(
1571 def __init__(
1562 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1572 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1563 message='', author='', date=None):
1573 message='', author='', date=None):
1564 self._empty_commit_id = commit_id
1574 self._empty_commit_id = commit_id
1565 # TODO: johbo: Solve idx parameter, default value does not make
1575 # TODO: johbo: Solve idx parameter, default value does not make
1566 # too much sense
1576 # too much sense
1567 self.idx = idx
1577 self.idx = idx
1568 self.message = message
1578 self.message = message
1569 self.author = author
1579 self.author = author
1570 self.date = date or datetime.datetime.fromtimestamp(0)
1580 self.date = date or datetime.datetime.fromtimestamp(0)
1571 self.repository = repo
1581 self.repository = repo
1572 self.alias = alias
1582 self.alias = alias
1573
1583
1574 @LazyProperty
1584 @LazyProperty
1575 def raw_id(self):
1585 def raw_id(self):
1576 """
1586 """
1577 Returns raw string identifying this commit, useful for web
1587 Returns raw string identifying this commit, useful for web
1578 representation.
1588 representation.
1579 """
1589 """
1580
1590
1581 return self._empty_commit_id
1591 return self._empty_commit_id
1582
1592
1583 @LazyProperty
1593 @LazyProperty
1584 def branch(self):
1594 def branch(self):
1585 if self.alias:
1595 if self.alias:
1586 from rhodecode.lib.vcs.backends import get_backend
1596 from rhodecode.lib.vcs.backends import get_backend
1587 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1597 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1588
1598
1589 @LazyProperty
1599 @LazyProperty
1590 def short_id(self):
1600 def short_id(self):
1591 return self.raw_id[:12]
1601 return self.raw_id[:12]
1592
1602
1593 @LazyProperty
1603 @LazyProperty
1594 def id(self):
1604 def id(self):
1595 return self.raw_id
1605 return self.raw_id
1596
1606
1597 def get_path_commit(self, path):
1607 def get_path_commit(self, path):
1598 return self
1608 return self
1599
1609
1600 def get_file_content(self, path):
1610 def get_file_content(self, path):
1601 return u''
1611 return u''
1602
1612
1603 def get_file_size(self, path):
1613 def get_file_size(self, path):
1604 return 0
1614 return 0
1605
1615
1606
1616
1607 class EmptyChangesetClass(type):
1617 class EmptyChangesetClass(type):
1608
1618
1609 def __instancecheck__(self, instance):
1619 def __instancecheck__(self, instance):
1610 return isinstance(instance, EmptyCommit)
1620 return isinstance(instance, EmptyCommit)
1611
1621
1612
1622
1613 class EmptyChangeset(EmptyCommit):
1623 class EmptyChangeset(EmptyCommit):
1614
1624
1615 __metaclass__ = EmptyChangesetClass
1625 __metaclass__ = EmptyChangesetClass
1616
1626
1617 def __new__(cls, *args, **kwargs):
1627 def __new__(cls, *args, **kwargs):
1618 warnings.warn(
1628 warnings.warn(
1619 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1629 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1620 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1630 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1621
1631
1622 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1632 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1623 alias=None, revision=-1, message='', author='', date=None):
1633 alias=None, revision=-1, message='', author='', date=None):
1624 if requested_revision is not None:
1634 if requested_revision is not None:
1625 warnings.warn(
1635 warnings.warn(
1626 "Parameter requested_revision not supported anymore",
1636 "Parameter requested_revision not supported anymore",
1627 DeprecationWarning)
1637 DeprecationWarning)
1628 super(EmptyChangeset, self).__init__(
1638 super(EmptyChangeset, self).__init__(
1629 commit_id=cs, repo=repo, alias=alias, idx=revision,
1639 commit_id=cs, repo=repo, alias=alias, idx=revision,
1630 message=message, author=author, date=date)
1640 message=message, author=author, date=date)
1631
1641
1632 @property
1642 @property
1633 def revision(self):
1643 def revision(self):
1634 warnings.warn("Use idx instead", DeprecationWarning)
1644 warnings.warn("Use idx instead", DeprecationWarning)
1635 return self.idx
1645 return self.idx
1636
1646
1637 @revision.setter
1647 @revision.setter
1638 def revision(self, value):
1648 def revision(self, value):
1639 warnings.warn("Use idx instead", DeprecationWarning)
1649 warnings.warn("Use idx instead", DeprecationWarning)
1640 self.idx = value
1650 self.idx = value
1641
1651
1642
1652
1643 class EmptyRepository(BaseRepository):
1653 class EmptyRepository(BaseRepository):
1644 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1654 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1645 pass
1655 pass
1646
1656
1647 def get_diff(self, *args, **kwargs):
1657 def get_diff(self, *args, **kwargs):
1648 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1658 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1649 return GitDiff('')
1659 return GitDiff('')
1650
1660
1651
1661
1652 class CollectionGenerator(object):
1662 class CollectionGenerator(object):
1653
1663
1654 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1664 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1655 self.repo = repo
1665 self.repo = repo
1656 self.commit_ids = commit_ids
1666 self.commit_ids = commit_ids
1657 # TODO: (oliver) this isn't currently hooked up
1667 # TODO: (oliver) this isn't currently hooked up
1658 self.collection_size = None
1668 self.collection_size = None
1659 self.pre_load = pre_load
1669 self.pre_load = pre_load
1660 self.translate_tag = translate_tag
1670 self.translate_tag = translate_tag
1661
1671
1662 def __len__(self):
1672 def __len__(self):
1663 if self.collection_size is not None:
1673 if self.collection_size is not None:
1664 return self.collection_size
1674 return self.collection_size
1665 return self.commit_ids.__len__()
1675 return self.commit_ids.__len__()
1666
1676
1667 def __iter__(self):
1677 def __iter__(self):
1668 for commit_id in self.commit_ids:
1678 for commit_id in self.commit_ids:
1669 # TODO: johbo: Mercurial passes in commit indices or commit ids
1679 # TODO: johbo: Mercurial passes in commit indices or commit ids
1670 yield self._commit_factory(commit_id)
1680 yield self._commit_factory(commit_id)
1671
1681
1672 def _commit_factory(self, commit_id):
1682 def _commit_factory(self, commit_id):
1673 """
1683 """
1674 Allows backends to override the way commits are generated.
1684 Allows backends to override the way commits are generated.
1675 """
1685 """
1676 return self.repo.get_commit(
1686 return self.repo.get_commit(
1677 commit_id=commit_id, pre_load=self.pre_load,
1687 commit_id=commit_id, pre_load=self.pre_load,
1678 translate_tag=self.translate_tag)
1688 translate_tag=self.translate_tag)
1679
1689
1680 def __getslice__(self, i, j):
1690 def __getslice__(self, i, j):
1681 """
1691 """
1682 Returns an iterator of sliced repository
1692 Returns an iterator of sliced repository
1683 """
1693 """
1684 commit_ids = self.commit_ids[i:j]
1694 commit_ids = self.commit_ids[i:j]
1685 return self.__class__(
1695 return self.__class__(
1686 self.repo, commit_ids, pre_load=self.pre_load,
1696 self.repo, commit_ids, pre_load=self.pre_load,
1687 translate_tag=self.translate_tag)
1697 translate_tag=self.translate_tag)
1688
1698
1689 def __repr__(self):
1699 def __repr__(self):
1690 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1700 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1691
1701
1692
1702
1693 class Config(object):
1703 class Config(object):
1694 """
1704 """
1695 Represents the configuration for a repository.
1705 Represents the configuration for a repository.
1696
1706
1697 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1707 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1698 standard library. It implements only the needed subset.
1708 standard library. It implements only the needed subset.
1699 """
1709 """
1700
1710
1701 def __init__(self):
1711 def __init__(self):
1702 self._values = {}
1712 self._values = {}
1703
1713
1704 def copy(self):
1714 def copy(self):
1705 clone = Config()
1715 clone = Config()
1706 for section, values in self._values.items():
1716 for section, values in self._values.items():
1707 clone._values[section] = values.copy()
1717 clone._values[section] = values.copy()
1708 return clone
1718 return clone
1709
1719
1710 def __repr__(self):
1720 def __repr__(self):
1711 return '<Config(%s sections) at %s>' % (
1721 return '<Config(%s sections) at %s>' % (
1712 len(self._values), hex(id(self)))
1722 len(self._values), hex(id(self)))
1713
1723
1714 def items(self, section):
1724 def items(self, section):
1715 return self._values.get(section, {}).iteritems()
1725 return self._values.get(section, {}).iteritems()
1716
1726
1717 def get(self, section, option):
1727 def get(self, section, option):
1718 return self._values.get(section, {}).get(option)
1728 return self._values.get(section, {}).get(option)
1719
1729
1720 def set(self, section, option, value):
1730 def set(self, section, option, value):
1721 section_values = self._values.setdefault(section, {})
1731 section_values = self._values.setdefault(section, {})
1722 section_values[option] = value
1732 section_values[option] = value
1723
1733
1724 def clear_section(self, section):
1734 def clear_section(self, section):
1725 self._values[section] = {}
1735 self._values[section] = {}
1726
1736
1727 def serialize(self):
1737 def serialize(self):
1728 """
1738 """
1729 Creates a list of three tuples (section, key, value) representing
1739 Creates a list of three tuples (section, key, value) representing
1730 this config object.
1740 this config object.
1731 """
1741 """
1732 items = []
1742 items = []
1733 for section in self._values:
1743 for section in self._values:
1734 for option, value in self._values[section].items():
1744 for option, value in self._values[section].items():
1735 items.append(
1745 items.append(
1736 (safe_str(section), safe_str(option), safe_str(value)))
1746 (safe_str(section), safe_str(option), safe_str(value)))
1737 return items
1747 return items
1738
1748
1739
1749
1740 class Diff(object):
1750 class Diff(object):
1741 """
1751 """
1742 Represents a diff result from a repository backend.
1752 Represents a diff result from a repository backend.
1743
1753
1744 Subclasses have to provide a backend specific value for
1754 Subclasses have to provide a backend specific value for
1745 :attr:`_header_re` and :attr:`_meta_re`.
1755 :attr:`_header_re` and :attr:`_meta_re`.
1746 """
1756 """
1747 _meta_re = None
1757 _meta_re = None
1748 _header_re = None
1758 _header_re = None
1749
1759
1750 def __init__(self, raw_diff):
1760 def __init__(self, raw_diff):
1751 self.raw = raw_diff
1761 self.raw = raw_diff
1752
1762
1753 def chunks(self):
1763 def chunks(self):
1754 """
1764 """
1755 split the diff in chunks of separate --git a/file b/file chunks
1765 split the diff in chunks of separate --git a/file b/file chunks
1756 to make diffs consistent we must prepend with \n, and make sure
1766 to make diffs consistent we must prepend with \n, and make sure
1757 we can detect last chunk as this was also has special rule
1767 we can detect last chunk as this was also has special rule
1758 """
1768 """
1759
1769
1760 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1770 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1761 header = diff_parts[0]
1771 header = diff_parts[0]
1762
1772
1763 if self._meta_re:
1773 if self._meta_re:
1764 match = self._meta_re.match(header)
1774 match = self._meta_re.match(header)
1765
1775
1766 chunks = diff_parts[1:]
1776 chunks = diff_parts[1:]
1767 total_chunks = len(chunks)
1777 total_chunks = len(chunks)
1768
1778
1769 return (
1779 return (
1770 DiffChunk(chunk, self, cur_chunk == total_chunks)
1780 DiffChunk(chunk, self, cur_chunk == total_chunks)
1771 for cur_chunk, chunk in enumerate(chunks, start=1))
1781 for cur_chunk, chunk in enumerate(chunks, start=1))
1772
1782
1773
1783
1774 class DiffChunk(object):
1784 class DiffChunk(object):
1775
1785
1776 def __init__(self, chunk, diff, last_chunk):
1786 def __init__(self, chunk, diff, last_chunk):
1777 self._diff = diff
1787 self._diff = diff
1778
1788
1779 # since we split by \ndiff --git that part is lost from original diff
1789 # since we split by \ndiff --git that part is lost from original diff
1780 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1790 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1781 if not last_chunk:
1791 if not last_chunk:
1782 chunk += '\n'
1792 chunk += '\n'
1783
1793
1784 match = self._diff._header_re.match(chunk)
1794 match = self._diff._header_re.match(chunk)
1785 self.header = match.groupdict()
1795 self.header = match.groupdict()
1786 self.diff = chunk[match.end():]
1796 self.diff = chunk[match.end():]
1787 self.raw = chunk
1797 self.raw = chunk
1788
1798
1789
1799
1790 class BasePathPermissionChecker(object):
1800 class BasePathPermissionChecker(object):
1791
1801
1792 @staticmethod
1802 @staticmethod
1793 def create_from_patterns(includes, excludes):
1803 def create_from_patterns(includes, excludes):
1794 if includes and '*' in includes and not excludes:
1804 if includes and '*' in includes and not excludes:
1795 return AllPathPermissionChecker()
1805 return AllPathPermissionChecker()
1796 elif excludes and '*' in excludes:
1806 elif excludes and '*' in excludes:
1797 return NonePathPermissionChecker()
1807 return NonePathPermissionChecker()
1798 else:
1808 else:
1799 return PatternPathPermissionChecker(includes, excludes)
1809 return PatternPathPermissionChecker(includes, excludes)
1800
1810
1801 @property
1811 @property
1802 def has_full_access(self):
1812 def has_full_access(self):
1803 raise NotImplemented()
1813 raise NotImplemented()
1804
1814
1805 def has_access(self, path):
1815 def has_access(self, path):
1806 raise NotImplemented()
1816 raise NotImplemented()
1807
1817
1808
1818
1809 class AllPathPermissionChecker(BasePathPermissionChecker):
1819 class AllPathPermissionChecker(BasePathPermissionChecker):
1810
1820
1811 @property
1821 @property
1812 def has_full_access(self):
1822 def has_full_access(self):
1813 return True
1823 return True
1814
1824
1815 def has_access(self, path):
1825 def has_access(self, path):
1816 return True
1826 return True
1817
1827
1818
1828
1819 class NonePathPermissionChecker(BasePathPermissionChecker):
1829 class NonePathPermissionChecker(BasePathPermissionChecker):
1820
1830
1821 @property
1831 @property
1822 def has_full_access(self):
1832 def has_full_access(self):
1823 return False
1833 return False
1824
1834
1825 def has_access(self, path):
1835 def has_access(self, path):
1826 return False
1836 return False
1827
1837
1828
1838
1829 class PatternPathPermissionChecker(BasePathPermissionChecker):
1839 class PatternPathPermissionChecker(BasePathPermissionChecker):
1830
1840
1831 def __init__(self, includes, excludes):
1841 def __init__(self, includes, excludes):
1832 self.includes = includes
1842 self.includes = includes
1833 self.excludes = excludes
1843 self.excludes = excludes
1834 self.includes_re = [] if not includes else [
1844 self.includes_re = [] if not includes else [
1835 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1845 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1836 self.excludes_re = [] if not excludes else [
1846 self.excludes_re = [] if not excludes else [
1837 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1847 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1838
1848
1839 @property
1849 @property
1840 def has_full_access(self):
1850 def has_full_access(self):
1841 return '*' in self.includes and not self.excludes
1851 return '*' in self.includes and not self.excludes
1842
1852
1843 def has_access(self, path):
1853 def has_access(self, path):
1844 for regex in self.excludes_re:
1854 for regex in self.excludes_re:
1845 if regex.match(path):
1855 if regex.match(path):
1846 return False
1856 return False
1847 for regex in self.includes_re:
1857 for regex in self.includes_re:
1848 if regex.match(path):
1858 if regex.match(path):
1849 return True
1859 return True
1850 return False
1860 return False
@@ -1,106 +1,104 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT inmemory module
22 GIT inmemory module
23 """
23 """
24
24
25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
26 from rhodecode.lib.utils import safe_str
26 from rhodecode.lib.utils import safe_str
27 from rhodecode.lib.vcs.backends import base
27 from rhodecode.lib.vcs.backends import base
28
28
29
29
30 class GitInMemoryCommit(base.BaseInMemoryCommit):
30 class GitInMemoryCommit(base.BaseInMemoryCommit):
31
31
32 def commit(self, message, author, parents=None, branch=None, date=None,
32 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
33 **kwargs):
34 """
33 """
35 Performs in-memory commit (doesn't check workdir in any way) and
34 Performs in-memory commit (doesn't check workdir in any way) and
36 returns newly created `GitCommit`. Updates repository's
35 returns newly created `GitCommit`. Updates repository's
37 `commit_ids`.
36 `commit_ids`.
38
37
39 :param message: message of the commit
38 :param message: message of the commit
40 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
39 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
41 :param parents: single parent or sequence of parents from which commit
40 :param parents: single parent or sequence of parents from which commit
42 would be derived
41 would be derived
43 :param date: `datetime.datetime` instance. Defaults to
42 :param date: `datetime.datetime` instance. Defaults to
44 ``datetime.datetime.now()``.
43 ``datetime.datetime.now()``.
45 :param branch: branch name, as string. If none given, default backend's
44 :param branch: branch name, as string. If none given, default backend's
46 branch would be used.
45 branch would be used.
47
46
48 :raises `CommitError`: if any error occurs while committing
47 :raises `CommitError`: if any error occurs while committing
49 """
48 """
50 self.check_integrity(parents)
49 self.check_integrity(parents)
51 if branch is None:
50 if branch is None:
52 branch = self.repository.DEFAULT_BRANCH_NAME
51 branch = self.repository.DEFAULT_BRANCH_NAME
53
52
54 ENCODING = "UTF-8"
53 ENCODING = "UTF-8"
55
54
56 commit_tree = None
55 commit_tree = None
57 if self.parents[0]:
56 if self.parents[0]:
58 commit_tree = self.parents[0]._commit['tree']
57 commit_tree = self.parents[0]._commit['tree']
59
58
60 updated = []
59 updated = []
61 for node in self.added + self.changed:
60 for node in self.added + self.changed:
62 if not node.is_binary:
61 if not node.is_binary:
63 content = node.content.encode(ENCODING)
62 content = node.content.encode(ENCODING)
64 else:
63 else:
65 content = node.content
64 content = node.content
66 updated.append({
65 updated.append({
67 'path': node.path,
66 'path': node.path,
68 'node_path': node.name.encode(ENCODING),
67 'node_path': node.name.encode(ENCODING),
69 'content': content,
68 'content': content,
70 'mode': node.mode,
69 'mode': node.mode,
71 })
70 })
72
71
73 removed = [node.path for node in self.removed]
72 removed = [node.path for node in self.removed]
74
73
75 date, tz = date_to_timestamp_plus_offset(date)
74 date, tz = date_to_timestamp_plus_offset(date)
76
75
77 # TODO: johbo: Make kwargs explicit and check if this is needed.
76 # TODO: johbo: Make kwargs explicit and check if this is needed.
78 author_time = kwargs.pop('author_time', date)
77 author_time = kwargs.pop('author_time', date)
79 author_tz = kwargs.pop('author_timezone', tz)
78 author_tz = kwargs.pop('author_timezone', tz)
80
79
81 commit_data = {
80 commit_data = {
82 'parents': [p._commit['id'] for p in self.parents if p],
81 'parents': [p._commit['id'] for p in self.parents if p],
83 'author': safe_str(author),
82 'author': safe_str(author),
84 'committer': safe_str(author),
83 'committer': safe_str(author),
85 'encoding': ENCODING,
84 'encoding': ENCODING,
86 'message': safe_str(message),
85 'message': safe_str(message),
87 'commit_time': int(date),
86 'commit_time': int(date),
88 'author_time': int(author_time),
87 'author_time': int(author_time),
89 'commit_timezone': tz,
88 'commit_timezone': tz,
90 'author_timezone': author_tz,
89 'author_timezone': author_tz,
91 }
90 }
92
91
93 commit_id = self.repository._remote.commit(
92 commit_id = self.repository._remote.commit(
94 commit_data, branch, commit_tree, updated, removed)
93 commit_data, branch, commit_tree, updated, removed)
95
94
96 # Update vcs repository object
95 # Update vcs repository object
97 if commit_id not in self.repository.commit_ids:
96 self.repository.append_commit_id(commit_id)
98 self.repository.commit_ids.append(commit_id)
99 self.repository._rebuild_cache(self.repository.commit_ids)
100
97
101 # invalidate parsed refs after commit
98 # invalidate parsed refs after commit
102 self.repository._refs = self.repository._get_refs()
99 self.repository._refs = self.repository._get_refs()
103 self.repository.branches = self.repository._get_branches()
100 self.repository.branches = self.repository._get_branches()
104 tip = self.repository.get_commit()
101 tip = self.repository.get_commit(commit_id)
102
105 self.reset()
103 self.reset()
106 return tip
104 return tip
@@ -1,1031 +1,1037 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28 import time
28
29
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import CachedProperty
30
32
31 from rhodecode.lib.compat import OrderedDict
33 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
34 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
35 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
36 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, path as vcspath
37 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs.backends.base import (
38 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference)
40 MergeFailureReason, Reference)
39 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
43 CommitDoesNotExistError, EmptyRepositoryError,
45 CommitDoesNotExistError, EmptyRepositoryError,
44 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
46 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45
47
46
48
47 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48
50
49 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
50
52
51
53
52 class GitRepository(BaseRepository):
54 class GitRepository(BaseRepository):
53 """
55 """
54 Git repository backend.
56 Git repository backend.
55 """
57 """
56 DEFAULT_BRANCH_NAME = 'master'
58 DEFAULT_BRANCH_NAME = 'master'
57
59
58 contact = BaseRepository.DEFAULT_CONTACT
60 contact = BaseRepository.DEFAULT_CONTACT
59
61
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
63 do_workspace_checkout=False, with_wire=None, bare=False):
62
64
63 self.path = safe_str(os.path.abspath(repo_path))
65 self.path = safe_str(os.path.abspath(repo_path))
64 self.config = config if config else self.get_default_config()
66 self.config = config if config else self.get_default_config()
65 self.with_wire = with_wire
67 self.with_wire = with_wire
66
68
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
69 self._init_repo(create, src_url, do_workspace_checkout, bare)
68
70
69 # caches
71 # caches
70 self._commit_ids = {}
72 self._commit_ids = {}
71
73
74 # dependent that trigger re-computation of commit_ids
75 self._commit_ids_ver = 0
76
72 @LazyProperty
77 @LazyProperty
73 def _remote(self):
78 def _remote(self):
74 return connection.Git(self.path, self.config, with_wire=self.with_wire)
79 return connection.Git(self.path, self.config, with_wire=self.with_wire)
75
80
76 @LazyProperty
81 @LazyProperty
77 def bare(self):
82 def bare(self):
78 return self._remote.bare()
83 return self._remote.bare()
79
84
80 @LazyProperty
85 @LazyProperty
81 def head(self):
86 def head(self):
82 return self._remote.head()
87 return self._remote.head()
83
88
84 @LazyProperty
89 @CachedProperty('_commit_ids_ver')
85 def commit_ids(self):
90 def commit_ids(self):
86 """
91 """
87 Returns list of commit ids, in ascending order. Being lazy
92 Returns list of commit ids, in ascending order. Being lazy
88 attribute allows external tools to inject commit ids from cache.
93 attribute allows external tools to inject commit ids from cache.
89 """
94 """
90 commit_ids = self._get_all_commit_ids()
95 commit_ids = self._get_all_commit_ids()
91 self._rebuild_cache(commit_ids)
96 self._rebuild_cache(commit_ids)
92 return commit_ids
97 return commit_ids
93
98
94 def _rebuild_cache(self, commit_ids):
99 def _rebuild_cache(self, commit_ids):
95 self._commit_ids = dict((commit_id, index)
100 self._commit_ids = dict((commit_id, index)
96 for index, commit_id in enumerate(commit_ids))
101 for index, commit_id in enumerate(commit_ids))
97
102
98 def run_git_command(self, cmd, **opts):
103 def run_git_command(self, cmd, **opts):
99 """
104 """
100 Runs given ``cmd`` as git command and returns tuple
105 Runs given ``cmd`` as git command and returns tuple
101 (stdout, stderr).
106 (stdout, stderr).
102
107
103 :param cmd: git command to be executed
108 :param cmd: git command to be executed
104 :param opts: env options to pass into Subprocess command
109 :param opts: env options to pass into Subprocess command
105 """
110 """
106 if not isinstance(cmd, list):
111 if not isinstance(cmd, list):
107 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
112 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
108
113
109 skip_stderr_log = opts.pop('skip_stderr_log', False)
114 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 out, err = self._remote.run_git_command(cmd, **opts)
115 out, err = self._remote.run_git_command(cmd, **opts)
111 if err and not skip_stderr_log:
116 if err and not skip_stderr_log:
112 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
117 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 return out, err
118 return out, err
114
119
115 @staticmethod
120 @staticmethod
116 def check_url(url, config):
121 def check_url(url, config):
117 """
122 """
118 Function will check given url and try to verify if it's a valid
123 Function will check given url and try to verify if it's a valid
119 link. Sometimes it may happened that git will issue basic
124 link. Sometimes it may happened that git will issue basic
120 auth request that can cause whole API to hang when used from python
125 auth request that can cause whole API to hang when used from python
121 or other external calls.
126 or other external calls.
122
127
123 On failures it'll raise urllib2.HTTPError, exception is also thrown
128 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 when the return code is non 200
129 when the return code is non 200
125 """
130 """
126 # check first if it's not an url
131 # check first if it's not an url
127 if os.path.isdir(url) or url.startswith('file:'):
132 if os.path.isdir(url) or url.startswith('file:'):
128 return True
133 return True
129
134
130 if '+' in url.split('://', 1)[0]:
135 if '+' in url.split('://', 1)[0]:
131 url = url.split('+', 1)[1]
136 url = url.split('+', 1)[1]
132
137
133 # Request the _remote to verify the url
138 # Request the _remote to verify the url
134 return connection.Git.check_url(url, config.serialize())
139 return connection.Git.check_url(url, config.serialize())
135
140
136 @staticmethod
141 @staticmethod
137 def is_valid_repository(path):
142 def is_valid_repository(path):
138 if os.path.isdir(os.path.join(path, '.git')):
143 if os.path.isdir(os.path.join(path, '.git')):
139 return True
144 return True
140 # check case of bare repository
145 # check case of bare repository
141 try:
146 try:
142 GitRepository(path)
147 GitRepository(path)
143 return True
148 return True
144 except VCSError:
149 except VCSError:
145 pass
150 pass
146 return False
151 return False
147
152
148 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
153 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 bare=False):
154 bare=False):
150 if create and os.path.exists(self.path):
155 if create and os.path.exists(self.path):
151 raise RepositoryError(
156 raise RepositoryError(
152 "Cannot create repository at %s, location already exist"
157 "Cannot create repository at %s, location already exist"
153 % self.path)
158 % self.path)
154
159
155 if bare and do_workspace_checkout:
160 if bare and do_workspace_checkout:
156 raise RepositoryError("Cannot update a bare repository")
161 raise RepositoryError("Cannot update a bare repository")
157 try:
162 try:
158
163
159 if src_url:
164 if src_url:
160 # check URL before any actions
165 # check URL before any actions
161 GitRepository.check_url(src_url, self.config)
166 GitRepository.check_url(src_url, self.config)
162
167
163 if create:
168 if create:
164 os.makedirs(self.path, mode=0o755)
169 os.makedirs(self.path, mode=0o755)
165
170
166 if bare:
171 if bare:
167 self._remote.init_bare()
172 self._remote.init_bare()
168 else:
173 else:
169 self._remote.init()
174 self._remote.init()
170
175
171 if src_url and bare:
176 if src_url and bare:
172 # bare repository only allows a fetch and checkout is not allowed
177 # bare repository only allows a fetch and checkout is not allowed
173 self.fetch(src_url, commit_ids=None)
178 self.fetch(src_url, commit_ids=None)
174 elif src_url:
179 elif src_url:
175 self.pull(src_url, commit_ids=None,
180 self.pull(src_url, commit_ids=None,
176 update_after=do_workspace_checkout)
181 update_after=do_workspace_checkout)
177
182
178 else:
183 else:
179 if not self._remote.assert_correct_path():
184 if not self._remote.assert_correct_path():
180 raise RepositoryError(
185 raise RepositoryError(
181 'Path "%s" does not contain a Git repository' %
186 'Path "%s" does not contain a Git repository' %
182 (self.path,))
187 (self.path,))
183
188
184 # TODO: johbo: check if we have to translate the OSError here
189 # TODO: johbo: check if we have to translate the OSError here
185 except OSError as err:
190 except OSError as err:
186 raise RepositoryError(err)
191 raise RepositoryError(err)
187
192
188 def _get_all_commit_ids(self, filters=None):
193 def _get_all_commit_ids(self, filters=None):
189 # we must check if this repo is not empty, since later command
194 # we must check if this repo is not empty, since later command
190 # fails if it is. And it's cheaper to ask than throw the subprocess
195 # fails if it is. And it's cheaper to ask than throw the subprocess
191 # errors
196 # errors
192
197
193 head = self._remote.head(show_exc=False)
198 head = self._remote.head(show_exc=False)
194 if not head:
199 if not head:
195 return []
200 return []
196
201
197 rev_filter = ['--branches', '--tags']
202 rev_filter = ['--branches', '--tags']
198 extra_filter = []
203 extra_filter = []
199
204
200 if filters:
205 if filters:
201 if filters.get('since'):
206 if filters.get('since'):
202 extra_filter.append('--since=%s' % (filters['since']))
207 extra_filter.append('--since=%s' % (filters['since']))
203 if filters.get('until'):
208 if filters.get('until'):
204 extra_filter.append('--until=%s' % (filters['until']))
209 extra_filter.append('--until=%s' % (filters['until']))
205 if filters.get('branch_name'):
210 if filters.get('branch_name'):
206 rev_filter = ['--tags']
211 rev_filter = ['--tags']
207 extra_filter.append(filters['branch_name'])
212 extra_filter.append(filters['branch_name'])
208 rev_filter.extend(extra_filter)
213 rev_filter.extend(extra_filter)
209
214
210 # if filters.get('start') or filters.get('end'):
215 # if filters.get('start') or filters.get('end'):
211 # # skip is offset, max-count is limit
216 # # skip is offset, max-count is limit
212 # if filters.get('start'):
217 # if filters.get('start'):
213 # extra_filter += ' --skip=%s' % filters['start']
218 # extra_filter += ' --skip=%s' % filters['start']
214 # if filters.get('end'):
219 # if filters.get('end'):
215 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
220 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
216
221
217 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
222 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
218 try:
223 try:
219 output, __ = self.run_git_command(cmd)
224 output, __ = self.run_git_command(cmd)
220 except RepositoryError:
225 except RepositoryError:
221 # Can be raised for empty repositories
226 # Can be raised for empty repositories
222 return []
227 return []
223 return output.splitlines()
228 return output.splitlines()
224
229
225 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
230 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
226 def is_null(value):
231 def is_null(value):
227 return len(value) == commit_id_or_idx.count('0')
232 return len(value) == commit_id_or_idx.count('0')
228
233
229 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
234 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
230 return self.commit_ids[-1]
235 return self.commit_ids[-1]
231
236
232 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
237 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
233 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
238 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
234 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
239 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
235 try:
240 try:
236 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
241 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
237 except Exception:
242 except Exception:
238 msg = "Commit %s does not exist for %s" % (commit_id_or_idx, self.name)
243 msg = "Commit %s does not exist for %s" % (commit_id_or_idx, self.name)
239 raise CommitDoesNotExistError(msg)
244 raise CommitDoesNotExistError(msg)
240
245
241 elif is_bstr:
246 elif is_bstr:
242 # check full path ref, eg. refs/heads/master
247 # check full path ref, eg. refs/heads/master
243 ref_id = self._refs.get(commit_id_or_idx)
248 ref_id = self._refs.get(commit_id_or_idx)
244 if ref_id:
249 if ref_id:
245 return ref_id
250 return ref_id
246
251
247 # check branch name
252 # check branch name
248 branch_ids = self.branches.values()
253 branch_ids = self.branches.values()
249 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
254 ref_id = self._refs.get('refs/heads/%s' % commit_id_or_idx)
250 if ref_id:
255 if ref_id:
251 return ref_id
256 return ref_id
252
257
253 # check tag name
258 # check tag name
254 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
259 ref_id = self._refs.get('refs/tags/%s' % commit_id_or_idx)
255 if ref_id:
260 if ref_id:
256 return ref_id
261 return ref_id
257
262
258 if (not SHA_PATTERN.match(commit_id_or_idx) or
263 if (not SHA_PATTERN.match(commit_id_or_idx) or
259 commit_id_or_idx not in self.commit_ids):
264 commit_id_or_idx not in self.commit_ids):
260 msg = "Commit %s does not exist for %s" % (commit_id_or_idx, self.name)
265 msg = "Commit %s does not exist for %s" % (commit_id_or_idx, self.name)
261 raise CommitDoesNotExistError(msg)
266 raise CommitDoesNotExistError(msg)
262
267
263 # Ensure we return full id
268 # Ensure we return full id
264 if not SHA_PATTERN.match(str(commit_id_or_idx)):
269 if not SHA_PATTERN.match(str(commit_id_or_idx)):
265 raise CommitDoesNotExistError(
270 raise CommitDoesNotExistError(
266 "Given commit id %s not recognized" % commit_id_or_idx)
271 "Given commit id %s not recognized" % commit_id_or_idx)
267 return commit_id_or_idx
272 return commit_id_or_idx
268
273
269 def get_hook_location(self):
274 def get_hook_location(self):
270 """
275 """
271 returns absolute path to location where hooks are stored
276 returns absolute path to location where hooks are stored
272 """
277 """
273 loc = os.path.join(self.path, 'hooks')
278 loc = os.path.join(self.path, 'hooks')
274 if not self.bare:
279 if not self.bare:
275 loc = os.path.join(self.path, '.git', 'hooks')
280 loc = os.path.join(self.path, '.git', 'hooks')
276 return loc
281 return loc
277
282
278 @LazyProperty
283 @LazyProperty
279 def last_change(self):
284 def last_change(self):
280 """
285 """
281 Returns last change made on this repository as
286 Returns last change made on this repository as
282 `datetime.datetime` object.
287 `datetime.datetime` object.
283 """
288 """
284 try:
289 try:
285 return self.get_commit().date
290 return self.get_commit().date
286 except RepositoryError:
291 except RepositoryError:
287 tzoffset = makedate()[1]
292 tzoffset = makedate()[1]
288 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
293 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
289
294
290 def _get_fs_mtime(self):
295 def _get_fs_mtime(self):
291 idx_loc = '' if self.bare else '.git'
296 idx_loc = '' if self.bare else '.git'
292 # fallback to filesystem
297 # fallback to filesystem
293 in_path = os.path.join(self.path, idx_loc, "index")
298 in_path = os.path.join(self.path, idx_loc, "index")
294 he_path = os.path.join(self.path, idx_loc, "HEAD")
299 he_path = os.path.join(self.path, idx_loc, "HEAD")
295 if os.path.exists(in_path):
300 if os.path.exists(in_path):
296 return os.stat(in_path).st_mtime
301 return os.stat(in_path).st_mtime
297 else:
302 else:
298 return os.stat(he_path).st_mtime
303 return os.stat(he_path).st_mtime
299
304
300 @LazyProperty
305 @LazyProperty
301 def description(self):
306 def description(self):
302 description = self._remote.get_description()
307 description = self._remote.get_description()
303 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
308 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
304
309
305 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
310 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
306 if self.is_empty():
311 if self.is_empty():
307 return OrderedDict()
312 return OrderedDict()
308
313
309 result = []
314 result = []
310 for ref, sha in self._refs.iteritems():
315 for ref, sha in self._refs.iteritems():
311 if ref.startswith(prefix):
316 if ref.startswith(prefix):
312 ref_name = ref
317 ref_name = ref
313 if strip_prefix:
318 if strip_prefix:
314 ref_name = ref[len(prefix):]
319 ref_name = ref[len(prefix):]
315 result.append((safe_unicode(ref_name), sha))
320 result.append((safe_unicode(ref_name), sha))
316
321
317 def get_name(entry):
322 def get_name(entry):
318 return entry[0]
323 return entry[0]
319
324
320 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
325 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
321
326
322 def _get_branches(self):
327 def _get_branches(self):
323 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
328 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
324
329
325 @LazyProperty
330 @LazyProperty
326 def branches(self):
331 def branches(self):
327 return self._get_branches()
332 return self._get_branches()
328
333
329 @LazyProperty
334 @LazyProperty
330 def branches_closed(self):
335 def branches_closed(self):
331 return {}
336 return {}
332
337
333 @LazyProperty
338 @LazyProperty
334 def bookmarks(self):
339 def bookmarks(self):
335 return {}
340 return {}
336
341
337 @LazyProperty
342 @LazyProperty
338 def branches_all(self):
343 def branches_all(self):
339 all_branches = {}
344 all_branches = {}
340 all_branches.update(self.branches)
345 all_branches.update(self.branches)
341 all_branches.update(self.branches_closed)
346 all_branches.update(self.branches_closed)
342 return all_branches
347 return all_branches
343
348
344 @LazyProperty
349 @LazyProperty
345 def tags(self):
350 def tags(self):
346 return self._get_tags()
351 return self._get_tags()
347
352
348 def _get_tags(self):
353 def _get_tags(self):
349 return self._get_refs_entries(
354 return self._get_refs_entries(
350 prefix='refs/tags/', strip_prefix=True, reverse=True)
355 prefix='refs/tags/', strip_prefix=True, reverse=True)
351
356
352 def tag(self, name, user, commit_id=None, message=None, date=None,
357 def tag(self, name, user, commit_id=None, message=None, date=None,
353 **kwargs):
358 **kwargs):
354 # TODO: fix this method to apply annotated tags correct with message
359 # TODO: fix this method to apply annotated tags correct with message
355 """
360 """
356 Creates and returns a tag for the given ``commit_id``.
361 Creates and returns a tag for the given ``commit_id``.
357
362
358 :param name: name for new tag
363 :param name: name for new tag
359 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
364 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
360 :param commit_id: commit id for which new tag would be created
365 :param commit_id: commit id for which new tag would be created
361 :param message: message of the tag's commit
366 :param message: message of the tag's commit
362 :param date: date of tag's commit
367 :param date: date of tag's commit
363
368
364 :raises TagAlreadyExistError: if tag with same name already exists
369 :raises TagAlreadyExistError: if tag with same name already exists
365 """
370 """
366 if name in self.tags:
371 if name in self.tags:
367 raise TagAlreadyExistError("Tag %s already exists" % name)
372 raise TagAlreadyExistError("Tag %s already exists" % name)
368 commit = self.get_commit(commit_id=commit_id)
373 commit = self.get_commit(commit_id=commit_id)
369 message = message or "Added tag %s for commit %s" % (
374 message = message or "Added tag %s for commit %s" % (
370 name, commit.raw_id)
375 name, commit.raw_id)
371 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
376 self._remote.set_refs('refs/tags/%s' % name, commit._commit['id'])
372
377
373 self._refs = self._get_refs()
378 self._refs = self._get_refs()
374 self.tags = self._get_tags()
379 self.tags = self._get_tags()
375 return commit
380 return commit
376
381
377 def remove_tag(self, name, user, message=None, date=None):
382 def remove_tag(self, name, user, message=None, date=None):
378 """
383 """
379 Removes tag with the given ``name``.
384 Removes tag with the given ``name``.
380
385
381 :param name: name of the tag to be removed
386 :param name: name of the tag to be removed
382 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
383 :param message: message of the tag's removal commit
388 :param message: message of the tag's removal commit
384 :param date: date of tag's removal commit
389 :param date: date of tag's removal commit
385
390
386 :raises TagDoesNotExistError: if tag with given name does not exists
391 :raises TagDoesNotExistError: if tag with given name does not exists
387 """
392 """
388 if name not in self.tags:
393 if name not in self.tags:
389 raise TagDoesNotExistError("Tag %s does not exist" % name)
394 raise TagDoesNotExistError("Tag %s does not exist" % name)
390 tagpath = vcspath.join(
395 tagpath = vcspath.join(
391 self._remote.get_refs_path(), 'refs', 'tags', name)
396 self._remote.get_refs_path(), 'refs', 'tags', name)
392 try:
397 try:
393 os.remove(tagpath)
398 os.remove(tagpath)
394 self._refs = self._get_refs()
399 self._refs = self._get_refs()
395 self.tags = self._get_tags()
400 self.tags = self._get_tags()
396 except OSError as e:
401 except OSError as e:
397 raise RepositoryError(e.strerror)
402 raise RepositoryError(e.strerror)
398
403
399 def _get_refs(self):
404 def _get_refs(self):
400 return self._remote.get_refs()
405 return self._remote.get_refs()
401
406
402 @LazyProperty
407 @LazyProperty
403 def _refs(self):
408 def _refs(self):
404 return self._get_refs()
409 return self._get_refs()
405
410
406 @property
411 @property
407 def _ref_tree(self):
412 def _ref_tree(self):
408 node = tree = {}
413 node = tree = {}
409 for ref, sha in self._refs.iteritems():
414 for ref, sha in self._refs.iteritems():
410 path = ref.split('/')
415 path = ref.split('/')
411 for bit in path[:-1]:
416 for bit in path[:-1]:
412 node = node.setdefault(bit, {})
417 node = node.setdefault(bit, {})
413 node[path[-1]] = sha
418 node[path[-1]] = sha
414 node = tree
419 node = tree
415 return tree
420 return tree
416
421
417 def get_remote_ref(self, ref_name):
422 def get_remote_ref(self, ref_name):
418 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
423 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
419 try:
424 try:
420 return self._refs[ref_key]
425 return self._refs[ref_key]
421 except Exception:
426 except Exception:
422 return
427 return
423
428
424 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
429 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
425 """
430 """
426 Returns `GitCommit` object representing commit from git repository
431 Returns `GitCommit` object representing commit from git repository
427 at the given `commit_id` or head (most recent commit) if None given.
432 at the given `commit_id` or head (most recent commit) if None given.
428 """
433 """
429 if self.is_empty():
434 if self.is_empty():
430 raise EmptyRepositoryError("There are no commits yet")
435 raise EmptyRepositoryError("There are no commits yet")
431
436
432 if commit_id is not None:
437 if commit_id is not None:
433 self._validate_commit_id(commit_id)
438 self._validate_commit_id(commit_id)
434 try:
439 try:
435 # we have cached idx, use it without contacting the remote
440 # we have cached idx, use it without contacting the remote
436 idx = self._commit_ids[commit_id]
441 idx = self._commit_ids[commit_id]
437 return GitCommit(self, commit_id, idx, pre_load=pre_load)
442 return GitCommit(self, commit_id, idx, pre_load=pre_load)
438 except KeyError:
443 except KeyError:
439 pass
444 pass
440
445
441 elif commit_idx is not None:
446 elif commit_idx is not None:
442 self._validate_commit_idx(commit_idx)
447 self._validate_commit_idx(commit_idx)
443 try:
448 try:
444 _commit_id = self.commit_ids[commit_idx]
449 _commit_id = self.commit_ids[commit_idx]
445 if commit_idx < 0:
450 if commit_idx < 0:
446 commit_idx = self.commit_ids.index(_commit_id)
451 commit_idx = self.commit_ids.index(_commit_id)
447 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
452 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
448 except IndexError:
453 except IndexError:
449 commit_id = commit_idx
454 commit_id = commit_idx
450 else:
455 else:
451 commit_id = "tip"
456 commit_id = "tip"
452
457
453 commit_id = self._lookup_commit(commit_id)
458 commit_id = self._lookup_commit(commit_id)
454 remote_idx = None
459 remote_idx = None
455 if translate_tag:
460 if translate_tag:
456 # Need to call remote to translate id for tagging scenario
461 # Need to call remote to translate id for tagging scenario
457 remote_data = self._remote.get_object(commit_id)
462 remote_data = self._remote.get_object(commit_id)
458 commit_id = remote_data["commit_id"]
463 commit_id = remote_data["commit_id"]
459 remote_idx = remote_data["idx"]
464 remote_idx = remote_data["idx"]
460
465
461 try:
466 try:
462 idx = self._commit_ids[commit_id]
467 idx = self._commit_ids[commit_id]
463 except KeyError:
468 except KeyError:
464 idx = remote_idx or 0
469 idx = remote_idx or 0
465
470
466 return GitCommit(self, commit_id, idx, pre_load=pre_load)
471 return GitCommit(self, commit_id, idx, pre_load=pre_load)
467
472
468 def get_commits(
473 def get_commits(
469 self, start_id=None, end_id=None, start_date=None, end_date=None,
474 self, start_id=None, end_id=None, start_date=None, end_date=None,
470 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
475 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
471 """
476 """
472 Returns generator of `GitCommit` objects from start to end (both
477 Returns generator of `GitCommit` objects from start to end (both
473 are inclusive), in ascending date order.
478 are inclusive), in ascending date order.
474
479
475 :param start_id: None, str(commit_id)
480 :param start_id: None, str(commit_id)
476 :param end_id: None, str(commit_id)
481 :param end_id: None, str(commit_id)
477 :param start_date: if specified, commits with commit date less than
482 :param start_date: if specified, commits with commit date less than
478 ``start_date`` would be filtered out from returned set
483 ``start_date`` would be filtered out from returned set
479 :param end_date: if specified, commits with commit date greater than
484 :param end_date: if specified, commits with commit date greater than
480 ``end_date`` would be filtered out from returned set
485 ``end_date`` would be filtered out from returned set
481 :param branch_name: if specified, commits not reachable from given
486 :param branch_name: if specified, commits not reachable from given
482 branch would be filtered out from returned set
487 branch would be filtered out from returned set
483 :param show_hidden: Show hidden commits such as obsolete or hidden from
488 :param show_hidden: Show hidden commits such as obsolete or hidden from
484 Mercurial evolve
489 Mercurial evolve
485 :raise BranchDoesNotExistError: If given `branch_name` does not
490 :raise BranchDoesNotExistError: If given `branch_name` does not
486 exist.
491 exist.
487 :raise CommitDoesNotExistError: If commits for given `start` or
492 :raise CommitDoesNotExistError: If commits for given `start` or
488 `end` could not be found.
493 `end` could not be found.
489
494
490 """
495 """
491 if self.is_empty():
496 if self.is_empty():
492 raise EmptyRepositoryError("There are no commits yet")
497 raise EmptyRepositoryError("There are no commits yet")
493
498
494 self._validate_branch_name(branch_name)
499 self._validate_branch_name(branch_name)
495
500
496 if start_id is not None:
501 if start_id is not None:
497 self._validate_commit_id(start_id)
502 self._validate_commit_id(start_id)
498 if end_id is not None:
503 if end_id is not None:
499 self._validate_commit_id(end_id)
504 self._validate_commit_id(end_id)
500
505
501 start_raw_id = self._lookup_commit(start_id)
506 start_raw_id = self._lookup_commit(start_id)
502 start_pos = self._commit_ids[start_raw_id] if start_id else None
507 start_pos = self._commit_ids[start_raw_id] if start_id else None
503 end_raw_id = self._lookup_commit(end_id)
508 end_raw_id = self._lookup_commit(end_id)
504 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
509 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
505
510
506 if None not in [start_id, end_id] and start_pos > end_pos:
511 if None not in [start_id, end_id] and start_pos > end_pos:
507 raise RepositoryError(
512 raise RepositoryError(
508 "Start commit '%s' cannot be after end commit '%s'" %
513 "Start commit '%s' cannot be after end commit '%s'" %
509 (start_id, end_id))
514 (start_id, end_id))
510
515
511 if end_pos is not None:
516 if end_pos is not None:
512 end_pos += 1
517 end_pos += 1
513
518
514 filter_ = []
519 filter_ = []
515 if branch_name:
520 if branch_name:
516 filter_.append({'branch_name': branch_name})
521 filter_.append({'branch_name': branch_name})
517 if start_date and not end_date:
522 if start_date and not end_date:
518 filter_.append({'since': start_date})
523 filter_.append({'since': start_date})
519 if end_date and not start_date:
524 if end_date and not start_date:
520 filter_.append({'until': end_date})
525 filter_.append({'until': end_date})
521 if start_date and end_date:
526 if start_date and end_date:
522 filter_.append({'since': start_date})
527 filter_.append({'since': start_date})
523 filter_.append({'until': end_date})
528 filter_.append({'until': end_date})
524
529
525 # if start_pos or end_pos:
530 # if start_pos or end_pos:
526 # filter_.append({'start': start_pos})
531 # filter_.append({'start': start_pos})
527 # filter_.append({'end': end_pos})
532 # filter_.append({'end': end_pos})
528
533
529 if filter_:
534 if filter_:
530 revfilters = {
535 revfilters = {
531 'branch_name': branch_name,
536 'branch_name': branch_name,
532 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
537 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
533 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
538 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
534 'start': start_pos,
539 'start': start_pos,
535 'end': end_pos,
540 'end': end_pos,
536 }
541 }
537 commit_ids = self._get_all_commit_ids(filters=revfilters)
542 commit_ids = self._get_all_commit_ids(filters=revfilters)
538
543
539 # pure python stuff, it's slow due to walker walking whole repo
544 # pure python stuff, it's slow due to walker walking whole repo
540 # def get_revs(walker):
545 # def get_revs(walker):
541 # for walker_entry in walker:
546 # for walker_entry in walker:
542 # yield walker_entry.commit.id
547 # yield walker_entry.commit.id
543 # revfilters = {}
548 # revfilters = {}
544 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
549 # commit_ids = list(reversed(list(get_revs(self._repo.get_walker(**revfilters)))))
545 else:
550 else:
546 commit_ids = self.commit_ids
551 commit_ids = self.commit_ids
547
552
548 if start_pos or end_pos:
553 if start_pos or end_pos:
549 commit_ids = commit_ids[start_pos: end_pos]
554 commit_ids = commit_ids[start_pos: end_pos]
550
555
551 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
556 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
552 translate_tag=translate_tags)
557 translate_tag=translate_tags)
553
558
554 def get_diff(
559 def get_diff(
555 self, commit1, commit2, path='', ignore_whitespace=False,
560 self, commit1, commit2, path='', ignore_whitespace=False,
556 context=3, path1=None):
561 context=3, path1=None):
557 """
562 """
558 Returns (git like) *diff*, as plain text. Shows changes introduced by
563 Returns (git like) *diff*, as plain text. Shows changes introduced by
559 ``commit2`` since ``commit1``.
564 ``commit2`` since ``commit1``.
560
565
561 :param commit1: Entry point from which diff is shown. Can be
566 :param commit1: Entry point from which diff is shown. Can be
562 ``self.EMPTY_COMMIT`` - in this case, patch showing all
567 ``self.EMPTY_COMMIT`` - in this case, patch showing all
563 the changes since empty state of the repository until ``commit2``
568 the changes since empty state of the repository until ``commit2``
564 :param commit2: Until which commits changes should be shown.
569 :param commit2: Until which commits changes should be shown.
565 :param ignore_whitespace: If set to ``True``, would not show whitespace
570 :param ignore_whitespace: If set to ``True``, would not show whitespace
566 changes. Defaults to ``False``.
571 changes. Defaults to ``False``.
567 :param context: How many lines before/after changed lines should be
572 :param context: How many lines before/after changed lines should be
568 shown. Defaults to ``3``.
573 shown. Defaults to ``3``.
569 """
574 """
570 self._validate_diff_commits(commit1, commit2)
575 self._validate_diff_commits(commit1, commit2)
571 if path1 is not None and path1 != path:
576 if path1 is not None and path1 != path:
572 raise ValueError("Diff of two different paths not supported.")
577 raise ValueError("Diff of two different paths not supported.")
573
578
574 flags = [
579 flags = [
575 '-U%s' % context, '--full-index', '--binary', '-p',
580 '-U%s' % context, '--full-index', '--binary', '-p',
576 '-M', '--abbrev=40']
581 '-M', '--abbrev=40']
577 if ignore_whitespace:
582 if ignore_whitespace:
578 flags.append('-w')
583 flags.append('-w')
579
584
580 if commit1 == self.EMPTY_COMMIT:
585 if commit1 == self.EMPTY_COMMIT:
581 cmd = ['show'] + flags + [commit2.raw_id]
586 cmd = ['show'] + flags + [commit2.raw_id]
582 else:
587 else:
583 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
588 cmd = ['diff'] + flags + [commit1.raw_id, commit2.raw_id]
584
589
585 if path:
590 if path:
586 cmd.extend(['--', path])
591 cmd.extend(['--', path])
587
592
588 stdout, __ = self.run_git_command(cmd)
593 stdout, __ = self.run_git_command(cmd)
589 # If we used 'show' command, strip first few lines (until actual diff
594 # If we used 'show' command, strip first few lines (until actual diff
590 # starts)
595 # starts)
591 if commit1 == self.EMPTY_COMMIT:
596 if commit1 == self.EMPTY_COMMIT:
592 lines = stdout.splitlines()
597 lines = stdout.splitlines()
593 x = 0
598 x = 0
594 for line in lines:
599 for line in lines:
595 if line.startswith('diff'):
600 if line.startswith('diff'):
596 break
601 break
597 x += 1
602 x += 1
598 # Append new line just like 'diff' command do
603 # Append new line just like 'diff' command do
599 stdout = '\n'.join(lines[x:]) + '\n'
604 stdout = '\n'.join(lines[x:]) + '\n'
600 return GitDiff(stdout)
605 return GitDiff(stdout)
601
606
602 def strip(self, commit_id, branch_name):
607 def strip(self, commit_id, branch_name):
603 commit = self.get_commit(commit_id=commit_id)
608 commit = self.get_commit(commit_id=commit_id)
604 if commit.merge:
609 if commit.merge:
605 raise Exception('Cannot reset to merge commit')
610 raise Exception('Cannot reset to merge commit')
606
611
607 # parent is going to be the new head now
612 # parent is going to be the new head now
608 commit = commit.parents[0]
613 commit = commit.parents[0]
609 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
614 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
610
615
611 self.commit_ids = self._get_all_commit_ids()
616 self._commit_ids_ver = time.time()
612 self._rebuild_cache(self.commit_ids)
617 # we updated _commit_ids_ver so accessing self.commit_ids will re-compute it
618 return len(self.commit_ids)
613
619
614 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
620 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
615 if commit_id1 == commit_id2:
621 if commit_id1 == commit_id2:
616 return commit_id1
622 return commit_id1
617
623
618 if self != repo2:
624 if self != repo2:
619 commits = self._remote.get_missing_revs(
625 commits = self._remote.get_missing_revs(
620 commit_id1, commit_id2, repo2.path)
626 commit_id1, commit_id2, repo2.path)
621 if commits:
627 if commits:
622 commit = repo2.get_commit(commits[-1])
628 commit = repo2.get_commit(commits[-1])
623 if commit.parents:
629 if commit.parents:
624 ancestor_id = commit.parents[0].raw_id
630 ancestor_id = commit.parents[0].raw_id
625 else:
631 else:
626 ancestor_id = None
632 ancestor_id = None
627 else:
633 else:
628 # no commits from other repo, ancestor_id is the commit_id2
634 # no commits from other repo, ancestor_id is the commit_id2
629 ancestor_id = commit_id2
635 ancestor_id = commit_id2
630 else:
636 else:
631 output, __ = self.run_git_command(
637 output, __ = self.run_git_command(
632 ['merge-base', commit_id1, commit_id2])
638 ['merge-base', commit_id1, commit_id2])
633 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
639 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
634
640
635 return ancestor_id
641 return ancestor_id
636
642
637 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
643 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
638 repo1 = self
644 repo1 = self
639 ancestor_id = None
645 ancestor_id = None
640
646
641 if commit_id1 == commit_id2:
647 if commit_id1 == commit_id2:
642 commits = []
648 commits = []
643 elif repo1 != repo2:
649 elif repo1 != repo2:
644 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
650 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
645 repo2.path)
651 repo2.path)
646 commits = [
652 commits = [
647 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
653 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
648 for commit_id in reversed(missing_ids)]
654 for commit_id in reversed(missing_ids)]
649 else:
655 else:
650 output, __ = repo1.run_git_command(
656 output, __ = repo1.run_git_command(
651 ['log', '--reverse', '--pretty=format: %H', '-s',
657 ['log', '--reverse', '--pretty=format: %H', '-s',
652 '%s..%s' % (commit_id1, commit_id2)])
658 '%s..%s' % (commit_id1, commit_id2)])
653 commits = [
659 commits = [
654 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
660 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
655 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
661 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
656
662
657 return commits
663 return commits
658
664
659 @LazyProperty
665 @LazyProperty
660 def in_memory_commit(self):
666 def in_memory_commit(self):
661 """
667 """
662 Returns ``GitInMemoryCommit`` object for this repository.
668 Returns ``GitInMemoryCommit`` object for this repository.
663 """
669 """
664 return GitInMemoryCommit(self)
670 return GitInMemoryCommit(self)
665
671
666 def pull(self, url, commit_ids=None, update_after=False):
672 def pull(self, url, commit_ids=None, update_after=False):
667 """
673 """
668 Pull changes from external location. Pull is different in GIT
674 Pull changes from external location. Pull is different in GIT
669 that fetch since it's doing a checkout
675 that fetch since it's doing a checkout
670
676
671 :param commit_ids: Optional. Can be set to a list of commit ids
677 :param commit_ids: Optional. Can be set to a list of commit ids
672 which shall be pulled from the other repository.
678 which shall be pulled from the other repository.
673 """
679 """
674 refs = None
680 refs = None
675 if commit_ids is not None:
681 if commit_ids is not None:
676 remote_refs = self._remote.get_remote_refs(url)
682 remote_refs = self._remote.get_remote_refs(url)
677 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
683 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
678 self._remote.pull(url, refs=refs, update_after=update_after)
684 self._remote.pull(url, refs=refs, update_after=update_after)
679 self._remote.invalidate_vcs_cache()
685 self._remote.invalidate_vcs_cache()
680
686
681 def fetch(self, url, commit_ids=None):
687 def fetch(self, url, commit_ids=None):
682 """
688 """
683 Fetch all git objects from external location.
689 Fetch all git objects from external location.
684 """
690 """
685 self._remote.sync_fetch(url, refs=commit_ids)
691 self._remote.sync_fetch(url, refs=commit_ids)
686 self._remote.invalidate_vcs_cache()
692 self._remote.invalidate_vcs_cache()
687
693
688 def push(self, url):
694 def push(self, url):
689 refs = None
695 refs = None
690 self._remote.sync_push(url, refs=refs)
696 self._remote.sync_push(url, refs=refs)
691
697
692 def set_refs(self, ref_name, commit_id):
698 def set_refs(self, ref_name, commit_id):
693 self._remote.set_refs(ref_name, commit_id)
699 self._remote.set_refs(ref_name, commit_id)
694
700
695 def remove_ref(self, ref_name):
701 def remove_ref(self, ref_name):
696 self._remote.remove_ref(ref_name)
702 self._remote.remove_ref(ref_name)
697
703
698 def _update_server_info(self):
704 def _update_server_info(self):
699 """
705 """
700 runs gits update-server-info command in this repo instance
706 runs gits update-server-info command in this repo instance
701 """
707 """
702 self._remote.update_server_info()
708 self._remote.update_server_info()
703
709
704 def _current_branch(self):
710 def _current_branch(self):
705 """
711 """
706 Return the name of the current branch.
712 Return the name of the current branch.
707
713
708 It only works for non bare repositories (i.e. repositories with a
714 It only works for non bare repositories (i.e. repositories with a
709 working copy)
715 working copy)
710 """
716 """
711 if self.bare:
717 if self.bare:
712 raise RepositoryError('Bare git repos do not have active branches')
718 raise RepositoryError('Bare git repos do not have active branches')
713
719
714 if self.is_empty():
720 if self.is_empty():
715 return None
721 return None
716
722
717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
723 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
718 return stdout.strip()
724 return stdout.strip()
719
725
720 def _checkout(self, branch_name, create=False, force=False):
726 def _checkout(self, branch_name, create=False, force=False):
721 """
727 """
722 Checkout a branch in the working directory.
728 Checkout a branch in the working directory.
723
729
724 It tries to create the branch if create is True, failing if the branch
730 It tries to create the branch if create is True, failing if the branch
725 already exists.
731 already exists.
726
732
727 It only works for non bare repositories (i.e. repositories with a
733 It only works for non bare repositories (i.e. repositories with a
728 working copy)
734 working copy)
729 """
735 """
730 if self.bare:
736 if self.bare:
731 raise RepositoryError('Cannot checkout branches in a bare git repo')
737 raise RepositoryError('Cannot checkout branches in a bare git repo')
732
738
733 cmd = ['checkout']
739 cmd = ['checkout']
734 if force:
740 if force:
735 cmd.append('-f')
741 cmd.append('-f')
736 if create:
742 if create:
737 cmd.append('-b')
743 cmd.append('-b')
738 cmd.append(branch_name)
744 cmd.append(branch_name)
739 self.run_git_command(cmd, fail_on_stderr=False)
745 self.run_git_command(cmd, fail_on_stderr=False)
740
746
741 def _identify(self):
747 def _identify(self):
742 """
748 """
743 Return the current state of the working directory.
749 Return the current state of the working directory.
744 """
750 """
745 if self.bare:
751 if self.bare:
746 raise RepositoryError('Bare git repos do not have active branches')
752 raise RepositoryError('Bare git repos do not have active branches')
747
753
748 if self.is_empty():
754 if self.is_empty():
749 return None
755 return None
750
756
751 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
752 return stdout.strip()
758 return stdout.strip()
753
759
754 def _local_clone(self, clone_path, branch_name, source_branch=None):
760 def _local_clone(self, clone_path, branch_name, source_branch=None):
755 """
761 """
756 Create a local clone of the current repo.
762 Create a local clone of the current repo.
757 """
763 """
758 # N.B.(skreft): the --branch option is required as otherwise the shallow
764 # N.B.(skreft): the --branch option is required as otherwise the shallow
759 # clone will only fetch the active branch.
765 # clone will only fetch the active branch.
760 cmd = ['clone', '--branch', branch_name,
766 cmd = ['clone', '--branch', branch_name,
761 self.path, os.path.abspath(clone_path)]
767 self.path, os.path.abspath(clone_path)]
762
768
763 self.run_git_command(cmd, fail_on_stderr=False)
769 self.run_git_command(cmd, fail_on_stderr=False)
764
770
765 # if we get the different source branch, make sure we also fetch it for
771 # if we get the different source branch, make sure we also fetch it for
766 # merge conditions
772 # merge conditions
767 if source_branch and source_branch != branch_name:
773 if source_branch and source_branch != branch_name:
768 # check if the ref exists.
774 # check if the ref exists.
769 shadow_repo = GitRepository(os.path.abspath(clone_path))
775 shadow_repo = GitRepository(os.path.abspath(clone_path))
770 if shadow_repo.get_remote_ref(source_branch):
776 if shadow_repo.get_remote_ref(source_branch):
771 cmd = ['fetch', self.path, source_branch]
777 cmd = ['fetch', self.path, source_branch]
772 self.run_git_command(cmd, fail_on_stderr=False)
778 self.run_git_command(cmd, fail_on_stderr=False)
773
779
774 def _local_fetch(self, repository_path, branch_name, use_origin=False):
780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
775 """
781 """
776 Fetch a branch from a local repository.
782 Fetch a branch from a local repository.
777 """
783 """
778 repository_path = os.path.abspath(repository_path)
784 repository_path = os.path.abspath(repository_path)
779 if repository_path == self.path:
785 if repository_path == self.path:
780 raise ValueError('Cannot fetch from the same repository')
786 raise ValueError('Cannot fetch from the same repository')
781
787
782 if use_origin:
788 if use_origin:
783 branch_name = '+{branch}:refs/heads/{branch}'.format(
789 branch_name = '+{branch}:refs/heads/{branch}'.format(
784 branch=branch_name)
790 branch=branch_name)
785
791
786 cmd = ['fetch', '--no-tags', '--update-head-ok',
792 cmd = ['fetch', '--no-tags', '--update-head-ok',
787 repository_path, branch_name]
793 repository_path, branch_name]
788 self.run_git_command(cmd, fail_on_stderr=False)
794 self.run_git_command(cmd, fail_on_stderr=False)
789
795
790 def _local_reset(self, branch_name):
796 def _local_reset(self, branch_name):
791 branch_name = '{}'.format(branch_name)
797 branch_name = '{}'.format(branch_name)
792 cmd = ['reset', '--hard', branch_name, '--']
798 cmd = ['reset', '--hard', branch_name, '--']
793 self.run_git_command(cmd, fail_on_stderr=False)
799 self.run_git_command(cmd, fail_on_stderr=False)
794
800
795 def _last_fetch_heads(self):
801 def _last_fetch_heads(self):
796 """
802 """
797 Return the last fetched heads that need merging.
803 Return the last fetched heads that need merging.
798
804
799 The algorithm is defined at
805 The algorithm is defined at
800 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
801 """
807 """
802 if not self.bare:
808 if not self.bare:
803 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
804 else:
810 else:
805 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
806
812
807 heads = []
813 heads = []
808 with open(fetch_heads_path) as f:
814 with open(fetch_heads_path) as f:
809 for line in f:
815 for line in f:
810 if ' not-for-merge ' in line:
816 if ' not-for-merge ' in line:
811 continue
817 continue
812 line = re.sub('\t.*', '', line, flags=re.DOTALL)
818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
813 heads.append(line)
819 heads.append(line)
814
820
815 return heads
821 return heads
816
822
817 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
823 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
818 return GitRepository(shadow_repository_path)
824 return GitRepository(shadow_repository_path)
819
825
820 def _local_pull(self, repository_path, branch_name, ff_only=True):
826 def _local_pull(self, repository_path, branch_name, ff_only=True):
821 """
827 """
822 Pull a branch from a local repository.
828 Pull a branch from a local repository.
823 """
829 """
824 if self.bare:
830 if self.bare:
825 raise RepositoryError('Cannot pull into a bare git repository')
831 raise RepositoryError('Cannot pull into a bare git repository')
826 # N.B.(skreft): The --ff-only option is to make sure this is a
832 # N.B.(skreft): The --ff-only option is to make sure this is a
827 # fast-forward (i.e., we are only pulling new changes and there are no
833 # fast-forward (i.e., we are only pulling new changes and there are no
828 # conflicts with our current branch)
834 # conflicts with our current branch)
829 # Additionally, that option needs to go before --no-tags, otherwise git
835 # Additionally, that option needs to go before --no-tags, otherwise git
830 # pull complains about it being an unknown flag.
836 # pull complains about it being an unknown flag.
831 cmd = ['pull']
837 cmd = ['pull']
832 if ff_only:
838 if ff_only:
833 cmd.append('--ff-only')
839 cmd.append('--ff-only')
834 cmd.extend(['--no-tags', repository_path, branch_name])
840 cmd.extend(['--no-tags', repository_path, branch_name])
835 self.run_git_command(cmd, fail_on_stderr=False)
841 self.run_git_command(cmd, fail_on_stderr=False)
836
842
837 def _local_merge(self, merge_message, user_name, user_email, heads):
843 def _local_merge(self, merge_message, user_name, user_email, heads):
838 """
844 """
839 Merge the given head into the checked out branch.
845 Merge the given head into the checked out branch.
840
846
841 It will force a merge commit.
847 It will force a merge commit.
842
848
843 Currently it raises an error if the repo is empty, as it is not possible
849 Currently it raises an error if the repo is empty, as it is not possible
844 to create a merge commit in an empty repo.
850 to create a merge commit in an empty repo.
845
851
846 :param merge_message: The message to use for the merge commit.
852 :param merge_message: The message to use for the merge commit.
847 :param heads: the heads to merge.
853 :param heads: the heads to merge.
848 """
854 """
849 if self.bare:
855 if self.bare:
850 raise RepositoryError('Cannot merge into a bare git repository')
856 raise RepositoryError('Cannot merge into a bare git repository')
851
857
852 if not heads:
858 if not heads:
853 return
859 return
854
860
855 if self.is_empty():
861 if self.is_empty():
856 # TODO(skreft): do somehting more robust in this case.
862 # TODO(skreft): do somehting more robust in this case.
857 raise RepositoryError(
863 raise RepositoryError(
858 'Do not know how to merge into empty repositories yet')
864 'Do not know how to merge into empty repositories yet')
859
865
860 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
861 # commit message. We also specify the user who is doing the merge.
867 # commit message. We also specify the user who is doing the merge.
862 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
868 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
863 '-c', 'user.email=%s' % safe_str(user_email),
869 '-c', 'user.email=%s' % safe_str(user_email),
864 'merge', '--no-ff', '-m', safe_str(merge_message)]
870 'merge', '--no-ff', '-m', safe_str(merge_message)]
865 cmd.extend(heads)
871 cmd.extend(heads)
866 try:
872 try:
867 output = self.run_git_command(cmd, fail_on_stderr=False)
873 output = self.run_git_command(cmd, fail_on_stderr=False)
868 except RepositoryError:
874 except RepositoryError:
869 # Cleanup any merge leftovers
875 # Cleanup any merge leftovers
870 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
876 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
871 raise
877 raise
872
878
873 def _local_push(
879 def _local_push(
874 self, source_branch, repository_path, target_branch,
880 self, source_branch, repository_path, target_branch,
875 enable_hooks=False, rc_scm_data=None):
881 enable_hooks=False, rc_scm_data=None):
876 """
882 """
877 Push the source_branch to the given repository and target_branch.
883 Push the source_branch to the given repository and target_branch.
878
884
879 Currently it if the target_branch is not master and the target repo is
885 Currently it if the target_branch is not master and the target repo is
880 empty, the push will work, but then GitRepository won't be able to find
886 empty, the push will work, but then GitRepository won't be able to find
881 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
887 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
882 pointing to master, which does not exist).
888 pointing to master, which does not exist).
883
889
884 It does not run the hooks in the target repo.
890 It does not run the hooks in the target repo.
885 """
891 """
886 # TODO(skreft): deal with the case in which the target repo is empty,
892 # TODO(skreft): deal with the case in which the target repo is empty,
887 # and the target_branch is not master.
893 # and the target_branch is not master.
888 target_repo = GitRepository(repository_path)
894 target_repo = GitRepository(repository_path)
889 if (not target_repo.bare and
895 if (not target_repo.bare and
890 target_repo._current_branch() == target_branch):
896 target_repo._current_branch() == target_branch):
891 # Git prevents pushing to the checked out branch, so simulate it by
897 # Git prevents pushing to the checked out branch, so simulate it by
892 # pulling into the target repository.
898 # pulling into the target repository.
893 target_repo._local_pull(self.path, source_branch)
899 target_repo._local_pull(self.path, source_branch)
894 else:
900 else:
895 cmd = ['push', os.path.abspath(repository_path),
901 cmd = ['push', os.path.abspath(repository_path),
896 '%s:%s' % (source_branch, target_branch)]
902 '%s:%s' % (source_branch, target_branch)]
897 gitenv = {}
903 gitenv = {}
898 if rc_scm_data:
904 if rc_scm_data:
899 gitenv.update({'RC_SCM_DATA': rc_scm_data})
905 gitenv.update({'RC_SCM_DATA': rc_scm_data})
900
906
901 if not enable_hooks:
907 if not enable_hooks:
902 gitenv['RC_SKIP_HOOKS'] = '1'
908 gitenv['RC_SKIP_HOOKS'] = '1'
903 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
909 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
904
910
905 def _get_new_pr_branch(self, source_branch, target_branch):
911 def _get_new_pr_branch(self, source_branch, target_branch):
906 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
912 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
907 pr_branches = []
913 pr_branches = []
908 for branch in self.branches:
914 for branch in self.branches:
909 if branch.startswith(prefix):
915 if branch.startswith(prefix):
910 pr_branches.append(int(branch[len(prefix):]))
916 pr_branches.append(int(branch[len(prefix):]))
911
917
912 if not pr_branches:
918 if not pr_branches:
913 branch_id = 0
919 branch_id = 0
914 else:
920 else:
915 branch_id = max(pr_branches) + 1
921 branch_id = max(pr_branches) + 1
916
922
917 return '%s%d' % (prefix, branch_id)
923 return '%s%d' % (prefix, branch_id)
918
924
919 def _maybe_prepare_merge_workspace(
925 def _maybe_prepare_merge_workspace(
920 self, repo_id, workspace_id, target_ref, source_ref):
926 self, repo_id, workspace_id, target_ref, source_ref):
921 shadow_repository_path = self._get_shadow_repository_path(
927 shadow_repository_path = self._get_shadow_repository_path(
922 repo_id, workspace_id)
928 repo_id, workspace_id)
923 if not os.path.exists(shadow_repository_path):
929 if not os.path.exists(shadow_repository_path):
924 self._local_clone(
930 self._local_clone(
925 shadow_repository_path, target_ref.name, source_ref.name)
931 shadow_repository_path, target_ref.name, source_ref.name)
926 log.debug(
932 log.debug(
927 'Prepared shadow repository in %s', shadow_repository_path)
933 'Prepared shadow repository in %s', shadow_repository_path)
928
934
929 return shadow_repository_path
935 return shadow_repository_path
930
936
931 def _merge_repo(self, repo_id, workspace_id, target_ref,
937 def _merge_repo(self, repo_id, workspace_id, target_ref,
932 source_repo, source_ref, merge_message,
938 source_repo, source_ref, merge_message,
933 merger_name, merger_email, dry_run=False,
939 merger_name, merger_email, dry_run=False,
934 use_rebase=False, close_branch=False):
940 use_rebase=False, close_branch=False):
935
941
936 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
942 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
937 'rebase' if use_rebase else 'merge', dry_run)
943 'rebase' if use_rebase else 'merge', dry_run)
938 if target_ref.commit_id != self.branches[target_ref.name]:
944 if target_ref.commit_id != self.branches[target_ref.name]:
939 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
945 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
940 target_ref.commit_id, self.branches[target_ref.name])
946 target_ref.commit_id, self.branches[target_ref.name])
941 return MergeResponse(
947 return MergeResponse(
942 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
948 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
943 metadata={'target_ref': target_ref})
949 metadata={'target_ref': target_ref})
944
950
945 shadow_repository_path = self._maybe_prepare_merge_workspace(
951 shadow_repository_path = self._maybe_prepare_merge_workspace(
946 repo_id, workspace_id, target_ref, source_ref)
952 repo_id, workspace_id, target_ref, source_ref)
947 shadow_repo = self._get_shadow_instance(shadow_repository_path)
953 shadow_repo = self._get_shadow_instance(shadow_repository_path)
948
954
949 # checkout source, if it's different. Otherwise we could not
955 # checkout source, if it's different. Otherwise we could not
950 # fetch proper commits for merge testing
956 # fetch proper commits for merge testing
951 if source_ref.name != target_ref.name:
957 if source_ref.name != target_ref.name:
952 if shadow_repo.get_remote_ref(source_ref.name):
958 if shadow_repo.get_remote_ref(source_ref.name):
953 shadow_repo._checkout(source_ref.name, force=True)
959 shadow_repo._checkout(source_ref.name, force=True)
954
960
955 # checkout target, and fetch changes
961 # checkout target, and fetch changes
956 shadow_repo._checkout(target_ref.name, force=True)
962 shadow_repo._checkout(target_ref.name, force=True)
957
963
958 # fetch/reset pull the target, in case it is changed
964 # fetch/reset pull the target, in case it is changed
959 # this handles even force changes
965 # this handles even force changes
960 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
966 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
961 shadow_repo._local_reset(target_ref.name)
967 shadow_repo._local_reset(target_ref.name)
962
968
963 # Need to reload repo to invalidate the cache, or otherwise we cannot
969 # Need to reload repo to invalidate the cache, or otherwise we cannot
964 # retrieve the last target commit.
970 # retrieve the last target commit.
965 shadow_repo = self._get_shadow_instance(shadow_repository_path)
971 shadow_repo = self._get_shadow_instance(shadow_repository_path)
966 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
972 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
967 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
973 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
968 target_ref, target_ref.commit_id,
974 target_ref, target_ref.commit_id,
969 shadow_repo.branches[target_ref.name])
975 shadow_repo.branches[target_ref.name])
970 return MergeResponse(
976 return MergeResponse(
971 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
977 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
972 metadata={'target_ref': target_ref})
978 metadata={'target_ref': target_ref})
973
979
974 # calculate new branch
980 # calculate new branch
975 pr_branch = shadow_repo._get_new_pr_branch(
981 pr_branch = shadow_repo._get_new_pr_branch(
976 source_ref.name, target_ref.name)
982 source_ref.name, target_ref.name)
977 log.debug('using pull-request merge branch: `%s`', pr_branch)
983 log.debug('using pull-request merge branch: `%s`', pr_branch)
978 # checkout to temp branch, and fetch changes
984 # checkout to temp branch, and fetch changes
979 shadow_repo._checkout(pr_branch, create=True)
985 shadow_repo._checkout(pr_branch, create=True)
980 try:
986 try:
981 shadow_repo._local_fetch(source_repo.path, source_ref.name)
987 shadow_repo._local_fetch(source_repo.path, source_ref.name)
982 except RepositoryError:
988 except RepositoryError:
983 log.exception('Failure when doing local fetch on '
989 log.exception('Failure when doing local fetch on '
984 'shadow repo: %s', shadow_repo)
990 'shadow repo: %s', shadow_repo)
985 return MergeResponse(
991 return MergeResponse(
986 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
992 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
987 metadata={'source_ref': source_ref})
993 metadata={'source_ref': source_ref})
988
994
989 merge_ref = None
995 merge_ref = None
990 merge_failure_reason = MergeFailureReason.NONE
996 merge_failure_reason = MergeFailureReason.NONE
991 metadata = {}
997 metadata = {}
992 try:
998 try:
993 shadow_repo._local_merge(merge_message, merger_name, merger_email,
999 shadow_repo._local_merge(merge_message, merger_name, merger_email,
994 [source_ref.commit_id])
1000 [source_ref.commit_id])
995 merge_possible = True
1001 merge_possible = True
996
1002
997 # Need to reload repo to invalidate the cache, or otherwise we
1003 # Need to reload repo to invalidate the cache, or otherwise we
998 # cannot retrieve the merge commit.
1004 # cannot retrieve the merge commit.
999 shadow_repo = GitRepository(shadow_repository_path)
1005 shadow_repo = GitRepository(shadow_repository_path)
1000 merge_commit_id = shadow_repo.branches[pr_branch]
1006 merge_commit_id = shadow_repo.branches[pr_branch]
1001
1007
1002 # Set a reference pointing to the merge commit. This reference may
1008 # Set a reference pointing to the merge commit. This reference may
1003 # be used to easily identify the last successful merge commit in
1009 # be used to easily identify the last successful merge commit in
1004 # the shadow repository.
1010 # the shadow repository.
1005 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1011 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1006 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1012 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1007 except RepositoryError:
1013 except RepositoryError:
1008 log.exception('Failure when doing local merge on git shadow repo')
1014 log.exception('Failure when doing local merge on git shadow repo')
1009 merge_possible = False
1015 merge_possible = False
1010 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1016 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1011
1017
1012 if merge_possible and not dry_run:
1018 if merge_possible and not dry_run:
1013 try:
1019 try:
1014 shadow_repo._local_push(
1020 shadow_repo._local_push(
1015 pr_branch, self.path, target_ref.name, enable_hooks=True,
1021 pr_branch, self.path, target_ref.name, enable_hooks=True,
1016 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1022 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1017 merge_succeeded = True
1023 merge_succeeded = True
1018 except RepositoryError:
1024 except RepositoryError:
1019 log.exception(
1025 log.exception(
1020 'Failure when doing local push from the shadow '
1026 'Failure when doing local push from the shadow '
1021 'repository to the target repository at %s.', self.path)
1027 'repository to the target repository at %s.', self.path)
1022 merge_succeeded = False
1028 merge_succeeded = False
1023 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1029 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1024 metadata['target'] = 'git shadow repo'
1030 metadata['target'] = 'git shadow repo'
1025 metadata['merge_commit'] = pr_branch
1031 metadata['merge_commit'] = pr_branch
1026 else:
1032 else:
1027 merge_succeeded = False
1033 merge_succeeded = False
1028
1034
1029 return MergeResponse(
1035 return MergeResponse(
1030 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1036 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1031 metadata=metadata)
1037 metadata=metadata)
@@ -1,98 +1,95 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG inmemory module
22 HG inmemory module
23 """
23 """
24
24
25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
25 from rhodecode.lib.datelib import date_to_timestamp_plus_offset
26 from rhodecode.lib.utils import safe_str
26 from rhodecode.lib.utils import safe_str
27 from rhodecode.lib.vcs.backends.base import BaseInMemoryCommit
27 from rhodecode.lib.vcs.backends.base import BaseInMemoryCommit
28 from rhodecode.lib.vcs.exceptions import RepositoryError
28 from rhodecode.lib.vcs.exceptions import RepositoryError
29
29
30
30
31 class MercurialInMemoryCommit(BaseInMemoryCommit):
31 class MercurialInMemoryCommit(BaseInMemoryCommit):
32
32
33 def commit(self, message, author, parents=None, branch=None, date=None,
33 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
34 **kwargs):
35 """
34 """
36 Performs in-memory commit (doesn't check workdir in any way) and
35 Performs in-memory commit (doesn't check workdir in any way) and
37 returns newly created `MercurialCommit`. Updates repository's
36 returns newly created `MercurialCommit`. Updates repository's
38 `commit_ids`.
37 `commit_ids`.
39
38
40 :param message: message of the commit
39 :param message: message of the commit
41 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
40 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
42 :param parents: single parent or sequence of parents from which commit
41 :param parents: single parent or sequence of parents from which commit
43 would be derived
42 would be derived
44 :param date: `datetime.datetime` instance. Defaults to
43 :param date: `datetime.datetime` instance. Defaults to
45 ``datetime.datetime.now()``.
44 ``datetime.datetime.now()``.
46 :param branch: Optional. Branch name as unicode. Will use the backend's
45 :param branch: Optional. Branch name as unicode. Will use the backend's
47 default if not given.
46 default if not given.
48
47
49 :raises `RepositoryError`: if any error occurs while committing
48 :raises `RepositoryError`: if any error occurs while committing
50 """
49 """
51 self.check_integrity(parents)
50 self.check_integrity(parents)
52
51
53 if not isinstance(message, unicode) or not isinstance(author, unicode):
52 if not isinstance(message, unicode) or not isinstance(author, unicode):
54 # TODO: johbo: Should be a TypeError
53 # TODO: johbo: Should be a TypeError
55 raise RepositoryError('Given message and author needs to be '
54 raise RepositoryError('Given message and author needs to be '
56 'an <unicode> instance got %r & %r instead'
55 'an <unicode> instance got %r & %r instead'
57 % (type(message), type(author)))
56 % (type(message), type(author)))
58
57
59 if branch is None:
58 if branch is None:
60 branch = self.repository.DEFAULT_BRANCH_NAME
59 branch = self.repository.DEFAULT_BRANCH_NAME
61 kwargs['branch'] = safe_str(branch)
60 kwargs['branch'] = safe_str(branch)
62
61
63 message = safe_str(message)
62 message = safe_str(message)
64 author = safe_str(author)
63 author = safe_str(author)
65
64
66 parent_ids = [p.raw_id if p else None for p in self.parents]
65 parent_ids = [p.raw_id if p else None for p in self.parents]
67
66
68 ENCODING = "UTF-8"
67 ENCODING = "UTF-8"
69
68
70 updated = []
69 updated = []
71 for node in self.added + self.changed:
70 for node in self.added + self.changed:
72 if node.is_binary:
71 if node.is_binary:
73 content = node.content
72 content = node.content
74 else:
73 else:
75 content = node.content.encode(ENCODING)
74 content = node.content.encode(ENCODING)
76 updated.append({
75 updated.append({
77 'path': node.path,
76 'path': node.path,
78 'content': content,
77 'content': content,
79 'mode': node.mode,
78 'mode': node.mode,
80 })
79 })
81
80
82 removed = [node.path for node in self.removed]
81 removed = [node.path for node in self.removed]
83
82
84 date, tz = date_to_timestamp_plus_offset(date)
83 date, tz = date_to_timestamp_plus_offset(date)
85
84
86 commit_id = self.repository._remote.commitctx(
85 commit_id = self.repository._remote.commitctx(
87 message=message, parents=parent_ids,
86 message=message, parents=parent_ids,
88 commit_time=date, commit_timezone=tz, user=author,
87 commit_time=date, commit_timezone=tz, user=author,
89 files=self.get_paths(), extra=kwargs, removed=removed,
88 files=self.get_paths(), extra=kwargs, removed=removed,
90 updated=updated)
89 updated=updated)
91 if commit_id not in self.repository.commit_ids:
90 self.repository.append_commit_id(commit_id)
92 self.repository.commit_ids.append(commit_id)
93 self.repository._rebuild_cache(self.repository.commit_ids)
94
91
95 self.repository.branches = self.repository._get_branches()
92 self.repository.branches = self.repository._get_branches()
96 tip = self.repository.get_commit()
93 tip = self.repository.get_commit(commit_id)
97 self.reset()
94 self.reset()
98 return tip
95 return tip
@@ -1,942 +1,949 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import time
27 import urllib
28 import urllib
28
29
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import CachedProperty
30
32
31 from rhodecode.lib.compat import OrderedDict
33 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
34 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
36 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs.backends.base import (
38 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference, BasePathPermissionChecker)
40 MergeFailureReason, Reference, BasePathPermissionChecker)
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 from rhodecode.lib.vcs.compat import configparser
47 from rhodecode.lib.vcs.compat import configparser
46
48
47 hexlify = binascii.hexlify
49 hexlify = binascii.hexlify
48 nullid = "\0" * 20
50 nullid = "\0" * 20
49
51
50 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
51
53
52
54
53 class MercurialRepository(BaseRepository):
55 class MercurialRepository(BaseRepository):
54 """
56 """
55 Mercurial repository backend
57 Mercurial repository backend
56 """
58 """
57 DEFAULT_BRANCH_NAME = 'default'
59 DEFAULT_BRANCH_NAME = 'default'
58
60
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 do_workspace_checkout=False, with_wire=None, bare=False):
62 do_workspace_checkout=False, with_wire=None, bare=False):
61 """
63 """
62 Raises RepositoryError if repository could not be find at the given
64 Raises RepositoryError if repository could not be find at the given
63 ``repo_path``.
65 ``repo_path``.
64
66
65 :param repo_path: local path of the repository
67 :param repo_path: local path of the repository
66 :param config: config object containing the repo configuration
68 :param config: config object containing the repo configuration
67 :param create=False: if set to True, would try to create repository if
69 :param create=False: if set to True, would try to create repository if
68 it does not exist rather than raising exception
70 it does not exist rather than raising exception
69 :param src_url=None: would try to clone repository from given location
71 :param src_url=None: would try to clone repository from given location
70 :param do_workspace_checkout=False: sets update of working copy after
72 :param do_workspace_checkout=False: sets update of working copy after
71 making a clone
73 making a clone
72 :param bare: not used, compatible with other VCS
74 :param bare: not used, compatible with other VCS
73 """
75 """
74
76
75 self.path = safe_str(os.path.abspath(repo_path))
77 self.path = safe_str(os.path.abspath(repo_path))
76 # mercurial since 4.4.X requires certain configuration to be present
78 # mercurial since 4.4.X requires certain configuration to be present
77 # because sometimes we init the repos with config we need to meet
79 # because sometimes we init the repos with config we need to meet
78 # special requirements
80 # special requirements
79 self.config = config if config else self.get_default_config(
81 self.config = config if config else self.get_default_config(
80 default=[('extensions', 'largefiles', '1')])
82 default=[('extensions', 'largefiles', '1')])
81 self.with_wire = with_wire
83 self.with_wire = with_wire
82
84
83 self._init_repo(create, src_url, do_workspace_checkout)
85 self._init_repo(create, src_url, do_workspace_checkout)
84
86
85 # caches
87 # caches
86 self._commit_ids = {}
88 self._commit_ids = {}
87
89
90 # dependent that trigger re-computation of commit_ids
91 self._commit_ids_ver = 0
92
88 @LazyProperty
93 @LazyProperty
89 def _remote(self):
94 def _remote(self):
90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
95 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
91
96
92 @LazyProperty
97 @CachedProperty('_commit_ids_ver')
93 def commit_ids(self):
98 def commit_ids(self):
94 """
99 """
95 Returns list of commit ids, in ascending order. Being lazy
100 Returns list of commit ids, in ascending order. Being lazy
96 attribute allows external tools to inject shas from cache.
101 attribute allows external tools to inject shas from cache.
97 """
102 """
98 commit_ids = self._get_all_commit_ids()
103 commit_ids = self._get_all_commit_ids()
99 self._rebuild_cache(commit_ids)
104 self._rebuild_cache(commit_ids)
100 return commit_ids
105 return commit_ids
101
106
102 def _rebuild_cache(self, commit_ids):
107 def _rebuild_cache(self, commit_ids):
103 self._commit_ids = dict((commit_id, index)
108 self._commit_ids = dict((commit_id, index)
104 for index, commit_id in enumerate(commit_ids))
109 for index, commit_id in enumerate(commit_ids))
105
110
106 @LazyProperty
111 @LazyProperty
107 def branches(self):
112 def branches(self):
108 return self._get_branches()
113 return self._get_branches()
109
114
110 @LazyProperty
115 @LazyProperty
111 def branches_closed(self):
116 def branches_closed(self):
112 return self._get_branches(active=False, closed=True)
117 return self._get_branches(active=False, closed=True)
113
118
114 @LazyProperty
119 @LazyProperty
115 def branches_all(self):
120 def branches_all(self):
116 all_branches = {}
121 all_branches = {}
117 all_branches.update(self.branches)
122 all_branches.update(self.branches)
118 all_branches.update(self.branches_closed)
123 all_branches.update(self.branches_closed)
119 return all_branches
124 return all_branches
120
125
121 def _get_branches(self, active=True, closed=False):
126 def _get_branches(self, active=True, closed=False):
122 """
127 """
123 Gets branches for this repository
128 Gets branches for this repository
124 Returns only not closed active branches by default
129 Returns only not closed active branches by default
125
130
126 :param active: return also active branches
131 :param active: return also active branches
127 :param closed: return also closed branches
132 :param closed: return also closed branches
128
133
129 """
134 """
130 if self.is_empty():
135 if self.is_empty():
131 return {}
136 return {}
132
137
133 def get_name(ctx):
138 def get_name(ctx):
134 return ctx[0]
139 return ctx[0]
135
140
136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
141 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
137 self._remote.branches(active, closed).items()]
142 self._remote.branches(active, closed).items()]
138
143
139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
144 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
140
145
141 @LazyProperty
146 @LazyProperty
142 def tags(self):
147 def tags(self):
143 """
148 """
144 Gets tags for this repository
149 Gets tags for this repository
145 """
150 """
146 return self._get_tags()
151 return self._get_tags()
147
152
148 def _get_tags(self):
153 def _get_tags(self):
149 if self.is_empty():
154 if self.is_empty():
150 return {}
155 return {}
151
156
152 def get_name(ctx):
157 def get_name(ctx):
153 return ctx[0]
158 return ctx[0]
154
159
155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
160 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
156 self._remote.tags().items()]
161 self._remote.tags().items()]
157
162
158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
163 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
159
164
160 def tag(self, name, user, commit_id=None, message=None, date=None,
165 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
161 **kwargs):
162 """
166 """
163 Creates and returns a tag for the given ``commit_id``.
167 Creates and returns a tag for the given ``commit_id``.
164
168
165 :param name: name for new tag
169 :param name: name for new tag
166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
170 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param commit_id: commit id for which new tag would be created
171 :param commit_id: commit id for which new tag would be created
168 :param message: message of the tag's commit
172 :param message: message of the tag's commit
169 :param date: date of tag's commit
173 :param date: date of tag's commit
170
174
171 :raises TagAlreadyExistError: if tag with same name already exists
175 :raises TagAlreadyExistError: if tag with same name already exists
172 """
176 """
173 if name in self.tags:
177 if name in self.tags:
174 raise TagAlreadyExistError("Tag %s already exists" % name)
178 raise TagAlreadyExistError("Tag %s already exists" % name)
179
175 commit = self.get_commit(commit_id=commit_id)
180 commit = self.get_commit(commit_id=commit_id)
176 local = kwargs.setdefault('local', False)
181 local = kwargs.setdefault('local', False)
177
182
178 if message is None:
183 if message is None:
179 message = "Added tag %s for commit %s" % (name, commit.short_id)
184 message = "Added tag %s for commit %s" % (name, commit.short_id)
180
185
181 date, tz = date_to_timestamp_plus_offset(date)
186 date, tz = date_to_timestamp_plus_offset(date)
182
187
183 self._remote.tag(
188 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
184 name, commit.raw_id, message, local, user, date, tz)
185 self._remote.invalidate_vcs_cache()
189 self._remote.invalidate_vcs_cache()
186
190
187 # Reinitialize tags
191 # Reinitialize tags
188 self.tags = self._get_tags()
192 self.tags = self._get_tags()
189 tag_id = self.tags[name]
193 tag_id = self.tags[name]
190
194
191 return self.get_commit(commit_id=tag_id)
195 return self.get_commit(commit_id=tag_id)
192
196
193 def remove_tag(self, name, user, message=None, date=None):
197 def remove_tag(self, name, user, message=None, date=None):
194 """
198 """
195 Removes tag with the given `name`.
199 Removes tag with the given `name`.
196
200
197 :param name: name of the tag to be removed
201 :param name: name of the tag to be removed
198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
202 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param message: message of the tag's removal commit
203 :param message: message of the tag's removal commit
200 :param date: date of tag's removal commit
204 :param date: date of tag's removal commit
201
205
202 :raises TagDoesNotExistError: if tag with given name does not exists
206 :raises TagDoesNotExistError: if tag with given name does not exists
203 """
207 """
204 if name not in self.tags:
208 if name not in self.tags:
205 raise TagDoesNotExistError("Tag %s does not exist" % name)
209 raise TagDoesNotExistError("Tag %s does not exist" % name)
210
206 if message is None:
211 if message is None:
207 message = "Removed tag %s" % name
212 message = "Removed tag %s" % name
208 local = False
213 local = False
209
214
210 date, tz = date_to_timestamp_plus_offset(date)
215 date, tz = date_to_timestamp_plus_offset(date)
211
216
212 self._remote.tag(name, nullid, message, local, user, date, tz)
217 self._remote.tag(name, nullid, message, local, user, date, tz)
213 self._remote.invalidate_vcs_cache()
218 self._remote.invalidate_vcs_cache()
214 self.tags = self._get_tags()
219 self.tags = self._get_tags()
215
220
216 @LazyProperty
221 @LazyProperty
217 def bookmarks(self):
222 def bookmarks(self):
218 """
223 """
219 Gets bookmarks for this repository
224 Gets bookmarks for this repository
220 """
225 """
221 return self._get_bookmarks()
226 return self._get_bookmarks()
222
227
223 def _get_bookmarks(self):
228 def _get_bookmarks(self):
224 if self.is_empty():
229 if self.is_empty():
225 return {}
230 return {}
226
231
227 def get_name(ctx):
232 def get_name(ctx):
228 return ctx[0]
233 return ctx[0]
229
234
230 _bookmarks = [
235 _bookmarks = [
231 (safe_unicode(n), hexlify(h)) for n, h in
236 (safe_unicode(n), hexlify(h)) for n, h in
232 self._remote.bookmarks().items()]
237 self._remote.bookmarks().items()]
233
238
234 return OrderedDict(sorted(_bookmarks, key=get_name))
239 return OrderedDict(sorted(_bookmarks, key=get_name))
235
240
236 def _get_all_commit_ids(self):
241 def _get_all_commit_ids(self):
237 return self._remote.get_all_commit_ids('visible')
242 return self._remote.get_all_commit_ids('visible')
238
243
239 def get_diff(
244 def get_diff(
240 self, commit1, commit2, path='', ignore_whitespace=False,
245 self, commit1, commit2, path='', ignore_whitespace=False,
241 context=3, path1=None):
246 context=3, path1=None):
242 """
247 """
243 Returns (git like) *diff*, as plain text. Shows changes introduced by
248 Returns (git like) *diff*, as plain text. Shows changes introduced by
244 `commit2` since `commit1`.
249 `commit2` since `commit1`.
245
250
246 :param commit1: Entry point from which diff is shown. Can be
251 :param commit1: Entry point from which diff is shown. Can be
247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
252 ``self.EMPTY_COMMIT`` - in this case, patch showing all
248 the changes since empty state of the repository until `commit2`
253 the changes since empty state of the repository until `commit2`
249 :param commit2: Until which commit changes should be shown.
254 :param commit2: Until which commit changes should be shown.
250 :param ignore_whitespace: If set to ``True``, would not show whitespace
255 :param ignore_whitespace: If set to ``True``, would not show whitespace
251 changes. Defaults to ``False``.
256 changes. Defaults to ``False``.
252 :param context: How many lines before/after changed lines should be
257 :param context: How many lines before/after changed lines should be
253 shown. Defaults to ``3``.
258 shown. Defaults to ``3``.
254 """
259 """
255 self._validate_diff_commits(commit1, commit2)
260 self._validate_diff_commits(commit1, commit2)
256 if path1 is not None and path1 != path:
261 if path1 is not None and path1 != path:
257 raise ValueError("Diff of two different paths not supported.")
262 raise ValueError("Diff of two different paths not supported.")
258
263
259 if path:
264 if path:
260 file_filter = [self.path, path]
265 file_filter = [self.path, path]
261 else:
266 else:
262 file_filter = None
267 file_filter = None
263
268
264 diff = self._remote.diff(
269 diff = self._remote.diff(
265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
270 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
266 opt_git=True, opt_ignorews=ignore_whitespace,
271 opt_git=True, opt_ignorews=ignore_whitespace,
267 context=context)
272 context=context)
268 return MercurialDiff(diff)
273 return MercurialDiff(diff)
269
274
270 def strip(self, commit_id, branch=None):
275 def strip(self, commit_id, branch=None):
271 self._remote.strip(commit_id, update=False, backup="none")
276 self._remote.strip(commit_id, update=False, backup="none")
272
277
273 self._remote.invalidate_vcs_cache()
278 self._remote.invalidate_vcs_cache()
274 self.commit_ids = self._get_all_commit_ids()
279 self._commit_ids_ver = time.time()
275 self._rebuild_cache(self.commit_ids)
280 # we updated _commit_ids_ver so accessing self.commit_ids will re-compute it
281 return len(self.commit_ids)
276
282
277 def verify(self):
283 def verify(self):
278 verify = self._remote.verify()
284 verify = self._remote.verify()
279
285
280 self._remote.invalidate_vcs_cache()
286 self._remote.invalidate_vcs_cache()
281 return verify
287 return verify
282
288
283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
289 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
284 if commit_id1 == commit_id2:
290 if commit_id1 == commit_id2:
285 return commit_id1
291 return commit_id1
286
292
287 ancestors = self._remote.revs_from_revspec(
293 ancestors = self._remote.revs_from_revspec(
288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
294 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
289 other_path=repo2.path)
295 other_path=repo2.path)
290 return repo2[ancestors[0]].raw_id if ancestors else None
296 return repo2[ancestors[0]].raw_id if ancestors else None
291
297
292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
298 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
293 if commit_id1 == commit_id2:
299 if commit_id1 == commit_id2:
294 commits = []
300 commits = []
295 else:
301 else:
296 if merge:
302 if merge:
297 indexes = self._remote.revs_from_revspec(
303 indexes = self._remote.revs_from_revspec(
298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
304 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
305 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
300 else:
306 else:
301 indexes = self._remote.revs_from_revspec(
307 indexes = self._remote.revs_from_revspec(
302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
308 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
303 commit_id1, other_path=repo2.path)
309 commit_id1, other_path=repo2.path)
304
310
305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
311 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
306 for idx in indexes]
312 for idx in indexes]
307
313
308 return commits
314 return commits
309
315
310 @staticmethod
316 @staticmethod
311 def check_url(url, config):
317 def check_url(url, config):
312 """
318 """
313 Function will check given url and try to verify if it's a valid
319 Function will check given url and try to verify if it's a valid
314 link. Sometimes it may happened that mercurial will issue basic
320 link. Sometimes it may happened that mercurial will issue basic
315 auth request that can cause whole API to hang when used from python
321 auth request that can cause whole API to hang when used from python
316 or other external calls.
322 or other external calls.
317
323
318 On failures it'll raise urllib2.HTTPError, exception is also thrown
324 On failures it'll raise urllib2.HTTPError, exception is also thrown
319 when the return code is non 200
325 when the return code is non 200
320 """
326 """
321 # check first if it's not an local url
327 # check first if it's not an local url
322 if os.path.isdir(url) or url.startswith('file:'):
328 if os.path.isdir(url) or url.startswith('file:'):
323 return True
329 return True
324
330
325 # Request the _remote to verify the url
331 # Request the _remote to verify the url
326 return connection.Hg.check_url(url, config.serialize())
332 return connection.Hg.check_url(url, config.serialize())
327
333
328 @staticmethod
334 @staticmethod
329 def is_valid_repository(path):
335 def is_valid_repository(path):
330 return os.path.isdir(os.path.join(path, '.hg'))
336 return os.path.isdir(os.path.join(path, '.hg'))
331
337
332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
338 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
333 """
339 """
334 Function will check for mercurial repository in given path. If there
340 Function will check for mercurial repository in given path. If there
335 is no repository in that path it will raise an exception unless
341 is no repository in that path it will raise an exception unless
336 `create` parameter is set to True - in that case repository would
342 `create` parameter is set to True - in that case repository would
337 be created.
343 be created.
338
344
339 If `src_url` is given, would try to clone repository from the
345 If `src_url` is given, would try to clone repository from the
340 location at given clone_point. Additionally it'll make update to
346 location at given clone_point. Additionally it'll make update to
341 working copy accordingly to `do_workspace_checkout` flag.
347 working copy accordingly to `do_workspace_checkout` flag.
342 """
348 """
343 if create and os.path.exists(self.path):
349 if create and os.path.exists(self.path):
344 raise RepositoryError(
350 raise RepositoryError(
345 "Cannot create repository at %s, location already exist"
351 "Cannot create repository at %s, location already exist"
346 % self.path)
352 % self.path)
347
353
348 if src_url:
354 if src_url:
349 url = str(self._get_url(src_url))
355 url = str(self._get_url(src_url))
350 MercurialRepository.check_url(url, self.config)
356 MercurialRepository.check_url(url, self.config)
351
357
352 self._remote.clone(url, self.path, do_workspace_checkout)
358 self._remote.clone(url, self.path, do_workspace_checkout)
353
359
354 # Don't try to create if we've already cloned repo
360 # Don't try to create if we've already cloned repo
355 create = False
361 create = False
356
362
357 if create:
363 if create:
358 os.makedirs(self.path, mode=0o755)
364 os.makedirs(self.path, mode=0o755)
359
365
360 self._remote.localrepository(create)
366 self._remote.localrepository(create)
361
367
362 @LazyProperty
368 @LazyProperty
363 def in_memory_commit(self):
369 def in_memory_commit(self):
364 return MercurialInMemoryCommit(self)
370 return MercurialInMemoryCommit(self)
365
371
366 @LazyProperty
372 @LazyProperty
367 def description(self):
373 def description(self):
368 description = self._remote.get_config_value(
374 description = self._remote.get_config_value(
369 'web', 'description', untrusted=True)
375 'web', 'description', untrusted=True)
370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
376 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
371
377
372 @LazyProperty
378 @LazyProperty
373 def contact(self):
379 def contact(self):
374 contact = (
380 contact = (
375 self._remote.get_config_value("web", "contact") or
381 self._remote.get_config_value("web", "contact") or
376 self._remote.get_config_value("ui", "username"))
382 self._remote.get_config_value("ui", "username"))
377 return safe_unicode(contact or self.DEFAULT_CONTACT)
383 return safe_unicode(contact or self.DEFAULT_CONTACT)
378
384
379 @LazyProperty
385 @LazyProperty
380 def last_change(self):
386 def last_change(self):
381 """
387 """
382 Returns last change made on this repository as
388 Returns last change made on this repository as
383 `datetime.datetime` object.
389 `datetime.datetime` object.
384 """
390 """
385 try:
391 try:
386 return self.get_commit().date
392 return self.get_commit().date
387 except RepositoryError:
393 except RepositoryError:
388 tzoffset = makedate()[1]
394 tzoffset = makedate()[1]
389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
395 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
390
396
391 def _get_fs_mtime(self):
397 def _get_fs_mtime(self):
392 # fallback to filesystem
398 # fallback to filesystem
393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
399 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
394 st_path = os.path.join(self.path, '.hg', "store")
400 st_path = os.path.join(self.path, '.hg', "store")
395 if os.path.exists(cl_path):
401 if os.path.exists(cl_path):
396 return os.stat(cl_path).st_mtime
402 return os.stat(cl_path).st_mtime
397 else:
403 else:
398 return os.stat(st_path).st_mtime
404 return os.stat(st_path).st_mtime
399
405
400 def _get_url(self, url):
406 def _get_url(self, url):
401 """
407 """
402 Returns normalized url. If schema is not given, would fall
408 Returns normalized url. If schema is not given, would fall
403 to filesystem
409 to filesystem
404 (``file:///``) schema.
410 (``file:///``) schema.
405 """
411 """
406 url = url.encode('utf8')
412 url = url.encode('utf8')
407 if url != 'default' and '://' not in url:
413 if url != 'default' and '://' not in url:
408 url = "file:" + urllib.pathname2url(url)
414 url = "file:" + urllib.pathname2url(url)
409 return url
415 return url
410
416
411 def get_hook_location(self):
417 def get_hook_location(self):
412 """
418 """
413 returns absolute path to location where hooks are stored
419 returns absolute path to location where hooks are stored
414 """
420 """
415 return os.path.join(self.path, '.hg', '.hgrc')
421 return os.path.join(self.path, '.hg', '.hgrc')
416
422
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
423 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
418 """
424 """
419 Returns ``MercurialCommit`` object representing repository's
425 Returns ``MercurialCommit`` object representing repository's
420 commit at the given `commit_id` or `commit_idx`.
426 commit at the given `commit_id` or `commit_idx`.
421 """
427 """
422 if self.is_empty():
428 if self.is_empty():
423 raise EmptyRepositoryError("There are no commits yet")
429 raise EmptyRepositoryError("There are no commits yet")
424
430
425 if commit_id is not None:
431 if commit_id is not None:
426 self._validate_commit_id(commit_id)
432 self._validate_commit_id(commit_id)
427 try:
433 try:
428 # we have cached idx, use it without contacting the remote
434 # we have cached idx, use it without contacting the remote
429 idx = self._commit_ids[commit_id]
435 idx = self._commit_ids[commit_id]
430 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
436 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
431 except KeyError:
437 except KeyError:
432 pass
438 pass
433
439
434 elif commit_idx is not None:
440 elif commit_idx is not None:
435 self._validate_commit_idx(commit_idx)
441 self._validate_commit_idx(commit_idx)
436 try:
442 try:
437 _commit_id = self.commit_ids[commit_idx]
443 _commit_id = self.commit_ids[commit_idx]
438 if commit_idx < 0:
444 if commit_idx < 0:
439 commit_idx = self.commit_ids.index(_commit_id)
445 commit_idx = self.commit_ids.index(_commit_id)
440
446
441 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
447 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
442 except IndexError:
448 except IndexError:
443 commit_id = commit_idx
449 commit_id = commit_idx
444 else:
450 else:
445 commit_id = "tip"
451 commit_id = "tip"
446
452
447 if isinstance(commit_id, unicode):
453 if isinstance(commit_id, unicode):
448 commit_id = safe_str(commit_id)
454 commit_id = safe_str(commit_id)
449
455
450 try:
456 try:
451 raw_id, idx = self._remote.lookup(commit_id, both=True)
457 raw_id, idx = self._remote.lookup(commit_id, both=True)
452 except CommitDoesNotExistError:
458 except CommitDoesNotExistError:
453 msg = "Commit %s does not exist for %s" % (commit_id, self.name)
459 msg = "Commit {} does not exist for {}".format(
460 *map(safe_str, [commit_id, self.name]))
454 raise CommitDoesNotExistError(msg)
461 raise CommitDoesNotExistError(msg)
455
462
456 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
463 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
457
464
458 def get_commits(
465 def get_commits(
459 self, start_id=None, end_id=None, start_date=None, end_date=None,
466 self, start_id=None, end_id=None, start_date=None, end_date=None,
460 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
467 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
461 """
468 """
462 Returns generator of ``MercurialCommit`` objects from start to end
469 Returns generator of ``MercurialCommit`` objects from start to end
463 (both are inclusive)
470 (both are inclusive)
464
471
465 :param start_id: None, str(commit_id)
472 :param start_id: None, str(commit_id)
466 :param end_id: None, str(commit_id)
473 :param end_id: None, str(commit_id)
467 :param start_date: if specified, commits with commit date less than
474 :param start_date: if specified, commits with commit date less than
468 ``start_date`` would be filtered out from returned set
475 ``start_date`` would be filtered out from returned set
469 :param end_date: if specified, commits with commit date greater than
476 :param end_date: if specified, commits with commit date greater than
470 ``end_date`` would be filtered out from returned set
477 ``end_date`` would be filtered out from returned set
471 :param branch_name: if specified, commits not reachable from given
478 :param branch_name: if specified, commits not reachable from given
472 branch would be filtered out from returned set
479 branch would be filtered out from returned set
473 :param show_hidden: Show hidden commits such as obsolete or hidden from
480 :param show_hidden: Show hidden commits such as obsolete or hidden from
474 Mercurial evolve
481 Mercurial evolve
475 :raise BranchDoesNotExistError: If given ``branch_name`` does not
482 :raise BranchDoesNotExistError: If given ``branch_name`` does not
476 exist.
483 exist.
477 :raise CommitDoesNotExistError: If commit for given ``start`` or
484 :raise CommitDoesNotExistError: If commit for given ``start`` or
478 ``end`` could not be found.
485 ``end`` could not be found.
479 """
486 """
480 # actually we should check now if it's not an empty repo
487 # actually we should check now if it's not an empty repo
481 if self.is_empty():
488 if self.is_empty():
482 raise EmptyRepositoryError("There are no commits yet")
489 raise EmptyRepositoryError("There are no commits yet")
483 self._validate_branch_name(branch_name)
490 self._validate_branch_name(branch_name)
484
491
485 branch_ancestors = False
492 branch_ancestors = False
486 if start_id is not None:
493 if start_id is not None:
487 self._validate_commit_id(start_id)
494 self._validate_commit_id(start_id)
488 c_start = self.get_commit(commit_id=start_id)
495 c_start = self.get_commit(commit_id=start_id)
489 start_pos = self._commit_ids[c_start.raw_id]
496 start_pos = self._commit_ids[c_start.raw_id]
490 else:
497 else:
491 start_pos = None
498 start_pos = None
492
499
493 if end_id is not None:
500 if end_id is not None:
494 self._validate_commit_id(end_id)
501 self._validate_commit_id(end_id)
495 c_end = self.get_commit(commit_id=end_id)
502 c_end = self.get_commit(commit_id=end_id)
496 end_pos = max(0, self._commit_ids[c_end.raw_id])
503 end_pos = max(0, self._commit_ids[c_end.raw_id])
497 else:
504 else:
498 end_pos = None
505 end_pos = None
499
506
500 if None not in [start_id, end_id] and start_pos > end_pos:
507 if None not in [start_id, end_id] and start_pos > end_pos:
501 raise RepositoryError(
508 raise RepositoryError(
502 "Start commit '%s' cannot be after end commit '%s'" %
509 "Start commit '%s' cannot be after end commit '%s'" %
503 (start_id, end_id))
510 (start_id, end_id))
504
511
505 if end_pos is not None:
512 if end_pos is not None:
506 end_pos += 1
513 end_pos += 1
507
514
508 commit_filter = []
515 commit_filter = []
509
516
510 if branch_name and not branch_ancestors:
517 if branch_name and not branch_ancestors:
511 commit_filter.append('branch("%s")' % (branch_name,))
518 commit_filter.append('branch("%s")' % (branch_name,))
512 elif branch_name and branch_ancestors:
519 elif branch_name and branch_ancestors:
513 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
520 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
514
521
515 if start_date and not end_date:
522 if start_date and not end_date:
516 commit_filter.append('date(">%s")' % (start_date,))
523 commit_filter.append('date(">%s")' % (start_date,))
517 if end_date and not start_date:
524 if end_date and not start_date:
518 commit_filter.append('date("<%s")' % (end_date,))
525 commit_filter.append('date("<%s")' % (end_date,))
519 if start_date and end_date:
526 if start_date and end_date:
520 commit_filter.append(
527 commit_filter.append(
521 'date(">%s") and date("<%s")' % (start_date, end_date))
528 'date(">%s") and date("<%s")' % (start_date, end_date))
522
529
523 if not show_hidden:
530 if not show_hidden:
524 commit_filter.append('not obsolete()')
531 commit_filter.append('not obsolete()')
525 commit_filter.append('not hidden()')
532 commit_filter.append('not hidden()')
526
533
527 # TODO: johbo: Figure out a simpler way for this solution
534 # TODO: johbo: Figure out a simpler way for this solution
528 collection_generator = CollectionGenerator
535 collection_generator = CollectionGenerator
529 if commit_filter:
536 if commit_filter:
530 commit_filter = ' and '.join(map(safe_str, commit_filter))
537 commit_filter = ' and '.join(map(safe_str, commit_filter))
531 revisions = self._remote.rev_range([commit_filter])
538 revisions = self._remote.rev_range([commit_filter])
532 collection_generator = MercurialIndexBasedCollectionGenerator
539 collection_generator = MercurialIndexBasedCollectionGenerator
533 else:
540 else:
534 revisions = self.commit_ids
541 revisions = self.commit_ids
535
542
536 if start_pos or end_pos:
543 if start_pos or end_pos:
537 revisions = revisions[start_pos:end_pos]
544 revisions = revisions[start_pos:end_pos]
538
545
539 return collection_generator(self, revisions, pre_load=pre_load)
546 return collection_generator(self, revisions, pre_load=pre_load)
540
547
541 def pull(self, url, commit_ids=None):
548 def pull(self, url, commit_ids=None):
542 """
549 """
543 Pull changes from external location.
550 Pull changes from external location.
544
551
545 :param commit_ids: Optional. Can be set to a list of commit ids
552 :param commit_ids: Optional. Can be set to a list of commit ids
546 which shall be pulled from the other repository.
553 which shall be pulled from the other repository.
547 """
554 """
548 url = self._get_url(url)
555 url = self._get_url(url)
549 self._remote.pull(url, commit_ids=commit_ids)
556 self._remote.pull(url, commit_ids=commit_ids)
550 self._remote.invalidate_vcs_cache()
557 self._remote.invalidate_vcs_cache()
551
558
552 def fetch(self, url, commit_ids=None):
559 def fetch(self, url, commit_ids=None):
553 """
560 """
554 Backward compatibility with GIT fetch==pull
561 Backward compatibility with GIT fetch==pull
555 """
562 """
556 return self.pull(url, commit_ids=commit_ids)
563 return self.pull(url, commit_ids=commit_ids)
557
564
558 def push(self, url):
565 def push(self, url):
559 url = self._get_url(url)
566 url = self._get_url(url)
560 self._remote.sync_push(url)
567 self._remote.sync_push(url)
561
568
562 def _local_clone(self, clone_path):
569 def _local_clone(self, clone_path):
563 """
570 """
564 Create a local clone of the current repo.
571 Create a local clone of the current repo.
565 """
572 """
566 self._remote.clone(self.path, clone_path, update_after_clone=True,
573 self._remote.clone(self.path, clone_path, update_after_clone=True,
567 hooks=False)
574 hooks=False)
568
575
569 def _update(self, revision, clean=False):
576 def _update(self, revision, clean=False):
570 """
577 """
571 Update the working copy to the specified revision.
578 Update the working copy to the specified revision.
572 """
579 """
573 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
580 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
574 self._remote.update(revision, clean=clean)
581 self._remote.update(revision, clean=clean)
575
582
576 def _identify(self):
583 def _identify(self):
577 """
584 """
578 Return the current state of the working directory.
585 Return the current state of the working directory.
579 """
586 """
580 return self._remote.identify().strip().rstrip('+')
587 return self._remote.identify().strip().rstrip('+')
581
588
582 def _heads(self, branch=None):
589 def _heads(self, branch=None):
583 """
590 """
584 Return the commit ids of the repository heads.
591 Return the commit ids of the repository heads.
585 """
592 """
586 return self._remote.heads(branch=branch).strip().split(' ')
593 return self._remote.heads(branch=branch).strip().split(' ')
587
594
588 def _ancestor(self, revision1, revision2):
595 def _ancestor(self, revision1, revision2):
589 """
596 """
590 Return the common ancestor of the two revisions.
597 Return the common ancestor of the two revisions.
591 """
598 """
592 return self._remote.ancestor(revision1, revision2)
599 return self._remote.ancestor(revision1, revision2)
593
600
594 def _local_push(
601 def _local_push(
595 self, revision, repository_path, push_branches=False,
602 self, revision, repository_path, push_branches=False,
596 enable_hooks=False):
603 enable_hooks=False):
597 """
604 """
598 Push the given revision to the specified repository.
605 Push the given revision to the specified repository.
599
606
600 :param push_branches: allow to create branches in the target repo.
607 :param push_branches: allow to create branches in the target repo.
601 """
608 """
602 self._remote.push(
609 self._remote.push(
603 [revision], repository_path, hooks=enable_hooks,
610 [revision], repository_path, hooks=enable_hooks,
604 push_branches=push_branches)
611 push_branches=push_branches)
605
612
606 def _local_merge(self, target_ref, merge_message, user_name, user_email,
613 def _local_merge(self, target_ref, merge_message, user_name, user_email,
607 source_ref, use_rebase=False, dry_run=False):
614 source_ref, use_rebase=False, dry_run=False):
608 """
615 """
609 Merge the given source_revision into the checked out revision.
616 Merge the given source_revision into the checked out revision.
610
617
611 Returns the commit id of the merge and a boolean indicating if the
618 Returns the commit id of the merge and a boolean indicating if the
612 commit needs to be pushed.
619 commit needs to be pushed.
613 """
620 """
614 self._update(target_ref.commit_id, clean=True)
621 self._update(target_ref.commit_id, clean=True)
615
622
616 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
623 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
617 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
624 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
618
625
619 if ancestor == source_ref.commit_id:
626 if ancestor == source_ref.commit_id:
620 # Nothing to do, the changes were already integrated
627 # Nothing to do, the changes were already integrated
621 return target_ref.commit_id, False
628 return target_ref.commit_id, False
622
629
623 elif ancestor == target_ref.commit_id and is_the_same_branch:
630 elif ancestor == target_ref.commit_id and is_the_same_branch:
624 # In this case we should force a commit message
631 # In this case we should force a commit message
625 return source_ref.commit_id, True
632 return source_ref.commit_id, True
626
633
627 if use_rebase:
634 if use_rebase:
628 try:
635 try:
629 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
636 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
630 target_ref.commit_id)
637 target_ref.commit_id)
631 self.bookmark(bookmark_name, revision=source_ref.commit_id)
638 self.bookmark(bookmark_name, revision=source_ref.commit_id)
632 self._remote.rebase(
639 self._remote.rebase(
633 source=source_ref.commit_id, dest=target_ref.commit_id)
640 source=source_ref.commit_id, dest=target_ref.commit_id)
634 self._remote.invalidate_vcs_cache()
641 self._remote.invalidate_vcs_cache()
635 self._update(bookmark_name, clean=True)
642 self._update(bookmark_name, clean=True)
636 return self._identify(), True
643 return self._identify(), True
637 except RepositoryError:
644 except RepositoryError:
638 # The rebase-abort may raise another exception which 'hides'
645 # The rebase-abort may raise another exception which 'hides'
639 # the original one, therefore we log it here.
646 # the original one, therefore we log it here.
640 log.exception('Error while rebasing shadow repo during merge.')
647 log.exception('Error while rebasing shadow repo during merge.')
641
648
642 # Cleanup any rebase leftovers
649 # Cleanup any rebase leftovers
643 self._remote.invalidate_vcs_cache()
650 self._remote.invalidate_vcs_cache()
644 self._remote.rebase(abort=True)
651 self._remote.rebase(abort=True)
645 self._remote.invalidate_vcs_cache()
652 self._remote.invalidate_vcs_cache()
646 self._remote.update(clean=True)
653 self._remote.update(clean=True)
647 raise
654 raise
648 else:
655 else:
649 try:
656 try:
650 self._remote.merge(source_ref.commit_id)
657 self._remote.merge(source_ref.commit_id)
651 self._remote.invalidate_vcs_cache()
658 self._remote.invalidate_vcs_cache()
652 self._remote.commit(
659 self._remote.commit(
653 message=safe_str(merge_message),
660 message=safe_str(merge_message),
654 username=safe_str('%s <%s>' % (user_name, user_email)))
661 username=safe_str('%s <%s>' % (user_name, user_email)))
655 self._remote.invalidate_vcs_cache()
662 self._remote.invalidate_vcs_cache()
656 return self._identify(), True
663 return self._identify(), True
657 except RepositoryError:
664 except RepositoryError:
658 # Cleanup any merge leftovers
665 # Cleanup any merge leftovers
659 self._remote.update(clean=True)
666 self._remote.update(clean=True)
660 raise
667 raise
661
668
662 def _local_close(self, target_ref, user_name, user_email,
669 def _local_close(self, target_ref, user_name, user_email,
663 source_ref, close_message=''):
670 source_ref, close_message=''):
664 """
671 """
665 Close the branch of the given source_revision
672 Close the branch of the given source_revision
666
673
667 Returns the commit id of the close and a boolean indicating if the
674 Returns the commit id of the close and a boolean indicating if the
668 commit needs to be pushed.
675 commit needs to be pushed.
669 """
676 """
670 self._update(source_ref.commit_id)
677 self._update(source_ref.commit_id)
671 message = close_message or "Closing branch: `{}`".format(source_ref.name)
678 message = close_message or "Closing branch: `{}`".format(source_ref.name)
672 try:
679 try:
673 self._remote.commit(
680 self._remote.commit(
674 message=safe_str(message),
681 message=safe_str(message),
675 username=safe_str('%s <%s>' % (user_name, user_email)),
682 username=safe_str('%s <%s>' % (user_name, user_email)),
676 close_branch=True)
683 close_branch=True)
677 self._remote.invalidate_vcs_cache()
684 self._remote.invalidate_vcs_cache()
678 return self._identify(), True
685 return self._identify(), True
679 except RepositoryError:
686 except RepositoryError:
680 # Cleanup any commit leftovers
687 # Cleanup any commit leftovers
681 self._remote.update(clean=True)
688 self._remote.update(clean=True)
682 raise
689 raise
683
690
684 def _is_the_same_branch(self, target_ref, source_ref):
691 def _is_the_same_branch(self, target_ref, source_ref):
685 return (
692 return (
686 self._get_branch_name(target_ref) ==
693 self._get_branch_name(target_ref) ==
687 self._get_branch_name(source_ref))
694 self._get_branch_name(source_ref))
688
695
689 def _get_branch_name(self, ref):
696 def _get_branch_name(self, ref):
690 if ref.type == 'branch':
697 if ref.type == 'branch':
691 return ref.name
698 return ref.name
692 return self._remote.ctx_branch(ref.commit_id)
699 return self._remote.ctx_branch(ref.commit_id)
693
700
694 def _maybe_prepare_merge_workspace(
701 def _maybe_prepare_merge_workspace(
695 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
702 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
696 shadow_repository_path = self._get_shadow_repository_path(
703 shadow_repository_path = self._get_shadow_repository_path(
697 repo_id, workspace_id)
704 repo_id, workspace_id)
698 if not os.path.exists(shadow_repository_path):
705 if not os.path.exists(shadow_repository_path):
699 self._local_clone(shadow_repository_path)
706 self._local_clone(shadow_repository_path)
700 log.debug(
707 log.debug(
701 'Prepared shadow repository in %s', shadow_repository_path)
708 'Prepared shadow repository in %s', shadow_repository_path)
702
709
703 return shadow_repository_path
710 return shadow_repository_path
704
711
705 def _merge_repo(self, repo_id, workspace_id, target_ref,
712 def _merge_repo(self, repo_id, workspace_id, target_ref,
706 source_repo, source_ref, merge_message,
713 source_repo, source_ref, merge_message,
707 merger_name, merger_email, dry_run=False,
714 merger_name, merger_email, dry_run=False,
708 use_rebase=False, close_branch=False):
715 use_rebase=False, close_branch=False):
709
716
710 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
717 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
711 'rebase' if use_rebase else 'merge', dry_run)
718 'rebase' if use_rebase else 'merge', dry_run)
712 if target_ref.commit_id not in self._heads():
719 if target_ref.commit_id not in self._heads():
713 return MergeResponse(
720 return MergeResponse(
714 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
721 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
715 metadata={'target_ref': target_ref})
722 metadata={'target_ref': target_ref})
716
723
717 try:
724 try:
718 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
725 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
719 heads = '\n,'.join(self._heads(target_ref.name))
726 heads = '\n,'.join(self._heads(target_ref.name))
720 metadata = {
727 metadata = {
721 'target_ref': target_ref,
728 'target_ref': target_ref,
722 'source_ref': source_ref,
729 'source_ref': source_ref,
723 'heads': heads
730 'heads': heads
724 }
731 }
725 return MergeResponse(
732 return MergeResponse(
726 False, False, None,
733 False, False, None,
727 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
734 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
728 metadata=metadata)
735 metadata=metadata)
729 except CommitDoesNotExistError:
736 except CommitDoesNotExistError:
730 log.exception('Failure when looking up branch heads on hg target')
737 log.exception('Failure when looking up branch heads on hg target')
731 return MergeResponse(
738 return MergeResponse(
732 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
739 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
733 metadata={'target_ref': target_ref})
740 metadata={'target_ref': target_ref})
734
741
735 shadow_repository_path = self._maybe_prepare_merge_workspace(
742 shadow_repository_path = self._maybe_prepare_merge_workspace(
736 repo_id, workspace_id, target_ref, source_ref)
743 repo_id, workspace_id, target_ref, source_ref)
737 shadow_repo = self._get_shadow_instance(shadow_repository_path)
744 shadow_repo = self._get_shadow_instance(shadow_repository_path)
738
745
739 log.debug('Pulling in target reference %s', target_ref)
746 log.debug('Pulling in target reference %s', target_ref)
740 self._validate_pull_reference(target_ref)
747 self._validate_pull_reference(target_ref)
741 shadow_repo._local_pull(self.path, target_ref)
748 shadow_repo._local_pull(self.path, target_ref)
742
749
743 try:
750 try:
744 log.debug('Pulling in source reference %s', source_ref)
751 log.debug('Pulling in source reference %s', source_ref)
745 source_repo._validate_pull_reference(source_ref)
752 source_repo._validate_pull_reference(source_ref)
746 shadow_repo._local_pull(source_repo.path, source_ref)
753 shadow_repo._local_pull(source_repo.path, source_ref)
747 except CommitDoesNotExistError:
754 except CommitDoesNotExistError:
748 log.exception('Failure when doing local pull on hg shadow repo')
755 log.exception('Failure when doing local pull on hg shadow repo')
749 return MergeResponse(
756 return MergeResponse(
750 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
757 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
751 metadata={'source_ref': source_ref})
758 metadata={'source_ref': source_ref})
752
759
753 merge_ref = None
760 merge_ref = None
754 merge_commit_id = None
761 merge_commit_id = None
755 close_commit_id = None
762 close_commit_id = None
756 merge_failure_reason = MergeFailureReason.NONE
763 merge_failure_reason = MergeFailureReason.NONE
757 metadata = {}
764 metadata = {}
758
765
759 # enforce that close branch should be used only in case we source from
766 # enforce that close branch should be used only in case we source from
760 # an actual Branch
767 # an actual Branch
761 close_branch = close_branch and source_ref.type == 'branch'
768 close_branch = close_branch and source_ref.type == 'branch'
762
769
763 # don't allow to close branch if source and target are the same
770 # don't allow to close branch if source and target are the same
764 close_branch = close_branch and source_ref.name != target_ref.name
771 close_branch = close_branch and source_ref.name != target_ref.name
765
772
766 needs_push_on_close = False
773 needs_push_on_close = False
767 if close_branch and not use_rebase and not dry_run:
774 if close_branch and not use_rebase and not dry_run:
768 try:
775 try:
769 close_commit_id, needs_push_on_close = shadow_repo._local_close(
776 close_commit_id, needs_push_on_close = shadow_repo._local_close(
770 target_ref, merger_name, merger_email, source_ref)
777 target_ref, merger_name, merger_email, source_ref)
771 merge_possible = True
778 merge_possible = True
772 except RepositoryError:
779 except RepositoryError:
773 log.exception('Failure when doing close branch on '
780 log.exception('Failure when doing close branch on '
774 'shadow repo: %s', shadow_repo)
781 'shadow repo: %s', shadow_repo)
775 merge_possible = False
782 merge_possible = False
776 merge_failure_reason = MergeFailureReason.MERGE_FAILED
783 merge_failure_reason = MergeFailureReason.MERGE_FAILED
777 else:
784 else:
778 merge_possible = True
785 merge_possible = True
779
786
780 needs_push = False
787 needs_push = False
781 if merge_possible:
788 if merge_possible:
782 try:
789 try:
783 merge_commit_id, needs_push = shadow_repo._local_merge(
790 merge_commit_id, needs_push = shadow_repo._local_merge(
784 target_ref, merge_message, merger_name, merger_email,
791 target_ref, merge_message, merger_name, merger_email,
785 source_ref, use_rebase=use_rebase, dry_run=dry_run)
792 source_ref, use_rebase=use_rebase, dry_run=dry_run)
786 merge_possible = True
793 merge_possible = True
787
794
788 # read the state of the close action, if it
795 # read the state of the close action, if it
789 # maybe required a push
796 # maybe required a push
790 needs_push = needs_push or needs_push_on_close
797 needs_push = needs_push or needs_push_on_close
791
798
792 # Set a bookmark pointing to the merge commit. This bookmark
799 # Set a bookmark pointing to the merge commit. This bookmark
793 # may be used to easily identify the last successful merge
800 # may be used to easily identify the last successful merge
794 # commit in the shadow repository.
801 # commit in the shadow repository.
795 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
802 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
796 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
803 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
797 except SubrepoMergeError:
804 except SubrepoMergeError:
798 log.exception(
805 log.exception(
799 'Subrepo merge error during local merge on hg shadow repo.')
806 'Subrepo merge error during local merge on hg shadow repo.')
800 merge_possible = False
807 merge_possible = False
801 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
808 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
802 needs_push = False
809 needs_push = False
803 except RepositoryError:
810 except RepositoryError:
804 log.exception('Failure when doing local merge on hg shadow repo')
811 log.exception('Failure when doing local merge on hg shadow repo')
805 merge_possible = False
812 merge_possible = False
806 merge_failure_reason = MergeFailureReason.MERGE_FAILED
813 merge_failure_reason = MergeFailureReason.MERGE_FAILED
807 needs_push = False
814 needs_push = False
808
815
809 if merge_possible and not dry_run:
816 if merge_possible and not dry_run:
810 if needs_push:
817 if needs_push:
811 # In case the target is a bookmark, update it, so after pushing
818 # In case the target is a bookmark, update it, so after pushing
812 # the bookmarks is also updated in the target.
819 # the bookmarks is also updated in the target.
813 if target_ref.type == 'book':
820 if target_ref.type == 'book':
814 shadow_repo.bookmark(
821 shadow_repo.bookmark(
815 target_ref.name, revision=merge_commit_id)
822 target_ref.name, revision=merge_commit_id)
816 try:
823 try:
817 shadow_repo_with_hooks = self._get_shadow_instance(
824 shadow_repo_with_hooks = self._get_shadow_instance(
818 shadow_repository_path,
825 shadow_repository_path,
819 enable_hooks=True)
826 enable_hooks=True)
820 # This is the actual merge action, we push from shadow
827 # This is the actual merge action, we push from shadow
821 # into origin.
828 # into origin.
822 # Note: the push_branches option will push any new branch
829 # Note: the push_branches option will push any new branch
823 # defined in the source repository to the target. This may
830 # defined in the source repository to the target. This may
824 # be dangerous as branches are permanent in Mercurial.
831 # be dangerous as branches are permanent in Mercurial.
825 # This feature was requested in issue #441.
832 # This feature was requested in issue #441.
826 shadow_repo_with_hooks._local_push(
833 shadow_repo_with_hooks._local_push(
827 merge_commit_id, self.path, push_branches=True,
834 merge_commit_id, self.path, push_branches=True,
828 enable_hooks=True)
835 enable_hooks=True)
829
836
830 # maybe we also need to push the close_commit_id
837 # maybe we also need to push the close_commit_id
831 if close_commit_id:
838 if close_commit_id:
832 shadow_repo_with_hooks._local_push(
839 shadow_repo_with_hooks._local_push(
833 close_commit_id, self.path, push_branches=True,
840 close_commit_id, self.path, push_branches=True,
834 enable_hooks=True)
841 enable_hooks=True)
835 merge_succeeded = True
842 merge_succeeded = True
836 except RepositoryError:
843 except RepositoryError:
837 log.exception(
844 log.exception(
838 'Failure when doing local push from the shadow '
845 'Failure when doing local push from the shadow '
839 'repository to the target repository at %s.', self.path)
846 'repository to the target repository at %s.', self.path)
840 merge_succeeded = False
847 merge_succeeded = False
841 merge_failure_reason = MergeFailureReason.PUSH_FAILED
848 merge_failure_reason = MergeFailureReason.PUSH_FAILED
842 metadata['target'] = 'hg shadow repo'
849 metadata['target'] = 'hg shadow repo'
843 metadata['merge_commit'] = merge_commit_id
850 metadata['merge_commit'] = merge_commit_id
844 else:
851 else:
845 merge_succeeded = True
852 merge_succeeded = True
846 else:
853 else:
847 merge_succeeded = False
854 merge_succeeded = False
848
855
849 return MergeResponse(
856 return MergeResponse(
850 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
857 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
851 metadata=metadata)
858 metadata=metadata)
852
859
853 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
860 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
854 config = self.config.copy()
861 config = self.config.copy()
855 if not enable_hooks:
862 if not enable_hooks:
856 config.clear_section('hooks')
863 config.clear_section('hooks')
857 return MercurialRepository(shadow_repository_path, config)
864 return MercurialRepository(shadow_repository_path, config)
858
865
859 def _validate_pull_reference(self, reference):
866 def _validate_pull_reference(self, reference):
860 if not (reference.name in self.bookmarks or
867 if not (reference.name in self.bookmarks or
861 reference.name in self.branches or
868 reference.name in self.branches or
862 self.get_commit(reference.commit_id)):
869 self.get_commit(reference.commit_id)):
863 raise CommitDoesNotExistError(
870 raise CommitDoesNotExistError(
864 'Unknown branch, bookmark or commit id')
871 'Unknown branch, bookmark or commit id')
865
872
866 def _local_pull(self, repository_path, reference):
873 def _local_pull(self, repository_path, reference):
867 """
874 """
868 Fetch a branch, bookmark or commit from a local repository.
875 Fetch a branch, bookmark or commit from a local repository.
869 """
876 """
870 repository_path = os.path.abspath(repository_path)
877 repository_path = os.path.abspath(repository_path)
871 if repository_path == self.path:
878 if repository_path == self.path:
872 raise ValueError('Cannot pull from the same repository')
879 raise ValueError('Cannot pull from the same repository')
873
880
874 reference_type_to_option_name = {
881 reference_type_to_option_name = {
875 'book': 'bookmark',
882 'book': 'bookmark',
876 'branch': 'branch',
883 'branch': 'branch',
877 }
884 }
878 option_name = reference_type_to_option_name.get(
885 option_name = reference_type_to_option_name.get(
879 reference.type, 'revision')
886 reference.type, 'revision')
880
887
881 if option_name == 'revision':
888 if option_name == 'revision':
882 ref = reference.commit_id
889 ref = reference.commit_id
883 else:
890 else:
884 ref = reference.name
891 ref = reference.name
885
892
886 options = {option_name: [ref]}
893 options = {option_name: [ref]}
887 self._remote.pull_cmd(repository_path, hooks=False, **options)
894 self._remote.pull_cmd(repository_path, hooks=False, **options)
888 self._remote.invalidate_vcs_cache()
895 self._remote.invalidate_vcs_cache()
889
896
890 def bookmark(self, bookmark, revision=None):
897 def bookmark(self, bookmark, revision=None):
891 if isinstance(bookmark, unicode):
898 if isinstance(bookmark, unicode):
892 bookmark = safe_str(bookmark)
899 bookmark = safe_str(bookmark)
893 self._remote.bookmark(bookmark, revision=revision)
900 self._remote.bookmark(bookmark, revision=revision)
894 self._remote.invalidate_vcs_cache()
901 self._remote.invalidate_vcs_cache()
895
902
896 def get_path_permissions(self, username):
903 def get_path_permissions(self, username):
897 hgacl_file = os.path.join(self.path, '.hg/hgacl')
904 hgacl_file = os.path.join(self.path, '.hg/hgacl')
898
905
899 def read_patterns(suffix):
906 def read_patterns(suffix):
900 svalue = None
907 svalue = None
901 for section, option in [
908 for section, option in [
902 ('narrowacl', username + suffix),
909 ('narrowacl', username + suffix),
903 ('narrowacl', 'default' + suffix),
910 ('narrowacl', 'default' + suffix),
904 ('narrowhgacl', username + suffix),
911 ('narrowhgacl', username + suffix),
905 ('narrowhgacl', 'default' + suffix)
912 ('narrowhgacl', 'default' + suffix)
906 ]:
913 ]:
907 try:
914 try:
908 svalue = hgacl.get(section, option)
915 svalue = hgacl.get(section, option)
909 break # stop at the first value we find
916 break # stop at the first value we find
910 except configparser.NoOptionError:
917 except configparser.NoOptionError:
911 pass
918 pass
912 if not svalue:
919 if not svalue:
913 return None
920 return None
914 result = ['/']
921 result = ['/']
915 for pattern in svalue.split():
922 for pattern in svalue.split():
916 result.append(pattern)
923 result.append(pattern)
917 if '*' not in pattern and '?' not in pattern:
924 if '*' not in pattern and '?' not in pattern:
918 result.append(pattern + '/*')
925 result.append(pattern + '/*')
919 return result
926 return result
920
927
921 if os.path.exists(hgacl_file):
928 if os.path.exists(hgacl_file):
922 try:
929 try:
923 hgacl = configparser.RawConfigParser()
930 hgacl = configparser.RawConfigParser()
924 hgacl.read(hgacl_file)
931 hgacl.read(hgacl_file)
925
932
926 includes = read_patterns('.includes')
933 includes = read_patterns('.includes')
927 excludes = read_patterns('.excludes')
934 excludes = read_patterns('.excludes')
928 return BasePathPermissionChecker.create_from_patterns(
935 return BasePathPermissionChecker.create_from_patterns(
929 includes, excludes)
936 includes, excludes)
930 except BaseException as e:
937 except BaseException as e:
931 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
938 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
932 hgacl_file, self.name, e)
939 hgacl_file, self.name, e)
933 raise exceptions.RepositoryRequirementError(msg)
940 raise exceptions.RepositoryRequirementError(msg)
934 else:
941 else:
935 return None
942 return None
936
943
937
944
938 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
945 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
939
946
940 def _commit_factory(self, commit_id):
947 def _commit_factory(self, commit_id):
941 return self.repo.get_commit(
948 return self.repo.get_commit(
942 commit_idx=commit_id, pre_load=self.pre_load)
949 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,81 +1,79 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 SVN inmemory module
23 SVN inmemory module
24 """
24 """
25
25
26 from rhodecode.lib.datelib import date_astimestamp
26 from rhodecode.lib.datelib import date_astimestamp
27 from rhodecode.lib.utils import safe_str
27 from rhodecode.lib.utils import safe_str
28 from rhodecode.lib.vcs.backends import base
28 from rhodecode.lib.vcs.backends import base
29
29
30
30
31 class SubversionInMemoryCommit(base.BaseInMemoryCommit):
31 class SubversionInMemoryCommit(base.BaseInMemoryCommit):
32
32
33 def commit(self, message, author, parents=None, branch=None, date=None,
33 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
34 **kwargs):
35 if branch not in (None, self.repository.DEFAULT_BRANCH_NAME):
34 if branch not in (None, self.repository.DEFAULT_BRANCH_NAME):
36 raise NotImplementedError("Branches are not yet supported")
35 raise NotImplementedError("Branches are not yet supported")
37
36
38 self.check_integrity(parents)
37 self.check_integrity(parents)
39
38
40 message = safe_str(message)
39 message = safe_str(message)
41 author = safe_str(author)
40 author = safe_str(author)
42
41
43 updated = []
42 updated = []
44 for node in self.added:
43 for node in self.added:
45 node_data = {
44 node_data = {
46 'path': node.path,
45 'path': node.path,
47 'content': safe_str(node.content),
46 'content': safe_str(node.content),
48 'mode': node.mode,
47 'mode': node.mode,
49 }
48 }
50 if node.is_binary:
49 if node.is_binary:
51 node_data['properties'] = {
50 node_data['properties'] = {
52 'svn:mime-type': 'application/octet-stream'
51 'svn:mime-type': 'application/octet-stream'
53 }
52 }
54 updated.append(node_data)
53 updated.append(node_data)
55 for node in self.changed:
54 for node in self.changed:
56 updated.append({
55 updated.append({
57 'path': node.path,
56 'path': node.path,
58 'content': safe_str(node.content),
57 'content': safe_str(node.content),
59 'mode': node.mode,
58 'mode': node.mode,
60 })
59 })
61
60
62 removed = []
61 removed = []
63 for node in self.removed:
62 for node in self.removed:
64 removed.append({
63 removed.append({
65 'path': node.path,
64 'path': node.path,
66 })
65 })
67
66
68 timestamp = date_astimestamp(date) if date else None
67 timestamp = date_astimestamp(date) if date else None
69 svn_rev = self.repository._remote.commit(
68 svn_rev = self.repository._remote.commit(
70 message=message, author=author, timestamp=timestamp,
69 message=message, author=author, timestamp=timestamp,
71 updated=updated, removed=removed)
70 updated=updated, removed=removed)
72
71
73 # TODO: Find a nicer way. If commit_ids is not yet evaluated, then
72 # TODO: Find a nicer way. If commit_ids is not yet evaluated, then
74 # we should not add the commit_id, if it is already evaluated, it
73 # we should not add the commit_id, if it is already evaluated, it
75 # will not be evaluated again.
74 # will not be evaluated again.
76 commit_id = str(svn_rev)
75 commit_id = str(svn_rev)
77 if commit_id not in self.repository.commit_ids:
76 self.repository.append_commit_id(commit_id)
78 self.repository.commit_ids.append(commit_id)
79 tip = self.repository.get_commit()
77 tip = self.repository.get_commit()
80 self.reset()
78 self.reset()
81 return tip
79 return tip
@@ -1,360 +1,367 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SVN repository module
22 SVN repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import CachedProperty
30
31
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import date_astimestamp
33 from rhodecode.lib.datelib import date_astimestamp
33 from rhodecode.lib.utils import safe_str, safe_unicode
34 from rhodecode.lib.utils import safe_str, safe_unicode
34 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends.svn.commit import (
37 from rhodecode.lib.vcs.backends.svn.commit import (
37 SubversionCommit, _date_from_svn_properties)
38 SubversionCommit, _date_from_svn_properties)
38 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
39 from rhodecode.lib.vcs.backends.svn.diff import SubversionDiff
39 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
40 from rhodecode.lib.vcs.backends.svn.inmemory import SubversionInMemoryCommit
40 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
42 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
43 CommitDoesNotExistError, EmptyRepositoryError, RepositoryError,
43 VCSError, NodeDoesNotExistError)
44 VCSError, NodeDoesNotExistError)
44
45
45
46
46 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
47
48
48
49
49 class SubversionRepository(base.BaseRepository):
50 class SubversionRepository(base.BaseRepository):
50 """
51 """
51 Subversion backend implementation
52 Subversion backend implementation
52
53
53 .. important::
54 .. important::
54
55
55 It is very important to distinguish the commit index and the commit id
56 It is very important to distinguish the commit index and the commit id
56 which is assigned by Subversion. The first one is always handled as an
57 which is assigned by Subversion. The first one is always handled as an
57 `int` by this implementation. The commit id assigned by Subversion on
58 `int` by this implementation. The commit id assigned by Subversion on
58 the other side will always be a `str`.
59 the other side will always be a `str`.
59
60
60 There is a specific trap since the first commit will have the index
61 There is a specific trap since the first commit will have the index
61 ``0`` but the svn id will be ``"1"``.
62 ``0`` but the svn id will be ``"1"``.
62
63
63 """
64 """
64
65
65 # Note: Subversion does not really have a default branch name.
66 # Note: Subversion does not really have a default branch name.
66 DEFAULT_BRANCH_NAME = None
67 DEFAULT_BRANCH_NAME = None
67
68
68 contact = base.BaseRepository.DEFAULT_CONTACT
69 contact = base.BaseRepository.DEFAULT_CONTACT
69 description = base.BaseRepository.DEFAULT_DESCRIPTION
70 description = base.BaseRepository.DEFAULT_DESCRIPTION
70
71
71 def __init__(self, repo_path, config=None, create=False, src_url=None, bare=False,
72 def __init__(self, repo_path, config=None, create=False, src_url=None, bare=False,
72 **kwargs):
73 **kwargs):
73 self.path = safe_str(os.path.abspath(repo_path))
74 self.path = safe_str(os.path.abspath(repo_path))
74 self.config = config if config else self.get_default_config()
75 self.config = config if config else self.get_default_config()
75
76
76 self._init_repo(create, src_url)
77 self._init_repo(create, src_url)
77
78
79 # dependent that trigger re-computation of commit_ids
80 self._commit_ids_ver = 0
81
78 @LazyProperty
82 @LazyProperty
79 def _remote(self):
83 def _remote(self):
80 return connection.Svn(self.path, self.config)
84 return connection.Svn(self.path, self.config)
81
85
82 def _init_repo(self, create, src_url):
86 def _init_repo(self, create, src_url):
83 if create and os.path.exists(self.path):
87 if create and os.path.exists(self.path):
84 raise RepositoryError(
88 raise RepositoryError(
85 "Cannot create repository at %s, location already exist"
89 "Cannot create repository at %s, location already exist"
86 % self.path)
90 % self.path)
87
91
88 if create:
92 if create:
89 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
93 self._remote.create_repository(settings.SVN_COMPATIBLE_VERSION)
90 if src_url:
94 if src_url:
91 src_url = _sanitize_url(src_url)
95 src_url = _sanitize_url(src_url)
92 self._remote.import_remote_repository(src_url)
96 self._remote.import_remote_repository(src_url)
93 else:
97 else:
94 self._check_path()
98 self._check_path()
95
99
96 @LazyProperty
100 @CachedProperty('_commit_ids_ver')
97 def commit_ids(self):
101 def commit_ids(self):
98 head = self._remote.lookup(None)
102 head = self._remote.lookup(None)
99 return [str(r) for r in xrange(1, head + 1)]
103 return [str(r) for r in xrange(1, head + 1)]
100
104
105 def _rebuild_cache(self, commit_ids):
106 pass
107
101 def run_svn_command(self, cmd, **opts):
108 def run_svn_command(self, cmd, **opts):
102 """
109 """
103 Runs given ``cmd`` as svn command and returns tuple
110 Runs given ``cmd`` as svn command and returns tuple
104 (stdout, stderr).
111 (stdout, stderr).
105
112
106 :param cmd: full svn command to be executed
113 :param cmd: full svn command to be executed
107 :param opts: env options to pass into Subprocess command
114 :param opts: env options to pass into Subprocess command
108 """
115 """
109 if not isinstance(cmd, list):
116 if not isinstance(cmd, list):
110 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
117 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
111
118
112 skip_stderr_log = opts.pop('skip_stderr_log', False)
119 skip_stderr_log = opts.pop('skip_stderr_log', False)
113 out, err = self._remote.run_svn_command(cmd, **opts)
120 out, err = self._remote.run_svn_command(cmd, **opts)
114 if err and not skip_stderr_log:
121 if err and not skip_stderr_log:
115 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
122 log.debug('Stderr output of svn command "%s":\n%s', cmd, err)
116 return out, err
123 return out, err
117
124
118 @LazyProperty
125 @LazyProperty
119 def branches(self):
126 def branches(self):
120 return self._tags_or_branches('vcs_svn_branch')
127 return self._tags_or_branches('vcs_svn_branch')
121
128
122 @LazyProperty
129 @LazyProperty
123 def branches_closed(self):
130 def branches_closed(self):
124 return {}
131 return {}
125
132
126 @LazyProperty
133 @LazyProperty
127 def bookmarks(self):
134 def bookmarks(self):
128 return {}
135 return {}
129
136
130 @LazyProperty
137 @LazyProperty
131 def branches_all(self):
138 def branches_all(self):
132 # TODO: johbo: Implement proper branch support
139 # TODO: johbo: Implement proper branch support
133 all_branches = {}
140 all_branches = {}
134 all_branches.update(self.branches)
141 all_branches.update(self.branches)
135 all_branches.update(self.branches_closed)
142 all_branches.update(self.branches_closed)
136 return all_branches
143 return all_branches
137
144
138 @LazyProperty
145 @LazyProperty
139 def tags(self):
146 def tags(self):
140 return self._tags_or_branches('vcs_svn_tag')
147 return self._tags_or_branches('vcs_svn_tag')
141
148
142 def _tags_or_branches(self, config_section):
149 def _tags_or_branches(self, config_section):
143 found_items = {}
150 found_items = {}
144
151
145 if self.is_empty():
152 if self.is_empty():
146 return {}
153 return {}
147
154
148 for pattern in self._patterns_from_section(config_section):
155 for pattern in self._patterns_from_section(config_section):
149 pattern = vcspath.sanitize(pattern)
156 pattern = vcspath.sanitize(pattern)
150 tip = self.get_commit()
157 tip = self.get_commit()
151 try:
158 try:
152 if pattern.endswith('*'):
159 if pattern.endswith('*'):
153 basedir = tip.get_node(vcspath.dirname(pattern))
160 basedir = tip.get_node(vcspath.dirname(pattern))
154 directories = basedir.dirs
161 directories = basedir.dirs
155 else:
162 else:
156 directories = (tip.get_node(pattern), )
163 directories = (tip.get_node(pattern), )
157 except NodeDoesNotExistError:
164 except NodeDoesNotExistError:
158 continue
165 continue
159 found_items.update(
166 found_items.update(
160 (safe_unicode(n.path),
167 (safe_unicode(n.path),
161 self.commit_ids[-1])
168 self.commit_ids[-1])
162 for n in directories)
169 for n in directories)
163
170
164 def get_name(item):
171 def get_name(item):
165 return item[0]
172 return item[0]
166
173
167 return OrderedDict(sorted(found_items.items(), key=get_name))
174 return OrderedDict(sorted(found_items.items(), key=get_name))
168
175
169 def _patterns_from_section(self, section):
176 def _patterns_from_section(self, section):
170 return (pattern for key, pattern in self.config.items(section))
177 return (pattern for key, pattern in self.config.items(section))
171
178
172 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
179 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
173 if self != repo2:
180 if self != repo2:
174 raise ValueError(
181 raise ValueError(
175 "Subversion does not support getting common ancestor of"
182 "Subversion does not support getting common ancestor of"
176 " different repositories.")
183 " different repositories.")
177
184
178 if int(commit_id1) < int(commit_id2):
185 if int(commit_id1) < int(commit_id2):
179 return commit_id1
186 return commit_id1
180 return commit_id2
187 return commit_id2
181
188
182 def verify(self):
189 def verify(self):
183 verify = self._remote.verify()
190 verify = self._remote.verify()
184
191
185 self._remote.invalidate_vcs_cache()
192 self._remote.invalidate_vcs_cache()
186 return verify
193 return verify
187
194
188 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
195 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
189 # TODO: johbo: Implement better comparison, this is a very naive
196 # TODO: johbo: Implement better comparison, this is a very naive
190 # version which does not allow to compare branches, tags or folders
197 # version which does not allow to compare branches, tags or folders
191 # at all.
198 # at all.
192 if repo2 != self:
199 if repo2 != self:
193 raise ValueError(
200 raise ValueError(
194 "Subversion does not support comparison of of different "
201 "Subversion does not support comparison of of different "
195 "repositories.")
202 "repositories.")
196
203
197 if commit_id1 == commit_id2:
204 if commit_id1 == commit_id2:
198 return []
205 return []
199
206
200 commit_idx1 = self._get_commit_idx(commit_id1)
207 commit_idx1 = self._get_commit_idx(commit_id1)
201 commit_idx2 = self._get_commit_idx(commit_id2)
208 commit_idx2 = self._get_commit_idx(commit_id2)
202
209
203 commits = [
210 commits = [
204 self.get_commit(commit_idx=idx)
211 self.get_commit(commit_idx=idx)
205 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
212 for idx in range(commit_idx1 + 1, commit_idx2 + 1)]
206
213
207 return commits
214 return commits
208
215
209 def _get_commit_idx(self, commit_id):
216 def _get_commit_idx(self, commit_id):
210 try:
217 try:
211 svn_rev = int(commit_id)
218 svn_rev = int(commit_id)
212 except:
219 except:
213 # TODO: johbo: this might be only one case, HEAD, check this
220 # TODO: johbo: this might be only one case, HEAD, check this
214 svn_rev = self._remote.lookup(commit_id)
221 svn_rev = self._remote.lookup(commit_id)
215 commit_idx = svn_rev - 1
222 commit_idx = svn_rev - 1
216 if commit_idx >= len(self.commit_ids):
223 if commit_idx >= len(self.commit_ids):
217 raise CommitDoesNotExistError(
224 raise CommitDoesNotExistError(
218 "Commit at index %s does not exist." % (commit_idx, ))
225 "Commit at index %s does not exist." % (commit_idx, ))
219 return commit_idx
226 return commit_idx
220
227
221 @staticmethod
228 @staticmethod
222 def check_url(url, config):
229 def check_url(url, config):
223 """
230 """
224 Check if `url` is a valid source to import a Subversion repository.
231 Check if `url` is a valid source to import a Subversion repository.
225 """
232 """
226 # convert to URL if it's a local directory
233 # convert to URL if it's a local directory
227 if os.path.isdir(url):
234 if os.path.isdir(url):
228 url = 'file://' + urllib.pathname2url(url)
235 url = 'file://' + urllib.pathname2url(url)
229 return connection.Svn.check_url(url, config.serialize())
236 return connection.Svn.check_url(url, config.serialize())
230
237
231 @staticmethod
238 @staticmethod
232 def is_valid_repository(path):
239 def is_valid_repository(path):
233 try:
240 try:
234 SubversionRepository(path)
241 SubversionRepository(path)
235 return True
242 return True
236 except VCSError:
243 except VCSError:
237 pass
244 pass
238 return False
245 return False
239
246
240 def _check_path(self):
247 def _check_path(self):
241 if not os.path.exists(self.path):
248 if not os.path.exists(self.path):
242 raise VCSError('Path "%s" does not exist!' % (self.path, ))
249 raise VCSError('Path "%s" does not exist!' % (self.path, ))
243 if not self._remote.is_path_valid_repository(self.path):
250 if not self._remote.is_path_valid_repository(self.path):
244 raise VCSError(
251 raise VCSError(
245 'Path "%s" does not contain a Subversion repository' %
252 'Path "%s" does not contain a Subversion repository' %
246 (self.path, ))
253 (self.path, ))
247
254
248 @LazyProperty
255 @LazyProperty
249 def last_change(self):
256 def last_change(self):
250 """
257 """
251 Returns last change made on this repository as
258 Returns last change made on this repository as
252 `datetime.datetime` object.
259 `datetime.datetime` object.
253 """
260 """
254 # Subversion always has a first commit which has id "0" and contains
261 # Subversion always has a first commit which has id "0" and contains
255 # what we are looking for.
262 # what we are looking for.
256 last_id = len(self.commit_ids)
263 last_id = len(self.commit_ids)
257 properties = self._remote.revision_properties(last_id)
264 properties = self._remote.revision_properties(last_id)
258 return _date_from_svn_properties(properties)
265 return _date_from_svn_properties(properties)
259
266
260 @LazyProperty
267 @LazyProperty
261 def in_memory_commit(self):
268 def in_memory_commit(self):
262 return SubversionInMemoryCommit(self)
269 return SubversionInMemoryCommit(self)
263
270
264 def get_hook_location(self):
271 def get_hook_location(self):
265 """
272 """
266 returns absolute path to location where hooks are stored
273 returns absolute path to location where hooks are stored
267 """
274 """
268 return os.path.join(self.path, 'hooks')
275 return os.path.join(self.path, 'hooks')
269
276
270 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
277 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
271 if self.is_empty():
278 if self.is_empty():
272 raise EmptyRepositoryError("There are no commits yet")
279 raise EmptyRepositoryError("There are no commits yet")
273 if commit_id is not None:
280 if commit_id is not None:
274 self._validate_commit_id(commit_id)
281 self._validate_commit_id(commit_id)
275 elif commit_idx is not None:
282 elif commit_idx is not None:
276 self._validate_commit_idx(commit_idx)
283 self._validate_commit_idx(commit_idx)
277 try:
284 try:
278 commit_id = self.commit_ids[commit_idx]
285 commit_id = self.commit_ids[commit_idx]
279 except IndexError:
286 except IndexError:
280 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
287 raise CommitDoesNotExistError('No commit with idx: {}'.format(commit_idx))
281
288
282 commit_id = self._sanitize_commit_id(commit_id)
289 commit_id = self._sanitize_commit_id(commit_id)
283 commit = SubversionCommit(repository=self, commit_id=commit_id)
290 commit = SubversionCommit(repository=self, commit_id=commit_id)
284 return commit
291 return commit
285
292
286 def get_commits(
293 def get_commits(
287 self, start_id=None, end_id=None, start_date=None, end_date=None,
294 self, start_id=None, end_id=None, start_date=None, end_date=None,
288 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
295 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
289 if self.is_empty():
296 if self.is_empty():
290 raise EmptyRepositoryError("There are no commit_ids yet")
297 raise EmptyRepositoryError("There are no commit_ids yet")
291 self._validate_branch_name(branch_name)
298 self._validate_branch_name(branch_name)
292
299
293 if start_id is not None:
300 if start_id is not None:
294 self._validate_commit_id(start_id)
301 self._validate_commit_id(start_id)
295 if end_id is not None:
302 if end_id is not None:
296 self._validate_commit_id(end_id)
303 self._validate_commit_id(end_id)
297
304
298 start_raw_id = self._sanitize_commit_id(start_id)
305 start_raw_id = self._sanitize_commit_id(start_id)
299 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
306 start_pos = self.commit_ids.index(start_raw_id) if start_id else None
300 end_raw_id = self._sanitize_commit_id(end_id)
307 end_raw_id = self._sanitize_commit_id(end_id)
301 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
308 end_pos = max(0, self.commit_ids.index(end_raw_id)) if end_id else None
302
309
303 if None not in [start_id, end_id] and start_pos > end_pos:
310 if None not in [start_id, end_id] and start_pos > end_pos:
304 raise RepositoryError(
311 raise RepositoryError(
305 "Start commit '%s' cannot be after end commit '%s'" %
312 "Start commit '%s' cannot be after end commit '%s'" %
306 (start_id, end_id))
313 (start_id, end_id))
307 if end_pos is not None:
314 if end_pos is not None:
308 end_pos += 1
315 end_pos += 1
309
316
310 # Date based filtering
317 # Date based filtering
311 if start_date or end_date:
318 if start_date or end_date:
312 start_raw_id, end_raw_id = self._remote.lookup_interval(
319 start_raw_id, end_raw_id = self._remote.lookup_interval(
313 date_astimestamp(start_date) if start_date else None,
320 date_astimestamp(start_date) if start_date else None,
314 date_astimestamp(end_date) if end_date else None)
321 date_astimestamp(end_date) if end_date else None)
315 start_pos = start_raw_id - 1
322 start_pos = start_raw_id - 1
316 end_pos = end_raw_id
323 end_pos = end_raw_id
317
324
318 commit_ids = self.commit_ids
325 commit_ids = self.commit_ids
319
326
320 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
327 # TODO: johbo: Reconsider impact of DEFAULT_BRANCH_NAME here
321 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
328 if branch_name not in [None, self.DEFAULT_BRANCH_NAME]:
322 svn_rev = long(self.commit_ids[-1])
329 svn_rev = long(self.commit_ids[-1])
323 commit_ids = self._remote.node_history(
330 commit_ids = self._remote.node_history(
324 path=branch_name, revision=svn_rev, limit=None)
331 path=branch_name, revision=svn_rev, limit=None)
325 commit_ids = [str(i) for i in reversed(commit_ids)]
332 commit_ids = [str(i) for i in reversed(commit_ids)]
326
333
327 if start_pos or end_pos:
334 if start_pos or end_pos:
328 commit_ids = commit_ids[start_pos:end_pos]
335 commit_ids = commit_ids[start_pos:end_pos]
329 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
336 return base.CollectionGenerator(self, commit_ids, pre_load=pre_load)
330
337
331 def _sanitize_commit_id(self, commit_id):
338 def _sanitize_commit_id(self, commit_id):
332 if commit_id and commit_id.isdigit():
339 if commit_id and commit_id.isdigit():
333 if int(commit_id) <= len(self.commit_ids):
340 if int(commit_id) <= len(self.commit_ids):
334 return commit_id
341 return commit_id
335 else:
342 else:
336 raise CommitDoesNotExistError(
343 raise CommitDoesNotExistError(
337 "Commit %s does not exist." % (commit_id, ))
344 "Commit %s does not exist." % (commit_id, ))
338 if commit_id not in [
345 if commit_id not in [
339 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
346 None, 'HEAD', 'tip', self.DEFAULT_BRANCH_NAME]:
340 raise CommitDoesNotExistError(
347 raise CommitDoesNotExistError(
341 "Commit id %s not understood." % (commit_id, ))
348 "Commit id %s not understood." % (commit_id, ))
342 svn_rev = self._remote.lookup('HEAD')
349 svn_rev = self._remote.lookup('HEAD')
343 return str(svn_rev)
350 return str(svn_rev)
344
351
345 def get_diff(
352 def get_diff(
346 self, commit1, commit2, path=None, ignore_whitespace=False,
353 self, commit1, commit2, path=None, ignore_whitespace=False,
347 context=3, path1=None):
354 context=3, path1=None):
348 self._validate_diff_commits(commit1, commit2)
355 self._validate_diff_commits(commit1, commit2)
349 svn_rev1 = long(commit1.raw_id)
356 svn_rev1 = long(commit1.raw_id)
350 svn_rev2 = long(commit2.raw_id)
357 svn_rev2 = long(commit2.raw_id)
351 diff = self._remote.diff(
358 diff = self._remote.diff(
352 svn_rev1, svn_rev2, path1=path1, path2=path,
359 svn_rev1, svn_rev2, path1=path1, path2=path,
353 ignore_whitespace=ignore_whitespace, context=context)
360 ignore_whitespace=ignore_whitespace, context=context)
354 return SubversionDiff(diff)
361 return SubversionDiff(diff)
355
362
356
363
357 def _sanitize_url(url):
364 def _sanitize_url(url):
358 if '://' not in url:
365 if '://' not in url:
359 url = 'file://' + urllib.pathname2url(url)
366 url = 'file://' + urllib.pathname2url(url)
360 return url
367 return url
@@ -1,151 +1,151 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import datetime
21 import datetime
22 import os
22 import os
23 import shutil
23 import shutil
24 import tarfile
24 import tarfile
25 import tempfile
25 import tempfile
26 import zipfile
26 import zipfile
27 import StringIO
27 import StringIO
28
28
29 import mock
29 import mock
30 import pytest
30 import pytest
31
31
32 from rhodecode.lib.vcs.backends import base
32 from rhodecode.lib.vcs.backends import base
33 from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError, VCSError
33 from rhodecode.lib.vcs.exceptions import ImproperArchiveTypeError, VCSError
34 from rhodecode.lib.vcs.nodes import FileNode
34 from rhodecode.lib.vcs.nodes import FileNode
35 from rhodecode.tests.vcs.conftest import BackendTestMixin
35 from rhodecode.tests.vcs.conftest import BackendTestMixin
36
36
37
37
38 @pytest.mark.usefixtures("vcs_repository_support")
38 @pytest.mark.usefixtures("vcs_repository_support")
39 class TestArchives(BackendTestMixin):
39 class TestArchives(BackendTestMixin):
40
40
41 @pytest.fixture(autouse=True)
41 @pytest.fixture(autouse=True)
42 def tempfile(self, request):
42 def tempfile(self, request):
43 self.temp_file = tempfile.mkstemp()[1]
43 self.temp_file = tempfile.mkstemp()[1]
44
44
45 @request.addfinalizer
45 @request.addfinalizer
46 def cleanup():
46 def cleanup():
47 os.remove(self.temp_file)
47 os.remove(self.temp_file)
48
48
49 @classmethod
49 @classmethod
50 def _get_commits(cls):
50 def _get_commits(cls):
51 start_date = datetime.datetime(2010, 1, 1, 20)
51 start_date = datetime.datetime(2010, 1, 1, 20)
52 for x in range(5):
52 for x in range(5):
53 yield {
53 yield {
54 'message': 'Commit %d' % x,
54 'message': 'Commit %d' % x,
55 'author': 'Joe Doe <joe.doe@example.com>',
55 'author': 'Joe Doe <joe.doe@example.com>',
56 'date': start_date + datetime.timedelta(hours=12 * x),
56 'date': start_date + datetime.timedelta(hours=12 * x),
57 'added': [
57 'added': [
58 FileNode(
58 FileNode(
59 '%d/file_%d.txt' % (x, x), content='Foobar %d' % x),
59 '%d/file_%d.txt' % (x, x), content='Foobar %d' % x),
60 ],
60 ],
61 }
61 }
62
62
63 @pytest.mark.parametrize('compressor', ['gz', 'bz2'])
63 @pytest.mark.parametrize('compressor', ['gz', 'bz2'])
64 def test_archive_tar(self, compressor):
64 def test_archive_tar(self, compressor):
65 self.tip.archive_repo(
65 self.tip.archive_repo(
66 self.temp_file, kind='t' + compressor, prefix='repo')
66 self.temp_file, kind='t' + compressor, prefix='repo')
67 out_dir = tempfile.mkdtemp()
67 out_dir = tempfile.mkdtemp()
68 out_file = tarfile.open(self.temp_file, 'r|' + compressor)
68 out_file = tarfile.open(self.temp_file, 'r|' + compressor)
69 out_file.extractall(out_dir)
69 out_file.extractall(out_dir)
70 out_file.close()
70 out_file.close()
71
71
72 for x in range(5):
72 for x in range(5):
73 node_path = '%d/file_%d.txt' % (x, x)
73 node_path = '%d/file_%d.txt' % (x, x)
74 with open(os.path.join(out_dir, 'repo/' + node_path)) as f:
74 with open(os.path.join(out_dir, 'repo/' + node_path)) as f:
75 file_content = f.read()
75 file_content = f.read()
76 assert file_content == self.tip.get_node(node_path).content
76 assert file_content == self.tip.get_node(node_path).content
77
77
78 shutil.rmtree(out_dir)
78 shutil.rmtree(out_dir)
79
79
80 def test_archive_zip(self):
80 def test_archive_zip(self):
81 self.tip.archive_repo(self.temp_file, kind='zip', prefix='repo')
81 self.tip.archive_repo(self.temp_file, kind='zip', prefix='repo')
82 out = zipfile.ZipFile(self.temp_file)
82 out = zipfile.ZipFile(self.temp_file)
83
83
84 for x in range(5):
84 for x in range(5):
85 node_path = '%d/file_%d.txt' % (x, x)
85 node_path = '%d/file_%d.txt' % (x, x)
86 decompressed = StringIO.StringIO()
86 decompressed = StringIO.StringIO()
87 decompressed.write(out.read('repo/' + node_path))
87 decompressed.write(out.read('repo/' + node_path))
88 assert decompressed.getvalue() == \
88 assert decompressed.getvalue() == \
89 self.tip.get_node(node_path).content
89 self.tip.get_node(node_path).content
90 decompressed.close()
90 decompressed.close()
91
91
92 def test_archive_zip_with_metadata(self):
92 def test_archive_zip_with_metadata(self):
93 self.tip.archive_repo(self.temp_file, kind='zip',
93 self.tip.archive_repo(self.temp_file, kind='zip',
94 prefix='repo', write_metadata=True)
94 prefix='repo', write_metadata=True)
95
95
96 out = zipfile.ZipFile(self.temp_file)
96 out = zipfile.ZipFile(self.temp_file)
97 metafile = out.read('.archival.txt')
97 metafile = out.read('.archival.txt')
98
98
99 raw_id = self.tip.raw_id
99 raw_id = self.tip.raw_id
100 assert 'rev:%s' % raw_id in metafile
100 assert 'commit_id:%s' % raw_id in metafile
101
101
102 for x in range(5):
102 for x in range(5):
103 node_path = '%d/file_%d.txt' % (x, x)
103 node_path = '%d/file_%d.txt' % (x, x)
104 decompressed = StringIO.StringIO()
104 decompressed = StringIO.StringIO()
105 decompressed.write(out.read('repo/' + node_path))
105 decompressed.write(out.read('repo/' + node_path))
106 assert decompressed.getvalue() == \
106 assert decompressed.getvalue() == \
107 self.tip.get_node(node_path).content
107 self.tip.get_node(node_path).content
108 decompressed.close()
108 decompressed.close()
109
109
110 def test_archive_wrong_kind(self):
110 def test_archive_wrong_kind(self):
111 with pytest.raises(ImproperArchiveTypeError):
111 with pytest.raises(ImproperArchiveTypeError):
112 self.tip.archive_repo(self.temp_file, kind='wrong kind')
112 self.tip.archive_repo(self.temp_file, kind='wrong kind')
113
113
114
114
115 @pytest.fixture
115 @pytest.fixture
116 def base_commit():
116 def base_commit():
117 """
117 """
118 Prepare a `base.BaseCommit` just enough for `_validate_archive_prefix`.
118 Prepare a `base.BaseCommit` just enough for `_validate_archive_prefix`.
119 """
119 """
120 commit = base.BaseCommit()
120 commit = base.BaseCommit()
121 commit.repository = mock.Mock()
121 commit.repository = mock.Mock()
122 commit.repository.name = u'fake_repo'
122 commit.repository.name = u'fake_repo'
123 commit.short_id = 'fake_id'
123 commit.short_id = 'fake_id'
124 return commit
124 return commit
125
125
126
126
127 @pytest.mark.parametrize("prefix", [u"unicode-prefix", u"Ünïcâdë"])
127 @pytest.mark.parametrize("prefix", [u"unicode-prefix", u"Ünïcâdë"])
128 def test_validate_archive_prefix_enforces_bytes_as_prefix(prefix, base_commit):
128 def test_validate_archive_prefix_enforces_bytes_as_prefix(prefix, base_commit):
129 with pytest.raises(ValueError):
129 with pytest.raises(ValueError):
130 base_commit._validate_archive_prefix(prefix)
130 base_commit._validate_archive_prefix(prefix)
131
131
132
132
133 def test_validate_archive_prefix_empty_prefix(base_commit):
133 def test_validate_archive_prefix_empty_prefix(base_commit):
134 # TODO: johbo: Should raise a ValueError here.
134 # TODO: johbo: Should raise a ValueError here.
135 with pytest.raises(VCSError):
135 with pytest.raises(VCSError):
136 base_commit._validate_archive_prefix('')
136 base_commit._validate_archive_prefix('')
137
137
138
138
139 def test_validate_archive_prefix_with_leading_slash(base_commit):
139 def test_validate_archive_prefix_with_leading_slash(base_commit):
140 # TODO: johbo: Should raise a ValueError here.
140 # TODO: johbo: Should raise a ValueError here.
141 with pytest.raises(VCSError):
141 with pytest.raises(VCSError):
142 base_commit._validate_archive_prefix('/any')
142 base_commit._validate_archive_prefix('/any')
143
143
144
144
145 def test_validate_archive_prefix_falls_back_to_repository_name(base_commit):
145 def test_validate_archive_prefix_falls_back_to_repository_name(base_commit):
146 prefix = base_commit._validate_archive_prefix(None)
146 prefix = base_commit._validate_archive_prefix(None)
147 expected_prefix = base_commit._ARCHIVE_PREFIX_TEMPLATE.format(
147 expected_prefix = base_commit._ARCHIVE_PREFIX_TEMPLATE.format(
148 repo_name='fake_repo',
148 repo_name='fake_repo',
149 short_id='fake_id')
149 short_id='fake_id')
150 assert isinstance(prefix, str)
150 assert isinstance(prefix, str)
151 assert prefix == expected_prefix
151 assert prefix == expected_prefix
@@ -1,183 +1,186 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import pytest
21 import pytest
22
22
23 from mock import call, patch
23 from mock import call, patch
24
24
25 from rhodecode.lib.vcs.backends.base import Reference
25 from rhodecode.lib.vcs.backends.base import Reference
26
26
27
27
28 class TestMercurialRemoteRepoInvalidation(object):
28 class TestMercurialRemoteRepoInvalidation(object):
29 """
29 """
30 If the VCSServer is running with multiple processes or/and instances.
30 If the VCSServer is running with multiple processes or/and instances.
31 Operations on repositories are potentially handled by different processes
31 Operations on repositories are potentially handled by different processes
32 in a random fashion. The mercurial repository objects used in the VCSServer
32 in a random fashion. The mercurial repository objects used in the VCSServer
33 are caching the commits of the repo. Therefore we have to invalidate the
33 are caching the commits of the repo. Therefore we have to invalidate the
34 VCSServer caching of these objects after a writing operation.
34 VCSServer caching of these objects after a writing operation.
35 """
35 """
36
36
37 # Default reference used as a dummy during tests.
37 # Default reference used as a dummy during tests.
38 default_ref = Reference('branch', 'default', None)
38 default_ref = Reference('branch', 'default', None)
39
39
40 # Methods of vcsserver.hg.HgRemote that are "writing" operations.
40 # Methods of vcsserver.hg.HgRemote that are "writing" operations.
41 writing_methods = [
41 writing_methods = [
42 'bookmark',
42 'bookmark',
43 'commit',
43 'commit',
44 'merge',
44 'merge',
45 'pull',
45 'pull',
46 'pull_cmd',
46 'pull_cmd',
47 'rebase',
47 'rebase',
48 'strip',
48 'strip',
49 'tag',
49 'tag',
50 ]
50 ]
51
51
52 @pytest.mark.parametrize('method_name, method_args', [
52 @pytest.mark.parametrize('method_name, method_args', [
53 ('_local_merge', [default_ref, None, None, None, default_ref]),
53 ('_local_merge', [default_ref, None, None, None, default_ref]),
54 ('_local_pull', ['', default_ref]),
54 ('_local_pull', ['', default_ref]),
55 ('bookmark', [None]),
55 ('bookmark', [None]),
56 ('pull', ['', default_ref]),
56 ('pull', ['', default_ref]),
57 ('remove_tag', ['mytag', None]),
57 ('remove_tag', ['mytag', None]),
58 ('strip', [None]),
58 ('strip', [None]),
59 ('tag', ['newtag', None]),
59 ('tag', ['newtag', None]),
60 ])
60 ])
61 def test_method_invokes_invalidate_on_remote_repo(
61 def test_method_invokes_invalidate_on_remote_repo(
62 self, method_name, method_args, backend_hg):
62 self, method_name, method_args, backend_hg):
63 """
63 """
64 Check that the listed methods are invalidating the VCSServer cache
64 Check that the listed methods are invalidating the VCSServer cache
65 after invoking a writing method of their remote repository object.
65 after invoking a writing method of their remote repository object.
66 """
66 """
67 tags = {'mytag': 'mytag-id'}
67 tags = {'mytag': 'mytag-id'}
68
68
69 def add_tag(name, raw_id, *args, **kwds):
69 def add_tag(name, raw_id, *args, **kwds):
70 tags[name] = raw_id
70 tags[name] = raw_id
71
71
72 repo = backend_hg.repo.scm_instance()
72 repo = backend_hg.repo.scm_instance()
73
73 with patch.object(repo, '_remote') as remote:
74 with patch.object(repo, '_remote') as remote:
75 repo.tags = tags
74 remote.lookup.return_value = ('commit-id', 'commit-idx')
76 remote.lookup.return_value = ('commit-id', 'commit-idx')
75 remote.tags.return_value = tags
77 remote.tags.return_value = tags
76 remote._get_tags.return_value = tags
78 remote._get_tags.return_value = tags
79 remote.is_empty.return_value = False
77 remote.tag.side_effect = add_tag
80 remote.tag.side_effect = add_tag
78
81
79 # Invoke method.
82 # Invoke method.
80 method = getattr(repo, method_name)
83 method = getattr(repo, method_name)
81 method(*method_args)
84 method(*method_args)
82
85
83 # Assert that every "writing" method is followed by an invocation
86 # Assert that every "writing" method is followed by an invocation
84 # of the cache invalidation method.
87 # of the cache invalidation method.
85 for counter, method_call in enumerate(remote.method_calls):
88 for counter, method_call in enumerate(remote.method_calls):
86 call_name = method_call[0]
89 call_name = method_call[0]
87 if call_name in self.writing_methods:
90 if call_name in self.writing_methods:
88 next_call = remote.method_calls[counter + 1]
91 next_call = remote.method_calls[counter + 1]
89 assert next_call == call.invalidate_vcs_cache()
92 assert next_call == call.invalidate_vcs_cache()
90
93
91 def _prepare_shadow_repo(self, pull_request):
94 def _prepare_shadow_repo(self, pull_request):
92 """
95 """
93 Helper that creates a shadow repo that can be used to reproduce the
96 Helper that creates a shadow repo that can be used to reproduce the
94 CommitDoesNotExistError when pulling in from target and source
97 CommitDoesNotExistError when pulling in from target and source
95 references.
98 references.
96 """
99 """
97 from rhodecode.model.pull_request import PullRequestModel
100 from rhodecode.model.pull_request import PullRequestModel
98 repo_id = pull_request.target_repo.repo_id
101 repo_id = pull_request.target_repo.repo_id
99 target_vcs = pull_request.target_repo.scm_instance()
102 target_vcs = pull_request.target_repo.scm_instance()
100 target_ref = pull_request.target_ref_parts
103 target_ref = pull_request.target_ref_parts
101 source_ref = pull_request.source_ref_parts
104 source_ref = pull_request.source_ref_parts
102
105
103 # Create shadow repository.
106 # Create shadow repository.
104 pr = PullRequestModel()
107 pr = PullRequestModel()
105 workspace_id = pr._workspace_id(pull_request)
108 workspace_id = pr._workspace_id(pull_request)
106 shadow_repository_path = target_vcs._maybe_prepare_merge_workspace(
109 shadow_repository_path = target_vcs._maybe_prepare_merge_workspace(
107 repo_id, workspace_id, target_ref, source_ref)
110 repo_id, workspace_id, target_ref, source_ref)
108 shadow_repo = target_vcs._get_shadow_instance(shadow_repository_path)
111 shadow_repo = target_vcs._get_shadow_instance(shadow_repository_path)
109
112
110 # This will populate the cache of the mercurial repository object
113 # This will populate the cache of the mercurial repository object
111 # inside of the VCSServer.
114 # inside of the VCSServer.
112 shadow_repo.get_commit()
115 shadow_repo.get_commit()
113
116
114 return shadow_repo, source_ref, target_ref
117 return shadow_repo, source_ref, target_ref
115
118
116 @pytest.mark.backends('hg')
119 @pytest.mark.backends('hg')
117 def test_commit_does_not_exist_error_happens(self, pr_util, app):
120 def test_commit_does_not_exist_error_happens(self, pr_util, app):
118 """
121 """
119 This test is somewhat special. It does not really test the system
122 This test is somewhat special. It does not really test the system
120 instead it is more or less a precondition for the
123 instead it is more or less a precondition for the
121 "test_commit_does_not_exist_error_does_not_happen". It deactivates the
124 "test_commit_does_not_exist_error_does_not_happen". It deactivates the
122 cache invalidation and asserts that the error occurs.
125 cache invalidation and asserts that the error occurs.
123 """
126 """
124 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
127 from rhodecode.lib.vcs.exceptions import CommitDoesNotExistError
125
128
126 pull_request = pr_util.create_pull_request()
129 pull_request = pr_util.create_pull_request()
127 target_vcs = pull_request.target_repo.scm_instance()
130 target_vcs = pull_request.target_repo.scm_instance()
128 source_vcs = pull_request.source_repo.scm_instance()
131 source_vcs = pull_request.source_repo.scm_instance()
129 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(
132 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(
130 pull_request)
133 pull_request)
131
134
132 # Pull from target and source references but without invalidation of
135 # Pull from target and source references but without invalidation of
133 # RemoteRepo objects and without VCSServer caching of mercurial
136 # RemoteRepo objects and without VCSServer caching of mercurial
134 # repository objects.
137 # repository objects.
135 with patch.object(shadow_repo._remote, 'invalidate_vcs_cache'):
138 with patch.object(shadow_repo._remote, 'invalidate_vcs_cache'):
136 # NOTE: Do not use patch.dict() to disable the cache because it
139 # NOTE: Do not use patch.dict() to disable the cache because it
137 # restores the WHOLE dict and not only the patched keys.
140 # restores the WHOLE dict and not only the patched keys.
138 shadow_repo._remote._wire['cache'] = False
141 shadow_repo._remote._wire['cache'] = False
139 shadow_repo._local_pull(target_vcs.path, target_ref)
142 shadow_repo._local_pull(target_vcs.path, target_ref)
140 shadow_repo._local_pull(source_vcs.path, source_ref)
143 shadow_repo._local_pull(source_vcs.path, source_ref)
141 shadow_repo._remote._wire.pop('cache')
144 shadow_repo._remote._wire.pop('cache')
142
145
143 # Try to lookup the target_ref in shadow repo. This should work because
146 # Try to lookup the target_ref in shadow repo. This should work because
144 # the shadow repo is a clone of the target and always contains all off
147 # the shadow repo is a clone of the target and always contains all off
145 # it's commits in the initial cache.
148 # it's commits in the initial cache.
146 shadow_repo.get_commit(target_ref.commit_id)
149 shadow_repo.get_commit(target_ref.commit_id)
147
150
148 # If we try to lookup the source_ref it should fail because the shadow
151 # If we try to lookup the source_ref it should fail because the shadow
149 # repo commit cache doesn't get invalidated. (Due to patched
152 # repo commit cache doesn't get invalidated. (Due to patched
150 # invalidation and caching above).
153 # invalidation and caching above).
151 with pytest.raises(CommitDoesNotExistError):
154 with pytest.raises(CommitDoesNotExistError):
152 shadow_repo.get_commit(source_ref.commit_id)
155 shadow_repo.get_commit(source_ref.commit_id)
153
156
154 @pytest.mark.backends('hg')
157 @pytest.mark.backends('hg')
155 def test_commit_does_not_exist_error_does_not_happen(self, pr_util, app):
158 def test_commit_does_not_exist_error_does_not_happen(self, pr_util, app):
156 """
159 """
157 This test simulates a pull request merge in which the pull operations
160 This test simulates a pull request merge in which the pull operations
158 are handled by a different VCSServer process than all other operations.
161 are handled by a different VCSServer process than all other operations.
159 Without correct cache invalidation this leads to an error when
162 Without correct cache invalidation this leads to an error when
160 retrieving the pulled commits afterwards.
163 retrieving the pulled commits afterwards.
161 """
164 """
162
165
163 pull_request = pr_util.create_pull_request()
166 pull_request = pr_util.create_pull_request()
164 target_vcs = pull_request.target_repo.scm_instance()
167 target_vcs = pull_request.target_repo.scm_instance()
165 source_vcs = pull_request.source_repo.scm_instance()
168 source_vcs = pull_request.source_repo.scm_instance()
166 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(
169 shadow_repo, source_ref, target_ref = self._prepare_shadow_repo(
167 pull_request)
170 pull_request)
168
171
169 # Pull from target and source references without without VCSServer
172 # Pull from target and source references without without VCSServer
170 # caching of mercurial repository objects but with active invalidation
173 # caching of mercurial repository objects but with active invalidation
171 # of RemoteRepo objects.
174 # of RemoteRepo objects.
172 # NOTE: Do not use patch.dict() to disable the cache because it
175 # NOTE: Do not use patch.dict() to disable the cache because it
173 # restores the WHOLE dict and not only the patched keys.
176 # restores the WHOLE dict and not only the patched keys.
174 shadow_repo._remote._wire['cache'] = False
177 shadow_repo._remote._wire['cache'] = False
175 shadow_repo._local_pull(target_vcs.path, target_ref)
178 shadow_repo._local_pull(target_vcs.path, target_ref)
176 shadow_repo._local_pull(source_vcs.path, source_ref)
179 shadow_repo._local_pull(source_vcs.path, source_ref)
177 shadow_repo._remote._wire.pop('cache')
180 shadow_repo._remote._wire.pop('cache')
178
181
179 # Try to lookup the target and source references in shadow repo. This
182 # Try to lookup the target and source references in shadow repo. This
180 # should work because the RemoteRepo object gets invalidated during the
183 # should work because the RemoteRepo object gets invalidated during the
181 # above pull operations.
184 # above pull operations.
182 shadow_repo.get_commit(target_ref.commit_id)
185 shadow_repo.get_commit(target_ref.commit_id)
183 shadow_repo.get_commit(source_ref.commit_id)
186 shadow_repo.get_commit(source_ref.commit_id)
@@ -1,345 +1,353 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Tests so called "in memory commits" commit API of vcs.
22 Tests so called "in memory commits" commit API of vcs.
23 """
23 """
24 import datetime
24 import datetime
25
25
26 import pytest
26 import pytest
27
27
28 from rhodecode.lib.utils2 import safe_unicode
28 from rhodecode.lib.utils2 import safe_unicode
29 from rhodecode.lib.vcs.exceptions import (
29 from rhodecode.lib.vcs.exceptions import (
30 EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyExistsError,
30 EmptyRepositoryError, NodeAlreadyAddedError, NodeAlreadyExistsError,
31 NodeAlreadyRemovedError, NodeAlreadyChangedError, NodeDoesNotExistError,
31 NodeAlreadyRemovedError, NodeAlreadyChangedError, NodeDoesNotExistError,
32 NodeNotChangedError)
32 NodeNotChangedError)
33 from rhodecode.lib.vcs.nodes import DirNode, FileNode
33 from rhodecode.lib.vcs.nodes import DirNode, FileNode
34 from rhodecode.tests.vcs.conftest import BackendTestMixin
34 from rhodecode.tests.vcs.conftest import BackendTestMixin
35
35
36
36
37 @pytest.fixture
37 @pytest.fixture
38 def nodes():
38 def nodes():
39 nodes = [
39 nodes = [
40 FileNode('foobar', content='Foo & bar'),
40 FileNode('foobar', content='Foo & bar'),
41 FileNode('foobar2', content='Foo & bar, doubled!'),
41 FileNode('foobar2', content='Foo & bar, doubled!'),
42 FileNode('foo bar with spaces', content=''),
42 FileNode('foo bar with spaces', content=''),
43 FileNode('foo/bar/baz', content='Inside'),
43 FileNode('foo/bar/baz', content='Inside'),
44 FileNode(
44 FileNode(
45 'foo/bar/file.bin',
45 'foo/bar/file.bin',
46 content=(
46 content=(
47 '\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00'
47 '\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1\x00\x00\x00\x00\x00\x00'
48 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe'
48 '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00;\x00\x03\x00\xfe'
49 '\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
49 '\xff\t\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
50 '\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00'
50 '\x01\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00'
51 '\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00'
51 '\x00\x18\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00'
52 '\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff'
52 '\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff'
53 '\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
53 '\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
54 )
54 )
55 ),
55 ),
56 ]
56 ]
57 return nodes
57 return nodes
58
58
59
59
60 @pytest.mark.usefixtures("vcs_repository_support")
60 @pytest.mark.usefixtures("vcs_repository_support")
61 class TestInMemoryCommit(BackendTestMixin):
61 class TestInMemoryCommit(BackendTestMixin):
62 """
62 """
63 This is a backend independent test case class which should be created
63 This is a backend independent test case class which should be created
64 with ``type`` method.
64 with ``type`` method.
65
65
66 It is required to set following attributes at subclass:
66 It is required to set following attributes at subclass:
67
67
68 - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
68 - ``backend_alias``: alias of used backend (see ``vcs.BACKENDS``)
69 """
69 """
70
70
71 @classmethod
71 @classmethod
72 def _get_commits(cls):
72 def _get_commits(cls):
73 return []
73 return []
74
74
75 def test_add(self, nodes):
75 def test_add(self, nodes):
76 for node in nodes:
76 for node in nodes:
77 self.imc.add(node)
77 self.imc.add(node)
78
78
79 self.commit()
79 self.commit()
80 self.assert_succesful_commit(nodes)
80 self.assert_succesful_commit(nodes)
81
81
82 @pytest.mark.skip_backends(
82 @pytest.mark.backends("hg")
83 'svn', reason="Svn does not support commits on branches.")
83 def test_add_on_branch_hg(self, nodes):
84 def test_add_on_branch(self, nodes):
84 for node in nodes:
85 self.imc.add(node)
86 self.commit(branch=u'stable')
87 self.assert_succesful_commit(nodes)
88
89 @pytest.mark.backends("git")
90 def test_add_on_branch_git(self, nodes):
91 self.repo._checkout('stable', create=True)
92
85 for node in nodes:
93 for node in nodes:
86 self.imc.add(node)
94 self.imc.add(node)
87 self.commit(branch=u'stable')
95 self.commit(branch=u'stable')
88 self.assert_succesful_commit(nodes)
96 self.assert_succesful_commit(nodes)
89
97
90 def test_add_in_bulk(self, nodes):
98 def test_add_in_bulk(self, nodes):
91 self.imc.add(*nodes)
99 self.imc.add(*nodes)
92
100
93 self.commit()
101 self.commit()
94 self.assert_succesful_commit(nodes)
102 self.assert_succesful_commit(nodes)
95
103
96 def test_add_non_ascii_files(self):
104 def test_add_non_ascii_files(self):
97 nodes = [
105 nodes = [
98 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko_utf8_str', content='Δ‡Δ‡Δ‡Δ‡'),
106 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko_utf8_str', content='Δ‡Δ‡Δ‡Δ‡'),
99 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_unicode', content=u'Δ‡Δ‡Δ‡Δ‡'),
107 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_unicode', content=u'Δ‡Δ‡Δ‡Δ‡'),
100 ]
108 ]
101
109
102 for node in nodes:
110 for node in nodes:
103 self.imc.add(node)
111 self.imc.add(node)
104
112
105 self.commit()
113 self.commit()
106 self.assert_succesful_commit(nodes)
114 self.assert_succesful_commit(nodes)
107
115
108 def commit(self, branch=None):
116 def commit(self, branch=None):
109 self.old_commit_count = len(self.repo.commit_ids)
117 self.old_commit_count = len(self.repo.commit_ids)
110 self.commit_message = u'Test commit with unicode: ΕΌΓ³Ε‚wik'
118 self.commit_message = u'Test commit with unicode: ΕΌΓ³Ε‚wik'
111 self.commit_author = unicode(self.__class__)
119 self.commit_author = unicode(self.__class__)
112 self.commit = self.imc.commit(
120 self.commit = self.imc.commit(
113 message=self.commit_message, author=self.commit_author,
121 message=self.commit_message, author=self.commit_author,
114 branch=branch)
122 branch=branch)
115
123
116 def test_add_actually_adds_all_nodes_at_second_commit_too(self):
124 def test_add_actually_adds_all_nodes_at_second_commit_too(self):
117 to_add = [
125 to_add = [
118 FileNode('foo/bar/image.png', content='\0'),
126 FileNode('foo/bar/image.png', content='\0'),
119 FileNode('foo/README.txt', content='readme!'),
127 FileNode('foo/README.txt', content='readme!'),
120 ]
128 ]
121 self.imc.add(*to_add)
129 self.imc.add(*to_add)
122 commit = self.imc.commit(u'Initial', u'joe.doe@example.com')
130 commit = self.imc.commit(u'Initial', u'joe.doe@example.com')
123 assert isinstance(commit.get_node('foo'), DirNode)
131 assert isinstance(commit.get_node('foo'), DirNode)
124 assert isinstance(commit.get_node('foo/bar'), DirNode)
132 assert isinstance(commit.get_node('foo/bar'), DirNode)
125 self.assert_nodes_in_commit(commit, to_add)
133 self.assert_nodes_in_commit(commit, to_add)
126
134
127 # commit some more files again
135 # commit some more files again
128 to_add = [
136 to_add = [
129 FileNode('foo/bar/foobaz/bar', content='foo'),
137 FileNode('foo/bar/foobaz/bar', content='foo'),
130 FileNode('foo/bar/another/bar', content='foo'),
138 FileNode('foo/bar/another/bar', content='foo'),
131 FileNode('foo/baz.txt', content='foo'),
139 FileNode('foo/baz.txt', content='foo'),
132 FileNode('foobar/foobaz/file', content='foo'),
140 FileNode('foobar/foobaz/file', content='foo'),
133 FileNode('foobar/barbaz', content='foo'),
141 FileNode('foobar/barbaz', content='foo'),
134 ]
142 ]
135 self.imc.add(*to_add)
143 self.imc.add(*to_add)
136 commit = self.imc.commit(u'Another', u'joe.doe@example.com')
144 commit = self.imc.commit(u'Another', u'joe.doe@example.com')
137 self.assert_nodes_in_commit(commit, to_add)
145 self.assert_nodes_in_commit(commit, to_add)
138
146
139 def test_add_raise_already_added(self):
147 def test_add_raise_already_added(self):
140 node = FileNode('foobar', content='baz')
148 node = FileNode('foobar', content='baz')
141 self.imc.add(node)
149 self.imc.add(node)
142 with pytest.raises(NodeAlreadyAddedError):
150 with pytest.raises(NodeAlreadyAddedError):
143 self.imc.add(node)
151 self.imc.add(node)
144
152
145 def test_check_integrity_raise_already_exist(self):
153 def test_check_integrity_raise_already_exist(self):
146 node = FileNode('foobar', content='baz')
154 node = FileNode('foobar', content='baz')
147 self.imc.add(node)
155 self.imc.add(node)
148 self.imc.commit(message=u'Added foobar', author=unicode(self))
156 self.imc.commit(message=u'Added foobar', author=unicode(self))
149 self.imc.add(node)
157 self.imc.add(node)
150 with pytest.raises(NodeAlreadyExistsError):
158 with pytest.raises(NodeAlreadyExistsError):
151 self.imc.commit(message='new message', author=str(self))
159 self.imc.commit(message='new message', author=str(self))
152
160
153 def test_change(self):
161 def test_change(self):
154 self.imc.add(FileNode('foo/bar/baz', content='foo'))
162 self.imc.add(FileNode('foo/bar/baz', content='foo'))
155 self.imc.add(FileNode('foo/fbar', content='foobar'))
163 self.imc.add(FileNode('foo/fbar', content='foobar'))
156 tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
164 tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
157
165
158 # Change node's content
166 # Change node's content
159 node = FileNode('foo/bar/baz', content='My **changed** content')
167 node = FileNode('foo/bar/baz', content='My **changed** content')
160 self.imc.change(node)
168 self.imc.change(node)
161 self.imc.commit(u'Changed %s' % node.path, u'joe.doe@example.com')
169 self.imc.commit(u'Changed %s' % node.path, u'joe.doe@example.com')
162
170
163 newtip = self.repo.get_commit()
171 newtip = self.repo.get_commit()
164 assert tip != newtip
172 assert tip != newtip
165 assert tip.id != newtip.id
173 assert tip.id != newtip.id
166 self.assert_nodes_in_commit(newtip, (node,))
174 self.assert_nodes_in_commit(newtip, (node,))
167
175
168 def test_change_non_ascii(self):
176 def test_change_non_ascii(self):
169 to_add = [
177 to_add = [
170 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='Δ‡Δ‡Δ‡Δ‡'),
178 FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='Δ‡Δ‡Δ‡Δ‡'),
171 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'Δ‡Δ‡Δ‡Δ‡'),
179 FileNode(u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'Δ‡Δ‡Δ‡Δ‡'),
172 ]
180 ]
173 for node in to_add:
181 for node in to_add:
174 self.imc.add(node)
182 self.imc.add(node)
175
183
176 tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
184 tip = self.imc.commit(u'Initial', u'joe.doe@example.com')
177
185
178 # Change node's content
186 # Change node's content
179 node = FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='My **changed** content')
187 node = FileNode('ΕΌΓ³Ε‚wik/zwierzΔ…tko', content='My **changed** content')
180 self.imc.change(node)
188 self.imc.change(node)
181 self.imc.commit(u'Changed %s' % safe_unicode(node.path),
189 self.imc.commit(u'Changed %s' % safe_unicode(node.path),
182 u'joe.doe@example.com')
190 u'joe.doe@example.com')
183
191
184 node_uni = FileNode(
192 node_uni = FileNode(
185 u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'My **changed** content')
193 u'ΕΌΓ³Ε‚wik/zwierzΔ…tko_uni', content=u'My **changed** content')
186 self.imc.change(node_uni)
194 self.imc.change(node_uni)
187 self.imc.commit(u'Changed %s' % safe_unicode(node_uni.path),
195 self.imc.commit(u'Changed %s' % safe_unicode(node_uni.path),
188 u'joe.doe@example.com')
196 u'joe.doe@example.com')
189
197
190 newtip = self.repo.get_commit()
198 newtip = self.repo.get_commit()
191 assert tip != newtip
199 assert tip != newtip
192 assert tip.id != newtip.id
200 assert tip.id != newtip.id
193
201
194 self.assert_nodes_in_commit(newtip, (node, node_uni))
202 self.assert_nodes_in_commit(newtip, (node, node_uni))
195
203
196 def test_change_raise_empty_repository(self):
204 def test_change_raise_empty_repository(self):
197 node = FileNode('foobar')
205 node = FileNode('foobar')
198 with pytest.raises(EmptyRepositoryError):
206 with pytest.raises(EmptyRepositoryError):
199 self.imc.change(node)
207 self.imc.change(node)
200
208
201 def test_check_integrity_change_raise_node_does_not_exist(self):
209 def test_check_integrity_change_raise_node_does_not_exist(self):
202 node = FileNode('foobar', content='baz')
210 node = FileNode('foobar', content='baz')
203 self.imc.add(node)
211 self.imc.add(node)
204 self.imc.commit(message=u'Added foobar', author=unicode(self))
212 self.imc.commit(message=u'Added foobar', author=unicode(self))
205 node = FileNode('not-foobar', content='')
213 node = FileNode('not-foobar', content='')
206 self.imc.change(node)
214 self.imc.change(node)
207 with pytest.raises(NodeDoesNotExistError):
215 with pytest.raises(NodeDoesNotExistError):
208 self.imc.commit(
216 self.imc.commit(
209 message='Changed not existing node',
217 message='Changed not existing node',
210 author=str(self))
218 author=str(self))
211
219
212 def test_change_raise_node_already_changed(self):
220 def test_change_raise_node_already_changed(self):
213 node = FileNode('foobar', content='baz')
221 node = FileNode('foobar', content='baz')
214 self.imc.add(node)
222 self.imc.add(node)
215 self.imc.commit(message=u'Added foobar', author=unicode(self))
223 self.imc.commit(message=u'Added foobar', author=unicode(self))
216 node = FileNode('foobar', content='more baz')
224 node = FileNode('foobar', content='more baz')
217 self.imc.change(node)
225 self.imc.change(node)
218 with pytest.raises(NodeAlreadyChangedError):
226 with pytest.raises(NodeAlreadyChangedError):
219 self.imc.change(node)
227 self.imc.change(node)
220
228
221 def test_check_integrity_change_raise_node_not_changed(self, nodes):
229 def test_check_integrity_change_raise_node_not_changed(self, nodes):
222 self.test_add(nodes) # Performs first commit
230 self.test_add(nodes) # Performs first commit
223
231
224 node = FileNode(nodes[0].path, content=nodes[0].content)
232 node = FileNode(nodes[0].path, content=nodes[0].content)
225 self.imc.change(node)
233 self.imc.change(node)
226 with pytest.raises(NodeNotChangedError):
234 with pytest.raises(NodeNotChangedError):
227 self.imc.commit(
235 self.imc.commit(
228 message=u'Trying to mark node as changed without touching it',
236 message=u'Trying to mark node as changed without touching it',
229 author=unicode(self))
237 author=unicode(self))
230
238
231 def test_change_raise_node_already_removed(self):
239 def test_change_raise_node_already_removed(self):
232 node = FileNode('foobar', content='baz')
240 node = FileNode('foobar', content='baz')
233 self.imc.add(node)
241 self.imc.add(node)
234 self.imc.commit(message=u'Added foobar', author=unicode(self))
242 self.imc.commit(message=u'Added foobar', author=unicode(self))
235 self.imc.remove(FileNode('foobar'))
243 self.imc.remove(FileNode('foobar'))
236 with pytest.raises(NodeAlreadyRemovedError):
244 with pytest.raises(NodeAlreadyRemovedError):
237 self.imc.change(node)
245 self.imc.change(node)
238
246
239 def test_remove(self, nodes):
247 def test_remove(self, nodes):
240 self.test_add(nodes) # Performs first commit
248 self.test_add(nodes) # Performs first commit
241
249
242 tip = self.repo.get_commit()
250 tip = self.repo.get_commit()
243 node = nodes[0]
251 node = nodes[0]
244 assert node.content == tip.get_node(node.path).content
252 assert node.content == tip.get_node(node.path).content
245 self.imc.remove(node)
253 self.imc.remove(node)
246 self.imc.commit(
254 self.imc.commit(
247 message=u'Removed %s' % node.path, author=unicode(self))
255 message=u'Removed %s' % node.path, author=unicode(self))
248
256
249 newtip = self.repo.get_commit()
257 newtip = self.repo.get_commit()
250 assert tip != newtip
258 assert tip != newtip
251 assert tip.id != newtip.id
259 assert tip.id != newtip.id
252 with pytest.raises(NodeDoesNotExistError):
260 with pytest.raises(NodeDoesNotExistError):
253 newtip.get_node(node.path)
261 newtip.get_node(node.path)
254
262
255 def test_remove_last_file_from_directory(self):
263 def test_remove_last_file_from_directory(self):
256 node = FileNode('omg/qwe/foo/bar', content='foobar')
264 node = FileNode('omg/qwe/foo/bar', content='foobar')
257 self.imc.add(node)
265 self.imc.add(node)
258 self.imc.commit(u'added', u'joe doe')
266 self.imc.commit(u'added', u'joe doe')
259
267
260 self.imc.remove(node)
268 self.imc.remove(node)
261 tip = self.imc.commit(u'removed', u'joe doe')
269 tip = self.imc.commit(u'removed', u'joe doe')
262 with pytest.raises(NodeDoesNotExistError):
270 with pytest.raises(NodeDoesNotExistError):
263 tip.get_node('omg/qwe/foo/bar')
271 tip.get_node('omg/qwe/foo/bar')
264
272
265 def test_remove_raise_node_does_not_exist(self, nodes):
273 def test_remove_raise_node_does_not_exist(self, nodes):
266 self.imc.remove(nodes[0])
274 self.imc.remove(nodes[0])
267 with pytest.raises(NodeDoesNotExistError):
275 with pytest.raises(NodeDoesNotExistError):
268 self.imc.commit(
276 self.imc.commit(
269 message='Trying to remove node at empty repository',
277 message='Trying to remove node at empty repository',
270 author=str(self))
278 author=str(self))
271
279
272 def test_check_integrity_remove_raise_node_does_not_exist(self, nodes):
280 def test_check_integrity_remove_raise_node_does_not_exist(self, nodes):
273 self.test_add(nodes) # Performs first commit
281 self.test_add(nodes) # Performs first commit
274
282
275 node = FileNode('no-such-file')
283 node = FileNode('no-such-file')
276 self.imc.remove(node)
284 self.imc.remove(node)
277 with pytest.raises(NodeDoesNotExistError):
285 with pytest.raises(NodeDoesNotExistError):
278 self.imc.commit(
286 self.imc.commit(
279 message=u'Trying to remove not existing node',
287 message=u'Trying to remove not existing node',
280 author=unicode(self))
288 author=unicode(self))
281
289
282 def test_remove_raise_node_already_removed(self, nodes):
290 def test_remove_raise_node_already_removed(self, nodes):
283 self.test_add(nodes) # Performs first commit
291 self.test_add(nodes) # Performs first commit
284
292
285 node = FileNode(nodes[0].path)
293 node = FileNode(nodes[0].path)
286 self.imc.remove(node)
294 self.imc.remove(node)
287 with pytest.raises(NodeAlreadyRemovedError):
295 with pytest.raises(NodeAlreadyRemovedError):
288 self.imc.remove(node)
296 self.imc.remove(node)
289
297
290 def test_remove_raise_node_already_changed(self, nodes):
298 def test_remove_raise_node_already_changed(self, nodes):
291 self.test_add(nodes) # Performs first commit
299 self.test_add(nodes) # Performs first commit
292
300
293 node = FileNode(nodes[0].path, content='Bending time')
301 node = FileNode(nodes[0].path, content='Bending time')
294 self.imc.change(node)
302 self.imc.change(node)
295 with pytest.raises(NodeAlreadyChangedError):
303 with pytest.raises(NodeAlreadyChangedError):
296 self.imc.remove(node)
304 self.imc.remove(node)
297
305
298 def test_reset(self):
306 def test_reset(self):
299 self.imc.add(FileNode('foo', content='bar'))
307 self.imc.add(FileNode('foo', content='bar'))
300 # self.imc.change(FileNode('baz', content='new'))
308 # self.imc.change(FileNode('baz', content='new'))
301 # self.imc.remove(FileNode('qwe'))
309 # self.imc.remove(FileNode('qwe'))
302 self.imc.reset()
310 self.imc.reset()
303 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
311 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
304
312
305 def test_multiple_commits(self):
313 def test_multiple_commits(self):
306 N = 3 # number of commits to perform
314 N = 3 # number of commits to perform
307 last = None
315 last = None
308 for x in xrange(N):
316 for x in xrange(N):
309 fname = 'file%s' % str(x).rjust(5, '0')
317 fname = 'file%s' % str(x).rjust(5, '0')
310 content = 'foobar\n' * x
318 content = 'foobar\n' * x
311 node = FileNode(fname, content=content)
319 node = FileNode(fname, content=content)
312 self.imc.add(node)
320 self.imc.add(node)
313 commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs')
321 commit = self.imc.commit(u"Commit no. %s" % (x + 1), author=u'vcs')
314 assert last != commit
322 assert last != commit
315 last = commit
323 last = commit
316
324
317 # Check commit number for same repo
325 # Check commit number for same repo
318 assert len(self.repo.commit_ids) == N
326 assert len(self.repo.commit_ids) == N
319
327
320 # Check commit number for recreated repo
328 # Check commit number for recreated repo
321 repo = self.Backend(self.repo_path)
329 repo = self.Backend(self.repo_path)
322 assert len(repo.commit_ids) == N
330 assert len(repo.commit_ids) == N
323
331
324 def test_date_attr(self, local_dt_to_utc):
332 def test_date_attr(self, local_dt_to_utc):
325 node = FileNode('foobar.txt', content='Foobared!')
333 node = FileNode('foobar.txt', content='Foobared!')
326 self.imc.add(node)
334 self.imc.add(node)
327 date = datetime.datetime(1985, 1, 30, 1, 45)
335 date = datetime.datetime(1985, 1, 30, 1, 45)
328 commit = self.imc.commit(
336 commit = self.imc.commit(
329 u"Committed at time when I was born ;-)",
337 u"Committed at time when I was born ;-)",
330 author=u'lb', date=date)
338 author=u'lb', date=date)
331
339
332 assert commit.date == local_dt_to_utc(date)
340 assert commit.date == local_dt_to_utc(date)
333
341
334 def assert_succesful_commit(self, added_nodes):
342 def assert_succesful_commit(self, added_nodes):
335 newtip = self.repo.get_commit()
343 newtip = self.repo.get_commit()
336 assert self.commit == newtip
344 assert self.commit == newtip
337 assert self.old_commit_count + 1 == len(self.repo.commit_ids)
345 assert self.old_commit_count + 1 == len(self.repo.commit_ids)
338 assert newtip.message == self.commit_message
346 assert newtip.message == self.commit_message
339 assert newtip.author == self.commit_author
347 assert newtip.author == self.commit_author
340 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
348 assert not any((self.imc.added, self.imc.changed, self.imc.removed))
341 self.assert_nodes_in_commit(newtip, added_nodes)
349 self.assert_nodes_in_commit(newtip, added_nodes)
342
350
343 def assert_nodes_in_commit(self, commit, nodes):
351 def assert_nodes_in_commit(self, commit, nodes):
344 for node in nodes:
352 for node in nodes:
345 assert commit.get_node(node.path).content == node.content
353 assert commit.get_node(node.path).content == node.content
General Comments 0
You need to be logged in to leave comments. Login now