##// END OF EJS Templates
pull-requests: expose unresolved files in merge response.
marcink -
r4080:df62e32a default
parent child Browse files
Show More
@@ -1,1899 +1,1899 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24 import os
24 import os
25 import re
25 import re
26 import time
26 import time
27 import shutil
27 import shutil
28 import datetime
28 import datetime
29 import fnmatch
29 import fnmatch
30 import itertools
30 import itertools
31 import logging
31 import logging
32 import collections
32 import collections
33 import warnings
33 import warnings
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from pyramid import compat
37 from pyramid import compat
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.translation import lazy_ugettext
40 from rhodecode.translation import lazy_ugettext
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 from rhodecode.lib.vcs import connection
42 from rhodecode.lib.vcs import connection
43 from rhodecode.lib.vcs.utils import author_name, author_email
43 from rhodecode.lib.vcs.utils import author_name, author_email
44 from rhodecode.lib.vcs.conf import settings
44 from rhodecode.lib.vcs.conf import settings
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 RepositoryError)
50 RepositoryError)
51
51
52
52
53 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
54
54
55
55
56 FILEMODE_DEFAULT = 0o100644
56 FILEMODE_DEFAULT = 0o100644
57 FILEMODE_EXECUTABLE = 0o100755
57 FILEMODE_EXECUTABLE = 0o100755
58 EMPTY_COMMIT_ID = '0' * 40
58 EMPTY_COMMIT_ID = '0' * 40
59
59
60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61
61
62
62
63 class MergeFailureReason(object):
63 class MergeFailureReason(object):
64 """
64 """
65 Enumeration with all the reasons why the server side merge could fail.
65 Enumeration with all the reasons why the server side merge could fail.
66
66
67 DO NOT change the number of the reasons, as they may be stored in the
67 DO NOT change the number of the reasons, as they may be stored in the
68 database.
68 database.
69
69
70 Changing the name of a reason is acceptable and encouraged to deprecate old
70 Changing the name of a reason is acceptable and encouraged to deprecate old
71 reasons.
71 reasons.
72 """
72 """
73
73
74 # Everything went well.
74 # Everything went well.
75 NONE = 0
75 NONE = 0
76
76
77 # An unexpected exception was raised. Check the logs for more details.
77 # An unexpected exception was raised. Check the logs for more details.
78 UNKNOWN = 1
78 UNKNOWN = 1
79
79
80 # The merge was not successful, there are conflicts.
80 # The merge was not successful, there are conflicts.
81 MERGE_FAILED = 2
81 MERGE_FAILED = 2
82
82
83 # The merge succeeded but we could not push it to the target repository.
83 # The merge succeeded but we could not push it to the target repository.
84 PUSH_FAILED = 3
84 PUSH_FAILED = 3
85
85
86 # The specified target is not a head in the target repository.
86 # The specified target is not a head in the target repository.
87 TARGET_IS_NOT_HEAD = 4
87 TARGET_IS_NOT_HEAD = 4
88
88
89 # The source repository contains more branches than the target. Pushing
89 # The source repository contains more branches than the target. Pushing
90 # the merge will create additional branches in the target.
90 # the merge will create additional branches in the target.
91 HG_SOURCE_HAS_MORE_BRANCHES = 5
91 HG_SOURCE_HAS_MORE_BRANCHES = 5
92
92
93 # The target reference has multiple heads. That does not allow to correctly
93 # The target reference has multiple heads. That does not allow to correctly
94 # identify the target location. This could only happen for mercurial
94 # identify the target location. This could only happen for mercurial
95 # branches.
95 # branches.
96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
97
97
98 # The target repository is locked
98 # The target repository is locked
99 TARGET_IS_LOCKED = 7
99 TARGET_IS_LOCKED = 7
100
100
101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
102 # A involved commit could not be found.
102 # A involved commit could not be found.
103 _DEPRECATED_MISSING_COMMIT = 8
103 _DEPRECATED_MISSING_COMMIT = 8
104
104
105 # The target repo reference is missing.
105 # The target repo reference is missing.
106 MISSING_TARGET_REF = 9
106 MISSING_TARGET_REF = 9
107
107
108 # The source repo reference is missing.
108 # The source repo reference is missing.
109 MISSING_SOURCE_REF = 10
109 MISSING_SOURCE_REF = 10
110
110
111 # The merge was not successful, there are conflicts related to sub
111 # The merge was not successful, there are conflicts related to sub
112 # repositories.
112 # repositories.
113 SUBREPO_MERGE_FAILED = 11
113 SUBREPO_MERGE_FAILED = 11
114
114
115
115
116 class UpdateFailureReason(object):
116 class UpdateFailureReason(object):
117 """
117 """
118 Enumeration with all the reasons why the pull request update could fail.
118 Enumeration with all the reasons why the pull request update could fail.
119
119
120 DO NOT change the number of the reasons, as they may be stored in the
120 DO NOT change the number of the reasons, as they may be stored in the
121 database.
121 database.
122
122
123 Changing the name of a reason is acceptable and encouraged to deprecate old
123 Changing the name of a reason is acceptable and encouraged to deprecate old
124 reasons.
124 reasons.
125 """
125 """
126
126
127 # Everything went well.
127 # Everything went well.
128 NONE = 0
128 NONE = 0
129
129
130 # An unexpected exception was raised. Check the logs for more details.
130 # An unexpected exception was raised. Check the logs for more details.
131 UNKNOWN = 1
131 UNKNOWN = 1
132
132
133 # The pull request is up to date.
133 # The pull request is up to date.
134 NO_CHANGE = 2
134 NO_CHANGE = 2
135
135
136 # The pull request has a reference type that is not supported for update.
136 # The pull request has a reference type that is not supported for update.
137 WRONG_REF_TYPE = 3
137 WRONG_REF_TYPE = 3
138
138
139 # Update failed because the target reference is missing.
139 # Update failed because the target reference is missing.
140 MISSING_TARGET_REF = 4
140 MISSING_TARGET_REF = 4
141
141
142 # Update failed because the source reference is missing.
142 # Update failed because the source reference is missing.
143 MISSING_SOURCE_REF = 5
143 MISSING_SOURCE_REF = 5
144
144
145
145
146 class MergeResponse(object):
146 class MergeResponse(object):
147
147
148 # uses .format(**metadata) for variables
148 # uses .format(**metadata) for variables
149 MERGE_STATUS_MESSAGES = {
149 MERGE_STATUS_MESSAGES = {
150 MergeFailureReason.NONE: lazy_ugettext(
150 MergeFailureReason.NONE: lazy_ugettext(
151 u'This pull request can be automatically merged.'),
151 u'This pull request can be automatically merged.'),
152 MergeFailureReason.UNKNOWN: lazy_ugettext(
152 MergeFailureReason.UNKNOWN: lazy_ugettext(
153 u'This pull request cannot be merged because of an unhandled exception. '
153 u'This pull request cannot be merged because of an unhandled exception. '
154 u'{exception}'),
154 u'{exception}'),
155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
156 u'This pull request cannot be merged because of merge conflicts.'),
156 u'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
158 u'This pull request could not be merged because push to '
158 u'This pull request could not be merged because push to '
159 u'target:`{target}@{merge_commit}` failed.'),
159 u'target:`{target}@{merge_commit}` failed.'),
160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
161 u'This pull request cannot be merged because the target '
161 u'This pull request cannot be merged because the target '
162 u'`{target_ref.name}` is not a head.'),
162 u'`{target_ref.name}` is not a head.'),
163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
164 u'This pull request cannot be merged because the source contains '
164 u'This pull request cannot be merged because the source contains '
165 u'more branches than the target.'),
165 u'more branches than the target.'),
166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
167 u'This pull request cannot be merged because the target `{target_ref.name}` '
167 u'This pull request cannot be merged because the target `{target_ref.name}` '
168 u'has multiple heads: `{heads}`.'),
168 u'has multiple heads: `{heads}`.'),
169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
170 u'This pull request cannot be merged because the target repository is '
170 u'This pull request cannot be merged because the target repository is '
171 u'locked by {locked_by}.'),
171 u'locked by {locked_by}.'),
172
172
173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
174 u'This pull request cannot be merged because the target '
174 u'This pull request cannot be merged because the target '
175 u'reference `{target_ref.name}` is missing.'),
175 u'reference `{target_ref.name}` is missing.'),
176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
177 u'This pull request cannot be merged because the source '
177 u'This pull request cannot be merged because the source '
178 u'reference `{source_ref.name}` is missing.'),
178 u'reference `{source_ref.name}` is missing.'),
179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
180 u'This pull request cannot be merged because of conflicts related '
180 u'This pull request cannot be merged because of conflicts related '
181 u'to sub repositories.'),
181 u'to sub repositories.'),
182
182
183 # Deprecations
183 # Deprecations
184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
185 u'This pull request cannot be merged because the target or the '
185 u'This pull request cannot be merged because the target or the '
186 u'source reference is missing.'),
186 u'source reference is missing.'),
187
187
188 }
188 }
189
189
190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
191 self.possible = possible
191 self.possible = possible
192 self.executed = executed
192 self.executed = executed
193 self.merge_ref = merge_ref
193 self.merge_ref = merge_ref
194 self.failure_reason = failure_reason
194 self.failure_reason = failure_reason
195 self.metadata = metadata or {}
195 self.metadata = metadata or {}
196
196
197 def __repr__(self):
197 def __repr__(self):
198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
199
199
200 def __eq__(self, other):
200 def __eq__(self, other):
201 same_instance = isinstance(other, self.__class__)
201 same_instance = isinstance(other, self.__class__)
202 return same_instance \
202 return same_instance \
203 and self.possible == other.possible \
203 and self.possible == other.possible \
204 and self.executed == other.executed \
204 and self.executed == other.executed \
205 and self.failure_reason == other.failure_reason
205 and self.failure_reason == other.failure_reason
206
206
207 @property
207 @property
208 def label(self):
208 def label(self):
209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
210 not k.startswith('_'))
210 not k.startswith('_'))
211 return label_dict.get(self.failure_reason)
211 return label_dict.get(self.failure_reason)
212
212
213 @property
213 @property
214 def merge_status_message(self):
214 def merge_status_message(self):
215 """
215 """
216 Return a human friendly error message for the given merge status code.
216 Return a human friendly error message for the given merge status code.
217 """
217 """
218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
219 try:
219 try:
220 return msg.format(**self.metadata)
220 return msg.format(**self.metadata)
221 except Exception:
221 except Exception:
222 log.exception('Failed to format %s message', self)
222 log.exception('Failed to format %s message', self)
223 return msg
223 return msg
224
224
225 def asdict(self):
225 def asdict(self):
226 data = {}
226 data = {}
227 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
227 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
228 'merge_status_message']:
228 'merge_status_message']:
229 data[k] = getattr(self, k)
229 data[k] = getattr(self, k)
230 return data
230 return data
231
231
232
232
233 class BaseRepository(object):
233 class BaseRepository(object):
234 """
234 """
235 Base Repository for final backends
235 Base Repository for final backends
236
236
237 .. attribute:: DEFAULT_BRANCH_NAME
237 .. attribute:: DEFAULT_BRANCH_NAME
238
238
239 name of default branch (i.e. "trunk" for svn, "master" for git etc.
239 name of default branch (i.e. "trunk" for svn, "master" for git etc.
240
240
241 .. attribute:: commit_ids
241 .. attribute:: commit_ids
242
242
243 list of all available commit ids, in ascending order
243 list of all available commit ids, in ascending order
244
244
245 .. attribute:: path
245 .. attribute:: path
246
246
247 absolute path to the repository
247 absolute path to the repository
248
248
249 .. attribute:: bookmarks
249 .. attribute:: bookmarks
250
250
251 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
251 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
252 there are no bookmarks or the backend implementation does not support
252 there are no bookmarks or the backend implementation does not support
253 bookmarks.
253 bookmarks.
254
254
255 .. attribute:: tags
255 .. attribute:: tags
256
256
257 Mapping from name to :term:`Commit ID` of the tag.
257 Mapping from name to :term:`Commit ID` of the tag.
258
258
259 """
259 """
260
260
261 DEFAULT_BRANCH_NAME = None
261 DEFAULT_BRANCH_NAME = None
262 DEFAULT_CONTACT = u"Unknown"
262 DEFAULT_CONTACT = u"Unknown"
263 DEFAULT_DESCRIPTION = u"unknown"
263 DEFAULT_DESCRIPTION = u"unknown"
264 EMPTY_COMMIT_ID = '0' * 40
264 EMPTY_COMMIT_ID = '0' * 40
265
265
266 path = None
266 path = None
267
267
268 _is_empty = None
268 _is_empty = None
269 _commit_ids = {}
269 _commit_ids = {}
270
270
271 def __init__(self, repo_path, config=None, create=False, **kwargs):
271 def __init__(self, repo_path, config=None, create=False, **kwargs):
272 """
272 """
273 Initializes repository. Raises RepositoryError if repository could
273 Initializes repository. Raises RepositoryError if repository could
274 not be find at the given ``repo_path`` or directory at ``repo_path``
274 not be find at the given ``repo_path`` or directory at ``repo_path``
275 exists and ``create`` is set to True.
275 exists and ``create`` is set to True.
276
276
277 :param repo_path: local path of the repository
277 :param repo_path: local path of the repository
278 :param config: repository configuration
278 :param config: repository configuration
279 :param create=False: if set to True, would try to create repository.
279 :param create=False: if set to True, would try to create repository.
280 :param src_url=None: if set, should be proper url from which repository
280 :param src_url=None: if set, should be proper url from which repository
281 would be cloned; requires ``create`` parameter to be set to True -
281 would be cloned; requires ``create`` parameter to be set to True -
282 raises RepositoryError if src_url is set and create evaluates to
282 raises RepositoryError if src_url is set and create evaluates to
283 False
283 False
284 """
284 """
285 raise NotImplementedError
285 raise NotImplementedError
286
286
287 def __repr__(self):
287 def __repr__(self):
288 return '<%s at %s>' % (self.__class__.__name__, self.path)
288 return '<%s at %s>' % (self.__class__.__name__, self.path)
289
289
290 def __len__(self):
290 def __len__(self):
291 return self.count()
291 return self.count()
292
292
293 def __eq__(self, other):
293 def __eq__(self, other):
294 same_instance = isinstance(other, self.__class__)
294 same_instance = isinstance(other, self.__class__)
295 return same_instance and other.path == self.path
295 return same_instance and other.path == self.path
296
296
297 def __ne__(self, other):
297 def __ne__(self, other):
298 return not self.__eq__(other)
298 return not self.__eq__(other)
299
299
300 def get_create_shadow_cache_pr_path(self, db_repo):
300 def get_create_shadow_cache_pr_path(self, db_repo):
301 path = db_repo.cached_diffs_dir
301 path = db_repo.cached_diffs_dir
302 if not os.path.exists(path):
302 if not os.path.exists(path):
303 os.makedirs(path, 0o755)
303 os.makedirs(path, 0o755)
304 return path
304 return path
305
305
306 @classmethod
306 @classmethod
307 def get_default_config(cls, default=None):
307 def get_default_config(cls, default=None):
308 config = Config()
308 config = Config()
309 if default and isinstance(default, list):
309 if default and isinstance(default, list):
310 for section, key, val in default:
310 for section, key, val in default:
311 config.set(section, key, val)
311 config.set(section, key, val)
312 return config
312 return config
313
313
314 @LazyProperty
314 @LazyProperty
315 def _remote(self):
315 def _remote(self):
316 raise NotImplementedError
316 raise NotImplementedError
317
317
318 def _heads(self, branch=None):
318 def _heads(self, branch=None):
319 return []
319 return []
320
320
321 @LazyProperty
321 @LazyProperty
322 def EMPTY_COMMIT(self):
322 def EMPTY_COMMIT(self):
323 return EmptyCommit(self.EMPTY_COMMIT_ID)
323 return EmptyCommit(self.EMPTY_COMMIT_ID)
324
324
325 @LazyProperty
325 @LazyProperty
326 def alias(self):
326 def alias(self):
327 for k, v in settings.BACKENDS.items():
327 for k, v in settings.BACKENDS.items():
328 if v.split('.')[-1] == str(self.__class__.__name__):
328 if v.split('.')[-1] == str(self.__class__.__name__):
329 return k
329 return k
330
330
331 @LazyProperty
331 @LazyProperty
332 def name(self):
332 def name(self):
333 return safe_unicode(os.path.basename(self.path))
333 return safe_unicode(os.path.basename(self.path))
334
334
335 @LazyProperty
335 @LazyProperty
336 def description(self):
336 def description(self):
337 raise NotImplementedError
337 raise NotImplementedError
338
338
339 def refs(self):
339 def refs(self):
340 """
340 """
341 returns a `dict` with branches, bookmarks, tags, and closed_branches
341 returns a `dict` with branches, bookmarks, tags, and closed_branches
342 for this repository
342 for this repository
343 """
343 """
344 return dict(
344 return dict(
345 branches=self.branches,
345 branches=self.branches,
346 branches_closed=self.branches_closed,
346 branches_closed=self.branches_closed,
347 tags=self.tags,
347 tags=self.tags,
348 bookmarks=self.bookmarks
348 bookmarks=self.bookmarks
349 )
349 )
350
350
351 @LazyProperty
351 @LazyProperty
352 def branches(self):
352 def branches(self):
353 """
353 """
354 A `dict` which maps branch names to commit ids.
354 A `dict` which maps branch names to commit ids.
355 """
355 """
356 raise NotImplementedError
356 raise NotImplementedError
357
357
358 @LazyProperty
358 @LazyProperty
359 def branches_closed(self):
359 def branches_closed(self):
360 """
360 """
361 A `dict` which maps tags names to commit ids.
361 A `dict` which maps tags names to commit ids.
362 """
362 """
363 raise NotImplementedError
363 raise NotImplementedError
364
364
365 @LazyProperty
365 @LazyProperty
366 def bookmarks(self):
366 def bookmarks(self):
367 """
367 """
368 A `dict` which maps tags names to commit ids.
368 A `dict` which maps tags names to commit ids.
369 """
369 """
370 raise NotImplementedError
370 raise NotImplementedError
371
371
372 @LazyProperty
372 @LazyProperty
373 def tags(self):
373 def tags(self):
374 """
374 """
375 A `dict` which maps tags names to commit ids.
375 A `dict` which maps tags names to commit ids.
376 """
376 """
377 raise NotImplementedError
377 raise NotImplementedError
378
378
379 @LazyProperty
379 @LazyProperty
380 def size(self):
380 def size(self):
381 """
381 """
382 Returns combined size in bytes for all repository files
382 Returns combined size in bytes for all repository files
383 """
383 """
384 tip = self.get_commit()
384 tip = self.get_commit()
385 return tip.size
385 return tip.size
386
386
387 def size_at_commit(self, commit_id):
387 def size_at_commit(self, commit_id):
388 commit = self.get_commit(commit_id)
388 commit = self.get_commit(commit_id)
389 return commit.size
389 return commit.size
390
390
391 def _check_for_empty(self):
391 def _check_for_empty(self):
392 no_commits = len(self._commit_ids) == 0
392 no_commits = len(self._commit_ids) == 0
393 if no_commits:
393 if no_commits:
394 # check on remote to be sure
394 # check on remote to be sure
395 return self._remote.is_empty()
395 return self._remote.is_empty()
396 else:
396 else:
397 return False
397 return False
398
398
399 def is_empty(self):
399 def is_empty(self):
400 if rhodecode.is_test:
400 if rhodecode.is_test:
401 return self._check_for_empty()
401 return self._check_for_empty()
402
402
403 if self._is_empty is None:
403 if self._is_empty is None:
404 # cache empty for production, but not tests
404 # cache empty for production, but not tests
405 self._is_empty = self._check_for_empty()
405 self._is_empty = self._check_for_empty()
406
406
407 return self._is_empty
407 return self._is_empty
408
408
409 @staticmethod
409 @staticmethod
410 def check_url(url, config):
410 def check_url(url, config):
411 """
411 """
412 Function will check given url and try to verify if it's a valid
412 Function will check given url and try to verify if it's a valid
413 link.
413 link.
414 """
414 """
415 raise NotImplementedError
415 raise NotImplementedError
416
416
417 @staticmethod
417 @staticmethod
418 def is_valid_repository(path):
418 def is_valid_repository(path):
419 """
419 """
420 Check if given `path` contains a valid repository of this backend
420 Check if given `path` contains a valid repository of this backend
421 """
421 """
422 raise NotImplementedError
422 raise NotImplementedError
423
423
424 # ==========================================================================
424 # ==========================================================================
425 # COMMITS
425 # COMMITS
426 # ==========================================================================
426 # ==========================================================================
427
427
428 @CachedProperty
428 @CachedProperty
429 def commit_ids(self):
429 def commit_ids(self):
430 raise NotImplementedError
430 raise NotImplementedError
431
431
432 def append_commit_id(self, commit_id):
432 def append_commit_id(self, commit_id):
433 if commit_id not in self.commit_ids:
433 if commit_id not in self.commit_ids:
434 self._rebuild_cache(self.commit_ids + [commit_id])
434 self._rebuild_cache(self.commit_ids + [commit_id])
435
435
436 # clear cache
436 # clear cache
437 self._invalidate_prop_cache('commit_ids')
437 self._invalidate_prop_cache('commit_ids')
438 self._is_empty = False
438 self._is_empty = False
439
439
440 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
440 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
441 """
441 """
442 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
442 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
443 are both None, most recent commit is returned.
443 are both None, most recent commit is returned.
444
444
445 :param pre_load: Optional. List of commit attributes to load.
445 :param pre_load: Optional. List of commit attributes to load.
446
446
447 :raises ``EmptyRepositoryError``: if there are no commits
447 :raises ``EmptyRepositoryError``: if there are no commits
448 """
448 """
449 raise NotImplementedError
449 raise NotImplementedError
450
450
451 def __iter__(self):
451 def __iter__(self):
452 for commit_id in self.commit_ids:
452 for commit_id in self.commit_ids:
453 yield self.get_commit(commit_id=commit_id)
453 yield self.get_commit(commit_id=commit_id)
454
454
455 def get_commits(
455 def get_commits(
456 self, start_id=None, end_id=None, start_date=None, end_date=None,
456 self, start_id=None, end_id=None, start_date=None, end_date=None,
457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
458 """
458 """
459 Returns iterator of `BaseCommit` objects from start to end
459 Returns iterator of `BaseCommit` objects from start to end
460 not inclusive. This should behave just like a list, ie. end is not
460 not inclusive. This should behave just like a list, ie. end is not
461 inclusive.
461 inclusive.
462
462
463 :param start_id: None or str, must be a valid commit id
463 :param start_id: None or str, must be a valid commit id
464 :param end_id: None or str, must be a valid commit id
464 :param end_id: None or str, must be a valid commit id
465 :param start_date:
465 :param start_date:
466 :param end_date:
466 :param end_date:
467 :param branch_name:
467 :param branch_name:
468 :param show_hidden:
468 :param show_hidden:
469 :param pre_load:
469 :param pre_load:
470 :param translate_tags:
470 :param translate_tags:
471 """
471 """
472 raise NotImplementedError
472 raise NotImplementedError
473
473
474 def __getitem__(self, key):
474 def __getitem__(self, key):
475 """
475 """
476 Allows index based access to the commit objects of this repository.
476 Allows index based access to the commit objects of this repository.
477 """
477 """
478 pre_load = ["author", "branch", "date", "message", "parents"]
478 pre_load = ["author", "branch", "date", "message", "parents"]
479 if isinstance(key, slice):
479 if isinstance(key, slice):
480 return self._get_range(key, pre_load)
480 return self._get_range(key, pre_load)
481 return self.get_commit(commit_idx=key, pre_load=pre_load)
481 return self.get_commit(commit_idx=key, pre_load=pre_load)
482
482
483 def _get_range(self, slice_obj, pre_load):
483 def _get_range(self, slice_obj, pre_load):
484 for commit_id in self.commit_ids.__getitem__(slice_obj):
484 for commit_id in self.commit_ids.__getitem__(slice_obj):
485 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
485 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
486
486
487 def count(self):
487 def count(self):
488 return len(self.commit_ids)
488 return len(self.commit_ids)
489
489
490 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
490 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
491 """
491 """
492 Creates and returns a tag for the given ``commit_id``.
492 Creates and returns a tag for the given ``commit_id``.
493
493
494 :param name: name for new tag
494 :param name: name for new tag
495 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
495 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
496 :param commit_id: commit id for which new tag would be created
496 :param commit_id: commit id for which new tag would be created
497 :param message: message of the tag's commit
497 :param message: message of the tag's commit
498 :param date: date of tag's commit
498 :param date: date of tag's commit
499
499
500 :raises TagAlreadyExistError: if tag with same name already exists
500 :raises TagAlreadyExistError: if tag with same name already exists
501 """
501 """
502 raise NotImplementedError
502 raise NotImplementedError
503
503
504 def remove_tag(self, name, user, message=None, date=None):
504 def remove_tag(self, name, user, message=None, date=None):
505 """
505 """
506 Removes tag with the given ``name``.
506 Removes tag with the given ``name``.
507
507
508 :param name: name of the tag to be removed
508 :param name: name of the tag to be removed
509 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
509 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
510 :param message: message of the tag's removal commit
510 :param message: message of the tag's removal commit
511 :param date: date of tag's removal commit
511 :param date: date of tag's removal commit
512
512
513 :raises TagDoesNotExistError: if tag with given name does not exists
513 :raises TagDoesNotExistError: if tag with given name does not exists
514 """
514 """
515 raise NotImplementedError
515 raise NotImplementedError
516
516
517 def get_diff(
517 def get_diff(
518 self, commit1, commit2, path=None, ignore_whitespace=False,
518 self, commit1, commit2, path=None, ignore_whitespace=False,
519 context=3, path1=None):
519 context=3, path1=None):
520 """
520 """
521 Returns (git like) *diff*, as plain text. Shows changes introduced by
521 Returns (git like) *diff*, as plain text. Shows changes introduced by
522 `commit2` since `commit1`.
522 `commit2` since `commit1`.
523
523
524 :param commit1: Entry point from which diff is shown. Can be
524 :param commit1: Entry point from which diff is shown. Can be
525 ``self.EMPTY_COMMIT`` - in this case, patch showing all
525 ``self.EMPTY_COMMIT`` - in this case, patch showing all
526 the changes since empty state of the repository until `commit2`
526 the changes since empty state of the repository until `commit2`
527 :param commit2: Until which commit changes should be shown.
527 :param commit2: Until which commit changes should be shown.
528 :param path: Can be set to a path of a file to create a diff of that
528 :param path: Can be set to a path of a file to create a diff of that
529 file. If `path1` is also set, this value is only associated to
529 file. If `path1` is also set, this value is only associated to
530 `commit2`.
530 `commit2`.
531 :param ignore_whitespace: If set to ``True``, would not show whitespace
531 :param ignore_whitespace: If set to ``True``, would not show whitespace
532 changes. Defaults to ``False``.
532 changes. Defaults to ``False``.
533 :param context: How many lines before/after changed lines should be
533 :param context: How many lines before/after changed lines should be
534 shown. Defaults to ``3``.
534 shown. Defaults to ``3``.
535 :param path1: Can be set to a path to associate with `commit1`. This
535 :param path1: Can be set to a path to associate with `commit1`. This
536 parameter works only for backends which support diff generation for
536 parameter works only for backends which support diff generation for
537 different paths. Other backends will raise a `ValueError` if `path1`
537 different paths. Other backends will raise a `ValueError` if `path1`
538 is set and has a different value than `path`.
538 is set and has a different value than `path`.
539 :param file_path: filter this diff by given path pattern
539 :param file_path: filter this diff by given path pattern
540 """
540 """
541 raise NotImplementedError
541 raise NotImplementedError
542
542
543 def strip(self, commit_id, branch=None):
543 def strip(self, commit_id, branch=None):
544 """
544 """
545 Strip given commit_id from the repository
545 Strip given commit_id from the repository
546 """
546 """
547 raise NotImplementedError
547 raise NotImplementedError
548
548
549 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
549 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
550 """
550 """
551 Return a latest common ancestor commit if one exists for this repo
551 Return a latest common ancestor commit if one exists for this repo
552 `commit_id1` vs `commit_id2` from `repo2`.
552 `commit_id1` vs `commit_id2` from `repo2`.
553
553
554 :param commit_id1: Commit it from this repository to use as a
554 :param commit_id1: Commit it from this repository to use as a
555 target for the comparison.
555 target for the comparison.
556 :param commit_id2: Source commit id to use for comparison.
556 :param commit_id2: Source commit id to use for comparison.
557 :param repo2: Source repository to use for comparison.
557 :param repo2: Source repository to use for comparison.
558 """
558 """
559 raise NotImplementedError
559 raise NotImplementedError
560
560
561 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
561 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
562 """
562 """
563 Compare this repository's revision `commit_id1` with `commit_id2`.
563 Compare this repository's revision `commit_id1` with `commit_id2`.
564
564
565 Returns a tuple(commits, ancestor) that would be merged from
565 Returns a tuple(commits, ancestor) that would be merged from
566 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
566 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
567 will be returned as ancestor.
567 will be returned as ancestor.
568
568
569 :param commit_id1: Commit it from this repository to use as a
569 :param commit_id1: Commit it from this repository to use as a
570 target for the comparison.
570 target for the comparison.
571 :param commit_id2: Source commit id to use for comparison.
571 :param commit_id2: Source commit id to use for comparison.
572 :param repo2: Source repository to use for comparison.
572 :param repo2: Source repository to use for comparison.
573 :param merge: If set to ``True`` will do a merge compare which also
573 :param merge: If set to ``True`` will do a merge compare which also
574 returns the common ancestor.
574 returns the common ancestor.
575 :param pre_load: Optional. List of commit attributes to load.
575 :param pre_load: Optional. List of commit attributes to load.
576 """
576 """
577 raise NotImplementedError
577 raise NotImplementedError
578
578
579 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
579 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
580 user_name='', user_email='', message='', dry_run=False,
580 user_name='', user_email='', message='', dry_run=False,
581 use_rebase=False, close_branch=False):
581 use_rebase=False, close_branch=False):
582 """
582 """
583 Merge the revisions specified in `source_ref` from `source_repo`
583 Merge the revisions specified in `source_ref` from `source_repo`
584 onto the `target_ref` of this repository.
584 onto the `target_ref` of this repository.
585
585
586 `source_ref` and `target_ref` are named tupls with the following
586 `source_ref` and `target_ref` are named tupls with the following
587 fields `type`, `name` and `commit_id`.
587 fields `type`, `name` and `commit_id`.
588
588
589 Returns a MergeResponse named tuple with the following fields
589 Returns a MergeResponse named tuple with the following fields
590 'possible', 'executed', 'source_commit', 'target_commit',
590 'possible', 'executed', 'source_commit', 'target_commit',
591 'merge_commit'.
591 'merge_commit'.
592
592
593 :param repo_id: `repo_id` target repo id.
593 :param repo_id: `repo_id` target repo id.
594 :param workspace_id: `workspace_id` unique identifier.
594 :param workspace_id: `workspace_id` unique identifier.
595 :param target_ref: `target_ref` points to the commit on top of which
595 :param target_ref: `target_ref` points to the commit on top of which
596 the `source_ref` should be merged.
596 the `source_ref` should be merged.
597 :param source_repo: The repository that contains the commits to be
597 :param source_repo: The repository that contains the commits to be
598 merged.
598 merged.
599 :param source_ref: `source_ref` points to the topmost commit from
599 :param source_ref: `source_ref` points to the topmost commit from
600 the `source_repo` which should be merged.
600 the `source_repo` which should be merged.
601 :param user_name: Merge commit `user_name`.
601 :param user_name: Merge commit `user_name`.
602 :param user_email: Merge commit `user_email`.
602 :param user_email: Merge commit `user_email`.
603 :param message: Merge commit `message`.
603 :param message: Merge commit `message`.
604 :param dry_run: If `True` the merge will not take place.
604 :param dry_run: If `True` the merge will not take place.
605 :param use_rebase: If `True` commits from the source will be rebased
605 :param use_rebase: If `True` commits from the source will be rebased
606 on top of the target instead of being merged.
606 on top of the target instead of being merged.
607 :param close_branch: If `True` branch will be close before merging it
607 :param close_branch: If `True` branch will be close before merging it
608 """
608 """
609 if dry_run:
609 if dry_run:
610 message = message or settings.MERGE_DRY_RUN_MESSAGE
610 message = message or settings.MERGE_DRY_RUN_MESSAGE
611 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
611 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
612 user_name = user_name or settings.MERGE_DRY_RUN_USER
612 user_name = user_name or settings.MERGE_DRY_RUN_USER
613 else:
613 else:
614 if not user_name:
614 if not user_name:
615 raise ValueError('user_name cannot be empty')
615 raise ValueError('user_name cannot be empty')
616 if not user_email:
616 if not user_email:
617 raise ValueError('user_email cannot be empty')
617 raise ValueError('user_email cannot be empty')
618 if not message:
618 if not message:
619 raise ValueError('message cannot be empty')
619 raise ValueError('message cannot be empty')
620
620
621 try:
621 try:
622 return self._merge_repo(
622 return self._merge_repo(
623 repo_id, workspace_id, target_ref, source_repo,
623 repo_id, workspace_id, target_ref, source_repo,
624 source_ref, message, user_name, user_email, dry_run=dry_run,
624 source_ref, message, user_name, user_email, dry_run=dry_run,
625 use_rebase=use_rebase, close_branch=close_branch)
625 use_rebase=use_rebase, close_branch=close_branch)
626 except RepositoryError as exc:
626 except RepositoryError as exc:
627 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
627 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
628 return MergeResponse(
628 return MergeResponse(
629 False, False, None, MergeFailureReason.UNKNOWN,
629 False, False, None, MergeFailureReason.UNKNOWN,
630 metadata={'exception': str(exc)})
630 metadata={'exception': str(exc)})
631
631
632 def _merge_repo(self, repo_id, workspace_id, target_ref,
632 def _merge_repo(self, repo_id, workspace_id, target_ref,
633 source_repo, source_ref, merge_message,
633 source_repo, source_ref, merge_message,
634 merger_name, merger_email, dry_run=False,
634 merger_name, merger_email, dry_run=False,
635 use_rebase=False, close_branch=False):
635 use_rebase=False, close_branch=False):
636 """Internal implementation of merge."""
636 """Internal implementation of merge."""
637 raise NotImplementedError
637 raise NotImplementedError
638
638
639 def _maybe_prepare_merge_workspace(
639 def _maybe_prepare_merge_workspace(
640 self, repo_id, workspace_id, target_ref, source_ref):
640 self, repo_id, workspace_id, target_ref, source_ref):
641 """
641 """
642 Create the merge workspace.
642 Create the merge workspace.
643
643
644 :param workspace_id: `workspace_id` unique identifier.
644 :param workspace_id: `workspace_id` unique identifier.
645 """
645 """
646 raise NotImplementedError
646 raise NotImplementedError
647
647
648 @classmethod
648 @classmethod
649 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
649 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
650 """
650 """
651 Legacy version that was used before. We still need it for
651 Legacy version that was used before. We still need it for
652 backward compat
652 backward compat
653 """
653 """
654 return os.path.join(
654 return os.path.join(
655 os.path.dirname(repo_path),
655 os.path.dirname(repo_path),
656 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
656 '.__shadow_%s_%s' % (os.path.basename(repo_path), workspace_id))
657
657
658 @classmethod
658 @classmethod
659 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
659 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
660 # The name of the shadow repository must start with '.', so it is
660 # The name of the shadow repository must start with '.', so it is
661 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
661 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
662 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
662 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
663 if os.path.exists(legacy_repository_path):
663 if os.path.exists(legacy_repository_path):
664 return legacy_repository_path
664 return legacy_repository_path
665 else:
665 else:
666 return os.path.join(
666 return os.path.join(
667 os.path.dirname(repo_path),
667 os.path.dirname(repo_path),
668 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
668 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
669
669
670 def cleanup_merge_workspace(self, repo_id, workspace_id):
670 def cleanup_merge_workspace(self, repo_id, workspace_id):
671 """
671 """
672 Remove merge workspace.
672 Remove merge workspace.
673
673
674 This function MUST not fail in case there is no workspace associated to
674 This function MUST not fail in case there is no workspace associated to
675 the given `workspace_id`.
675 the given `workspace_id`.
676
676
677 :param workspace_id: `workspace_id` unique identifier.
677 :param workspace_id: `workspace_id` unique identifier.
678 """
678 """
679 shadow_repository_path = self._get_shadow_repository_path(
679 shadow_repository_path = self._get_shadow_repository_path(
680 self.path, repo_id, workspace_id)
680 self.path, repo_id, workspace_id)
681 shadow_repository_path_del = '{}.{}.delete'.format(
681 shadow_repository_path_del = '{}.{}.delete'.format(
682 shadow_repository_path, time.time())
682 shadow_repository_path, time.time())
683
683
684 # move the shadow repo, so it never conflicts with the one used.
684 # move the shadow repo, so it never conflicts with the one used.
685 # we use this method because shutil.rmtree had some edge case problems
685 # we use this method because shutil.rmtree had some edge case problems
686 # removing symlinked repositories
686 # removing symlinked repositories
687 if not os.path.isdir(shadow_repository_path):
687 if not os.path.isdir(shadow_repository_path):
688 return
688 return
689
689
690 shutil.move(shadow_repository_path, shadow_repository_path_del)
690 shutil.move(shadow_repository_path, shadow_repository_path_del)
691 try:
691 try:
692 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
692 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
693 except Exception:
693 except Exception:
694 log.exception('Failed to gracefully remove shadow repo under %s',
694 log.exception('Failed to gracefully remove shadow repo under %s',
695 shadow_repository_path_del)
695 shadow_repository_path_del)
696 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
696 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
697
697
698 # ========== #
698 # ========== #
699 # COMMIT API #
699 # COMMIT API #
700 # ========== #
700 # ========== #
701
701
702 @LazyProperty
702 @LazyProperty
703 def in_memory_commit(self):
703 def in_memory_commit(self):
704 """
704 """
705 Returns :class:`InMemoryCommit` object for this repository.
705 Returns :class:`InMemoryCommit` object for this repository.
706 """
706 """
707 raise NotImplementedError
707 raise NotImplementedError
708
708
709 # ======================== #
709 # ======================== #
710 # UTILITIES FOR SUBCLASSES #
710 # UTILITIES FOR SUBCLASSES #
711 # ======================== #
711 # ======================== #
712
712
713 def _validate_diff_commits(self, commit1, commit2):
713 def _validate_diff_commits(self, commit1, commit2):
714 """
714 """
715 Validates that the given commits are related to this repository.
715 Validates that the given commits are related to this repository.
716
716
717 Intended as a utility for sub classes to have a consistent validation
717 Intended as a utility for sub classes to have a consistent validation
718 of input parameters in methods like :meth:`get_diff`.
718 of input parameters in methods like :meth:`get_diff`.
719 """
719 """
720 self._validate_commit(commit1)
720 self._validate_commit(commit1)
721 self._validate_commit(commit2)
721 self._validate_commit(commit2)
722 if (isinstance(commit1, EmptyCommit) and
722 if (isinstance(commit1, EmptyCommit) and
723 isinstance(commit2, EmptyCommit)):
723 isinstance(commit2, EmptyCommit)):
724 raise ValueError("Cannot compare two empty commits")
724 raise ValueError("Cannot compare two empty commits")
725
725
726 def _validate_commit(self, commit):
726 def _validate_commit(self, commit):
727 if not isinstance(commit, BaseCommit):
727 if not isinstance(commit, BaseCommit):
728 raise TypeError(
728 raise TypeError(
729 "%s is not of type BaseCommit" % repr(commit))
729 "%s is not of type BaseCommit" % repr(commit))
730 if commit.repository != self and not isinstance(commit, EmptyCommit):
730 if commit.repository != self and not isinstance(commit, EmptyCommit):
731 raise ValueError(
731 raise ValueError(
732 "Commit %s must be a valid commit from this repository %s, "
732 "Commit %s must be a valid commit from this repository %s, "
733 "related to this repository instead %s." %
733 "related to this repository instead %s." %
734 (commit, self, commit.repository))
734 (commit, self, commit.repository))
735
735
736 def _validate_commit_id(self, commit_id):
736 def _validate_commit_id(self, commit_id):
737 if not isinstance(commit_id, compat.string_types):
737 if not isinstance(commit_id, compat.string_types):
738 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
738 raise TypeError("commit_id must be a string value got {} instead".format(type(commit_id)))
739
739
740 def _validate_commit_idx(self, commit_idx):
740 def _validate_commit_idx(self, commit_idx):
741 if not isinstance(commit_idx, (int, long)):
741 if not isinstance(commit_idx, (int, long)):
742 raise TypeError("commit_idx must be a numeric value")
742 raise TypeError("commit_idx must be a numeric value")
743
743
744 def _validate_branch_name(self, branch_name):
744 def _validate_branch_name(self, branch_name):
745 if branch_name and branch_name not in self.branches_all:
745 if branch_name and branch_name not in self.branches_all:
746 msg = ("Branch %s not found in %s" % (branch_name, self))
746 msg = ("Branch %s not found in %s" % (branch_name, self))
747 raise BranchDoesNotExistError(msg)
747 raise BranchDoesNotExistError(msg)
748
748
749 #
749 #
750 # Supporting deprecated API parts
750 # Supporting deprecated API parts
751 # TODO: johbo: consider to move this into a mixin
751 # TODO: johbo: consider to move this into a mixin
752 #
752 #
753
753
754 @property
754 @property
755 def EMPTY_CHANGESET(self):
755 def EMPTY_CHANGESET(self):
756 warnings.warn(
756 warnings.warn(
757 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
757 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
758 return self.EMPTY_COMMIT_ID
758 return self.EMPTY_COMMIT_ID
759
759
760 @property
760 @property
761 def revisions(self):
761 def revisions(self):
762 warnings.warn("Use commits attribute instead", DeprecationWarning)
762 warnings.warn("Use commits attribute instead", DeprecationWarning)
763 return self.commit_ids
763 return self.commit_ids
764
764
765 @revisions.setter
765 @revisions.setter
766 def revisions(self, value):
766 def revisions(self, value):
767 warnings.warn("Use commits attribute instead", DeprecationWarning)
767 warnings.warn("Use commits attribute instead", DeprecationWarning)
768 self.commit_ids = value
768 self.commit_ids = value
769
769
770 def get_changeset(self, revision=None, pre_load=None):
770 def get_changeset(self, revision=None, pre_load=None):
771 warnings.warn("Use get_commit instead", DeprecationWarning)
771 warnings.warn("Use get_commit instead", DeprecationWarning)
772 commit_id = None
772 commit_id = None
773 commit_idx = None
773 commit_idx = None
774 if isinstance(revision, compat.string_types):
774 if isinstance(revision, compat.string_types):
775 commit_id = revision
775 commit_id = revision
776 else:
776 else:
777 commit_idx = revision
777 commit_idx = revision
778 return self.get_commit(
778 return self.get_commit(
779 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
779 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
780
780
781 def get_changesets(
781 def get_changesets(
782 self, start=None, end=None, start_date=None, end_date=None,
782 self, start=None, end=None, start_date=None, end_date=None,
783 branch_name=None, pre_load=None):
783 branch_name=None, pre_load=None):
784 warnings.warn("Use get_commits instead", DeprecationWarning)
784 warnings.warn("Use get_commits instead", DeprecationWarning)
785 start_id = self._revision_to_commit(start)
785 start_id = self._revision_to_commit(start)
786 end_id = self._revision_to_commit(end)
786 end_id = self._revision_to_commit(end)
787 return self.get_commits(
787 return self.get_commits(
788 start_id=start_id, end_id=end_id, start_date=start_date,
788 start_id=start_id, end_id=end_id, start_date=start_date,
789 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
789 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
790
790
791 def _revision_to_commit(self, revision):
791 def _revision_to_commit(self, revision):
792 """
792 """
793 Translates a revision to a commit_id
793 Translates a revision to a commit_id
794
794
795 Helps to support the old changeset based API which allows to use
795 Helps to support the old changeset based API which allows to use
796 commit ids and commit indices interchangeable.
796 commit ids and commit indices interchangeable.
797 """
797 """
798 if revision is None:
798 if revision is None:
799 return revision
799 return revision
800
800
801 if isinstance(revision, compat.string_types):
801 if isinstance(revision, compat.string_types):
802 commit_id = revision
802 commit_id = revision
803 else:
803 else:
804 commit_id = self.commit_ids[revision]
804 commit_id = self.commit_ids[revision]
805 return commit_id
805 return commit_id
806
806
807 @property
807 @property
808 def in_memory_changeset(self):
808 def in_memory_changeset(self):
809 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
809 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
810 return self.in_memory_commit
810 return self.in_memory_commit
811
811
812 def get_path_permissions(self, username):
812 def get_path_permissions(self, username):
813 """
813 """
814 Returns a path permission checker or None if not supported
814 Returns a path permission checker or None if not supported
815
815
816 :param username: session user name
816 :param username: session user name
817 :return: an instance of BasePathPermissionChecker or None
817 :return: an instance of BasePathPermissionChecker or None
818 """
818 """
819 return None
819 return None
820
820
821 def install_hooks(self, force=False):
821 def install_hooks(self, force=False):
822 return self._remote.install_hooks(force)
822 return self._remote.install_hooks(force)
823
823
824 def get_hooks_info(self):
824 def get_hooks_info(self):
825 return self._remote.get_hooks_info()
825 return self._remote.get_hooks_info()
826
826
827
827
828 class BaseCommit(object):
828 class BaseCommit(object):
829 """
829 """
830 Each backend should implement it's commit representation.
830 Each backend should implement it's commit representation.
831
831
832 **Attributes**
832 **Attributes**
833
833
834 ``repository``
834 ``repository``
835 repository object within which commit exists
835 repository object within which commit exists
836
836
837 ``id``
837 ``id``
838 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
838 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
839 just ``tip``.
839 just ``tip``.
840
840
841 ``raw_id``
841 ``raw_id``
842 raw commit representation (i.e. full 40 length sha for git
842 raw commit representation (i.e. full 40 length sha for git
843 backend)
843 backend)
844
844
845 ``short_id``
845 ``short_id``
846 shortened (if apply) version of ``raw_id``; it would be simple
846 shortened (if apply) version of ``raw_id``; it would be simple
847 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
847 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
848 as ``raw_id`` for subversion
848 as ``raw_id`` for subversion
849
849
850 ``idx``
850 ``idx``
851 commit index
851 commit index
852
852
853 ``files``
853 ``files``
854 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
854 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
855
855
856 ``dirs``
856 ``dirs``
857 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
857 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
858
858
859 ``nodes``
859 ``nodes``
860 combined list of ``Node`` objects
860 combined list of ``Node`` objects
861
861
862 ``author``
862 ``author``
863 author of the commit, as unicode
863 author of the commit, as unicode
864
864
865 ``message``
865 ``message``
866 message of the commit, as unicode
866 message of the commit, as unicode
867
867
868 ``parents``
868 ``parents``
869 list of parent commits
869 list of parent commits
870
870
871 """
871 """
872
872
873 branch = None
873 branch = None
874 """
874 """
875 Depending on the backend this should be set to the branch name of the
875 Depending on the backend this should be set to the branch name of the
876 commit. Backends not supporting branches on commits should leave this
876 commit. Backends not supporting branches on commits should leave this
877 value as ``None``.
877 value as ``None``.
878 """
878 """
879
879
880 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
880 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
881 """
881 """
882 This template is used to generate a default prefix for repository archives
882 This template is used to generate a default prefix for repository archives
883 if no prefix has been specified.
883 if no prefix has been specified.
884 """
884 """
885
885
886 def __str__(self):
886 def __str__(self):
887 return '<%s at %s:%s>' % (
887 return '<%s at %s:%s>' % (
888 self.__class__.__name__, self.idx, self.short_id)
888 self.__class__.__name__, self.idx, self.short_id)
889
889
890 def __repr__(self):
890 def __repr__(self):
891 return self.__str__()
891 return self.__str__()
892
892
893 def __unicode__(self):
893 def __unicode__(self):
894 return u'%s:%s' % (self.idx, self.short_id)
894 return u'%s:%s' % (self.idx, self.short_id)
895
895
896 def __eq__(self, other):
896 def __eq__(self, other):
897 same_instance = isinstance(other, self.__class__)
897 same_instance = isinstance(other, self.__class__)
898 return same_instance and self.raw_id == other.raw_id
898 return same_instance and self.raw_id == other.raw_id
899
899
900 def __json__(self):
900 def __json__(self):
901 parents = []
901 parents = []
902 try:
902 try:
903 for parent in self.parents:
903 for parent in self.parents:
904 parents.append({'raw_id': parent.raw_id})
904 parents.append({'raw_id': parent.raw_id})
905 except NotImplementedError:
905 except NotImplementedError:
906 # empty commit doesn't have parents implemented
906 # empty commit doesn't have parents implemented
907 pass
907 pass
908
908
909 return {
909 return {
910 'short_id': self.short_id,
910 'short_id': self.short_id,
911 'raw_id': self.raw_id,
911 'raw_id': self.raw_id,
912 'revision': self.idx,
912 'revision': self.idx,
913 'message': self.message,
913 'message': self.message,
914 'date': self.date,
914 'date': self.date,
915 'author': self.author,
915 'author': self.author,
916 'parents': parents,
916 'parents': parents,
917 'branch': self.branch
917 'branch': self.branch
918 }
918 }
919
919
920 def __getstate__(self):
920 def __getstate__(self):
921 d = self.__dict__.copy()
921 d = self.__dict__.copy()
922 d.pop('_remote', None)
922 d.pop('_remote', None)
923 d.pop('repository', None)
923 d.pop('repository', None)
924 return d
924 return d
925
925
926 def _get_refs(self):
926 def _get_refs(self):
927 return {
927 return {
928 'branches': [self.branch] if self.branch else [],
928 'branches': [self.branch] if self.branch else [],
929 'bookmarks': getattr(self, 'bookmarks', []),
929 'bookmarks': getattr(self, 'bookmarks', []),
930 'tags': self.tags
930 'tags': self.tags
931 }
931 }
932
932
933 @LazyProperty
933 @LazyProperty
934 def last(self):
934 def last(self):
935 """
935 """
936 ``True`` if this is last commit in repository, ``False``
936 ``True`` if this is last commit in repository, ``False``
937 otherwise; trying to access this attribute while there is no
937 otherwise; trying to access this attribute while there is no
938 commits would raise `EmptyRepositoryError`
938 commits would raise `EmptyRepositoryError`
939 """
939 """
940 if self.repository is None:
940 if self.repository is None:
941 raise CommitError("Cannot check if it's most recent commit")
941 raise CommitError("Cannot check if it's most recent commit")
942 return self.raw_id == self.repository.commit_ids[-1]
942 return self.raw_id == self.repository.commit_ids[-1]
943
943
944 @LazyProperty
944 @LazyProperty
945 def parents(self):
945 def parents(self):
946 """
946 """
947 Returns list of parent commits.
947 Returns list of parent commits.
948 """
948 """
949 raise NotImplementedError
949 raise NotImplementedError
950
950
951 @LazyProperty
951 @LazyProperty
952 def first_parent(self):
952 def first_parent(self):
953 """
953 """
954 Returns list of parent commits.
954 Returns list of parent commits.
955 """
955 """
956 return self.parents[0] if self.parents else EmptyCommit()
956 return self.parents[0] if self.parents else EmptyCommit()
957
957
958 @property
958 @property
959 def merge(self):
959 def merge(self):
960 """
960 """
961 Returns boolean if commit is a merge.
961 Returns boolean if commit is a merge.
962 """
962 """
963 return len(self.parents) > 1
963 return len(self.parents) > 1
964
964
965 @LazyProperty
965 @LazyProperty
966 def children(self):
966 def children(self):
967 """
967 """
968 Returns list of child commits.
968 Returns list of child commits.
969 """
969 """
970 raise NotImplementedError
970 raise NotImplementedError
971
971
972 @LazyProperty
972 @LazyProperty
973 def id(self):
973 def id(self):
974 """
974 """
975 Returns string identifying this commit.
975 Returns string identifying this commit.
976 """
976 """
977 raise NotImplementedError
977 raise NotImplementedError
978
978
979 @LazyProperty
979 @LazyProperty
980 def raw_id(self):
980 def raw_id(self):
981 """
981 """
982 Returns raw string identifying this commit.
982 Returns raw string identifying this commit.
983 """
983 """
984 raise NotImplementedError
984 raise NotImplementedError
985
985
986 @LazyProperty
986 @LazyProperty
987 def short_id(self):
987 def short_id(self):
988 """
988 """
989 Returns shortened version of ``raw_id`` attribute, as string,
989 Returns shortened version of ``raw_id`` attribute, as string,
990 identifying this commit, useful for presentation to users.
990 identifying this commit, useful for presentation to users.
991 """
991 """
992 raise NotImplementedError
992 raise NotImplementedError
993
993
994 @LazyProperty
994 @LazyProperty
995 def idx(self):
995 def idx(self):
996 """
996 """
997 Returns integer identifying this commit.
997 Returns integer identifying this commit.
998 """
998 """
999 raise NotImplementedError
999 raise NotImplementedError
1000
1000
1001 @LazyProperty
1001 @LazyProperty
1002 def committer(self):
1002 def committer(self):
1003 """
1003 """
1004 Returns committer for this commit
1004 Returns committer for this commit
1005 """
1005 """
1006 raise NotImplementedError
1006 raise NotImplementedError
1007
1007
1008 @LazyProperty
1008 @LazyProperty
1009 def committer_name(self):
1009 def committer_name(self):
1010 """
1010 """
1011 Returns committer name for this commit
1011 Returns committer name for this commit
1012 """
1012 """
1013
1013
1014 return author_name(self.committer)
1014 return author_name(self.committer)
1015
1015
1016 @LazyProperty
1016 @LazyProperty
1017 def committer_email(self):
1017 def committer_email(self):
1018 """
1018 """
1019 Returns committer email address for this commit
1019 Returns committer email address for this commit
1020 """
1020 """
1021
1021
1022 return author_email(self.committer)
1022 return author_email(self.committer)
1023
1023
1024 @LazyProperty
1024 @LazyProperty
1025 def author(self):
1025 def author(self):
1026 """
1026 """
1027 Returns author for this commit
1027 Returns author for this commit
1028 """
1028 """
1029
1029
1030 raise NotImplementedError
1030 raise NotImplementedError
1031
1031
1032 @LazyProperty
1032 @LazyProperty
1033 def author_name(self):
1033 def author_name(self):
1034 """
1034 """
1035 Returns author name for this commit
1035 Returns author name for this commit
1036 """
1036 """
1037
1037
1038 return author_name(self.author)
1038 return author_name(self.author)
1039
1039
1040 @LazyProperty
1040 @LazyProperty
1041 def author_email(self):
1041 def author_email(self):
1042 """
1042 """
1043 Returns author email address for this commit
1043 Returns author email address for this commit
1044 """
1044 """
1045
1045
1046 return author_email(self.author)
1046 return author_email(self.author)
1047
1047
1048 def get_file_mode(self, path):
1048 def get_file_mode(self, path):
1049 """
1049 """
1050 Returns stat mode of the file at `path`.
1050 Returns stat mode of the file at `path`.
1051 """
1051 """
1052 raise NotImplementedError
1052 raise NotImplementedError
1053
1053
1054 def is_link(self, path):
1054 def is_link(self, path):
1055 """
1055 """
1056 Returns ``True`` if given `path` is a symlink
1056 Returns ``True`` if given `path` is a symlink
1057 """
1057 """
1058 raise NotImplementedError
1058 raise NotImplementedError
1059
1059
1060 def is_node_binary(self, path):
1060 def is_node_binary(self, path):
1061 """
1061 """
1062 Returns ``True`` is given path is a binary file
1062 Returns ``True`` is given path is a binary file
1063 """
1063 """
1064 raise NotImplementedError
1064 raise NotImplementedError
1065
1065
1066 def get_file_content(self, path):
1066 def get_file_content(self, path):
1067 """
1067 """
1068 Returns content of the file at the given `path`.
1068 Returns content of the file at the given `path`.
1069 """
1069 """
1070 raise NotImplementedError
1070 raise NotImplementedError
1071
1071
1072 def get_file_content_streamed(self, path):
1072 def get_file_content_streamed(self, path):
1073 """
1073 """
1074 returns a streaming response from vcsserver with file content
1074 returns a streaming response from vcsserver with file content
1075 """
1075 """
1076 raise NotImplementedError
1076 raise NotImplementedError
1077
1077
1078 def get_file_size(self, path):
1078 def get_file_size(self, path):
1079 """
1079 """
1080 Returns size of the file at the given `path`.
1080 Returns size of the file at the given `path`.
1081 """
1081 """
1082 raise NotImplementedError
1082 raise NotImplementedError
1083
1083
1084 def get_path_commit(self, path, pre_load=None):
1084 def get_path_commit(self, path, pre_load=None):
1085 """
1085 """
1086 Returns last commit of the file at the given `path`.
1086 Returns last commit of the file at the given `path`.
1087
1087
1088 :param pre_load: Optional. List of commit attributes to load.
1088 :param pre_load: Optional. List of commit attributes to load.
1089 """
1089 """
1090 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1090 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1091 if not commits:
1091 if not commits:
1092 raise RepositoryError(
1092 raise RepositoryError(
1093 'Failed to fetch history for path {}. '
1093 'Failed to fetch history for path {}. '
1094 'Please check if such path exists in your repository'.format(
1094 'Please check if such path exists in your repository'.format(
1095 path))
1095 path))
1096 return commits[0]
1096 return commits[0]
1097
1097
1098 def get_path_history(self, path, limit=None, pre_load=None):
1098 def get_path_history(self, path, limit=None, pre_load=None):
1099 """
1099 """
1100 Returns history of file as reversed list of :class:`BaseCommit`
1100 Returns history of file as reversed list of :class:`BaseCommit`
1101 objects for which file at given `path` has been modified.
1101 objects for which file at given `path` has been modified.
1102
1102
1103 :param limit: Optional. Allows to limit the size of the returned
1103 :param limit: Optional. Allows to limit the size of the returned
1104 history. This is intended as a hint to the underlying backend, so
1104 history. This is intended as a hint to the underlying backend, so
1105 that it can apply optimizations depending on the limit.
1105 that it can apply optimizations depending on the limit.
1106 :param pre_load: Optional. List of commit attributes to load.
1106 :param pre_load: Optional. List of commit attributes to load.
1107 """
1107 """
1108 raise NotImplementedError
1108 raise NotImplementedError
1109
1109
1110 def get_file_annotate(self, path, pre_load=None):
1110 def get_file_annotate(self, path, pre_load=None):
1111 """
1111 """
1112 Returns a generator of four element tuples with
1112 Returns a generator of four element tuples with
1113 lineno, sha, commit lazy loader and line
1113 lineno, sha, commit lazy loader and line
1114
1114
1115 :param pre_load: Optional. List of commit attributes to load.
1115 :param pre_load: Optional. List of commit attributes to load.
1116 """
1116 """
1117 raise NotImplementedError
1117 raise NotImplementedError
1118
1118
1119 def get_nodes(self, path):
1119 def get_nodes(self, path):
1120 """
1120 """
1121 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1121 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1122 state of commit at the given ``path``.
1122 state of commit at the given ``path``.
1123
1123
1124 :raises ``CommitError``: if node at the given ``path`` is not
1124 :raises ``CommitError``: if node at the given ``path`` is not
1125 instance of ``DirNode``
1125 instance of ``DirNode``
1126 """
1126 """
1127 raise NotImplementedError
1127 raise NotImplementedError
1128
1128
1129 def get_node(self, path):
1129 def get_node(self, path):
1130 """
1130 """
1131 Returns ``Node`` object from the given ``path``.
1131 Returns ``Node`` object from the given ``path``.
1132
1132
1133 :raises ``NodeDoesNotExistError``: if there is no node at the given
1133 :raises ``NodeDoesNotExistError``: if there is no node at the given
1134 ``path``
1134 ``path``
1135 """
1135 """
1136 raise NotImplementedError
1136 raise NotImplementedError
1137
1137
1138 def get_largefile_node(self, path):
1138 def get_largefile_node(self, path):
1139 """
1139 """
1140 Returns the path to largefile from Mercurial/Git-lfs storage.
1140 Returns the path to largefile from Mercurial/Git-lfs storage.
1141 or None if it's not a largefile node
1141 or None if it's not a largefile node
1142 """
1142 """
1143 return None
1143 return None
1144
1144
1145 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1145 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1146 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1146 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1147 """
1147 """
1148 Creates an archive containing the contents of the repository.
1148 Creates an archive containing the contents of the repository.
1149
1149
1150 :param archive_dest_path: path to the file which to create the archive.
1150 :param archive_dest_path: path to the file which to create the archive.
1151 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1151 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1152 :param prefix: name of root directory in archive.
1152 :param prefix: name of root directory in archive.
1153 Default is repository name and commit's short_id joined with dash:
1153 Default is repository name and commit's short_id joined with dash:
1154 ``"{repo_name}-{short_id}"``.
1154 ``"{repo_name}-{short_id}"``.
1155 :param write_metadata: write a metadata file into archive.
1155 :param write_metadata: write a metadata file into archive.
1156 :param mtime: custom modification time for archive creation, defaults
1156 :param mtime: custom modification time for archive creation, defaults
1157 to time.time() if not given.
1157 to time.time() if not given.
1158 :param archive_at_path: pack files at this path (default '/')
1158 :param archive_at_path: pack files at this path (default '/')
1159
1159
1160 :raise VCSError: If prefix has a problem.
1160 :raise VCSError: If prefix has a problem.
1161 """
1161 """
1162 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1162 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1163 if kind not in allowed_kinds:
1163 if kind not in allowed_kinds:
1164 raise ImproperArchiveTypeError(
1164 raise ImproperArchiveTypeError(
1165 'Archive kind (%s) not supported use one of %s' %
1165 'Archive kind (%s) not supported use one of %s' %
1166 (kind, allowed_kinds))
1166 (kind, allowed_kinds))
1167
1167
1168 prefix = self._validate_archive_prefix(prefix)
1168 prefix = self._validate_archive_prefix(prefix)
1169
1169
1170 mtime = mtime is not None or time.mktime(self.date.timetuple())
1170 mtime = mtime is not None or time.mktime(self.date.timetuple())
1171
1171
1172 file_info = []
1172 file_info = []
1173 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1173 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1174 for _r, _d, files in cur_rev.walk(archive_at_path):
1174 for _r, _d, files in cur_rev.walk(archive_at_path):
1175 for f in files:
1175 for f in files:
1176 f_path = os.path.join(prefix, f.path)
1176 f_path = os.path.join(prefix, f.path)
1177 file_info.append(
1177 file_info.append(
1178 (f_path, f.mode, f.is_link(), f.raw_bytes))
1178 (f_path, f.mode, f.is_link(), f.raw_bytes))
1179
1179
1180 if write_metadata:
1180 if write_metadata:
1181 metadata = [
1181 metadata = [
1182 ('repo_name', self.repository.name),
1182 ('repo_name', self.repository.name),
1183 ('commit_id', self.raw_id),
1183 ('commit_id', self.raw_id),
1184 ('mtime', mtime),
1184 ('mtime', mtime),
1185 ('branch', self.branch),
1185 ('branch', self.branch),
1186 ('tags', ','.join(self.tags)),
1186 ('tags', ','.join(self.tags)),
1187 ]
1187 ]
1188 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1188 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1189 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1189 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1190
1190
1191 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1191 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1192
1192
1193 def _validate_archive_prefix(self, prefix):
1193 def _validate_archive_prefix(self, prefix):
1194 if prefix is None:
1194 if prefix is None:
1195 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1195 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1196 repo_name=safe_str(self.repository.name),
1196 repo_name=safe_str(self.repository.name),
1197 short_id=self.short_id)
1197 short_id=self.short_id)
1198 elif not isinstance(prefix, str):
1198 elif not isinstance(prefix, str):
1199 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1199 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1200 elif prefix.startswith('/'):
1200 elif prefix.startswith('/'):
1201 raise VCSError("Prefix cannot start with leading slash")
1201 raise VCSError("Prefix cannot start with leading slash")
1202 elif prefix.strip() == '':
1202 elif prefix.strip() == '':
1203 raise VCSError("Prefix cannot be empty")
1203 raise VCSError("Prefix cannot be empty")
1204 return prefix
1204 return prefix
1205
1205
1206 @LazyProperty
1206 @LazyProperty
1207 def root(self):
1207 def root(self):
1208 """
1208 """
1209 Returns ``RootNode`` object for this commit.
1209 Returns ``RootNode`` object for this commit.
1210 """
1210 """
1211 return self.get_node('')
1211 return self.get_node('')
1212
1212
1213 def next(self, branch=None):
1213 def next(self, branch=None):
1214 """
1214 """
1215 Returns next commit from current, if branch is gives it will return
1215 Returns next commit from current, if branch is gives it will return
1216 next commit belonging to this branch
1216 next commit belonging to this branch
1217
1217
1218 :param branch: show commits within the given named branch
1218 :param branch: show commits within the given named branch
1219 """
1219 """
1220 indexes = xrange(self.idx + 1, self.repository.count())
1220 indexes = xrange(self.idx + 1, self.repository.count())
1221 return self._find_next(indexes, branch)
1221 return self._find_next(indexes, branch)
1222
1222
1223 def prev(self, branch=None):
1223 def prev(self, branch=None):
1224 """
1224 """
1225 Returns previous commit from current, if branch is gives it will
1225 Returns previous commit from current, if branch is gives it will
1226 return previous commit belonging to this branch
1226 return previous commit belonging to this branch
1227
1227
1228 :param branch: show commit within the given named branch
1228 :param branch: show commit within the given named branch
1229 """
1229 """
1230 indexes = xrange(self.idx - 1, -1, -1)
1230 indexes = xrange(self.idx - 1, -1, -1)
1231 return self._find_next(indexes, branch)
1231 return self._find_next(indexes, branch)
1232
1232
1233 def _find_next(self, indexes, branch=None):
1233 def _find_next(self, indexes, branch=None):
1234 if branch and self.branch != branch:
1234 if branch and self.branch != branch:
1235 raise VCSError('Branch option used on commit not belonging '
1235 raise VCSError('Branch option used on commit not belonging '
1236 'to that branch')
1236 'to that branch')
1237
1237
1238 for next_idx in indexes:
1238 for next_idx in indexes:
1239 commit = self.repository.get_commit(commit_idx=next_idx)
1239 commit = self.repository.get_commit(commit_idx=next_idx)
1240 if branch and branch != commit.branch:
1240 if branch and branch != commit.branch:
1241 continue
1241 continue
1242 return commit
1242 return commit
1243 raise CommitDoesNotExistError
1243 raise CommitDoesNotExistError
1244
1244
1245 def diff(self, ignore_whitespace=True, context=3):
1245 def diff(self, ignore_whitespace=True, context=3):
1246 """
1246 """
1247 Returns a `Diff` object representing the change made by this commit.
1247 Returns a `Diff` object representing the change made by this commit.
1248 """
1248 """
1249 parent = self.first_parent
1249 parent = self.first_parent
1250 diff = self.repository.get_diff(
1250 diff = self.repository.get_diff(
1251 parent, self,
1251 parent, self,
1252 ignore_whitespace=ignore_whitespace,
1252 ignore_whitespace=ignore_whitespace,
1253 context=context)
1253 context=context)
1254 return diff
1254 return diff
1255
1255
1256 @LazyProperty
1256 @LazyProperty
1257 def added(self):
1257 def added(self):
1258 """
1258 """
1259 Returns list of added ``FileNode`` objects.
1259 Returns list of added ``FileNode`` objects.
1260 """
1260 """
1261 raise NotImplementedError
1261 raise NotImplementedError
1262
1262
1263 @LazyProperty
1263 @LazyProperty
1264 def changed(self):
1264 def changed(self):
1265 """
1265 """
1266 Returns list of modified ``FileNode`` objects.
1266 Returns list of modified ``FileNode`` objects.
1267 """
1267 """
1268 raise NotImplementedError
1268 raise NotImplementedError
1269
1269
1270 @LazyProperty
1270 @LazyProperty
1271 def removed(self):
1271 def removed(self):
1272 """
1272 """
1273 Returns list of removed ``FileNode`` objects.
1273 Returns list of removed ``FileNode`` objects.
1274 """
1274 """
1275 raise NotImplementedError
1275 raise NotImplementedError
1276
1276
1277 @LazyProperty
1277 @LazyProperty
1278 def size(self):
1278 def size(self):
1279 """
1279 """
1280 Returns total number of bytes from contents of all filenodes.
1280 Returns total number of bytes from contents of all filenodes.
1281 """
1281 """
1282 return sum((node.size for node in self.get_filenodes_generator()))
1282 return sum((node.size for node in self.get_filenodes_generator()))
1283
1283
1284 def walk(self, topurl=''):
1284 def walk(self, topurl=''):
1285 """
1285 """
1286 Similar to os.walk method. Insted of filesystem it walks through
1286 Similar to os.walk method. Insted of filesystem it walks through
1287 commit starting at given ``topurl``. Returns generator of tuples
1287 commit starting at given ``topurl``. Returns generator of tuples
1288 (topnode, dirnodes, filenodes).
1288 (topnode, dirnodes, filenodes).
1289 """
1289 """
1290 topnode = self.get_node(topurl)
1290 topnode = self.get_node(topurl)
1291 if not topnode.is_dir():
1291 if not topnode.is_dir():
1292 return
1292 return
1293 yield (topnode, topnode.dirs, topnode.files)
1293 yield (topnode, topnode.dirs, topnode.files)
1294 for dirnode in topnode.dirs:
1294 for dirnode in topnode.dirs:
1295 for tup in self.walk(dirnode.path):
1295 for tup in self.walk(dirnode.path):
1296 yield tup
1296 yield tup
1297
1297
1298 def get_filenodes_generator(self):
1298 def get_filenodes_generator(self):
1299 """
1299 """
1300 Returns generator that yields *all* file nodes.
1300 Returns generator that yields *all* file nodes.
1301 """
1301 """
1302 for topnode, dirs, files in self.walk():
1302 for topnode, dirs, files in self.walk():
1303 for node in files:
1303 for node in files:
1304 yield node
1304 yield node
1305
1305
1306 #
1306 #
1307 # Utilities for sub classes to support consistent behavior
1307 # Utilities for sub classes to support consistent behavior
1308 #
1308 #
1309
1309
1310 def no_node_at_path(self, path):
1310 def no_node_at_path(self, path):
1311 return NodeDoesNotExistError(
1311 return NodeDoesNotExistError(
1312 u"There is no file nor directory at the given path: "
1312 u"There is no file nor directory at the given path: "
1313 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1313 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1314
1314
1315 def _fix_path(self, path):
1315 def _fix_path(self, path):
1316 """
1316 """
1317 Paths are stored without trailing slash so we need to get rid off it if
1317 Paths are stored without trailing slash so we need to get rid off it if
1318 needed.
1318 needed.
1319 """
1319 """
1320 return path.rstrip('/')
1320 return path.rstrip('/')
1321
1321
1322 #
1322 #
1323 # Deprecated API based on changesets
1323 # Deprecated API based on changesets
1324 #
1324 #
1325
1325
1326 @property
1326 @property
1327 def revision(self):
1327 def revision(self):
1328 warnings.warn("Use idx instead", DeprecationWarning)
1328 warnings.warn("Use idx instead", DeprecationWarning)
1329 return self.idx
1329 return self.idx
1330
1330
1331 @revision.setter
1331 @revision.setter
1332 def revision(self, value):
1332 def revision(self, value):
1333 warnings.warn("Use idx instead", DeprecationWarning)
1333 warnings.warn("Use idx instead", DeprecationWarning)
1334 self.idx = value
1334 self.idx = value
1335
1335
1336 def get_file_changeset(self, path):
1336 def get_file_changeset(self, path):
1337 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1337 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1338 return self.get_path_commit(path)
1338 return self.get_path_commit(path)
1339
1339
1340
1340
1341 class BaseChangesetClass(type):
1341 class BaseChangesetClass(type):
1342
1342
1343 def __instancecheck__(self, instance):
1343 def __instancecheck__(self, instance):
1344 return isinstance(instance, BaseCommit)
1344 return isinstance(instance, BaseCommit)
1345
1345
1346
1346
1347 class BaseChangeset(BaseCommit):
1347 class BaseChangeset(BaseCommit):
1348
1348
1349 __metaclass__ = BaseChangesetClass
1349 __metaclass__ = BaseChangesetClass
1350
1350
1351 def __new__(cls, *args, **kwargs):
1351 def __new__(cls, *args, **kwargs):
1352 warnings.warn(
1352 warnings.warn(
1353 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1353 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1354 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1354 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1355
1355
1356
1356
1357 class BaseInMemoryCommit(object):
1357 class BaseInMemoryCommit(object):
1358 """
1358 """
1359 Represents differences between repository's state (most recent head) and
1359 Represents differences between repository's state (most recent head) and
1360 changes made *in place*.
1360 changes made *in place*.
1361
1361
1362 **Attributes**
1362 **Attributes**
1363
1363
1364 ``repository``
1364 ``repository``
1365 repository object for this in-memory-commit
1365 repository object for this in-memory-commit
1366
1366
1367 ``added``
1367 ``added``
1368 list of ``FileNode`` objects marked as *added*
1368 list of ``FileNode`` objects marked as *added*
1369
1369
1370 ``changed``
1370 ``changed``
1371 list of ``FileNode`` objects marked as *changed*
1371 list of ``FileNode`` objects marked as *changed*
1372
1372
1373 ``removed``
1373 ``removed``
1374 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1374 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1375 *removed*
1375 *removed*
1376
1376
1377 ``parents``
1377 ``parents``
1378 list of :class:`BaseCommit` instances representing parents of
1378 list of :class:`BaseCommit` instances representing parents of
1379 in-memory commit. Should always be 2-element sequence.
1379 in-memory commit. Should always be 2-element sequence.
1380
1380
1381 """
1381 """
1382
1382
1383 def __init__(self, repository):
1383 def __init__(self, repository):
1384 self.repository = repository
1384 self.repository = repository
1385 self.added = []
1385 self.added = []
1386 self.changed = []
1386 self.changed = []
1387 self.removed = []
1387 self.removed = []
1388 self.parents = []
1388 self.parents = []
1389
1389
1390 def add(self, *filenodes):
1390 def add(self, *filenodes):
1391 """
1391 """
1392 Marks given ``FileNode`` objects as *to be committed*.
1392 Marks given ``FileNode`` objects as *to be committed*.
1393
1393
1394 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1394 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1395 latest commit
1395 latest commit
1396 :raises ``NodeAlreadyAddedError``: if node with same path is already
1396 :raises ``NodeAlreadyAddedError``: if node with same path is already
1397 marked as *added*
1397 marked as *added*
1398 """
1398 """
1399 # Check if not already marked as *added* first
1399 # Check if not already marked as *added* first
1400 for node in filenodes:
1400 for node in filenodes:
1401 if node.path in (n.path for n in self.added):
1401 if node.path in (n.path for n in self.added):
1402 raise NodeAlreadyAddedError(
1402 raise NodeAlreadyAddedError(
1403 "Such FileNode %s is already marked for addition"
1403 "Such FileNode %s is already marked for addition"
1404 % node.path)
1404 % node.path)
1405 for node in filenodes:
1405 for node in filenodes:
1406 self.added.append(node)
1406 self.added.append(node)
1407
1407
1408 def change(self, *filenodes):
1408 def change(self, *filenodes):
1409 """
1409 """
1410 Marks given ``FileNode`` objects to be *changed* in next commit.
1410 Marks given ``FileNode`` objects to be *changed* in next commit.
1411
1411
1412 :raises ``EmptyRepositoryError``: if there are no commits yet
1412 :raises ``EmptyRepositoryError``: if there are no commits yet
1413 :raises ``NodeAlreadyExistsError``: if node with same path is already
1413 :raises ``NodeAlreadyExistsError``: if node with same path is already
1414 marked to be *changed*
1414 marked to be *changed*
1415 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1415 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1416 marked to be *removed*
1416 marked to be *removed*
1417 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1417 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1418 commit
1418 commit
1419 :raises ``NodeNotChangedError``: if node hasn't really be changed
1419 :raises ``NodeNotChangedError``: if node hasn't really be changed
1420 """
1420 """
1421 for node in filenodes:
1421 for node in filenodes:
1422 if node.path in (n.path for n in self.removed):
1422 if node.path in (n.path for n in self.removed):
1423 raise NodeAlreadyRemovedError(
1423 raise NodeAlreadyRemovedError(
1424 "Node at %s is already marked as removed" % node.path)
1424 "Node at %s is already marked as removed" % node.path)
1425 try:
1425 try:
1426 self.repository.get_commit()
1426 self.repository.get_commit()
1427 except EmptyRepositoryError:
1427 except EmptyRepositoryError:
1428 raise EmptyRepositoryError(
1428 raise EmptyRepositoryError(
1429 "Nothing to change - try to *add* new nodes rather than "
1429 "Nothing to change - try to *add* new nodes rather than "
1430 "changing them")
1430 "changing them")
1431 for node in filenodes:
1431 for node in filenodes:
1432 if node.path in (n.path for n in self.changed):
1432 if node.path in (n.path for n in self.changed):
1433 raise NodeAlreadyChangedError(
1433 raise NodeAlreadyChangedError(
1434 "Node at '%s' is already marked as changed" % node.path)
1434 "Node at '%s' is already marked as changed" % node.path)
1435 self.changed.append(node)
1435 self.changed.append(node)
1436
1436
1437 def remove(self, *filenodes):
1437 def remove(self, *filenodes):
1438 """
1438 """
1439 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1439 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1440 *removed* in next commit.
1440 *removed* in next commit.
1441
1441
1442 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1442 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1443 be *removed*
1443 be *removed*
1444 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1444 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1445 be *changed*
1445 be *changed*
1446 """
1446 """
1447 for node in filenodes:
1447 for node in filenodes:
1448 if node.path in (n.path for n in self.removed):
1448 if node.path in (n.path for n in self.removed):
1449 raise NodeAlreadyRemovedError(
1449 raise NodeAlreadyRemovedError(
1450 "Node is already marked to for removal at %s" % node.path)
1450 "Node is already marked to for removal at %s" % node.path)
1451 if node.path in (n.path for n in self.changed):
1451 if node.path in (n.path for n in self.changed):
1452 raise NodeAlreadyChangedError(
1452 raise NodeAlreadyChangedError(
1453 "Node is already marked to be changed at %s" % node.path)
1453 "Node is already marked to be changed at %s" % node.path)
1454 # We only mark node as *removed* - real removal is done by
1454 # We only mark node as *removed* - real removal is done by
1455 # commit method
1455 # commit method
1456 self.removed.append(node)
1456 self.removed.append(node)
1457
1457
1458 def reset(self):
1458 def reset(self):
1459 """
1459 """
1460 Resets this instance to initial state (cleans ``added``, ``changed``
1460 Resets this instance to initial state (cleans ``added``, ``changed``
1461 and ``removed`` lists).
1461 and ``removed`` lists).
1462 """
1462 """
1463 self.added = []
1463 self.added = []
1464 self.changed = []
1464 self.changed = []
1465 self.removed = []
1465 self.removed = []
1466 self.parents = []
1466 self.parents = []
1467
1467
1468 def get_ipaths(self):
1468 def get_ipaths(self):
1469 """
1469 """
1470 Returns generator of paths from nodes marked as added, changed or
1470 Returns generator of paths from nodes marked as added, changed or
1471 removed.
1471 removed.
1472 """
1472 """
1473 for node in itertools.chain(self.added, self.changed, self.removed):
1473 for node in itertools.chain(self.added, self.changed, self.removed):
1474 yield node.path
1474 yield node.path
1475
1475
1476 def get_paths(self):
1476 def get_paths(self):
1477 """
1477 """
1478 Returns list of paths from nodes marked as added, changed or removed.
1478 Returns list of paths from nodes marked as added, changed or removed.
1479 """
1479 """
1480 return list(self.get_ipaths())
1480 return list(self.get_ipaths())
1481
1481
1482 def check_integrity(self, parents=None):
1482 def check_integrity(self, parents=None):
1483 """
1483 """
1484 Checks in-memory commit's integrity. Also, sets parents if not
1484 Checks in-memory commit's integrity. Also, sets parents if not
1485 already set.
1485 already set.
1486
1486
1487 :raises CommitError: if any error occurs (i.e.
1487 :raises CommitError: if any error occurs (i.e.
1488 ``NodeDoesNotExistError``).
1488 ``NodeDoesNotExistError``).
1489 """
1489 """
1490 if not self.parents:
1490 if not self.parents:
1491 parents = parents or []
1491 parents = parents or []
1492 if len(parents) == 0:
1492 if len(parents) == 0:
1493 try:
1493 try:
1494 parents = [self.repository.get_commit(), None]
1494 parents = [self.repository.get_commit(), None]
1495 except EmptyRepositoryError:
1495 except EmptyRepositoryError:
1496 parents = [None, None]
1496 parents = [None, None]
1497 elif len(parents) == 1:
1497 elif len(parents) == 1:
1498 parents += [None]
1498 parents += [None]
1499 self.parents = parents
1499 self.parents = parents
1500
1500
1501 # Local parents, only if not None
1501 # Local parents, only if not None
1502 parents = [p for p in self.parents if p]
1502 parents = [p for p in self.parents if p]
1503
1503
1504 # Check nodes marked as added
1504 # Check nodes marked as added
1505 for p in parents:
1505 for p in parents:
1506 for node in self.added:
1506 for node in self.added:
1507 try:
1507 try:
1508 p.get_node(node.path)
1508 p.get_node(node.path)
1509 except NodeDoesNotExistError:
1509 except NodeDoesNotExistError:
1510 pass
1510 pass
1511 else:
1511 else:
1512 raise NodeAlreadyExistsError(
1512 raise NodeAlreadyExistsError(
1513 "Node `%s` already exists at %s" % (node.path, p))
1513 "Node `%s` already exists at %s" % (node.path, p))
1514
1514
1515 # Check nodes marked as changed
1515 # Check nodes marked as changed
1516 missing = set(self.changed)
1516 missing = set(self.changed)
1517 not_changed = set(self.changed)
1517 not_changed = set(self.changed)
1518 if self.changed and not parents:
1518 if self.changed and not parents:
1519 raise NodeDoesNotExistError(str(self.changed[0].path))
1519 raise NodeDoesNotExistError(str(self.changed[0].path))
1520 for p in parents:
1520 for p in parents:
1521 for node in self.changed:
1521 for node in self.changed:
1522 try:
1522 try:
1523 old = p.get_node(node.path)
1523 old = p.get_node(node.path)
1524 missing.remove(node)
1524 missing.remove(node)
1525 # if content actually changed, remove node from not_changed
1525 # if content actually changed, remove node from not_changed
1526 if old.content != node.content:
1526 if old.content != node.content:
1527 not_changed.remove(node)
1527 not_changed.remove(node)
1528 except NodeDoesNotExistError:
1528 except NodeDoesNotExistError:
1529 pass
1529 pass
1530 if self.changed and missing:
1530 if self.changed and missing:
1531 raise NodeDoesNotExistError(
1531 raise NodeDoesNotExistError(
1532 "Node `%s` marked as modified but missing in parents: %s"
1532 "Node `%s` marked as modified but missing in parents: %s"
1533 % (node.path, parents))
1533 % (node.path, parents))
1534
1534
1535 if self.changed and not_changed:
1535 if self.changed and not_changed:
1536 raise NodeNotChangedError(
1536 raise NodeNotChangedError(
1537 "Node `%s` wasn't actually changed (parents: %s)"
1537 "Node `%s` wasn't actually changed (parents: %s)"
1538 % (not_changed.pop().path, parents))
1538 % (not_changed.pop().path, parents))
1539
1539
1540 # Check nodes marked as removed
1540 # Check nodes marked as removed
1541 if self.removed and not parents:
1541 if self.removed and not parents:
1542 raise NodeDoesNotExistError(
1542 raise NodeDoesNotExistError(
1543 "Cannot remove node at %s as there "
1543 "Cannot remove node at %s as there "
1544 "were no parents specified" % self.removed[0].path)
1544 "were no parents specified" % self.removed[0].path)
1545 really_removed = set()
1545 really_removed = set()
1546 for p in parents:
1546 for p in parents:
1547 for node in self.removed:
1547 for node in self.removed:
1548 try:
1548 try:
1549 p.get_node(node.path)
1549 p.get_node(node.path)
1550 really_removed.add(node)
1550 really_removed.add(node)
1551 except CommitError:
1551 except CommitError:
1552 pass
1552 pass
1553 not_removed = set(self.removed) - really_removed
1553 not_removed = set(self.removed) - really_removed
1554 if not_removed:
1554 if not_removed:
1555 # TODO: johbo: This code branch does not seem to be covered
1555 # TODO: johbo: This code branch does not seem to be covered
1556 raise NodeDoesNotExistError(
1556 raise NodeDoesNotExistError(
1557 "Cannot remove node at %s from "
1557 "Cannot remove node at %s from "
1558 "following parents: %s" % (not_removed, parents))
1558 "following parents: %s" % (not_removed, parents))
1559
1559
1560 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1560 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1561 """
1561 """
1562 Performs in-memory commit (doesn't check workdir in any way) and
1562 Performs in-memory commit (doesn't check workdir in any way) and
1563 returns newly created :class:`BaseCommit`. Updates repository's
1563 returns newly created :class:`BaseCommit`. Updates repository's
1564 attribute `commits`.
1564 attribute `commits`.
1565
1565
1566 .. note::
1566 .. note::
1567
1567
1568 While overriding this method each backend's should call
1568 While overriding this method each backend's should call
1569 ``self.check_integrity(parents)`` in the first place.
1569 ``self.check_integrity(parents)`` in the first place.
1570
1570
1571 :param message: message of the commit
1571 :param message: message of the commit
1572 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1572 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1573 :param parents: single parent or sequence of parents from which commit
1573 :param parents: single parent or sequence of parents from which commit
1574 would be derived
1574 would be derived
1575 :param date: ``datetime.datetime`` instance. Defaults to
1575 :param date: ``datetime.datetime`` instance. Defaults to
1576 ``datetime.datetime.now()``.
1576 ``datetime.datetime.now()``.
1577 :param branch: branch name, as string. If none given, default backend's
1577 :param branch: branch name, as string. If none given, default backend's
1578 branch would be used.
1578 branch would be used.
1579
1579
1580 :raises ``CommitError``: if any error occurs while committing
1580 :raises ``CommitError``: if any error occurs while committing
1581 """
1581 """
1582 raise NotImplementedError
1582 raise NotImplementedError
1583
1583
1584
1584
1585 class BaseInMemoryChangesetClass(type):
1585 class BaseInMemoryChangesetClass(type):
1586
1586
1587 def __instancecheck__(self, instance):
1587 def __instancecheck__(self, instance):
1588 return isinstance(instance, BaseInMemoryCommit)
1588 return isinstance(instance, BaseInMemoryCommit)
1589
1589
1590
1590
1591 class BaseInMemoryChangeset(BaseInMemoryCommit):
1591 class BaseInMemoryChangeset(BaseInMemoryCommit):
1592
1592
1593 __metaclass__ = BaseInMemoryChangesetClass
1593 __metaclass__ = BaseInMemoryChangesetClass
1594
1594
1595 def __new__(cls, *args, **kwargs):
1595 def __new__(cls, *args, **kwargs):
1596 warnings.warn(
1596 warnings.warn(
1597 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1597 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1598 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1598 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1599
1599
1600
1600
1601 class EmptyCommit(BaseCommit):
1601 class EmptyCommit(BaseCommit):
1602 """
1602 """
1603 An dummy empty commit. It's possible to pass hash when creating
1603 An dummy empty commit. It's possible to pass hash when creating
1604 an EmptyCommit
1604 an EmptyCommit
1605 """
1605 """
1606
1606
1607 def __init__(
1607 def __init__(
1608 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1608 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1609 message='', author='', date=None):
1609 message='', author='', date=None):
1610 self._empty_commit_id = commit_id
1610 self._empty_commit_id = commit_id
1611 # TODO: johbo: Solve idx parameter, default value does not make
1611 # TODO: johbo: Solve idx parameter, default value does not make
1612 # too much sense
1612 # too much sense
1613 self.idx = idx
1613 self.idx = idx
1614 self.message = message
1614 self.message = message
1615 self.author = author
1615 self.author = author
1616 self.date = date or datetime.datetime.fromtimestamp(0)
1616 self.date = date or datetime.datetime.fromtimestamp(0)
1617 self.repository = repo
1617 self.repository = repo
1618 self.alias = alias
1618 self.alias = alias
1619
1619
1620 @LazyProperty
1620 @LazyProperty
1621 def raw_id(self):
1621 def raw_id(self):
1622 """
1622 """
1623 Returns raw string identifying this commit, useful for web
1623 Returns raw string identifying this commit, useful for web
1624 representation.
1624 representation.
1625 """
1625 """
1626
1626
1627 return self._empty_commit_id
1627 return self._empty_commit_id
1628
1628
1629 @LazyProperty
1629 @LazyProperty
1630 def branch(self):
1630 def branch(self):
1631 if self.alias:
1631 if self.alias:
1632 from rhodecode.lib.vcs.backends import get_backend
1632 from rhodecode.lib.vcs.backends import get_backend
1633 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1633 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1634
1634
1635 @LazyProperty
1635 @LazyProperty
1636 def short_id(self):
1636 def short_id(self):
1637 return self.raw_id[:12]
1637 return self.raw_id[:12]
1638
1638
1639 @LazyProperty
1639 @LazyProperty
1640 def id(self):
1640 def id(self):
1641 return self.raw_id
1641 return self.raw_id
1642
1642
1643 def get_path_commit(self, path):
1643 def get_path_commit(self, path):
1644 return self
1644 return self
1645
1645
1646 def get_file_content(self, path):
1646 def get_file_content(self, path):
1647 return u''
1647 return u''
1648
1648
1649 def get_file_content_streamed(self, path):
1649 def get_file_content_streamed(self, path):
1650 yield self.get_file_content()
1650 yield self.get_file_content()
1651
1651
1652 def get_file_size(self, path):
1652 def get_file_size(self, path):
1653 return 0
1653 return 0
1654
1654
1655
1655
1656 class EmptyChangesetClass(type):
1656 class EmptyChangesetClass(type):
1657
1657
1658 def __instancecheck__(self, instance):
1658 def __instancecheck__(self, instance):
1659 return isinstance(instance, EmptyCommit)
1659 return isinstance(instance, EmptyCommit)
1660
1660
1661
1661
1662 class EmptyChangeset(EmptyCommit):
1662 class EmptyChangeset(EmptyCommit):
1663
1663
1664 __metaclass__ = EmptyChangesetClass
1664 __metaclass__ = EmptyChangesetClass
1665
1665
1666 def __new__(cls, *args, **kwargs):
1666 def __new__(cls, *args, **kwargs):
1667 warnings.warn(
1667 warnings.warn(
1668 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1668 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1669 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1669 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1670
1670
1671 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1671 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1672 alias=None, revision=-1, message='', author='', date=None):
1672 alias=None, revision=-1, message='', author='', date=None):
1673 if requested_revision is not None:
1673 if requested_revision is not None:
1674 warnings.warn(
1674 warnings.warn(
1675 "Parameter requested_revision not supported anymore",
1675 "Parameter requested_revision not supported anymore",
1676 DeprecationWarning)
1676 DeprecationWarning)
1677 super(EmptyChangeset, self).__init__(
1677 super(EmptyChangeset, self).__init__(
1678 commit_id=cs, repo=repo, alias=alias, idx=revision,
1678 commit_id=cs, repo=repo, alias=alias, idx=revision,
1679 message=message, author=author, date=date)
1679 message=message, author=author, date=date)
1680
1680
1681 @property
1681 @property
1682 def revision(self):
1682 def revision(self):
1683 warnings.warn("Use idx instead", DeprecationWarning)
1683 warnings.warn("Use idx instead", DeprecationWarning)
1684 return self.idx
1684 return self.idx
1685
1685
1686 @revision.setter
1686 @revision.setter
1687 def revision(self, value):
1687 def revision(self, value):
1688 warnings.warn("Use idx instead", DeprecationWarning)
1688 warnings.warn("Use idx instead", DeprecationWarning)
1689 self.idx = value
1689 self.idx = value
1690
1690
1691
1691
1692 class EmptyRepository(BaseRepository):
1692 class EmptyRepository(BaseRepository):
1693 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1693 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1694 pass
1694 pass
1695
1695
1696 def get_diff(self, *args, **kwargs):
1696 def get_diff(self, *args, **kwargs):
1697 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1697 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1698 return GitDiff('')
1698 return GitDiff('')
1699
1699
1700
1700
1701 class CollectionGenerator(object):
1701 class CollectionGenerator(object):
1702
1702
1703 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1703 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1704 self.repo = repo
1704 self.repo = repo
1705 self.commit_ids = commit_ids
1705 self.commit_ids = commit_ids
1706 # TODO: (oliver) this isn't currently hooked up
1706 # TODO: (oliver) this isn't currently hooked up
1707 self.collection_size = None
1707 self.collection_size = None
1708 self.pre_load = pre_load
1708 self.pre_load = pre_load
1709 self.translate_tag = translate_tag
1709 self.translate_tag = translate_tag
1710
1710
1711 def __len__(self):
1711 def __len__(self):
1712 if self.collection_size is not None:
1712 if self.collection_size is not None:
1713 return self.collection_size
1713 return self.collection_size
1714 return self.commit_ids.__len__()
1714 return self.commit_ids.__len__()
1715
1715
1716 def __iter__(self):
1716 def __iter__(self):
1717 for commit_id in self.commit_ids:
1717 for commit_id in self.commit_ids:
1718 # TODO: johbo: Mercurial passes in commit indices or commit ids
1718 # TODO: johbo: Mercurial passes in commit indices or commit ids
1719 yield self._commit_factory(commit_id)
1719 yield self._commit_factory(commit_id)
1720
1720
1721 def _commit_factory(self, commit_id):
1721 def _commit_factory(self, commit_id):
1722 """
1722 """
1723 Allows backends to override the way commits are generated.
1723 Allows backends to override the way commits are generated.
1724 """
1724 """
1725 return self.repo.get_commit(
1725 return self.repo.get_commit(
1726 commit_id=commit_id, pre_load=self.pre_load,
1726 commit_id=commit_id, pre_load=self.pre_load,
1727 translate_tag=self.translate_tag)
1727 translate_tag=self.translate_tag)
1728
1728
1729 def __getslice__(self, i, j):
1729 def __getslice__(self, i, j):
1730 """
1730 """
1731 Returns an iterator of sliced repository
1731 Returns an iterator of sliced repository
1732 """
1732 """
1733 commit_ids = self.commit_ids[i:j]
1733 commit_ids = self.commit_ids[i:j]
1734 return self.__class__(
1734 return self.__class__(
1735 self.repo, commit_ids, pre_load=self.pre_load,
1735 self.repo, commit_ids, pre_load=self.pre_load,
1736 translate_tag=self.translate_tag)
1736 translate_tag=self.translate_tag)
1737
1737
1738 def __repr__(self):
1738 def __repr__(self):
1739 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1739 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1740
1740
1741
1741
1742 class Config(object):
1742 class Config(object):
1743 """
1743 """
1744 Represents the configuration for a repository.
1744 Represents the configuration for a repository.
1745
1745
1746 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1746 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1747 standard library. It implements only the needed subset.
1747 standard library. It implements only the needed subset.
1748 """
1748 """
1749
1749
1750 def __init__(self):
1750 def __init__(self):
1751 self._values = {}
1751 self._values = {}
1752
1752
1753 def copy(self):
1753 def copy(self):
1754 clone = Config()
1754 clone = Config()
1755 for section, values in self._values.items():
1755 for section, values in self._values.items():
1756 clone._values[section] = values.copy()
1756 clone._values[section] = values.copy()
1757 return clone
1757 return clone
1758
1758
1759 def __repr__(self):
1759 def __repr__(self):
1760 return '<Config(%s sections) at %s>' % (
1760 return '<Config(%s sections) at %s>' % (
1761 len(self._values), hex(id(self)))
1761 len(self._values), hex(id(self)))
1762
1762
1763 def items(self, section):
1763 def items(self, section):
1764 return self._values.get(section, {}).iteritems()
1764 return self._values.get(section, {}).iteritems()
1765
1765
1766 def get(self, section, option):
1766 def get(self, section, option):
1767 return self._values.get(section, {}).get(option)
1767 return self._values.get(section, {}).get(option)
1768
1768
1769 def set(self, section, option, value):
1769 def set(self, section, option, value):
1770 section_values = self._values.setdefault(section, {})
1770 section_values = self._values.setdefault(section, {})
1771 section_values[option] = value
1771 section_values[option] = value
1772
1772
1773 def clear_section(self, section):
1773 def clear_section(self, section):
1774 self._values[section] = {}
1774 self._values[section] = {}
1775
1775
1776 def serialize(self):
1776 def serialize(self):
1777 """
1777 """
1778 Creates a list of three tuples (section, key, value) representing
1778 Creates a list of three tuples (section, key, value) representing
1779 this config object.
1779 this config object.
1780 """
1780 """
1781 items = []
1781 items = []
1782 for section in self._values:
1782 for section in self._values:
1783 for option, value in self._values[section].items():
1783 for option, value in self._values[section].items():
1784 items.append(
1784 items.append(
1785 (safe_str(section), safe_str(option), safe_str(value)))
1785 (safe_str(section), safe_str(option), safe_str(value)))
1786 return items
1786 return items
1787
1787
1788
1788
1789 class Diff(object):
1789 class Diff(object):
1790 """
1790 """
1791 Represents a diff result from a repository backend.
1791 Represents a diff result from a repository backend.
1792
1792
1793 Subclasses have to provide a backend specific value for
1793 Subclasses have to provide a backend specific value for
1794 :attr:`_header_re` and :attr:`_meta_re`.
1794 :attr:`_header_re` and :attr:`_meta_re`.
1795 """
1795 """
1796 _meta_re = None
1796 _meta_re = None
1797 _header_re = None
1797 _header_re = None
1798
1798
1799 def __init__(self, raw_diff):
1799 def __init__(self, raw_diff):
1800 self.raw = raw_diff
1800 self.raw = raw_diff
1801
1801
1802 def chunks(self):
1802 def chunks(self):
1803 """
1803 """
1804 split the diff in chunks of separate --git a/file b/file chunks
1804 split the diff in chunks of separate --git a/file b/file chunks
1805 to make diffs consistent we must prepend with \n, and make sure
1805 to make diffs consistent we must prepend with \n, and make sure
1806 we can detect last chunk as this was also has special rule
1806 we can detect last chunk as this was also has special rule
1807 """
1807 """
1808
1808
1809 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1809 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1810 header = diff_parts[0]
1810 header = diff_parts[0]
1811
1811
1812 if self._meta_re:
1812 if self._meta_re:
1813 match = self._meta_re.match(header)
1813 match = self._meta_re.match(header)
1814
1814
1815 chunks = diff_parts[1:]
1815 chunks = diff_parts[1:]
1816 total_chunks = len(chunks)
1816 total_chunks = len(chunks)
1817
1817
1818 return (
1818 return (
1819 DiffChunk(chunk, self, cur_chunk == total_chunks)
1819 DiffChunk(chunk, self, cur_chunk == total_chunks)
1820 for cur_chunk, chunk in enumerate(chunks, start=1))
1820 for cur_chunk, chunk in enumerate(chunks, start=1))
1821
1821
1822
1822
1823 class DiffChunk(object):
1823 class DiffChunk(object):
1824
1824
1825 def __init__(self, chunk, diff, last_chunk):
1825 def __init__(self, chunk, diff, last_chunk):
1826 self._diff = diff
1826 self._diff = diff
1827
1827
1828 # since we split by \ndiff --git that part is lost from original diff
1828 # since we split by \ndiff --git that part is lost from original diff
1829 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1829 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1830 if not last_chunk:
1830 if not last_chunk:
1831 chunk += '\n'
1831 chunk += '\n'
1832
1832
1833 match = self._diff._header_re.match(chunk)
1833 match = self._diff._header_re.match(chunk)
1834 self.header = match.groupdict()
1834 self.header = match.groupdict()
1835 self.diff = chunk[match.end():]
1835 self.diff = chunk[match.end():]
1836 self.raw = chunk
1836 self.raw = chunk
1837
1837
1838
1838
1839 class BasePathPermissionChecker(object):
1839 class BasePathPermissionChecker(object):
1840
1840
1841 @staticmethod
1841 @staticmethod
1842 def create_from_patterns(includes, excludes):
1842 def create_from_patterns(includes, excludes):
1843 if includes and '*' in includes and not excludes:
1843 if includes and '*' in includes and not excludes:
1844 return AllPathPermissionChecker()
1844 return AllPathPermissionChecker()
1845 elif excludes and '*' in excludes:
1845 elif excludes and '*' in excludes:
1846 return NonePathPermissionChecker()
1846 return NonePathPermissionChecker()
1847 else:
1847 else:
1848 return PatternPathPermissionChecker(includes, excludes)
1848 return PatternPathPermissionChecker(includes, excludes)
1849
1849
1850 @property
1850 @property
1851 def has_full_access(self):
1851 def has_full_access(self):
1852 raise NotImplemented()
1852 raise NotImplemented()
1853
1853
1854 def has_access(self, path):
1854 def has_access(self, path):
1855 raise NotImplemented()
1855 raise NotImplemented()
1856
1856
1857
1857
1858 class AllPathPermissionChecker(BasePathPermissionChecker):
1858 class AllPathPermissionChecker(BasePathPermissionChecker):
1859
1859
1860 @property
1860 @property
1861 def has_full_access(self):
1861 def has_full_access(self):
1862 return True
1862 return True
1863
1863
1864 def has_access(self, path):
1864 def has_access(self, path):
1865 return True
1865 return True
1866
1866
1867
1867
1868 class NonePathPermissionChecker(BasePathPermissionChecker):
1868 class NonePathPermissionChecker(BasePathPermissionChecker):
1869
1869
1870 @property
1870 @property
1871 def has_full_access(self):
1871 def has_full_access(self):
1872 return False
1872 return False
1873
1873
1874 def has_access(self, path):
1874 def has_access(self, path):
1875 return False
1875 return False
1876
1876
1877
1877
1878 class PatternPathPermissionChecker(BasePathPermissionChecker):
1878 class PatternPathPermissionChecker(BasePathPermissionChecker):
1879
1879
1880 def __init__(self, includes, excludes):
1880 def __init__(self, includes, excludes):
1881 self.includes = includes
1881 self.includes = includes
1882 self.excludes = excludes
1882 self.excludes = excludes
1883 self.includes_re = [] if not includes else [
1883 self.includes_re = [] if not includes else [
1884 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1884 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1885 self.excludes_re = [] if not excludes else [
1885 self.excludes_re = [] if not excludes else [
1886 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1886 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1887
1887
1888 @property
1888 @property
1889 def has_full_access(self):
1889 def has_full_access(self):
1890 return '*' in self.includes and not self.excludes
1890 return '*' in self.includes and not self.excludes
1891
1891
1892 def has_access(self, path):
1892 def has_access(self, path):
1893 for regex in self.excludes_re:
1893 for regex in self.excludes_re:
1894 if regex.match(path):
1894 if regex.match(path):
1895 return False
1895 return False
1896 for regex in self.includes_re:
1896 for regex in self.includes_re:
1897 if regex.match(path):
1897 if regex.match(path):
1898 return True
1898 return True
1899 return False
1899 return False
@@ -1,1004 +1,1017 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT repository module
22 GIT repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 utcdate_fromtimestamp, makedate, date_astimestamp)
33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, path as vcspath
36 from rhodecode.lib.vcs import connection, path as vcspath
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference)
39 MergeFailureReason, Reference)
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 CommitDoesNotExistError, EmptyRepositoryError,
44 CommitDoesNotExistError, EmptyRepositoryError,
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError)
45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46
46
47
47
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class GitRepository(BaseRepository):
53 class GitRepository(BaseRepository):
54 """
54 """
55 Git repository backend.
55 Git repository backend.
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'master'
57 DEFAULT_BRANCH_NAME = 'master'
58
58
59 contact = BaseRepository.DEFAULT_CONTACT
59 contact = BaseRepository.DEFAULT_CONTACT
60
60
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 do_workspace_checkout=False, with_wire=None, bare=False):
62 do_workspace_checkout=False, with_wire=None, bare=False):
63
63
64 self.path = safe_str(os.path.abspath(repo_path))
64 self.path = safe_str(os.path.abspath(repo_path))
65 self.config = config if config else self.get_default_config()
65 self.config = config if config else self.get_default_config()
66 self.with_wire = with_wire or {"cache": False} # default should not use cache
66 self.with_wire = with_wire or {"cache": False} # default should not use cache
67
67
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
68 self._init_repo(create, src_url, do_workspace_checkout, bare)
69
69
70 # caches
70 # caches
71 self._commit_ids = {}
71 self._commit_ids = {}
72
72
73 @LazyProperty
73 @LazyProperty
74 def _remote(self):
74 def _remote(self):
75 repo_id = self.path
75 repo_id = self.path
76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77
77
78 @LazyProperty
78 @LazyProperty
79 def bare(self):
79 def bare(self):
80 return self._remote.bare()
80 return self._remote.bare()
81
81
82 @LazyProperty
82 @LazyProperty
83 def head(self):
83 def head(self):
84 return self._remote.head()
84 return self._remote.head()
85
85
86 @CachedProperty
86 @CachedProperty
87 def commit_ids(self):
87 def commit_ids(self):
88 """
88 """
89 Returns list of commit ids, in ascending order. Being lazy
89 Returns list of commit ids, in ascending order. Being lazy
90 attribute allows external tools to inject commit ids from cache.
90 attribute allows external tools to inject commit ids from cache.
91 """
91 """
92 commit_ids = self._get_all_commit_ids()
92 commit_ids = self._get_all_commit_ids()
93 self._rebuild_cache(commit_ids)
93 self._rebuild_cache(commit_ids)
94 return commit_ids
94 return commit_ids
95
95
96 def _rebuild_cache(self, commit_ids):
96 def _rebuild_cache(self, commit_ids):
97 self._commit_ids = dict((commit_id, index)
97 self._commit_ids = dict((commit_id, index)
98 for index, commit_id in enumerate(commit_ids))
98 for index, commit_id in enumerate(commit_ids))
99
99
100 def run_git_command(self, cmd, **opts):
100 def run_git_command(self, cmd, **opts):
101 """
101 """
102 Runs given ``cmd`` as git command and returns tuple
102 Runs given ``cmd`` as git command and returns tuple
103 (stdout, stderr).
103 (stdout, stderr).
104
104
105 :param cmd: git command to be executed
105 :param cmd: git command to be executed
106 :param opts: env options to pass into Subprocess command
106 :param opts: env options to pass into Subprocess command
107 """
107 """
108 if not isinstance(cmd, list):
108 if not isinstance(cmd, list):
109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
109 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110
110
111 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 out, err = self._remote.run_git_command(cmd, **opts)
112 out, err = self._remote.run_git_command(cmd, **opts)
113 if err and not skip_stderr_log:
113 if err and not skip_stderr_log:
114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 return out, err
115 return out, err
116
116
117 @staticmethod
117 @staticmethod
118 def check_url(url, config):
118 def check_url(url, config):
119 """
119 """
120 Function will check given url and try to verify if it's a valid
120 Function will check given url and try to verify if it's a valid
121 link. Sometimes it may happened that git will issue basic
121 link. Sometimes it may happened that git will issue basic
122 auth request that can cause whole API to hang when used from python
122 auth request that can cause whole API to hang when used from python
123 or other external calls.
123 or other external calls.
124
124
125 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 when the return code is non 200
126 when the return code is non 200
127 """
127 """
128 # check first if it's not an url
128 # check first if it's not an url
129 if os.path.isdir(url) or url.startswith('file:'):
129 if os.path.isdir(url) or url.startswith('file:'):
130 return True
130 return True
131
131
132 if '+' in url.split('://', 1)[0]:
132 if '+' in url.split('://', 1)[0]:
133 url = url.split('+', 1)[1]
133 url = url.split('+', 1)[1]
134
134
135 # Request the _remote to verify the url
135 # Request the _remote to verify the url
136 return connection.Git.check_url(url, config.serialize())
136 return connection.Git.check_url(url, config.serialize())
137
137
138 @staticmethod
138 @staticmethod
139 def is_valid_repository(path):
139 def is_valid_repository(path):
140 if os.path.isdir(os.path.join(path, '.git')):
140 if os.path.isdir(os.path.join(path, '.git')):
141 return True
141 return True
142 # check case of bare repository
142 # check case of bare repository
143 try:
143 try:
144 GitRepository(path)
144 GitRepository(path)
145 return True
145 return True
146 except VCSError:
146 except VCSError:
147 pass
147 pass
148 return False
148 return False
149
149
150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 bare=False):
151 bare=False):
152 if create and os.path.exists(self.path):
152 if create and os.path.exists(self.path):
153 raise RepositoryError(
153 raise RepositoryError(
154 "Cannot create repository at %s, location already exist"
154 "Cannot create repository at %s, location already exist"
155 % self.path)
155 % self.path)
156
156
157 if bare and do_workspace_checkout:
157 if bare and do_workspace_checkout:
158 raise RepositoryError("Cannot update a bare repository")
158 raise RepositoryError("Cannot update a bare repository")
159 try:
159 try:
160
160
161 if src_url:
161 if src_url:
162 # check URL before any actions
162 # check URL before any actions
163 GitRepository.check_url(src_url, self.config)
163 GitRepository.check_url(src_url, self.config)
164
164
165 if create:
165 if create:
166 os.makedirs(self.path, mode=0o755)
166 os.makedirs(self.path, mode=0o755)
167
167
168 if bare:
168 if bare:
169 self._remote.init_bare()
169 self._remote.init_bare()
170 else:
170 else:
171 self._remote.init()
171 self._remote.init()
172
172
173 if src_url and bare:
173 if src_url and bare:
174 # bare repository only allows a fetch and checkout is not allowed
174 # bare repository only allows a fetch and checkout is not allowed
175 self.fetch(src_url, commit_ids=None)
175 self.fetch(src_url, commit_ids=None)
176 elif src_url:
176 elif src_url:
177 self.pull(src_url, commit_ids=None,
177 self.pull(src_url, commit_ids=None,
178 update_after=do_workspace_checkout)
178 update_after=do_workspace_checkout)
179
179
180 else:
180 else:
181 if not self._remote.assert_correct_path():
181 if not self._remote.assert_correct_path():
182 raise RepositoryError(
182 raise RepositoryError(
183 'Path "%s" does not contain a Git repository' %
183 'Path "%s" does not contain a Git repository' %
184 (self.path,))
184 (self.path,))
185
185
186 # TODO: johbo: check if we have to translate the OSError here
186 # TODO: johbo: check if we have to translate the OSError here
187 except OSError as err:
187 except OSError as err:
188 raise RepositoryError(err)
188 raise RepositoryError(err)
189
189
190 def _get_all_commit_ids(self):
190 def _get_all_commit_ids(self):
191 return self._remote.get_all_commit_ids()
191 return self._remote.get_all_commit_ids()
192
192
193 def _get_commit_ids(self, filters=None):
193 def _get_commit_ids(self, filters=None):
194 # we must check if this repo is not empty, since later command
194 # we must check if this repo is not empty, since later command
195 # fails if it is. And it's cheaper to ask than throw the subprocess
195 # fails if it is. And it's cheaper to ask than throw the subprocess
196 # errors
196 # errors
197
197
198 head = self._remote.head(show_exc=False)
198 head = self._remote.head(show_exc=False)
199
199
200 if not head:
200 if not head:
201 return []
201 return []
202
202
203 rev_filter = ['--branches', '--tags']
203 rev_filter = ['--branches', '--tags']
204 extra_filter = []
204 extra_filter = []
205
205
206 if filters:
206 if filters:
207 if filters.get('since'):
207 if filters.get('since'):
208 extra_filter.append('--since=%s' % (filters['since']))
208 extra_filter.append('--since=%s' % (filters['since']))
209 if filters.get('until'):
209 if filters.get('until'):
210 extra_filter.append('--until=%s' % (filters['until']))
210 extra_filter.append('--until=%s' % (filters['until']))
211 if filters.get('branch_name'):
211 if filters.get('branch_name'):
212 rev_filter = []
212 rev_filter = []
213 extra_filter.append(filters['branch_name'])
213 extra_filter.append(filters['branch_name'])
214 rev_filter.extend(extra_filter)
214 rev_filter.extend(extra_filter)
215
215
216 # if filters.get('start') or filters.get('end'):
216 # if filters.get('start') or filters.get('end'):
217 # # skip is offset, max-count is limit
217 # # skip is offset, max-count is limit
218 # if filters.get('start'):
218 # if filters.get('start'):
219 # extra_filter += ' --skip=%s' % filters['start']
219 # extra_filter += ' --skip=%s' % filters['start']
220 # if filters.get('end'):
220 # if filters.get('end'):
221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
221 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222
222
223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
223 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 try:
224 try:
225 output, __ = self.run_git_command(cmd)
225 output, __ = self.run_git_command(cmd)
226 except RepositoryError:
226 except RepositoryError:
227 # Can be raised for empty repositories
227 # Can be raised for empty repositories
228 return []
228 return []
229 return output.splitlines()
229 return output.splitlines()
230
230
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
231 def _lookup_commit(self, commit_id_or_idx, translate_tag=True):
232 def is_null(value):
232 def is_null(value):
233 return len(value) == commit_id_or_idx.count('0')
233 return len(value) == commit_id_or_idx.count('0')
234
234
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 return self.commit_ids[-1]
236 return self.commit_ids[-1]
237 commit_missing_err = "Commit {} does not exist for `{}`".format(
237 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 *map(safe_str, [commit_id_or_idx, self.name]))
238 *map(safe_str, [commit_id_or_idx, self.name]))
239
239
240 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
240 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
241 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
241 if ((is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12)
242 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
242 or isinstance(commit_id_or_idx, int) or is_null(commit_id_or_idx)):
243 try:
243 try:
244 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
244 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
245 except Exception:
245 except Exception:
246 raise CommitDoesNotExistError(commit_missing_err)
246 raise CommitDoesNotExistError(commit_missing_err)
247
247
248 elif is_bstr:
248 elif is_bstr:
249 # Need to call remote to translate id for tagging scenario
249 # Need to call remote to translate id for tagging scenario
250 try:
250 try:
251 remote_data = self._remote.get_object(commit_id_or_idx)
251 remote_data = self._remote.get_object(commit_id_or_idx)
252 commit_id_or_idx = remote_data["commit_id"]
252 commit_id_or_idx = remote_data["commit_id"]
253 except (CommitDoesNotExistError,):
253 except (CommitDoesNotExistError,):
254 raise CommitDoesNotExistError(commit_missing_err)
254 raise CommitDoesNotExistError(commit_missing_err)
255
255
256 # Ensure we return full id
256 # Ensure we return full id
257 if not SHA_PATTERN.match(str(commit_id_or_idx)):
257 if not SHA_PATTERN.match(str(commit_id_or_idx)):
258 raise CommitDoesNotExistError(
258 raise CommitDoesNotExistError(
259 "Given commit id %s not recognized" % commit_id_or_idx)
259 "Given commit id %s not recognized" % commit_id_or_idx)
260 return commit_id_or_idx
260 return commit_id_or_idx
261
261
262 def get_hook_location(self):
262 def get_hook_location(self):
263 """
263 """
264 returns absolute path to location where hooks are stored
264 returns absolute path to location where hooks are stored
265 """
265 """
266 loc = os.path.join(self.path, 'hooks')
266 loc = os.path.join(self.path, 'hooks')
267 if not self.bare:
267 if not self.bare:
268 loc = os.path.join(self.path, '.git', 'hooks')
268 loc = os.path.join(self.path, '.git', 'hooks')
269 return loc
269 return loc
270
270
271 @LazyProperty
271 @LazyProperty
272 def last_change(self):
272 def last_change(self):
273 """
273 """
274 Returns last change made on this repository as
274 Returns last change made on this repository as
275 `datetime.datetime` object.
275 `datetime.datetime` object.
276 """
276 """
277 try:
277 try:
278 return self.get_commit().date
278 return self.get_commit().date
279 except RepositoryError:
279 except RepositoryError:
280 tzoffset = makedate()[1]
280 tzoffset = makedate()[1]
281 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
281 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
282
282
283 def _get_fs_mtime(self):
283 def _get_fs_mtime(self):
284 idx_loc = '' if self.bare else '.git'
284 idx_loc = '' if self.bare else '.git'
285 # fallback to filesystem
285 # fallback to filesystem
286 in_path = os.path.join(self.path, idx_loc, "index")
286 in_path = os.path.join(self.path, idx_loc, "index")
287 he_path = os.path.join(self.path, idx_loc, "HEAD")
287 he_path = os.path.join(self.path, idx_loc, "HEAD")
288 if os.path.exists(in_path):
288 if os.path.exists(in_path):
289 return os.stat(in_path).st_mtime
289 return os.stat(in_path).st_mtime
290 else:
290 else:
291 return os.stat(he_path).st_mtime
291 return os.stat(he_path).st_mtime
292
292
293 @LazyProperty
293 @LazyProperty
294 def description(self):
294 def description(self):
295 description = self._remote.get_description()
295 description = self._remote.get_description()
296 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
296 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
297
297
298 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
298 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
299 if self.is_empty():
299 if self.is_empty():
300 return OrderedDict()
300 return OrderedDict()
301
301
302 result = []
302 result = []
303 for ref, sha in self._refs.iteritems():
303 for ref, sha in self._refs.iteritems():
304 if ref.startswith(prefix):
304 if ref.startswith(prefix):
305 ref_name = ref
305 ref_name = ref
306 if strip_prefix:
306 if strip_prefix:
307 ref_name = ref[len(prefix):]
307 ref_name = ref[len(prefix):]
308 result.append((safe_unicode(ref_name), sha))
308 result.append((safe_unicode(ref_name), sha))
309
309
310 def get_name(entry):
310 def get_name(entry):
311 return entry[0]
311 return entry[0]
312
312
313 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
313 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
314
314
315 def _get_branches(self):
315 def _get_branches(self):
316 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
316 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
317
317
318 @CachedProperty
318 @CachedProperty
319 def branches(self):
319 def branches(self):
320 return self._get_branches()
320 return self._get_branches()
321
321
322 @CachedProperty
322 @CachedProperty
323 def branches_closed(self):
323 def branches_closed(self):
324 return {}
324 return {}
325
325
326 @CachedProperty
326 @CachedProperty
327 def bookmarks(self):
327 def bookmarks(self):
328 return {}
328 return {}
329
329
330 @CachedProperty
330 @CachedProperty
331 def branches_all(self):
331 def branches_all(self):
332 all_branches = {}
332 all_branches = {}
333 all_branches.update(self.branches)
333 all_branches.update(self.branches)
334 all_branches.update(self.branches_closed)
334 all_branches.update(self.branches_closed)
335 return all_branches
335 return all_branches
336
336
337 @CachedProperty
337 @CachedProperty
338 def tags(self):
338 def tags(self):
339 return self._get_tags()
339 return self._get_tags()
340
340
341 def _get_tags(self):
341 def _get_tags(self):
342 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
342 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
343
343
344 def tag(self, name, user, commit_id=None, message=None, date=None,
344 def tag(self, name, user, commit_id=None, message=None, date=None,
345 **kwargs):
345 **kwargs):
346 # TODO: fix this method to apply annotated tags correct with message
346 # TODO: fix this method to apply annotated tags correct with message
347 """
347 """
348 Creates and returns a tag for the given ``commit_id``.
348 Creates and returns a tag for the given ``commit_id``.
349
349
350 :param name: name for new tag
350 :param name: name for new tag
351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
351 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
352 :param commit_id: commit id for which new tag would be created
352 :param commit_id: commit id for which new tag would be created
353 :param message: message of the tag's commit
353 :param message: message of the tag's commit
354 :param date: date of tag's commit
354 :param date: date of tag's commit
355
355
356 :raises TagAlreadyExistError: if tag with same name already exists
356 :raises TagAlreadyExistError: if tag with same name already exists
357 """
357 """
358 if name in self.tags:
358 if name in self.tags:
359 raise TagAlreadyExistError("Tag %s already exists" % name)
359 raise TagAlreadyExistError("Tag %s already exists" % name)
360 commit = self.get_commit(commit_id=commit_id)
360 commit = self.get_commit(commit_id=commit_id)
361 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
361 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
362
362
363 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
363 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
364
364
365 self._invalidate_prop_cache('tags')
365 self._invalidate_prop_cache('tags')
366 self._invalidate_prop_cache('_refs')
366 self._invalidate_prop_cache('_refs')
367
367
368 return commit
368 return commit
369
369
370 def remove_tag(self, name, user, message=None, date=None):
370 def remove_tag(self, name, user, message=None, date=None):
371 """
371 """
372 Removes tag with the given ``name``.
372 Removes tag with the given ``name``.
373
373
374 :param name: name of the tag to be removed
374 :param name: name of the tag to be removed
375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
375 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
376 :param message: message of the tag's removal commit
376 :param message: message of the tag's removal commit
377 :param date: date of tag's removal commit
377 :param date: date of tag's removal commit
378
378
379 :raises TagDoesNotExistError: if tag with given name does not exists
379 :raises TagDoesNotExistError: if tag with given name does not exists
380 """
380 """
381 if name not in self.tags:
381 if name not in self.tags:
382 raise TagDoesNotExistError("Tag %s does not exist" % name)
382 raise TagDoesNotExistError("Tag %s does not exist" % name)
383
383
384 self._remote.tag_remove(name)
384 self._remote.tag_remove(name)
385 self._invalidate_prop_cache('tags')
385 self._invalidate_prop_cache('tags')
386 self._invalidate_prop_cache('_refs')
386 self._invalidate_prop_cache('_refs')
387
387
388 def _get_refs(self):
388 def _get_refs(self):
389 return self._remote.get_refs()
389 return self._remote.get_refs()
390
390
391 @CachedProperty
391 @CachedProperty
392 def _refs(self):
392 def _refs(self):
393 return self._get_refs()
393 return self._get_refs()
394
394
395 @property
395 @property
396 def _ref_tree(self):
396 def _ref_tree(self):
397 node = tree = {}
397 node = tree = {}
398 for ref, sha in self._refs.iteritems():
398 for ref, sha in self._refs.iteritems():
399 path = ref.split('/')
399 path = ref.split('/')
400 for bit in path[:-1]:
400 for bit in path[:-1]:
401 node = node.setdefault(bit, {})
401 node = node.setdefault(bit, {})
402 node[path[-1]] = sha
402 node[path[-1]] = sha
403 node = tree
403 node = tree
404 return tree
404 return tree
405
405
406 def get_remote_ref(self, ref_name):
406 def get_remote_ref(self, ref_name):
407 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
407 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
408 try:
408 try:
409 return self._refs[ref_key]
409 return self._refs[ref_key]
410 except Exception:
410 except Exception:
411 return
411 return
412
412
413 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
413 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=True):
414 """
414 """
415 Returns `GitCommit` object representing commit from git repository
415 Returns `GitCommit` object representing commit from git repository
416 at the given `commit_id` or head (most recent commit) if None given.
416 at the given `commit_id` or head (most recent commit) if None given.
417 """
417 """
418 if self.is_empty():
418 if self.is_empty():
419 raise EmptyRepositoryError("There are no commits yet")
419 raise EmptyRepositoryError("There are no commits yet")
420
420
421 if commit_id is not None:
421 if commit_id is not None:
422 self._validate_commit_id(commit_id)
422 self._validate_commit_id(commit_id)
423 try:
423 try:
424 # we have cached idx, use it without contacting the remote
424 # we have cached idx, use it without contacting the remote
425 idx = self._commit_ids[commit_id]
425 idx = self._commit_ids[commit_id]
426 return GitCommit(self, commit_id, idx, pre_load=pre_load)
426 return GitCommit(self, commit_id, idx, pre_load=pre_load)
427 except KeyError:
427 except KeyError:
428 pass
428 pass
429
429
430 elif commit_idx is not None:
430 elif commit_idx is not None:
431 self._validate_commit_idx(commit_idx)
431 self._validate_commit_idx(commit_idx)
432 try:
432 try:
433 _commit_id = self.commit_ids[commit_idx]
433 _commit_id = self.commit_ids[commit_idx]
434 if commit_idx < 0:
434 if commit_idx < 0:
435 commit_idx = self.commit_ids.index(_commit_id)
435 commit_idx = self.commit_ids.index(_commit_id)
436 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
436 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
437 except IndexError:
437 except IndexError:
438 commit_id = commit_idx
438 commit_id = commit_idx
439 else:
439 else:
440 commit_id = "tip"
440 commit_id = "tip"
441
441
442 if translate_tag:
442 if translate_tag:
443 commit_id = self._lookup_commit(commit_id)
443 commit_id = self._lookup_commit(commit_id)
444
444
445 try:
445 try:
446 idx = self._commit_ids[commit_id]
446 idx = self._commit_ids[commit_id]
447 except KeyError:
447 except KeyError:
448 idx = -1
448 idx = -1
449
449
450 return GitCommit(self, commit_id, idx, pre_load=pre_load)
450 return GitCommit(self, commit_id, idx, pre_load=pre_load)
451
451
452 def get_commits(
452 def get_commits(
453 self, start_id=None, end_id=None, start_date=None, end_date=None,
453 self, start_id=None, end_id=None, start_date=None, end_date=None,
454 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
454 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
455 """
455 """
456 Returns generator of `GitCommit` objects from start to end (both
456 Returns generator of `GitCommit` objects from start to end (both
457 are inclusive), in ascending date order.
457 are inclusive), in ascending date order.
458
458
459 :param start_id: None, str(commit_id)
459 :param start_id: None, str(commit_id)
460 :param end_id: None, str(commit_id)
460 :param end_id: None, str(commit_id)
461 :param start_date: if specified, commits with commit date less than
461 :param start_date: if specified, commits with commit date less than
462 ``start_date`` would be filtered out from returned set
462 ``start_date`` would be filtered out from returned set
463 :param end_date: if specified, commits with commit date greater than
463 :param end_date: if specified, commits with commit date greater than
464 ``end_date`` would be filtered out from returned set
464 ``end_date`` would be filtered out from returned set
465 :param branch_name: if specified, commits not reachable from given
465 :param branch_name: if specified, commits not reachable from given
466 branch would be filtered out from returned set
466 branch would be filtered out from returned set
467 :param show_hidden: Show hidden commits such as obsolete or hidden from
467 :param show_hidden: Show hidden commits such as obsolete or hidden from
468 Mercurial evolve
468 Mercurial evolve
469 :raise BranchDoesNotExistError: If given `branch_name` does not
469 :raise BranchDoesNotExistError: If given `branch_name` does not
470 exist.
470 exist.
471 :raise CommitDoesNotExistError: If commits for given `start` or
471 :raise CommitDoesNotExistError: If commits for given `start` or
472 `end` could not be found.
472 `end` could not be found.
473
473
474 """
474 """
475 if self.is_empty():
475 if self.is_empty():
476 raise EmptyRepositoryError("There are no commits yet")
476 raise EmptyRepositoryError("There are no commits yet")
477
477
478 self._validate_branch_name(branch_name)
478 self._validate_branch_name(branch_name)
479
479
480 if start_id is not None:
480 if start_id is not None:
481 self._validate_commit_id(start_id)
481 self._validate_commit_id(start_id)
482 if end_id is not None:
482 if end_id is not None:
483 self._validate_commit_id(end_id)
483 self._validate_commit_id(end_id)
484
484
485 start_raw_id = self._lookup_commit(start_id)
485 start_raw_id = self._lookup_commit(start_id)
486 start_pos = self._commit_ids[start_raw_id] if start_id else None
486 start_pos = self._commit_ids[start_raw_id] if start_id else None
487 end_raw_id = self._lookup_commit(end_id)
487 end_raw_id = self._lookup_commit(end_id)
488 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
488 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
489
489
490 if None not in [start_id, end_id] and start_pos > end_pos:
490 if None not in [start_id, end_id] and start_pos > end_pos:
491 raise RepositoryError(
491 raise RepositoryError(
492 "Start commit '%s' cannot be after end commit '%s'" %
492 "Start commit '%s' cannot be after end commit '%s'" %
493 (start_id, end_id))
493 (start_id, end_id))
494
494
495 if end_pos is not None:
495 if end_pos is not None:
496 end_pos += 1
496 end_pos += 1
497
497
498 filter_ = []
498 filter_ = []
499 if branch_name:
499 if branch_name:
500 filter_.append({'branch_name': branch_name})
500 filter_.append({'branch_name': branch_name})
501 if start_date and not end_date:
501 if start_date and not end_date:
502 filter_.append({'since': start_date})
502 filter_.append({'since': start_date})
503 if end_date and not start_date:
503 if end_date and not start_date:
504 filter_.append({'until': end_date})
504 filter_.append({'until': end_date})
505 if start_date and end_date:
505 if start_date and end_date:
506 filter_.append({'since': start_date})
506 filter_.append({'since': start_date})
507 filter_.append({'until': end_date})
507 filter_.append({'until': end_date})
508
508
509 # if start_pos or end_pos:
509 # if start_pos or end_pos:
510 # filter_.append({'start': start_pos})
510 # filter_.append({'start': start_pos})
511 # filter_.append({'end': end_pos})
511 # filter_.append({'end': end_pos})
512
512
513 if filter_:
513 if filter_:
514 revfilters = {
514 revfilters = {
515 'branch_name': branch_name,
515 'branch_name': branch_name,
516 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
516 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
517 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
517 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
518 'start': start_pos,
518 'start': start_pos,
519 'end': end_pos,
519 'end': end_pos,
520 }
520 }
521 commit_ids = self._get_commit_ids(filters=revfilters)
521 commit_ids = self._get_commit_ids(filters=revfilters)
522
522
523 else:
523 else:
524 commit_ids = self.commit_ids
524 commit_ids = self.commit_ids
525
525
526 if start_pos or end_pos:
526 if start_pos or end_pos:
527 commit_ids = commit_ids[start_pos: end_pos]
527 commit_ids = commit_ids[start_pos: end_pos]
528
528
529 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
529 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
530 translate_tag=translate_tags)
530 translate_tag=translate_tags)
531
531
532 def get_diff(
532 def get_diff(
533 self, commit1, commit2, path='', ignore_whitespace=False,
533 self, commit1, commit2, path='', ignore_whitespace=False,
534 context=3, path1=None):
534 context=3, path1=None):
535 """
535 """
536 Returns (git like) *diff*, as plain text. Shows changes introduced by
536 Returns (git like) *diff*, as plain text. Shows changes introduced by
537 ``commit2`` since ``commit1``.
537 ``commit2`` since ``commit1``.
538
538
539 :param commit1: Entry point from which diff is shown. Can be
539 :param commit1: Entry point from which diff is shown. Can be
540 ``self.EMPTY_COMMIT`` - in this case, patch showing all
540 ``self.EMPTY_COMMIT`` - in this case, patch showing all
541 the changes since empty state of the repository until ``commit2``
541 the changes since empty state of the repository until ``commit2``
542 :param commit2: Until which commits changes should be shown.
542 :param commit2: Until which commits changes should be shown.
543 :param ignore_whitespace: If set to ``True``, would not show whitespace
543 :param ignore_whitespace: If set to ``True``, would not show whitespace
544 changes. Defaults to ``False``.
544 changes. Defaults to ``False``.
545 :param context: How many lines before/after changed lines should be
545 :param context: How many lines before/after changed lines should be
546 shown. Defaults to ``3``.
546 shown. Defaults to ``3``.
547 """
547 """
548 self._validate_diff_commits(commit1, commit2)
548 self._validate_diff_commits(commit1, commit2)
549 if path1 is not None and path1 != path:
549 if path1 is not None and path1 != path:
550 raise ValueError("Diff of two different paths not supported.")
550 raise ValueError("Diff of two different paths not supported.")
551
551
552 if path:
552 if path:
553 file_filter = path
553 file_filter = path
554 else:
554 else:
555 file_filter = None
555 file_filter = None
556
556
557 diff = self._remote.diff(
557 diff = self._remote.diff(
558 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
558 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
559 opt_ignorews=ignore_whitespace,
559 opt_ignorews=ignore_whitespace,
560 context=context)
560 context=context)
561 return GitDiff(diff)
561 return GitDiff(diff)
562
562
563 def strip(self, commit_id, branch_name):
563 def strip(self, commit_id, branch_name):
564 commit = self.get_commit(commit_id=commit_id)
564 commit = self.get_commit(commit_id=commit_id)
565 if commit.merge:
565 if commit.merge:
566 raise Exception('Cannot reset to merge commit')
566 raise Exception('Cannot reset to merge commit')
567
567
568 # parent is going to be the new head now
568 # parent is going to be the new head now
569 commit = commit.parents[0]
569 commit = commit.parents[0]
570 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
570 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
571
571
572 # clear cached properties
572 # clear cached properties
573 self._invalidate_prop_cache('commit_ids')
573 self._invalidate_prop_cache('commit_ids')
574 self._invalidate_prop_cache('_refs')
574 self._invalidate_prop_cache('_refs')
575 self._invalidate_prop_cache('branches')
575 self._invalidate_prop_cache('branches')
576
576
577 return len(self.commit_ids)
577 return len(self.commit_ids)
578
578
579 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
579 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
580 if commit_id1 == commit_id2:
580 if commit_id1 == commit_id2:
581 return commit_id1
581 return commit_id1
582
582
583 if self != repo2:
583 if self != repo2:
584 commits = self._remote.get_missing_revs(
584 commits = self._remote.get_missing_revs(
585 commit_id1, commit_id2, repo2.path)
585 commit_id1, commit_id2, repo2.path)
586 if commits:
586 if commits:
587 commit = repo2.get_commit(commits[-1])
587 commit = repo2.get_commit(commits[-1])
588 if commit.parents:
588 if commit.parents:
589 ancestor_id = commit.parents[0].raw_id
589 ancestor_id = commit.parents[0].raw_id
590 else:
590 else:
591 ancestor_id = None
591 ancestor_id = None
592 else:
592 else:
593 # no commits from other repo, ancestor_id is the commit_id2
593 # no commits from other repo, ancestor_id is the commit_id2
594 ancestor_id = commit_id2
594 ancestor_id = commit_id2
595 else:
595 else:
596 output, __ = self.run_git_command(
596 output, __ = self.run_git_command(
597 ['merge-base', commit_id1, commit_id2])
597 ['merge-base', commit_id1, commit_id2])
598 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
598 ancestor_id = re.findall(r'[0-9a-fA-F]{40}', output)[0]
599
599
600 return ancestor_id
600 return ancestor_id
601
601
602 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
602 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
603 repo1 = self
603 repo1 = self
604 ancestor_id = None
604 ancestor_id = None
605
605
606 if commit_id1 == commit_id2:
606 if commit_id1 == commit_id2:
607 commits = []
607 commits = []
608 elif repo1 != repo2:
608 elif repo1 != repo2:
609 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
609 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
610 repo2.path)
610 repo2.path)
611 commits = [
611 commits = [
612 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
612 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
613 for commit_id in reversed(missing_ids)]
613 for commit_id in reversed(missing_ids)]
614 else:
614 else:
615 output, __ = repo1.run_git_command(
615 output, __ = repo1.run_git_command(
616 ['log', '--reverse', '--pretty=format: %H', '-s',
616 ['log', '--reverse', '--pretty=format: %H', '-s',
617 '%s..%s' % (commit_id1, commit_id2)])
617 '%s..%s' % (commit_id1, commit_id2)])
618 commits = [
618 commits = [
619 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
619 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
620 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
620 for commit_id in re.findall(r'[0-9a-fA-F]{40}', output)]
621
621
622 return commits
622 return commits
623
623
624 @LazyProperty
624 @LazyProperty
625 def in_memory_commit(self):
625 def in_memory_commit(self):
626 """
626 """
627 Returns ``GitInMemoryCommit`` object for this repository.
627 Returns ``GitInMemoryCommit`` object for this repository.
628 """
628 """
629 return GitInMemoryCommit(self)
629 return GitInMemoryCommit(self)
630
630
631 def pull(self, url, commit_ids=None, update_after=False):
631 def pull(self, url, commit_ids=None, update_after=False):
632 """
632 """
633 Pull changes from external location. Pull is different in GIT
633 Pull changes from external location. Pull is different in GIT
634 that fetch since it's doing a checkout
634 that fetch since it's doing a checkout
635
635
636 :param commit_ids: Optional. Can be set to a list of commit ids
636 :param commit_ids: Optional. Can be set to a list of commit ids
637 which shall be pulled from the other repository.
637 which shall be pulled from the other repository.
638 """
638 """
639 refs = None
639 refs = None
640 if commit_ids is not None:
640 if commit_ids is not None:
641 remote_refs = self._remote.get_remote_refs(url)
641 remote_refs = self._remote.get_remote_refs(url)
642 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
642 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
643 self._remote.pull(url, refs=refs, update_after=update_after)
643 self._remote.pull(url, refs=refs, update_after=update_after)
644 self._remote.invalidate_vcs_cache()
644 self._remote.invalidate_vcs_cache()
645
645
646 def fetch(self, url, commit_ids=None):
646 def fetch(self, url, commit_ids=None):
647 """
647 """
648 Fetch all git objects from external location.
648 Fetch all git objects from external location.
649 """
649 """
650 self._remote.sync_fetch(url, refs=commit_ids)
650 self._remote.sync_fetch(url, refs=commit_ids)
651 self._remote.invalidate_vcs_cache()
651 self._remote.invalidate_vcs_cache()
652
652
653 def push(self, url):
653 def push(self, url):
654 refs = None
654 refs = None
655 self._remote.sync_push(url, refs=refs)
655 self._remote.sync_push(url, refs=refs)
656
656
657 def set_refs(self, ref_name, commit_id):
657 def set_refs(self, ref_name, commit_id):
658 self._remote.set_refs(ref_name, commit_id)
658 self._remote.set_refs(ref_name, commit_id)
659 self._invalidate_prop_cache('_refs')
659 self._invalidate_prop_cache('_refs')
660
660
661 def remove_ref(self, ref_name):
661 def remove_ref(self, ref_name):
662 self._remote.remove_ref(ref_name)
662 self._remote.remove_ref(ref_name)
663 self._invalidate_prop_cache('_refs')
663 self._invalidate_prop_cache('_refs')
664
664
665 def _update_server_info(self):
665 def _update_server_info(self):
666 """
666 """
667 runs gits update-server-info command in this repo instance
667 runs gits update-server-info command in this repo instance
668 """
668 """
669 self._remote.update_server_info()
669 self._remote.update_server_info()
670
670
671 def _current_branch(self):
671 def _current_branch(self):
672 """
672 """
673 Return the name of the current branch.
673 Return the name of the current branch.
674
674
675 It only works for non bare repositories (i.e. repositories with a
675 It only works for non bare repositories (i.e. repositories with a
676 working copy)
676 working copy)
677 """
677 """
678 if self.bare:
678 if self.bare:
679 raise RepositoryError('Bare git repos do not have active branches')
679 raise RepositoryError('Bare git repos do not have active branches')
680
680
681 if self.is_empty():
681 if self.is_empty():
682 return None
682 return None
683
683
684 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
684 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
685 return stdout.strip()
685 return stdout.strip()
686
686
687 def _checkout(self, branch_name, create=False, force=False):
687 def _checkout(self, branch_name, create=False, force=False):
688 """
688 """
689 Checkout a branch in the working directory.
689 Checkout a branch in the working directory.
690
690
691 It tries to create the branch if create is True, failing if the branch
691 It tries to create the branch if create is True, failing if the branch
692 already exists.
692 already exists.
693
693
694 It only works for non bare repositories (i.e. repositories with a
694 It only works for non bare repositories (i.e. repositories with a
695 working copy)
695 working copy)
696 """
696 """
697 if self.bare:
697 if self.bare:
698 raise RepositoryError('Cannot checkout branches in a bare git repo')
698 raise RepositoryError('Cannot checkout branches in a bare git repo')
699
699
700 cmd = ['checkout']
700 cmd = ['checkout']
701 if force:
701 if force:
702 cmd.append('-f')
702 cmd.append('-f')
703 if create:
703 if create:
704 cmd.append('-b')
704 cmd.append('-b')
705 cmd.append(branch_name)
705 cmd.append(branch_name)
706 self.run_git_command(cmd, fail_on_stderr=False)
706 self.run_git_command(cmd, fail_on_stderr=False)
707
707
708 def _create_branch(self, branch_name, commit_id):
708 def _create_branch(self, branch_name, commit_id):
709 """
709 """
710 creates a branch in a GIT repo
710 creates a branch in a GIT repo
711 """
711 """
712 self._remote.create_branch(branch_name, commit_id)
712 self._remote.create_branch(branch_name, commit_id)
713
713
714 def _identify(self):
714 def _identify(self):
715 """
715 """
716 Return the current state of the working directory.
716 Return the current state of the working directory.
717 """
717 """
718 if self.bare:
718 if self.bare:
719 raise RepositoryError('Bare git repos do not have active branches')
719 raise RepositoryError('Bare git repos do not have active branches')
720
720
721 if self.is_empty():
721 if self.is_empty():
722 return None
722 return None
723
723
724 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
724 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
725 return stdout.strip()
725 return stdout.strip()
726
726
727 def _local_clone(self, clone_path, branch_name, source_branch=None):
727 def _local_clone(self, clone_path, branch_name, source_branch=None):
728 """
728 """
729 Create a local clone of the current repo.
729 Create a local clone of the current repo.
730 """
730 """
731 # N.B.(skreft): the --branch option is required as otherwise the shallow
731 # N.B.(skreft): the --branch option is required as otherwise the shallow
732 # clone will only fetch the active branch.
732 # clone will only fetch the active branch.
733 cmd = ['clone', '--branch', branch_name,
733 cmd = ['clone', '--branch', branch_name,
734 self.path, os.path.abspath(clone_path)]
734 self.path, os.path.abspath(clone_path)]
735
735
736 self.run_git_command(cmd, fail_on_stderr=False)
736 self.run_git_command(cmd, fail_on_stderr=False)
737
737
738 # if we get the different source branch, make sure we also fetch it for
738 # if we get the different source branch, make sure we also fetch it for
739 # merge conditions
739 # merge conditions
740 if source_branch and source_branch != branch_name:
740 if source_branch and source_branch != branch_name:
741 # check if the ref exists.
741 # check if the ref exists.
742 shadow_repo = GitRepository(os.path.abspath(clone_path))
742 shadow_repo = GitRepository(os.path.abspath(clone_path))
743 if shadow_repo.get_remote_ref(source_branch):
743 if shadow_repo.get_remote_ref(source_branch):
744 cmd = ['fetch', self.path, source_branch]
744 cmd = ['fetch', self.path, source_branch]
745 self.run_git_command(cmd, fail_on_stderr=False)
745 self.run_git_command(cmd, fail_on_stderr=False)
746
746
747 def _local_fetch(self, repository_path, branch_name, use_origin=False):
747 def _local_fetch(self, repository_path, branch_name, use_origin=False):
748 """
748 """
749 Fetch a branch from a local repository.
749 Fetch a branch from a local repository.
750 """
750 """
751 repository_path = os.path.abspath(repository_path)
751 repository_path = os.path.abspath(repository_path)
752 if repository_path == self.path:
752 if repository_path == self.path:
753 raise ValueError('Cannot fetch from the same repository')
753 raise ValueError('Cannot fetch from the same repository')
754
754
755 if use_origin:
755 if use_origin:
756 branch_name = '+{branch}:refs/heads/{branch}'.format(
756 branch_name = '+{branch}:refs/heads/{branch}'.format(
757 branch=branch_name)
757 branch=branch_name)
758
758
759 cmd = ['fetch', '--no-tags', '--update-head-ok',
759 cmd = ['fetch', '--no-tags', '--update-head-ok',
760 repository_path, branch_name]
760 repository_path, branch_name]
761 self.run_git_command(cmd, fail_on_stderr=False)
761 self.run_git_command(cmd, fail_on_stderr=False)
762
762
763 def _local_reset(self, branch_name):
763 def _local_reset(self, branch_name):
764 branch_name = '{}'.format(branch_name)
764 branch_name = '{}'.format(branch_name)
765 cmd = ['reset', '--hard', branch_name, '--']
765 cmd = ['reset', '--hard', branch_name, '--']
766 self.run_git_command(cmd, fail_on_stderr=False)
766 self.run_git_command(cmd, fail_on_stderr=False)
767
767
768 def _last_fetch_heads(self):
768 def _last_fetch_heads(self):
769 """
769 """
770 Return the last fetched heads that need merging.
770 Return the last fetched heads that need merging.
771
771
772 The algorithm is defined at
772 The algorithm is defined at
773 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
773 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
774 """
774 """
775 if not self.bare:
775 if not self.bare:
776 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
776 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
777 else:
777 else:
778 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
778 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
779
779
780 heads = []
780 heads = []
781 with open(fetch_heads_path) as f:
781 with open(fetch_heads_path) as f:
782 for line in f:
782 for line in f:
783 if ' not-for-merge ' in line:
783 if ' not-for-merge ' in line:
784 continue
784 continue
785 line = re.sub('\t.*', '', line, flags=re.DOTALL)
785 line = re.sub('\t.*', '', line, flags=re.DOTALL)
786 heads.append(line)
786 heads.append(line)
787
787
788 return heads
788 return heads
789
789
790 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
790 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
791 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
791 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
792
792
793 def _local_pull(self, repository_path, branch_name, ff_only=True):
793 def _local_pull(self, repository_path, branch_name, ff_only=True):
794 """
794 """
795 Pull a branch from a local repository.
795 Pull a branch from a local repository.
796 """
796 """
797 if self.bare:
797 if self.bare:
798 raise RepositoryError('Cannot pull into a bare git repository')
798 raise RepositoryError('Cannot pull into a bare git repository')
799 # N.B.(skreft): The --ff-only option is to make sure this is a
799 # N.B.(skreft): The --ff-only option is to make sure this is a
800 # fast-forward (i.e., we are only pulling new changes and there are no
800 # fast-forward (i.e., we are only pulling new changes and there are no
801 # conflicts with our current branch)
801 # conflicts with our current branch)
802 # Additionally, that option needs to go before --no-tags, otherwise git
802 # Additionally, that option needs to go before --no-tags, otherwise git
803 # pull complains about it being an unknown flag.
803 # pull complains about it being an unknown flag.
804 cmd = ['pull']
804 cmd = ['pull']
805 if ff_only:
805 if ff_only:
806 cmd.append('--ff-only')
806 cmd.append('--ff-only')
807 cmd.extend(['--no-tags', repository_path, branch_name])
807 cmd.extend(['--no-tags', repository_path, branch_name])
808 self.run_git_command(cmd, fail_on_stderr=False)
808 self.run_git_command(cmd, fail_on_stderr=False)
809
809
810 def _local_merge(self, merge_message, user_name, user_email, heads):
810 def _local_merge(self, merge_message, user_name, user_email, heads):
811 """
811 """
812 Merge the given head into the checked out branch.
812 Merge the given head into the checked out branch.
813
813
814 It will force a merge commit.
814 It will force a merge commit.
815
815
816 Currently it raises an error if the repo is empty, as it is not possible
816 Currently it raises an error if the repo is empty, as it is not possible
817 to create a merge commit in an empty repo.
817 to create a merge commit in an empty repo.
818
818
819 :param merge_message: The message to use for the merge commit.
819 :param merge_message: The message to use for the merge commit.
820 :param heads: the heads to merge.
820 :param heads: the heads to merge.
821 """
821 """
822 if self.bare:
822 if self.bare:
823 raise RepositoryError('Cannot merge into a bare git repository')
823 raise RepositoryError('Cannot merge into a bare git repository')
824
824
825 if not heads:
825 if not heads:
826 return
826 return
827
827
828 if self.is_empty():
828 if self.is_empty():
829 # TODO(skreft): do somehting more robust in this case.
829 # TODO(skreft): do something more robust in this case.
830 raise RepositoryError(
830 raise RepositoryError(
831 'Do not know how to merge into empty repositories yet')
831 'Do not know how to merge into empty repositories yet')
832 unresolved = None
832
833
833 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
834 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
834 # commit message. We also specify the user who is doing the merge.
835 # commit message. We also specify the user who is doing the merge.
835 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
836 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
836 '-c', 'user.email=%s' % safe_str(user_email),
837 '-c', 'user.email=%s' % safe_str(user_email),
837 'merge', '--no-ff', '-m', safe_str(merge_message)]
838 'merge', '--no-ff', '-m', safe_str(merge_message)]
838 cmd.extend(heads)
839 cmd.extend(heads)
839 try:
840 try:
840 output = self.run_git_command(cmd, fail_on_stderr=False)
841 output = self.run_git_command(cmd, fail_on_stderr=False)
841 except RepositoryError:
842 except RepositoryError:
843 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
844 fail_on_stderr=False)[0].splitlines()
845 # NOTE(marcink): we add U notation for consistent with HG backend output
846 unresolved = ['U {}'.format(f) for f in files]
847
842 # Cleanup any merge leftovers
848 # Cleanup any merge leftovers
843 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
849 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
844 raise
850
851 if unresolved:
852 raise UnresolvedFilesInRepo(unresolved)
853 else:
854 raise
845
855
846 def _local_push(
856 def _local_push(
847 self, source_branch, repository_path, target_branch,
857 self, source_branch, repository_path, target_branch,
848 enable_hooks=False, rc_scm_data=None):
858 enable_hooks=False, rc_scm_data=None):
849 """
859 """
850 Push the source_branch to the given repository and target_branch.
860 Push the source_branch to the given repository and target_branch.
851
861
852 Currently it if the target_branch is not master and the target repo is
862 Currently it if the target_branch is not master and the target repo is
853 empty, the push will work, but then GitRepository won't be able to find
863 empty, the push will work, but then GitRepository won't be able to find
854 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
864 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
855 pointing to master, which does not exist).
865 pointing to master, which does not exist).
856
866
857 It does not run the hooks in the target repo.
867 It does not run the hooks in the target repo.
858 """
868 """
859 # TODO(skreft): deal with the case in which the target repo is empty,
869 # TODO(skreft): deal with the case in which the target repo is empty,
860 # and the target_branch is not master.
870 # and the target_branch is not master.
861 target_repo = GitRepository(repository_path)
871 target_repo = GitRepository(repository_path)
862 if (not target_repo.bare and
872 if (not target_repo.bare and
863 target_repo._current_branch() == target_branch):
873 target_repo._current_branch() == target_branch):
864 # Git prevents pushing to the checked out branch, so simulate it by
874 # Git prevents pushing to the checked out branch, so simulate it by
865 # pulling into the target repository.
875 # pulling into the target repository.
866 target_repo._local_pull(self.path, source_branch)
876 target_repo._local_pull(self.path, source_branch)
867 else:
877 else:
868 cmd = ['push', os.path.abspath(repository_path),
878 cmd = ['push', os.path.abspath(repository_path),
869 '%s:%s' % (source_branch, target_branch)]
879 '%s:%s' % (source_branch, target_branch)]
870 gitenv = {}
880 gitenv = {}
871 if rc_scm_data:
881 if rc_scm_data:
872 gitenv.update({'RC_SCM_DATA': rc_scm_data})
882 gitenv.update({'RC_SCM_DATA': rc_scm_data})
873
883
874 if not enable_hooks:
884 if not enable_hooks:
875 gitenv['RC_SKIP_HOOKS'] = '1'
885 gitenv['RC_SKIP_HOOKS'] = '1'
876 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
886 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
877
887
878 def _get_new_pr_branch(self, source_branch, target_branch):
888 def _get_new_pr_branch(self, source_branch, target_branch):
879 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
889 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
880 pr_branches = []
890 pr_branches = []
881 for branch in self.branches:
891 for branch in self.branches:
882 if branch.startswith(prefix):
892 if branch.startswith(prefix):
883 pr_branches.append(int(branch[len(prefix):]))
893 pr_branches.append(int(branch[len(prefix):]))
884
894
885 if not pr_branches:
895 if not pr_branches:
886 branch_id = 0
896 branch_id = 0
887 else:
897 else:
888 branch_id = max(pr_branches) + 1
898 branch_id = max(pr_branches) + 1
889
899
890 return '%s%d' % (prefix, branch_id)
900 return '%s%d' % (prefix, branch_id)
891
901
892 def _maybe_prepare_merge_workspace(
902 def _maybe_prepare_merge_workspace(
893 self, repo_id, workspace_id, target_ref, source_ref):
903 self, repo_id, workspace_id, target_ref, source_ref):
894 shadow_repository_path = self._get_shadow_repository_path(
904 shadow_repository_path = self._get_shadow_repository_path(
895 self.path, repo_id, workspace_id)
905 self.path, repo_id, workspace_id)
896 if not os.path.exists(shadow_repository_path):
906 if not os.path.exists(shadow_repository_path):
897 self._local_clone(
907 self._local_clone(
898 shadow_repository_path, target_ref.name, source_ref.name)
908 shadow_repository_path, target_ref.name, source_ref.name)
899 log.debug('Prepared %s shadow repository in %s',
909 log.debug('Prepared %s shadow repository in %s',
900 self.alias, shadow_repository_path)
910 self.alias, shadow_repository_path)
901
911
902 return shadow_repository_path
912 return shadow_repository_path
903
913
904 def _merge_repo(self, repo_id, workspace_id, target_ref,
914 def _merge_repo(self, repo_id, workspace_id, target_ref,
905 source_repo, source_ref, merge_message,
915 source_repo, source_ref, merge_message,
906 merger_name, merger_email, dry_run=False,
916 merger_name, merger_email, dry_run=False,
907 use_rebase=False, close_branch=False):
917 use_rebase=False, close_branch=False):
908
918
909 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
919 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
910 'rebase' if use_rebase else 'merge', dry_run)
920 'rebase' if use_rebase else 'merge', dry_run)
911 if target_ref.commit_id != self.branches[target_ref.name]:
921 if target_ref.commit_id != self.branches[target_ref.name]:
912 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
922 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
913 target_ref.commit_id, self.branches[target_ref.name])
923 target_ref.commit_id, self.branches[target_ref.name])
914 return MergeResponse(
924 return MergeResponse(
915 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
925 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
916 metadata={'target_ref': target_ref})
926 metadata={'target_ref': target_ref})
917
927
918 shadow_repository_path = self._maybe_prepare_merge_workspace(
928 shadow_repository_path = self._maybe_prepare_merge_workspace(
919 repo_id, workspace_id, target_ref, source_ref)
929 repo_id, workspace_id, target_ref, source_ref)
920 shadow_repo = self.get_shadow_instance(shadow_repository_path)
930 shadow_repo = self.get_shadow_instance(shadow_repository_path)
921
931
922 # checkout source, if it's different. Otherwise we could not
932 # checkout source, if it's different. Otherwise we could not
923 # fetch proper commits for merge testing
933 # fetch proper commits for merge testing
924 if source_ref.name != target_ref.name:
934 if source_ref.name != target_ref.name:
925 if shadow_repo.get_remote_ref(source_ref.name):
935 if shadow_repo.get_remote_ref(source_ref.name):
926 shadow_repo._checkout(source_ref.name, force=True)
936 shadow_repo._checkout(source_ref.name, force=True)
927
937
928 # checkout target, and fetch changes
938 # checkout target, and fetch changes
929 shadow_repo._checkout(target_ref.name, force=True)
939 shadow_repo._checkout(target_ref.name, force=True)
930
940
931 # fetch/reset pull the target, in case it is changed
941 # fetch/reset pull the target, in case it is changed
932 # this handles even force changes
942 # this handles even force changes
933 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
943 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
934 shadow_repo._local_reset(target_ref.name)
944 shadow_repo._local_reset(target_ref.name)
935
945
936 # Need to reload repo to invalidate the cache, or otherwise we cannot
946 # Need to reload repo to invalidate the cache, or otherwise we cannot
937 # retrieve the last target commit.
947 # retrieve the last target commit.
938 shadow_repo = self.get_shadow_instance(shadow_repository_path)
948 shadow_repo = self.get_shadow_instance(shadow_repository_path)
939 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
949 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
940 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
950 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
941 target_ref, target_ref.commit_id,
951 target_ref, target_ref.commit_id,
942 shadow_repo.branches[target_ref.name])
952 shadow_repo.branches[target_ref.name])
943 return MergeResponse(
953 return MergeResponse(
944 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
954 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
945 metadata={'target_ref': target_ref})
955 metadata={'target_ref': target_ref})
946
956
947 # calculate new branch
957 # calculate new branch
948 pr_branch = shadow_repo._get_new_pr_branch(
958 pr_branch = shadow_repo._get_new_pr_branch(
949 source_ref.name, target_ref.name)
959 source_ref.name, target_ref.name)
950 log.debug('using pull-request merge branch: `%s`', pr_branch)
960 log.debug('using pull-request merge branch: `%s`', pr_branch)
951 # checkout to temp branch, and fetch changes
961 # checkout to temp branch, and fetch changes
952 shadow_repo._checkout(pr_branch, create=True)
962 shadow_repo._checkout(pr_branch, create=True)
953 try:
963 try:
954 shadow_repo._local_fetch(source_repo.path, source_ref.name)
964 shadow_repo._local_fetch(source_repo.path, source_ref.name)
955 except RepositoryError:
965 except RepositoryError:
956 log.exception('Failure when doing local fetch on '
966 log.exception('Failure when doing local fetch on '
957 'shadow repo: %s', shadow_repo)
967 'shadow repo: %s', shadow_repo)
958 return MergeResponse(
968 return MergeResponse(
959 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
969 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
960 metadata={'source_ref': source_ref})
970 metadata={'source_ref': source_ref})
961
971
962 merge_ref = None
972 merge_ref = None
963 merge_failure_reason = MergeFailureReason.NONE
973 merge_failure_reason = MergeFailureReason.NONE
964 metadata = {}
974 metadata = {}
965 try:
975 try:
966 shadow_repo._local_merge(merge_message, merger_name, merger_email,
976 shadow_repo._local_merge(merge_message, merger_name, merger_email,
967 [source_ref.commit_id])
977 [source_ref.commit_id])
968 merge_possible = True
978 merge_possible = True
969
979
970 # Need to invalidate the cache, or otherwise we
980 # Need to invalidate the cache, or otherwise we
971 # cannot retrieve the merge commit.
981 # cannot retrieve the merge commit.
972 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
982 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
973 merge_commit_id = shadow_repo.branches[pr_branch]
983 merge_commit_id = shadow_repo.branches[pr_branch]
974
984
975 # Set a reference pointing to the merge commit. This reference may
985 # Set a reference pointing to the merge commit. This reference may
976 # be used to easily identify the last successful merge commit in
986 # be used to easily identify the last successful merge commit in
977 # the shadow repository.
987 # the shadow repository.
978 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
988 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
979 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
989 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
980 except RepositoryError:
990 except RepositoryError as e:
981 log.exception('Failure when doing local merge on git shadow repo')
991 log.exception('Failure when doing local merge on git shadow repo')
992 if isinstance(e, UnresolvedFilesInRepo):
993 metadata['unresolved_files'] = 'file: ' + (', file: '.join(e.args[0]))
994
982 merge_possible = False
995 merge_possible = False
983 merge_failure_reason = MergeFailureReason.MERGE_FAILED
996 merge_failure_reason = MergeFailureReason.MERGE_FAILED
984
997
985 if merge_possible and not dry_run:
998 if merge_possible and not dry_run:
986 try:
999 try:
987 shadow_repo._local_push(
1000 shadow_repo._local_push(
988 pr_branch, self.path, target_ref.name, enable_hooks=True,
1001 pr_branch, self.path, target_ref.name, enable_hooks=True,
989 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1002 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
990 merge_succeeded = True
1003 merge_succeeded = True
991 except RepositoryError:
1004 except RepositoryError:
992 log.exception(
1005 log.exception(
993 'Failure when doing local push from the shadow '
1006 'Failure when doing local push from the shadow '
994 'repository to the target repository at %s.', self.path)
1007 'repository to the target repository at %s.', self.path)
995 merge_succeeded = False
1008 merge_succeeded = False
996 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1009 merge_failure_reason = MergeFailureReason.PUSH_FAILED
997 metadata['target'] = 'git shadow repo'
1010 metadata['target'] = 'git shadow repo'
998 metadata['merge_commit'] = pr_branch
1011 metadata['merge_commit'] = pr_branch
999 else:
1012 else:
1000 merge_succeeded = False
1013 merge_succeeded = False
1001
1014
1002 return MergeResponse(
1015 return MergeResponse(
1003 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1016 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1004 metadata=metadata)
1017 metadata=metadata)
@@ -1,952 +1,972 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference, BasePathPermissionChecker)
39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 from rhodecode.lib.vcs.compat import configparser
46 from rhodecode.lib.vcs.compat import configparser
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = dict((commit_id, index)
105 self._commit_ids = dict((commit_id, index)
106 for index, commit_id in enumerate(commit_ids))
106 for index, commit_id in enumerate(commit_ids))
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (safe_unicode(n), hexlify(h)) for n, h in
233 (safe_unicode(n), hexlify(h)) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup="none")
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
288 update_cache = self._remote.hg_update_cache()
289
289
290 self._remote.invalidate_vcs_cache()
290 self._remote.invalidate_vcs_cache()
291 return update_cache
291 return update_cache
292
292
293 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
293 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
294 if commit_id1 == commit_id2:
294 if commit_id1 == commit_id2:
295 return commit_id1
295 return commit_id1
296
296
297 ancestors = self._remote.revs_from_revspec(
297 ancestors = self._remote.revs_from_revspec(
298 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
298 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
299 other_path=repo2.path)
299 other_path=repo2.path)
300 return repo2[ancestors[0]].raw_id if ancestors else None
300 return repo2[ancestors[0]].raw_id if ancestors else None
301
301
302 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
302 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
303 if commit_id1 == commit_id2:
303 if commit_id1 == commit_id2:
304 commits = []
304 commits = []
305 else:
305 else:
306 if merge:
306 if merge:
307 indexes = self._remote.revs_from_revspec(
307 indexes = self._remote.revs_from_revspec(
308 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
308 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
309 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
309 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
310 else:
310 else:
311 indexes = self._remote.revs_from_revspec(
311 indexes = self._remote.revs_from_revspec(
312 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
312 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
313 commit_id1, other_path=repo2.path)
313 commit_id1, other_path=repo2.path)
314
314
315 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
315 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
316 for idx in indexes]
316 for idx in indexes]
317
317
318 return commits
318 return commits
319
319
320 @staticmethod
320 @staticmethod
321 def check_url(url, config):
321 def check_url(url, config):
322 """
322 """
323 Function will check given url and try to verify if it's a valid
323 Function will check given url and try to verify if it's a valid
324 link. Sometimes it may happened that mercurial will issue basic
324 link. Sometimes it may happened that mercurial will issue basic
325 auth request that can cause whole API to hang when used from python
325 auth request that can cause whole API to hang when used from python
326 or other external calls.
326 or other external calls.
327
327
328 On failures it'll raise urllib2.HTTPError, exception is also thrown
328 On failures it'll raise urllib2.HTTPError, exception is also thrown
329 when the return code is non 200
329 when the return code is non 200
330 """
330 """
331 # check first if it's not an local url
331 # check first if it's not an local url
332 if os.path.isdir(url) or url.startswith('file:'):
332 if os.path.isdir(url) or url.startswith('file:'):
333 return True
333 return True
334
334
335 # Request the _remote to verify the url
335 # Request the _remote to verify the url
336 return connection.Hg.check_url(url, config.serialize())
336 return connection.Hg.check_url(url, config.serialize())
337
337
338 @staticmethod
338 @staticmethod
339 def is_valid_repository(path):
339 def is_valid_repository(path):
340 return os.path.isdir(os.path.join(path, '.hg'))
340 return os.path.isdir(os.path.join(path, '.hg'))
341
341
342 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
342 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
343 """
343 """
344 Function will check for mercurial repository in given path. If there
344 Function will check for mercurial repository in given path. If there
345 is no repository in that path it will raise an exception unless
345 is no repository in that path it will raise an exception unless
346 `create` parameter is set to True - in that case repository would
346 `create` parameter is set to True - in that case repository would
347 be created.
347 be created.
348
348
349 If `src_url` is given, would try to clone repository from the
349 If `src_url` is given, would try to clone repository from the
350 location at given clone_point. Additionally it'll make update to
350 location at given clone_point. Additionally it'll make update to
351 working copy accordingly to `do_workspace_checkout` flag.
351 working copy accordingly to `do_workspace_checkout` flag.
352 """
352 """
353 if create and os.path.exists(self.path):
353 if create and os.path.exists(self.path):
354 raise RepositoryError(
354 raise RepositoryError(
355 "Cannot create repository at %s, location already exist"
355 "Cannot create repository at %s, location already exist"
356 % self.path)
356 % self.path)
357
357
358 if src_url:
358 if src_url:
359 url = str(self._get_url(src_url))
359 url = str(self._get_url(src_url))
360 MercurialRepository.check_url(url, self.config)
360 MercurialRepository.check_url(url, self.config)
361
361
362 self._remote.clone(url, self.path, do_workspace_checkout)
362 self._remote.clone(url, self.path, do_workspace_checkout)
363
363
364 # Don't try to create if we've already cloned repo
364 # Don't try to create if we've already cloned repo
365 create = False
365 create = False
366
366
367 if create:
367 if create:
368 os.makedirs(self.path, mode=0o755)
368 os.makedirs(self.path, mode=0o755)
369 self._remote.localrepository(create)
369 self._remote.localrepository(create)
370
370
371 @LazyProperty
371 @LazyProperty
372 def in_memory_commit(self):
372 def in_memory_commit(self):
373 return MercurialInMemoryCommit(self)
373 return MercurialInMemoryCommit(self)
374
374
375 @LazyProperty
375 @LazyProperty
376 def description(self):
376 def description(self):
377 description = self._remote.get_config_value(
377 description = self._remote.get_config_value(
378 'web', 'description', untrusted=True)
378 'web', 'description', untrusted=True)
379 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
379 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
380
380
381 @LazyProperty
381 @LazyProperty
382 def contact(self):
382 def contact(self):
383 contact = (
383 contact = (
384 self._remote.get_config_value("web", "contact") or
384 self._remote.get_config_value("web", "contact") or
385 self._remote.get_config_value("ui", "username"))
385 self._remote.get_config_value("ui", "username"))
386 return safe_unicode(contact or self.DEFAULT_CONTACT)
386 return safe_unicode(contact or self.DEFAULT_CONTACT)
387
387
388 @LazyProperty
388 @LazyProperty
389 def last_change(self):
389 def last_change(self):
390 """
390 """
391 Returns last change made on this repository as
391 Returns last change made on this repository as
392 `datetime.datetime` object.
392 `datetime.datetime` object.
393 """
393 """
394 try:
394 try:
395 return self.get_commit().date
395 return self.get_commit().date
396 except RepositoryError:
396 except RepositoryError:
397 tzoffset = makedate()[1]
397 tzoffset = makedate()[1]
398 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
398 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
399
399
400 def _get_fs_mtime(self):
400 def _get_fs_mtime(self):
401 # fallback to filesystem
401 # fallback to filesystem
402 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
402 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
403 st_path = os.path.join(self.path, '.hg', "store")
403 st_path = os.path.join(self.path, '.hg', "store")
404 if os.path.exists(cl_path):
404 if os.path.exists(cl_path):
405 return os.stat(cl_path).st_mtime
405 return os.stat(cl_path).st_mtime
406 else:
406 else:
407 return os.stat(st_path).st_mtime
407 return os.stat(st_path).st_mtime
408
408
409 def _get_url(self, url):
409 def _get_url(self, url):
410 """
410 """
411 Returns normalized url. If schema is not given, would fall
411 Returns normalized url. If schema is not given, would fall
412 to filesystem
412 to filesystem
413 (``file:///``) schema.
413 (``file:///``) schema.
414 """
414 """
415 url = url.encode('utf8')
415 url = url.encode('utf8')
416 if url != 'default' and '://' not in url:
416 if url != 'default' and '://' not in url:
417 url = "file:" + urllib.pathname2url(url)
417 url = "file:" + urllib.pathname2url(url)
418 return url
418 return url
419
419
420 def get_hook_location(self):
420 def get_hook_location(self):
421 """
421 """
422 returns absolute path to location where hooks are stored
422 returns absolute path to location where hooks are stored
423 """
423 """
424 return os.path.join(self.path, '.hg', '.hgrc')
424 return os.path.join(self.path, '.hg', '.hgrc')
425
425
426 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
426 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
427 """
427 """
428 Returns ``MercurialCommit`` object representing repository's
428 Returns ``MercurialCommit`` object representing repository's
429 commit at the given `commit_id` or `commit_idx`.
429 commit at the given `commit_id` or `commit_idx`.
430 """
430 """
431 if self.is_empty():
431 if self.is_empty():
432 raise EmptyRepositoryError("There are no commits yet")
432 raise EmptyRepositoryError("There are no commits yet")
433
433
434 if commit_id is not None:
434 if commit_id is not None:
435 self._validate_commit_id(commit_id)
435 self._validate_commit_id(commit_id)
436 try:
436 try:
437 # we have cached idx, use it without contacting the remote
437 # we have cached idx, use it without contacting the remote
438 idx = self._commit_ids[commit_id]
438 idx = self._commit_ids[commit_id]
439 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
439 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
440 except KeyError:
440 except KeyError:
441 pass
441 pass
442
442
443 elif commit_idx is not None:
443 elif commit_idx is not None:
444 self._validate_commit_idx(commit_idx)
444 self._validate_commit_idx(commit_idx)
445 try:
445 try:
446 _commit_id = self.commit_ids[commit_idx]
446 _commit_id = self.commit_ids[commit_idx]
447 if commit_idx < 0:
447 if commit_idx < 0:
448 commit_idx = self.commit_ids.index(_commit_id)
448 commit_idx = self.commit_ids.index(_commit_id)
449
449
450 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
450 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
451 except IndexError:
451 except IndexError:
452 commit_id = commit_idx
452 commit_id = commit_idx
453 else:
453 else:
454 commit_id = "tip"
454 commit_id = "tip"
455
455
456 if isinstance(commit_id, unicode):
456 if isinstance(commit_id, unicode):
457 commit_id = safe_str(commit_id)
457 commit_id = safe_str(commit_id)
458
458
459 try:
459 try:
460 raw_id, idx = self._remote.lookup(commit_id, both=True)
460 raw_id, idx = self._remote.lookup(commit_id, both=True)
461 except CommitDoesNotExistError:
461 except CommitDoesNotExistError:
462 msg = "Commit {} does not exist for `{}`".format(
462 msg = "Commit {} does not exist for `{}`".format(
463 *map(safe_str, [commit_id, self.name]))
463 *map(safe_str, [commit_id, self.name]))
464 raise CommitDoesNotExistError(msg)
464 raise CommitDoesNotExistError(msg)
465
465
466 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
466 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
467
467
468 def get_commits(
468 def get_commits(
469 self, start_id=None, end_id=None, start_date=None, end_date=None,
469 self, start_id=None, end_id=None, start_date=None, end_date=None,
470 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
470 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
471 """
471 """
472 Returns generator of ``MercurialCommit`` objects from start to end
472 Returns generator of ``MercurialCommit`` objects from start to end
473 (both are inclusive)
473 (both are inclusive)
474
474
475 :param start_id: None, str(commit_id)
475 :param start_id: None, str(commit_id)
476 :param end_id: None, str(commit_id)
476 :param end_id: None, str(commit_id)
477 :param start_date: if specified, commits with commit date less than
477 :param start_date: if specified, commits with commit date less than
478 ``start_date`` would be filtered out from returned set
478 ``start_date`` would be filtered out from returned set
479 :param end_date: if specified, commits with commit date greater than
479 :param end_date: if specified, commits with commit date greater than
480 ``end_date`` would be filtered out from returned set
480 ``end_date`` would be filtered out from returned set
481 :param branch_name: if specified, commits not reachable from given
481 :param branch_name: if specified, commits not reachable from given
482 branch would be filtered out from returned set
482 branch would be filtered out from returned set
483 :param show_hidden: Show hidden commits such as obsolete or hidden from
483 :param show_hidden: Show hidden commits such as obsolete or hidden from
484 Mercurial evolve
484 Mercurial evolve
485 :raise BranchDoesNotExistError: If given ``branch_name`` does not
485 :raise BranchDoesNotExistError: If given ``branch_name`` does not
486 exist.
486 exist.
487 :raise CommitDoesNotExistError: If commit for given ``start`` or
487 :raise CommitDoesNotExistError: If commit for given ``start`` or
488 ``end`` could not be found.
488 ``end`` could not be found.
489 """
489 """
490 # actually we should check now if it's not an empty repo
490 # actually we should check now if it's not an empty repo
491 if self.is_empty():
491 if self.is_empty():
492 raise EmptyRepositoryError("There are no commits yet")
492 raise EmptyRepositoryError("There are no commits yet")
493 self._validate_branch_name(branch_name)
493 self._validate_branch_name(branch_name)
494
494
495 branch_ancestors = False
495 branch_ancestors = False
496 if start_id is not None:
496 if start_id is not None:
497 self._validate_commit_id(start_id)
497 self._validate_commit_id(start_id)
498 c_start = self.get_commit(commit_id=start_id)
498 c_start = self.get_commit(commit_id=start_id)
499 start_pos = self._commit_ids[c_start.raw_id]
499 start_pos = self._commit_ids[c_start.raw_id]
500 else:
500 else:
501 start_pos = None
501 start_pos = None
502
502
503 if end_id is not None:
503 if end_id is not None:
504 self._validate_commit_id(end_id)
504 self._validate_commit_id(end_id)
505 c_end = self.get_commit(commit_id=end_id)
505 c_end = self.get_commit(commit_id=end_id)
506 end_pos = max(0, self._commit_ids[c_end.raw_id])
506 end_pos = max(0, self._commit_ids[c_end.raw_id])
507 else:
507 else:
508 end_pos = None
508 end_pos = None
509
509
510 if None not in [start_id, end_id] and start_pos > end_pos:
510 if None not in [start_id, end_id] and start_pos > end_pos:
511 raise RepositoryError(
511 raise RepositoryError(
512 "Start commit '%s' cannot be after end commit '%s'" %
512 "Start commit '%s' cannot be after end commit '%s'" %
513 (start_id, end_id))
513 (start_id, end_id))
514
514
515 if end_pos is not None:
515 if end_pos is not None:
516 end_pos += 1
516 end_pos += 1
517
517
518 commit_filter = []
518 commit_filter = []
519
519
520 if branch_name and not branch_ancestors:
520 if branch_name and not branch_ancestors:
521 commit_filter.append('branch("%s")' % (branch_name,))
521 commit_filter.append('branch("%s")' % (branch_name,))
522 elif branch_name and branch_ancestors:
522 elif branch_name and branch_ancestors:
523 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
523 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
524
524
525 if start_date and not end_date:
525 if start_date and not end_date:
526 commit_filter.append('date(">%s")' % (start_date,))
526 commit_filter.append('date(">%s")' % (start_date,))
527 if end_date and not start_date:
527 if end_date and not start_date:
528 commit_filter.append('date("<%s")' % (end_date,))
528 commit_filter.append('date("<%s")' % (end_date,))
529 if start_date and end_date:
529 if start_date and end_date:
530 commit_filter.append(
530 commit_filter.append(
531 'date(">%s") and date("<%s")' % (start_date, end_date))
531 'date(">%s") and date("<%s")' % (start_date, end_date))
532
532
533 if not show_hidden:
533 if not show_hidden:
534 commit_filter.append('not obsolete()')
534 commit_filter.append('not obsolete()')
535 commit_filter.append('not hidden()')
535 commit_filter.append('not hidden()')
536
536
537 # TODO: johbo: Figure out a simpler way for this solution
537 # TODO: johbo: Figure out a simpler way for this solution
538 collection_generator = CollectionGenerator
538 collection_generator = CollectionGenerator
539 if commit_filter:
539 if commit_filter:
540 commit_filter = ' and '.join(map(safe_str, commit_filter))
540 commit_filter = ' and '.join(map(safe_str, commit_filter))
541 revisions = self._remote.rev_range([commit_filter])
541 revisions = self._remote.rev_range([commit_filter])
542 collection_generator = MercurialIndexBasedCollectionGenerator
542 collection_generator = MercurialIndexBasedCollectionGenerator
543 else:
543 else:
544 revisions = self.commit_ids
544 revisions = self.commit_ids
545
545
546 if start_pos or end_pos:
546 if start_pos or end_pos:
547 revisions = revisions[start_pos:end_pos]
547 revisions = revisions[start_pos:end_pos]
548
548
549 return collection_generator(self, revisions, pre_load=pre_load)
549 return collection_generator(self, revisions, pre_load=pre_load)
550
550
551 def pull(self, url, commit_ids=None):
551 def pull(self, url, commit_ids=None):
552 """
552 """
553 Pull changes from external location.
553 Pull changes from external location.
554
554
555 :param commit_ids: Optional. Can be set to a list of commit ids
555 :param commit_ids: Optional. Can be set to a list of commit ids
556 which shall be pulled from the other repository.
556 which shall be pulled from the other repository.
557 """
557 """
558 url = self._get_url(url)
558 url = self._get_url(url)
559 self._remote.pull(url, commit_ids=commit_ids)
559 self._remote.pull(url, commit_ids=commit_ids)
560 self._remote.invalidate_vcs_cache()
560 self._remote.invalidate_vcs_cache()
561
561
562 def fetch(self, url, commit_ids=None):
562 def fetch(self, url, commit_ids=None):
563 """
563 """
564 Backward compatibility with GIT fetch==pull
564 Backward compatibility with GIT fetch==pull
565 """
565 """
566 return self.pull(url, commit_ids=commit_ids)
566 return self.pull(url, commit_ids=commit_ids)
567
567
568 def push(self, url):
568 def push(self, url):
569 url = self._get_url(url)
569 url = self._get_url(url)
570 self._remote.sync_push(url)
570 self._remote.sync_push(url)
571
571
572 def _local_clone(self, clone_path):
572 def _local_clone(self, clone_path):
573 """
573 """
574 Create a local clone of the current repo.
574 Create a local clone of the current repo.
575 """
575 """
576 self._remote.clone(self.path, clone_path, update_after_clone=True,
576 self._remote.clone(self.path, clone_path, update_after_clone=True,
577 hooks=False)
577 hooks=False)
578
578
579 def _update(self, revision, clean=False):
579 def _update(self, revision, clean=False):
580 """
580 """
581 Update the working copy to the specified revision.
581 Update the working copy to the specified revision.
582 """
582 """
583 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
583 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
584 self._remote.update(revision, clean=clean)
584 self._remote.update(revision, clean=clean)
585
585
586 def _identify(self):
586 def _identify(self):
587 """
587 """
588 Return the current state of the working directory.
588 Return the current state of the working directory.
589 """
589 """
590 return self._remote.identify().strip().rstrip('+')
590 return self._remote.identify().strip().rstrip('+')
591
591
592 def _heads(self, branch=None):
592 def _heads(self, branch=None):
593 """
593 """
594 Return the commit ids of the repository heads.
594 Return the commit ids of the repository heads.
595 """
595 """
596 return self._remote.heads(branch=branch).strip().split(' ')
596 return self._remote.heads(branch=branch).strip().split(' ')
597
597
598 def _ancestor(self, revision1, revision2):
598 def _ancestor(self, revision1, revision2):
599 """
599 """
600 Return the common ancestor of the two revisions.
600 Return the common ancestor of the two revisions.
601 """
601 """
602 return self._remote.ancestor(revision1, revision2)
602 return self._remote.ancestor(revision1, revision2)
603
603
604 def _local_push(
604 def _local_push(
605 self, revision, repository_path, push_branches=False,
605 self, revision, repository_path, push_branches=False,
606 enable_hooks=False):
606 enable_hooks=False):
607 """
607 """
608 Push the given revision to the specified repository.
608 Push the given revision to the specified repository.
609
609
610 :param push_branches: allow to create branches in the target repo.
610 :param push_branches: allow to create branches in the target repo.
611 """
611 """
612 self._remote.push(
612 self._remote.push(
613 [revision], repository_path, hooks=enable_hooks,
613 [revision], repository_path, hooks=enable_hooks,
614 push_branches=push_branches)
614 push_branches=push_branches)
615
615
616 def _local_merge(self, target_ref, merge_message, user_name, user_email,
616 def _local_merge(self, target_ref, merge_message, user_name, user_email,
617 source_ref, use_rebase=False, dry_run=False):
617 source_ref, use_rebase=False, dry_run=False):
618 """
618 """
619 Merge the given source_revision into the checked out revision.
619 Merge the given source_revision into the checked out revision.
620
620
621 Returns the commit id of the merge and a boolean indicating if the
621 Returns the commit id of the merge and a boolean indicating if the
622 commit needs to be pushed.
622 commit needs to be pushed.
623 """
623 """
624 self._update(target_ref.commit_id, clean=True)
624 self._update(target_ref.commit_id, clean=True)
625
625
626 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
626 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
627 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
627 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
628
628
629 if ancestor == source_ref.commit_id:
629 if ancestor == source_ref.commit_id:
630 # Nothing to do, the changes were already integrated
630 # Nothing to do, the changes were already integrated
631 return target_ref.commit_id, False
631 return target_ref.commit_id, False
632
632
633 elif ancestor == target_ref.commit_id and is_the_same_branch:
633 elif ancestor == target_ref.commit_id and is_the_same_branch:
634 # In this case we should force a commit message
634 # In this case we should force a commit message
635 return source_ref.commit_id, True
635 return source_ref.commit_id, True
636
636
637 unresolved = None
637 if use_rebase:
638 if use_rebase:
638 try:
639 try:
639 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
640 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
640 target_ref.commit_id)
641 target_ref.commit_id)
641 self.bookmark(bookmark_name, revision=source_ref.commit_id)
642 self.bookmark(bookmark_name, revision=source_ref.commit_id)
642 self._remote.rebase(
643 self._remote.rebase(
643 source=source_ref.commit_id, dest=target_ref.commit_id)
644 source=source_ref.commit_id, dest=target_ref.commit_id)
644 self._remote.invalidate_vcs_cache()
645 self._remote.invalidate_vcs_cache()
645 self._update(bookmark_name, clean=True)
646 self._update(bookmark_name, clean=True)
646 return self._identify(), True
647 return self._identify(), True
647 except RepositoryError:
648 except RepositoryError as e:
648 # The rebase-abort may raise another exception which 'hides'
649 # The rebase-abort may raise another exception which 'hides'
649 # the original one, therefore we log it here.
650 # the original one, therefore we log it here.
650 log.exception('Error while rebasing shadow repo during merge.')
651 log.exception('Error while rebasing shadow repo during merge.')
652 if 'unresolved conflicts' in e.message:
653 unresolved = self._remote.get_unresolved_files()
654 log.debug('unresolved files: %s', unresolved)
651
655
652 # Cleanup any rebase leftovers
656 # Cleanup any rebase leftovers
653 self._remote.invalidate_vcs_cache()
657 self._remote.invalidate_vcs_cache()
654 self._remote.rebase(abort=True)
658 self._remote.rebase(abort=True)
655 self._remote.invalidate_vcs_cache()
659 self._remote.invalidate_vcs_cache()
656 self._remote.update(clean=True)
660 self._remote.update(clean=True)
657 raise
661 if unresolved:
662 raise UnresolvedFilesInRepo(unresolved)
663 else:
664 raise
658 else:
665 else:
659 try:
666 try:
660 self._remote.merge(source_ref.commit_id)
667 self._remote.merge(source_ref.commit_id)
661 self._remote.invalidate_vcs_cache()
668 self._remote.invalidate_vcs_cache()
662 self._remote.commit(
669 self._remote.commit(
663 message=safe_str(merge_message),
670 message=safe_str(merge_message),
664 username=safe_str('%s <%s>' % (user_name, user_email)))
671 username=safe_str('%s <%s>' % (user_name, user_email)))
665 self._remote.invalidate_vcs_cache()
672 self._remote.invalidate_vcs_cache()
666 return self._identify(), True
673 return self._identify(), True
667 except RepositoryError:
674 except RepositoryError as e:
675 # The merge-abort may raise another exception which 'hides'
676 # the original one, therefore we log it here.
677 log.exception('Error while merging shadow repo during merge.')
678 if 'unresolved merge conflicts' in e.message:
679 unresolved = self._remote.get_unresolved_files()
680 log.debug('unresolved files: %s', unresolved)
681
668 # Cleanup any merge leftovers
682 # Cleanup any merge leftovers
669 self._remote.update(clean=True)
683 self._remote.update(clean=True)
670 raise
684 if unresolved:
685 raise UnresolvedFilesInRepo(unresolved)
686 else:
687 raise
671
688
672 def _local_close(self, target_ref, user_name, user_email,
689 def _local_close(self, target_ref, user_name, user_email,
673 source_ref, close_message=''):
690 source_ref, close_message=''):
674 """
691 """
675 Close the branch of the given source_revision
692 Close the branch of the given source_revision
676
693
677 Returns the commit id of the close and a boolean indicating if the
694 Returns the commit id of the close and a boolean indicating if the
678 commit needs to be pushed.
695 commit needs to be pushed.
679 """
696 """
680 self._update(source_ref.commit_id)
697 self._update(source_ref.commit_id)
681 message = close_message or "Closing branch: `{}`".format(source_ref.name)
698 message = close_message or "Closing branch: `{}`".format(source_ref.name)
682 try:
699 try:
683 self._remote.commit(
700 self._remote.commit(
684 message=safe_str(message),
701 message=safe_str(message),
685 username=safe_str('%s <%s>' % (user_name, user_email)),
702 username=safe_str('%s <%s>' % (user_name, user_email)),
686 close_branch=True)
703 close_branch=True)
687 self._remote.invalidate_vcs_cache()
704 self._remote.invalidate_vcs_cache()
688 return self._identify(), True
705 return self._identify(), True
689 except RepositoryError:
706 except RepositoryError:
690 # Cleanup any commit leftovers
707 # Cleanup any commit leftovers
691 self._remote.update(clean=True)
708 self._remote.update(clean=True)
692 raise
709 raise
693
710
694 def _is_the_same_branch(self, target_ref, source_ref):
711 def _is_the_same_branch(self, target_ref, source_ref):
695 return (
712 return (
696 self._get_branch_name(target_ref) ==
713 self._get_branch_name(target_ref) ==
697 self._get_branch_name(source_ref))
714 self._get_branch_name(source_ref))
698
715
699 def _get_branch_name(self, ref):
716 def _get_branch_name(self, ref):
700 if ref.type == 'branch':
717 if ref.type == 'branch':
701 return ref.name
718 return ref.name
702 return self._remote.ctx_branch(ref.commit_id)
719 return self._remote.ctx_branch(ref.commit_id)
703
720
704 def _maybe_prepare_merge_workspace(
721 def _maybe_prepare_merge_workspace(
705 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
722 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
706 shadow_repository_path = self._get_shadow_repository_path(
723 shadow_repository_path = self._get_shadow_repository_path(
707 self.path, repo_id, workspace_id)
724 self.path, repo_id, workspace_id)
708 if not os.path.exists(shadow_repository_path):
725 if not os.path.exists(shadow_repository_path):
709 self._local_clone(shadow_repository_path)
726 self._local_clone(shadow_repository_path)
710 log.debug(
727 log.debug(
711 'Prepared shadow repository in %s', shadow_repository_path)
728 'Prepared shadow repository in %s', shadow_repository_path)
712
729
713 return shadow_repository_path
730 return shadow_repository_path
714
731
715 def _merge_repo(self, repo_id, workspace_id, target_ref,
732 def _merge_repo(self, repo_id, workspace_id, target_ref,
716 source_repo, source_ref, merge_message,
733 source_repo, source_ref, merge_message,
717 merger_name, merger_email, dry_run=False,
734 merger_name, merger_email, dry_run=False,
718 use_rebase=False, close_branch=False):
735 use_rebase=False, close_branch=False):
719
736
720 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
737 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
721 'rebase' if use_rebase else 'merge', dry_run)
738 'rebase' if use_rebase else 'merge', dry_run)
722 if target_ref.commit_id not in self._heads():
739 if target_ref.commit_id not in self._heads():
723 return MergeResponse(
740 return MergeResponse(
724 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
741 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
725 metadata={'target_ref': target_ref})
742 metadata={'target_ref': target_ref})
726
743
727 try:
744 try:
728 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
745 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
729 heads = '\n,'.join(self._heads(target_ref.name))
746 heads = '\n,'.join(self._heads(target_ref.name))
730 metadata = {
747 metadata = {
731 'target_ref': target_ref,
748 'target_ref': target_ref,
732 'source_ref': source_ref,
749 'source_ref': source_ref,
733 'heads': heads
750 'heads': heads
734 }
751 }
735 return MergeResponse(
752 return MergeResponse(
736 False, False, None,
753 False, False, None,
737 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
754 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
738 metadata=metadata)
755 metadata=metadata)
739 except CommitDoesNotExistError:
756 except CommitDoesNotExistError:
740 log.exception('Failure when looking up branch heads on hg target')
757 log.exception('Failure when looking up branch heads on hg target')
741 return MergeResponse(
758 return MergeResponse(
742 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
759 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
743 metadata={'target_ref': target_ref})
760 metadata={'target_ref': target_ref})
744
761
745 shadow_repository_path = self._maybe_prepare_merge_workspace(
762 shadow_repository_path = self._maybe_prepare_merge_workspace(
746 repo_id, workspace_id, target_ref, source_ref)
763 repo_id, workspace_id, target_ref, source_ref)
747 shadow_repo = self.get_shadow_instance(shadow_repository_path)
764 shadow_repo = self.get_shadow_instance(shadow_repository_path)
748
765
749 log.debug('Pulling in target reference %s', target_ref)
766 log.debug('Pulling in target reference %s', target_ref)
750 self._validate_pull_reference(target_ref)
767 self._validate_pull_reference(target_ref)
751 shadow_repo._local_pull(self.path, target_ref)
768 shadow_repo._local_pull(self.path, target_ref)
752
769
753 try:
770 try:
754 log.debug('Pulling in source reference %s', source_ref)
771 log.debug('Pulling in source reference %s', source_ref)
755 source_repo._validate_pull_reference(source_ref)
772 source_repo._validate_pull_reference(source_ref)
756 shadow_repo._local_pull(source_repo.path, source_ref)
773 shadow_repo._local_pull(source_repo.path, source_ref)
757 except CommitDoesNotExistError:
774 except CommitDoesNotExistError:
758 log.exception('Failure when doing local pull on hg shadow repo')
775 log.exception('Failure when doing local pull on hg shadow repo')
759 return MergeResponse(
776 return MergeResponse(
760 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
777 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
761 metadata={'source_ref': source_ref})
778 metadata={'source_ref': source_ref})
762
779
763 merge_ref = None
780 merge_ref = None
764 merge_commit_id = None
781 merge_commit_id = None
765 close_commit_id = None
782 close_commit_id = None
766 merge_failure_reason = MergeFailureReason.NONE
783 merge_failure_reason = MergeFailureReason.NONE
767 metadata = {}
784 metadata = {}
768
785
769 # enforce that close branch should be used only in case we source from
786 # enforce that close branch should be used only in case we source from
770 # an actual Branch
787 # an actual Branch
771 close_branch = close_branch and source_ref.type == 'branch'
788 close_branch = close_branch and source_ref.type == 'branch'
772
789
773 # don't allow to close branch if source and target are the same
790 # don't allow to close branch if source and target are the same
774 close_branch = close_branch and source_ref.name != target_ref.name
791 close_branch = close_branch and source_ref.name != target_ref.name
775
792
776 needs_push_on_close = False
793 needs_push_on_close = False
777 if close_branch and not use_rebase and not dry_run:
794 if close_branch and not use_rebase and not dry_run:
778 try:
795 try:
779 close_commit_id, needs_push_on_close = shadow_repo._local_close(
796 close_commit_id, needs_push_on_close = shadow_repo._local_close(
780 target_ref, merger_name, merger_email, source_ref)
797 target_ref, merger_name, merger_email, source_ref)
781 merge_possible = True
798 merge_possible = True
782 except RepositoryError:
799 except RepositoryError:
783 log.exception('Failure when doing close branch on '
800 log.exception('Failure when doing close branch on '
784 'shadow repo: %s', shadow_repo)
801 'shadow repo: %s', shadow_repo)
785 merge_possible = False
802 merge_possible = False
786 merge_failure_reason = MergeFailureReason.MERGE_FAILED
803 merge_failure_reason = MergeFailureReason.MERGE_FAILED
787 else:
804 else:
788 merge_possible = True
805 merge_possible = True
789
806
790 needs_push = False
807 needs_push = False
791 if merge_possible:
808 if merge_possible:
792 try:
809 try:
793 merge_commit_id, needs_push = shadow_repo._local_merge(
810 merge_commit_id, needs_push = shadow_repo._local_merge(
794 target_ref, merge_message, merger_name, merger_email,
811 target_ref, merge_message, merger_name, merger_email,
795 source_ref, use_rebase=use_rebase, dry_run=dry_run)
812 source_ref, use_rebase=use_rebase, dry_run=dry_run)
796 merge_possible = True
813 merge_possible = True
797
814
798 # read the state of the close action, if it
815 # read the state of the close action, if it
799 # maybe required a push
816 # maybe required a push
800 needs_push = needs_push or needs_push_on_close
817 needs_push = needs_push or needs_push_on_close
801
818
802 # Set a bookmark pointing to the merge commit. This bookmark
819 # Set a bookmark pointing to the merge commit. This bookmark
803 # may be used to easily identify the last successful merge
820 # may be used to easily identify the last successful merge
804 # commit in the shadow repository.
821 # commit in the shadow repository.
805 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
822 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
806 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
823 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
807 except SubrepoMergeError:
824 except SubrepoMergeError:
808 log.exception(
825 log.exception(
809 'Subrepo merge error during local merge on hg shadow repo.')
826 'Subrepo merge error during local merge on hg shadow repo.')
810 merge_possible = False
827 merge_possible = False
811 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
828 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
812 needs_push = False
829 needs_push = False
813 except RepositoryError:
830 except RepositoryError as e:
814 log.exception('Failure when doing local merge on hg shadow repo')
831 log.exception('Failure when doing local merge on hg shadow repo')
832 if isinstance(e, UnresolvedFilesInRepo):
833 metadata['unresolved_files'] = 'file: ' + (', file: '.join(e.args[0]))
834
815 merge_possible = False
835 merge_possible = False
816 merge_failure_reason = MergeFailureReason.MERGE_FAILED
836 merge_failure_reason = MergeFailureReason.MERGE_FAILED
817 needs_push = False
837 needs_push = False
818
838
819 if merge_possible and not dry_run:
839 if merge_possible and not dry_run:
820 if needs_push:
840 if needs_push:
821 # In case the target is a bookmark, update it, so after pushing
841 # In case the target is a bookmark, update it, so after pushing
822 # the bookmarks is also updated in the target.
842 # the bookmarks is also updated in the target.
823 if target_ref.type == 'book':
843 if target_ref.type == 'book':
824 shadow_repo.bookmark(
844 shadow_repo.bookmark(
825 target_ref.name, revision=merge_commit_id)
845 target_ref.name, revision=merge_commit_id)
826 try:
846 try:
827 shadow_repo_with_hooks = self.get_shadow_instance(
847 shadow_repo_with_hooks = self.get_shadow_instance(
828 shadow_repository_path,
848 shadow_repository_path,
829 enable_hooks=True)
849 enable_hooks=True)
830 # This is the actual merge action, we push from shadow
850 # This is the actual merge action, we push from shadow
831 # into origin.
851 # into origin.
832 # Note: the push_branches option will push any new branch
852 # Note: the push_branches option will push any new branch
833 # defined in the source repository to the target. This may
853 # defined in the source repository to the target. This may
834 # be dangerous as branches are permanent in Mercurial.
854 # be dangerous as branches are permanent in Mercurial.
835 # This feature was requested in issue #441.
855 # This feature was requested in issue #441.
836 shadow_repo_with_hooks._local_push(
856 shadow_repo_with_hooks._local_push(
837 merge_commit_id, self.path, push_branches=True,
857 merge_commit_id, self.path, push_branches=True,
838 enable_hooks=True)
858 enable_hooks=True)
839
859
840 # maybe we also need to push the close_commit_id
860 # maybe we also need to push the close_commit_id
841 if close_commit_id:
861 if close_commit_id:
842 shadow_repo_with_hooks._local_push(
862 shadow_repo_with_hooks._local_push(
843 close_commit_id, self.path, push_branches=True,
863 close_commit_id, self.path, push_branches=True,
844 enable_hooks=True)
864 enable_hooks=True)
845 merge_succeeded = True
865 merge_succeeded = True
846 except RepositoryError:
866 except RepositoryError:
847 log.exception(
867 log.exception(
848 'Failure when doing local push from the shadow '
868 'Failure when doing local push from the shadow '
849 'repository to the target repository at %s.', self.path)
869 'repository to the target repository at %s.', self.path)
850 merge_succeeded = False
870 merge_succeeded = False
851 merge_failure_reason = MergeFailureReason.PUSH_FAILED
871 merge_failure_reason = MergeFailureReason.PUSH_FAILED
852 metadata['target'] = 'hg shadow repo'
872 metadata['target'] = 'hg shadow repo'
853 metadata['merge_commit'] = merge_commit_id
873 metadata['merge_commit'] = merge_commit_id
854 else:
874 else:
855 merge_succeeded = True
875 merge_succeeded = True
856 else:
876 else:
857 merge_succeeded = False
877 merge_succeeded = False
858
878
859 return MergeResponse(
879 return MergeResponse(
860 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
880 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
861 metadata=metadata)
881 metadata=metadata)
862
882
863 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
883 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
864 config = self.config.copy()
884 config = self.config.copy()
865 if not enable_hooks:
885 if not enable_hooks:
866 config.clear_section('hooks')
886 config.clear_section('hooks')
867 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
887 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
868
888
869 def _validate_pull_reference(self, reference):
889 def _validate_pull_reference(self, reference):
870 if not (reference.name in self.bookmarks or
890 if not (reference.name in self.bookmarks or
871 reference.name in self.branches or
891 reference.name in self.branches or
872 self.get_commit(reference.commit_id)):
892 self.get_commit(reference.commit_id)):
873 raise CommitDoesNotExistError(
893 raise CommitDoesNotExistError(
874 'Unknown branch, bookmark or commit id')
894 'Unknown branch, bookmark or commit id')
875
895
876 def _local_pull(self, repository_path, reference):
896 def _local_pull(self, repository_path, reference):
877 """
897 """
878 Fetch a branch, bookmark or commit from a local repository.
898 Fetch a branch, bookmark or commit from a local repository.
879 """
899 """
880 repository_path = os.path.abspath(repository_path)
900 repository_path = os.path.abspath(repository_path)
881 if repository_path == self.path:
901 if repository_path == self.path:
882 raise ValueError('Cannot pull from the same repository')
902 raise ValueError('Cannot pull from the same repository')
883
903
884 reference_type_to_option_name = {
904 reference_type_to_option_name = {
885 'book': 'bookmark',
905 'book': 'bookmark',
886 'branch': 'branch',
906 'branch': 'branch',
887 }
907 }
888 option_name = reference_type_to_option_name.get(
908 option_name = reference_type_to_option_name.get(
889 reference.type, 'revision')
909 reference.type, 'revision')
890
910
891 if option_name == 'revision':
911 if option_name == 'revision':
892 ref = reference.commit_id
912 ref = reference.commit_id
893 else:
913 else:
894 ref = reference.name
914 ref = reference.name
895
915
896 options = {option_name: [ref]}
916 options = {option_name: [ref]}
897 self._remote.pull_cmd(repository_path, hooks=False, **options)
917 self._remote.pull_cmd(repository_path, hooks=False, **options)
898 self._remote.invalidate_vcs_cache()
918 self._remote.invalidate_vcs_cache()
899
919
900 def bookmark(self, bookmark, revision=None):
920 def bookmark(self, bookmark, revision=None):
901 if isinstance(bookmark, unicode):
921 if isinstance(bookmark, unicode):
902 bookmark = safe_str(bookmark)
922 bookmark = safe_str(bookmark)
903 self._remote.bookmark(bookmark, revision=revision)
923 self._remote.bookmark(bookmark, revision=revision)
904 self._remote.invalidate_vcs_cache()
924 self._remote.invalidate_vcs_cache()
905
925
906 def get_path_permissions(self, username):
926 def get_path_permissions(self, username):
907 hgacl_file = os.path.join(self.path, '.hg/hgacl')
927 hgacl_file = os.path.join(self.path, '.hg/hgacl')
908
928
909 def read_patterns(suffix):
929 def read_patterns(suffix):
910 svalue = None
930 svalue = None
911 for section, option in [
931 for section, option in [
912 ('narrowacl', username + suffix),
932 ('narrowacl', username + suffix),
913 ('narrowacl', 'default' + suffix),
933 ('narrowacl', 'default' + suffix),
914 ('narrowhgacl', username + suffix),
934 ('narrowhgacl', username + suffix),
915 ('narrowhgacl', 'default' + suffix)
935 ('narrowhgacl', 'default' + suffix)
916 ]:
936 ]:
917 try:
937 try:
918 svalue = hgacl.get(section, option)
938 svalue = hgacl.get(section, option)
919 break # stop at the first value we find
939 break # stop at the first value we find
920 except configparser.NoOptionError:
940 except configparser.NoOptionError:
921 pass
941 pass
922 if not svalue:
942 if not svalue:
923 return None
943 return None
924 result = ['/']
944 result = ['/']
925 for pattern in svalue.split():
945 for pattern in svalue.split():
926 result.append(pattern)
946 result.append(pattern)
927 if '*' not in pattern and '?' not in pattern:
947 if '*' not in pattern and '?' not in pattern:
928 result.append(pattern + '/*')
948 result.append(pattern + '/*')
929 return result
949 return result
930
950
931 if os.path.exists(hgacl_file):
951 if os.path.exists(hgacl_file):
932 try:
952 try:
933 hgacl = configparser.RawConfigParser()
953 hgacl = configparser.RawConfigParser()
934 hgacl.read(hgacl_file)
954 hgacl.read(hgacl_file)
935
955
936 includes = read_patterns('.includes')
956 includes = read_patterns('.includes')
937 excludes = read_patterns('.excludes')
957 excludes = read_patterns('.excludes')
938 return BasePathPermissionChecker.create_from_patterns(
958 return BasePathPermissionChecker.create_from_patterns(
939 includes, excludes)
959 includes, excludes)
940 except BaseException as e:
960 except BaseException as e:
941 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
961 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
942 hgacl_file, self.name, e)
962 hgacl_file, self.name, e)
943 raise exceptions.RepositoryRequirementError(msg)
963 raise exceptions.RepositoryRequirementError(msg)
944 else:
964 else:
945 return None
965 return None
946
966
947
967
948 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
968 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
949
969
950 def _commit_factory(self, commit_id):
970 def _commit_factory(self, commit_id):
951 return self.repo.get_commit(
971 return self.repo.get_commit(
952 commit_idx=commit_id, pre_load=self.pre_load)
972 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,226 +1,230 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Custom vcs exceptions module.
22 Custom vcs exceptions module.
23 """
23 """
24 import logging
24 import logging
25 import functools
25 import functools
26 import urllib2
26 import urllib2
27 import rhodecode
27 import rhodecode
28 from pyramid import compat
28 from pyramid import compat
29
29
30 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
31
31
32
32
33 class VCSCommunicationError(Exception):
33 class VCSCommunicationError(Exception):
34 pass
34 pass
35
35
36
36
37 class HttpVCSCommunicationError(VCSCommunicationError):
37 class HttpVCSCommunicationError(VCSCommunicationError):
38 pass
38 pass
39
39
40
40
41 class VCSError(Exception):
41 class VCSError(Exception):
42 pass
42 pass
43
43
44
44
45 class RepositoryError(VCSError):
45 class RepositoryError(VCSError):
46 pass
46 pass
47
47
48
48
49 class RepositoryRequirementError(RepositoryError):
49 class RepositoryRequirementError(RepositoryError):
50 pass
50 pass
51
51
52
52
53 class UnresolvedFilesInRepo(RepositoryError):
54 pass
55
56
53 class VCSBackendNotSupportedError(VCSError):
57 class VCSBackendNotSupportedError(VCSError):
54 """
58 """
55 Exception raised when VCSServer does not support requested backend
59 Exception raised when VCSServer does not support requested backend
56 """
60 """
57
61
58
62
59 class EmptyRepositoryError(RepositoryError):
63 class EmptyRepositoryError(RepositoryError):
60 pass
64 pass
61
65
62
66
63 class TagAlreadyExistError(RepositoryError):
67 class TagAlreadyExistError(RepositoryError):
64 pass
68 pass
65
69
66
70
67 class TagDoesNotExistError(RepositoryError):
71 class TagDoesNotExistError(RepositoryError):
68 pass
72 pass
69
73
70
74
71 class BranchAlreadyExistError(RepositoryError):
75 class BranchAlreadyExistError(RepositoryError):
72 pass
76 pass
73
77
74
78
75 class BranchDoesNotExistError(RepositoryError):
79 class BranchDoesNotExistError(RepositoryError):
76 pass
80 pass
77
81
78
82
79 class CommitError(RepositoryError):
83 class CommitError(RepositoryError):
80 """
84 """
81 Exceptions related to an existing commit
85 Exceptions related to an existing commit
82 """
86 """
83
87
84
88
85 class CommitDoesNotExistError(CommitError):
89 class CommitDoesNotExistError(CommitError):
86 pass
90 pass
87
91
88
92
89 class CommittingError(RepositoryError):
93 class CommittingError(RepositoryError):
90 """
94 """
91 Exceptions happening while creating a new commit
95 Exceptions happening while creating a new commit
92 """
96 """
93
97
94
98
95 class NothingChangedError(CommittingError):
99 class NothingChangedError(CommittingError):
96 pass
100 pass
97
101
98
102
99 class NodeError(VCSError):
103 class NodeError(VCSError):
100 pass
104 pass
101
105
102
106
103 class RemovedFileNodeError(NodeError):
107 class RemovedFileNodeError(NodeError):
104 pass
108 pass
105
109
106
110
107 class NodeAlreadyExistsError(CommittingError):
111 class NodeAlreadyExistsError(CommittingError):
108 pass
112 pass
109
113
110
114
111 class NodeAlreadyChangedError(CommittingError):
115 class NodeAlreadyChangedError(CommittingError):
112 pass
116 pass
113
117
114
118
115 class NodeDoesNotExistError(CommittingError):
119 class NodeDoesNotExistError(CommittingError):
116 pass
120 pass
117
121
118
122
119 class NodeNotChangedError(CommittingError):
123 class NodeNotChangedError(CommittingError):
120 pass
124 pass
121
125
122
126
123 class NodeAlreadyAddedError(CommittingError):
127 class NodeAlreadyAddedError(CommittingError):
124 pass
128 pass
125
129
126
130
127 class NodeAlreadyRemovedError(CommittingError):
131 class NodeAlreadyRemovedError(CommittingError):
128 pass
132 pass
129
133
130
134
131 class SubrepoMergeError(RepositoryError):
135 class SubrepoMergeError(RepositoryError):
132 """
136 """
133 This happens if we try to merge a repository which contains subrepos and
137 This happens if we try to merge a repository which contains subrepos and
134 the subrepos cannot be merged. The subrepos are not merged itself but
138 the subrepos cannot be merged. The subrepos are not merged itself but
135 their references in the root repo are merged.
139 their references in the root repo are merged.
136 """
140 """
137
141
138
142
139 class ImproperArchiveTypeError(VCSError):
143 class ImproperArchiveTypeError(VCSError):
140 pass
144 pass
141
145
142
146
143 class CommandError(VCSError):
147 class CommandError(VCSError):
144 pass
148 pass
145
149
146
150
147 class UnhandledException(VCSError):
151 class UnhandledException(VCSError):
148 """
152 """
149 Signals that something unexpected went wrong.
153 Signals that something unexpected went wrong.
150
154
151 This usually means we have a programming error on the side of the VCSServer
155 This usually means we have a programming error on the side of the VCSServer
152 and should inspect the logfile of the VCSServer to find more details.
156 and should inspect the logfile of the VCSServer to find more details.
153 """
157 """
154
158
155
159
156 _EXCEPTION_MAP = {
160 _EXCEPTION_MAP = {
157 'abort': RepositoryError,
161 'abort': RepositoryError,
158 'archive': ImproperArchiveTypeError,
162 'archive': ImproperArchiveTypeError,
159 'error': RepositoryError,
163 'error': RepositoryError,
160 'lookup': CommitDoesNotExistError,
164 'lookup': CommitDoesNotExistError,
161 'repo_locked': RepositoryError,
165 'repo_locked': RepositoryError,
162 'requirement': RepositoryRequirementError,
166 'requirement': RepositoryRequirementError,
163 'unhandled': UnhandledException,
167 'unhandled': UnhandledException,
164 # TODO: johbo: Define our own exception for this and stop abusing
168 # TODO: johbo: Define our own exception for this and stop abusing
165 # urllib's exception class.
169 # urllib's exception class.
166 'url_error': urllib2.URLError,
170 'url_error': urllib2.URLError,
167 'subrepo_merge_error': SubrepoMergeError,
171 'subrepo_merge_error': SubrepoMergeError,
168 }
172 }
169
173
170
174
171 def map_vcs_exceptions(func):
175 def map_vcs_exceptions(func):
172 """
176 """
173 Utility to decorate functions so that plain exceptions are translated.
177 Utility to decorate functions so that plain exceptions are translated.
174
178
175 The translation is based on `exc_map` which maps a `str` indicating
179 The translation is based on `exc_map` which maps a `str` indicating
176 the error type into an exception class representing this error inside
180 the error type into an exception class representing this error inside
177 of the vcs layer.
181 of the vcs layer.
178 """
182 """
179
183
180 @functools.wraps(func)
184 @functools.wraps(func)
181 def wrapper(*args, **kwargs):
185 def wrapper(*args, **kwargs):
182 try:
186 try:
183 return func(*args, **kwargs)
187 return func(*args, **kwargs)
184 except Exception as e:
188 except Exception as e:
185 from rhodecode.lib.utils2 import str2bool
189 from rhodecode.lib.utils2 import str2bool
186 debug = str2bool(rhodecode.CONFIG.get('debug'))
190 debug = str2bool(rhodecode.CONFIG.get('debug'))
187
191
188 # The error middleware adds information if it finds
192 # The error middleware adds information if it finds
189 # __traceback_info__ in a frame object. This way the remote
193 # __traceback_info__ in a frame object. This way the remote
190 # traceback information is made available in error reports.
194 # traceback information is made available in error reports.
191 remote_tb = getattr(e, '_vcs_server_traceback', None)
195 remote_tb = getattr(e, '_vcs_server_traceback', None)
192 org_remote_tb = getattr(e, '_vcs_server_org_exc_tb', '')
196 org_remote_tb = getattr(e, '_vcs_server_org_exc_tb', '')
193 __traceback_info__ = None
197 __traceback_info__ = None
194 if remote_tb:
198 if remote_tb:
195 if isinstance(remote_tb, compat.string_types):
199 if isinstance(remote_tb, compat.string_types):
196 remote_tb = [remote_tb]
200 remote_tb = [remote_tb]
197 __traceback_info__ = (
201 __traceback_info__ = (
198 'Found VCSServer remote traceback information:\n'
202 'Found VCSServer remote traceback information:\n'
199 '{}\n'
203 '{}\n'
200 '+++ BEG SOURCE EXCEPTION +++\n\n'
204 '+++ BEG SOURCE EXCEPTION +++\n\n'
201 '{}\n'
205 '{}\n'
202 '+++ END SOURCE EXCEPTION +++\n'
206 '+++ END SOURCE EXCEPTION +++\n'
203 ''.format('\n'.join(remote_tb), org_remote_tb)
207 ''.format('\n'.join(remote_tb), org_remote_tb)
204 )
208 )
205
209
206 # Avoid that remote_tb also appears in the frame
210 # Avoid that remote_tb also appears in the frame
207 del remote_tb
211 del remote_tb
208
212
209 # Special vcs errors had an attribute "_vcs_kind" which is used
213 # Special vcs errors had an attribute "_vcs_kind" which is used
210 # to translate them to the proper exception class in the vcs
214 # to translate them to the proper exception class in the vcs
211 # client layer.
215 # client layer.
212 kind = getattr(e, '_vcs_kind', None)
216 kind = getattr(e, '_vcs_kind', None)
213
217
214 if kind:
218 if kind:
215 if any(e.args):
219 if any(e.args):
216 args = e.args
220 args = e.args
217 else:
221 else:
218 args = [__traceback_info__ or 'unhandledException']
222 args = [__traceback_info__ or 'unhandledException']
219 if debug or __traceback_info__ and kind not in ['unhandled', 'lookup']:
223 if debug or __traceback_info__ and kind not in ['unhandled', 'lookup']:
220 # for other than unhandled errors also log the traceback
224 # for other than unhandled errors also log the traceback
221 # can be useful for debugging
225 # can be useful for debugging
222 log.error(__traceback_info__)
226 log.error(__traceback_info__)
223 raise _EXCEPTION_MAP[kind](*args)
227 raise _EXCEPTION_MAP[kind](*args)
224 else:
228 else:
225 raise
229 raise
226 return wrapper
230 return wrapper
@@ -1,1759 +1,1760 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2019 RhodeCode GmbH
3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import urllib
30 import urllib
31 import collections
31 import collections
32
32
33 from pyramid import compat
33 from pyramid import compat
34 from pyramid.threadlocal import get_current_request
34 from pyramid.threadlocal import get_current_request
35
35
36 from rhodecode import events
36 from rhodecode import events
37 from rhodecode.translation import lazy_ugettext
37 from rhodecode.translation import lazy_ugettext
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 from rhodecode.lib import audit_logger
39 from rhodecode.lib import audit_logger
40 from rhodecode.lib.compat import OrderedDict
40 from rhodecode.lib.compat import OrderedDict
41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 from rhodecode.lib.markup_renderer import (
42 from rhodecode.lib.markup_renderer import (
43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 from rhodecode.lib.vcs.backends.base import (
45 from rhodecode.lib.vcs.backends.base import (
46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 from rhodecode.lib.vcs.exceptions import (
48 from rhodecode.lib.vcs.exceptions import (
49 CommitDoesNotExistError, EmptyRepositoryError)
49 CommitDoesNotExistError, EmptyRepositoryError)
50 from rhodecode.model import BaseModel
50 from rhodecode.model import BaseModel
51 from rhodecode.model.changeset_status import ChangesetStatusModel
51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 from rhodecode.model.comment import CommentsModel
52 from rhodecode.model.comment import CommentsModel
53 from rhodecode.model.db import (
53 from rhodecode.model.db import (
54 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
54 or_, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 from rhodecode.model.meta import Session
56 from rhodecode.model.meta import Session
57 from rhodecode.model.notification import NotificationModel, \
57 from rhodecode.model.notification import NotificationModel, \
58 EmailNotificationModel
58 EmailNotificationModel
59 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.scm import ScmModel
60 from rhodecode.model.settings import VcsSettingsModel
60 from rhodecode.model.settings import VcsSettingsModel
61
61
62
62
63 log = logging.getLogger(__name__)
63 log = logging.getLogger(__name__)
64
64
65
65
66 # Data structure to hold the response data when updating commits during a pull
66 # Data structure to hold the response data when updating commits during a pull
67 # request update.
67 # request update.
68 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 'executed', 'reason', 'new', 'old', 'changes',
69 'executed', 'reason', 'new', 'old', 'changes',
70 'source_changed', 'target_changed'])
70 'source_changed', 'target_changed'])
71
71
72
72
73 class PullRequestModel(BaseModel):
73 class PullRequestModel(BaseModel):
74
74
75 cls = PullRequest
75 cls = PullRequest
76
76
77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78
78
79 UPDATE_STATUS_MESSAGES = {
79 UPDATE_STATUS_MESSAGES = {
80 UpdateFailureReason.NONE: lazy_ugettext(
80 UpdateFailureReason.NONE: lazy_ugettext(
81 'Pull request update successful.'),
81 'Pull request update successful.'),
82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 'Pull request update failed because of an unknown error.'),
83 'Pull request update failed because of an unknown error.'),
84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 'No update needed because the source and target have not changed.'),
85 'No update needed because the source and target have not changed.'),
86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 'Pull request cannot be updated because the reference type is '
87 'Pull request cannot be updated because the reference type is '
88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 'This pull request cannot be updated because the target '
90 'This pull request cannot be updated because the target '
91 'reference is missing.'),
91 'reference is missing.'),
92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 'This pull request cannot be updated because the source '
93 'This pull request cannot be updated because the source '
94 'reference is missing.'),
94 'reference is missing.'),
95 }
95 }
96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98
98
99 def __get_pull_request(self, pull_request):
99 def __get_pull_request(self, pull_request):
100 return self._get_instance((
100 return self._get_instance((
101 PullRequest, PullRequestVersion), pull_request)
101 PullRequest, PullRequestVersion), pull_request)
102
102
103 def _check_perms(self, perms, pull_request, user, api=False):
103 def _check_perms(self, perms, pull_request, user, api=False):
104 if not api:
104 if not api:
105 return h.HasRepoPermissionAny(*perms)(
105 return h.HasRepoPermissionAny(*perms)(
106 user=user, repo_name=pull_request.target_repo.repo_name)
106 user=user, repo_name=pull_request.target_repo.repo_name)
107 else:
107 else:
108 return h.HasRepoPermissionAnyApi(*perms)(
108 return h.HasRepoPermissionAnyApi(*perms)(
109 user=user, repo_name=pull_request.target_repo.repo_name)
109 user=user, repo_name=pull_request.target_repo.repo_name)
110
110
111 def check_user_read(self, pull_request, user, api=False):
111 def check_user_read(self, pull_request, user, api=False):
112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 return self._check_perms(_perms, pull_request, user, api)
113 return self._check_perms(_perms, pull_request, user, api)
114
114
115 def check_user_merge(self, pull_request, user, api=False):
115 def check_user_merge(self, pull_request, user, api=False):
116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 return self._check_perms(_perms, pull_request, user, api)
117 return self._check_perms(_perms, pull_request, user, api)
118
118
119 def check_user_update(self, pull_request, user, api=False):
119 def check_user_update(self, pull_request, user, api=False):
120 owner = user.user_id == pull_request.user_id
120 owner = user.user_id == pull_request.user_id
121 return self.check_user_merge(pull_request, user, api) or owner
121 return self.check_user_merge(pull_request, user, api) or owner
122
122
123 def check_user_delete(self, pull_request, user):
123 def check_user_delete(self, pull_request, user):
124 owner = user.user_id == pull_request.user_id
124 owner = user.user_id == pull_request.user_id
125 _perms = ('repository.admin',)
125 _perms = ('repository.admin',)
126 return self._check_perms(_perms, pull_request, user) or owner
126 return self._check_perms(_perms, pull_request, user) or owner
127
127
128 def check_user_change_status(self, pull_request, user, api=False):
128 def check_user_change_status(self, pull_request, user, api=False):
129 reviewer = user.user_id in [x.user_id for x in
129 reviewer = user.user_id in [x.user_id for x in
130 pull_request.reviewers]
130 pull_request.reviewers]
131 return self.check_user_update(pull_request, user, api) or reviewer
131 return self.check_user_update(pull_request, user, api) or reviewer
132
132
133 def check_user_comment(self, pull_request, user):
133 def check_user_comment(self, pull_request, user):
134 owner = user.user_id == pull_request.user_id
134 owner = user.user_id == pull_request.user_id
135 return self.check_user_read(pull_request, user) or owner
135 return self.check_user_read(pull_request, user) or owner
136
136
137 def get(self, pull_request):
137 def get(self, pull_request):
138 return self.__get_pull_request(pull_request)
138 return self.__get_pull_request(pull_request)
139
139
140 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
140 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
141 statuses=None, opened_by=None, order_by=None,
141 statuses=None, opened_by=None, order_by=None,
142 order_dir='desc', only_created=False):
142 order_dir='desc', only_created=False):
143 repo = None
143 repo = None
144 if repo_name:
144 if repo_name:
145 repo = self._get_repo(repo_name)
145 repo = self._get_repo(repo_name)
146
146
147 q = PullRequest.query()
147 q = PullRequest.query()
148
148
149 if search_q:
149 if search_q:
150 like_expression = u'%{}%'.format(safe_unicode(search_q))
150 like_expression = u'%{}%'.format(safe_unicode(search_q))
151 q = q.filter(or_(
151 q = q.filter(or_(
152 cast(PullRequest.pull_request_id, String).ilike(like_expression),
152 cast(PullRequest.pull_request_id, String).ilike(like_expression),
153 PullRequest.title.ilike(like_expression),
153 PullRequest.title.ilike(like_expression),
154 PullRequest.description.ilike(like_expression),
154 PullRequest.description.ilike(like_expression),
155 ))
155 ))
156
156
157 # source or target
157 # source or target
158 if repo and source:
158 if repo and source:
159 q = q.filter(PullRequest.source_repo == repo)
159 q = q.filter(PullRequest.source_repo == repo)
160 elif repo:
160 elif repo:
161 q = q.filter(PullRequest.target_repo == repo)
161 q = q.filter(PullRequest.target_repo == repo)
162
162
163 # closed,opened
163 # closed,opened
164 if statuses:
164 if statuses:
165 q = q.filter(PullRequest.status.in_(statuses))
165 q = q.filter(PullRequest.status.in_(statuses))
166
166
167 # opened by filter
167 # opened by filter
168 if opened_by:
168 if opened_by:
169 q = q.filter(PullRequest.user_id.in_(opened_by))
169 q = q.filter(PullRequest.user_id.in_(opened_by))
170
170
171 # only get those that are in "created" state
171 # only get those that are in "created" state
172 if only_created:
172 if only_created:
173 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
173 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
174
174
175 if order_by:
175 if order_by:
176 order_map = {
176 order_map = {
177 'name_raw': PullRequest.pull_request_id,
177 'name_raw': PullRequest.pull_request_id,
178 'id': PullRequest.pull_request_id,
178 'id': PullRequest.pull_request_id,
179 'title': PullRequest.title,
179 'title': PullRequest.title,
180 'updated_on_raw': PullRequest.updated_on,
180 'updated_on_raw': PullRequest.updated_on,
181 'target_repo': PullRequest.target_repo_id
181 'target_repo': PullRequest.target_repo_id
182 }
182 }
183 if order_dir == 'asc':
183 if order_dir == 'asc':
184 q = q.order_by(order_map[order_by].asc())
184 q = q.order_by(order_map[order_by].asc())
185 else:
185 else:
186 q = q.order_by(order_map[order_by].desc())
186 q = q.order_by(order_map[order_by].desc())
187
187
188 return q
188 return q
189
189
190 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
190 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
191 opened_by=None):
191 opened_by=None):
192 """
192 """
193 Count the number of pull requests for a specific repository.
193 Count the number of pull requests for a specific repository.
194
194
195 :param repo_name: target or source repo
195 :param repo_name: target or source repo
196 :param search_q: filter by text
196 :param search_q: filter by text
197 :param source: boolean flag to specify if repo_name refers to source
197 :param source: boolean flag to specify if repo_name refers to source
198 :param statuses: list of pull request statuses
198 :param statuses: list of pull request statuses
199 :param opened_by: author user of the pull request
199 :param opened_by: author user of the pull request
200 :returns: int number of pull requests
200 :returns: int number of pull requests
201 """
201 """
202 q = self._prepare_get_all_query(
202 q = self._prepare_get_all_query(
203 repo_name, search_q=search_q, source=source, statuses=statuses,
203 repo_name, search_q=search_q, source=source, statuses=statuses,
204 opened_by=opened_by)
204 opened_by=opened_by)
205
205
206 return q.count()
206 return q.count()
207
207
208 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
208 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
209 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
209 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
210 """
210 """
211 Get all pull requests for a specific repository.
211 Get all pull requests for a specific repository.
212
212
213 :param repo_name: target or source repo
213 :param repo_name: target or source repo
214 :param search_q: filter by text
214 :param search_q: filter by text
215 :param source: boolean flag to specify if repo_name refers to source
215 :param source: boolean flag to specify if repo_name refers to source
216 :param statuses: list of pull request statuses
216 :param statuses: list of pull request statuses
217 :param opened_by: author user of the pull request
217 :param opened_by: author user of the pull request
218 :param offset: pagination offset
218 :param offset: pagination offset
219 :param length: length of returned list
219 :param length: length of returned list
220 :param order_by: order of the returned list
220 :param order_by: order of the returned list
221 :param order_dir: 'asc' or 'desc' ordering direction
221 :param order_dir: 'asc' or 'desc' ordering direction
222 :returns: list of pull requests
222 :returns: list of pull requests
223 """
223 """
224 q = self._prepare_get_all_query(
224 q = self._prepare_get_all_query(
225 repo_name, search_q=search_q, source=source, statuses=statuses,
225 repo_name, search_q=search_q, source=source, statuses=statuses,
226 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
226 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
227
227
228 if length:
228 if length:
229 pull_requests = q.limit(length).offset(offset).all()
229 pull_requests = q.limit(length).offset(offset).all()
230 else:
230 else:
231 pull_requests = q.all()
231 pull_requests = q.all()
232
232
233 return pull_requests
233 return pull_requests
234
234
235 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
235 def count_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
236 opened_by=None):
236 opened_by=None):
237 """
237 """
238 Count the number of pull requests for a specific repository that are
238 Count the number of pull requests for a specific repository that are
239 awaiting review.
239 awaiting review.
240
240
241 :param repo_name: target or source repo
241 :param repo_name: target or source repo
242 :param search_q: filter by text
242 :param search_q: filter by text
243 :param source: boolean flag to specify if repo_name refers to source
243 :param source: boolean flag to specify if repo_name refers to source
244 :param statuses: list of pull request statuses
244 :param statuses: list of pull request statuses
245 :param opened_by: author user of the pull request
245 :param opened_by: author user of the pull request
246 :returns: int number of pull requests
246 :returns: int number of pull requests
247 """
247 """
248 pull_requests = self.get_awaiting_review(
248 pull_requests = self.get_awaiting_review(
249 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
249 repo_name, search_q=search_q, source=source, statuses=statuses, opened_by=opened_by)
250
250
251 return len(pull_requests)
251 return len(pull_requests)
252
252
253 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
253 def get_awaiting_review(self, repo_name, search_q=None, source=False, statuses=None,
254 opened_by=None, offset=0, length=None,
254 opened_by=None, offset=0, length=None,
255 order_by=None, order_dir='desc'):
255 order_by=None, order_dir='desc'):
256 """
256 """
257 Get all pull requests for a specific repository that are awaiting
257 Get all pull requests for a specific repository that are awaiting
258 review.
258 review.
259
259
260 :param repo_name: target or source repo
260 :param repo_name: target or source repo
261 :param search_q: filter by text
261 :param search_q: filter by text
262 :param source: boolean flag to specify if repo_name refers to source
262 :param source: boolean flag to specify if repo_name refers to source
263 :param statuses: list of pull request statuses
263 :param statuses: list of pull request statuses
264 :param opened_by: author user of the pull request
264 :param opened_by: author user of the pull request
265 :param offset: pagination offset
265 :param offset: pagination offset
266 :param length: length of returned list
266 :param length: length of returned list
267 :param order_by: order of the returned list
267 :param order_by: order of the returned list
268 :param order_dir: 'asc' or 'desc' ordering direction
268 :param order_dir: 'asc' or 'desc' ordering direction
269 :returns: list of pull requests
269 :returns: list of pull requests
270 """
270 """
271 pull_requests = self.get_all(
271 pull_requests = self.get_all(
272 repo_name, search_q=search_q, source=source, statuses=statuses,
272 repo_name, search_q=search_q, source=source, statuses=statuses,
273 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
273 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
274
274
275 _filtered_pull_requests = []
275 _filtered_pull_requests = []
276 for pr in pull_requests:
276 for pr in pull_requests:
277 status = pr.calculated_review_status()
277 status = pr.calculated_review_status()
278 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
278 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
279 ChangesetStatus.STATUS_UNDER_REVIEW]:
279 ChangesetStatus.STATUS_UNDER_REVIEW]:
280 _filtered_pull_requests.append(pr)
280 _filtered_pull_requests.append(pr)
281 if length:
281 if length:
282 return _filtered_pull_requests[offset:offset+length]
282 return _filtered_pull_requests[offset:offset+length]
283 else:
283 else:
284 return _filtered_pull_requests
284 return _filtered_pull_requests
285
285
286 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
286 def count_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
287 opened_by=None, user_id=None):
287 opened_by=None, user_id=None):
288 """
288 """
289 Count the number of pull requests for a specific repository that are
289 Count the number of pull requests for a specific repository that are
290 awaiting review from a specific user.
290 awaiting review from a specific user.
291
291
292 :param repo_name: target or source repo
292 :param repo_name: target or source repo
293 :param search_q: filter by text
293 :param search_q: filter by text
294 :param source: boolean flag to specify if repo_name refers to source
294 :param source: boolean flag to specify if repo_name refers to source
295 :param statuses: list of pull request statuses
295 :param statuses: list of pull request statuses
296 :param opened_by: author user of the pull request
296 :param opened_by: author user of the pull request
297 :param user_id: reviewer user of the pull request
297 :param user_id: reviewer user of the pull request
298 :returns: int number of pull requests
298 :returns: int number of pull requests
299 """
299 """
300 pull_requests = self.get_awaiting_my_review(
300 pull_requests = self.get_awaiting_my_review(
301 repo_name, search_q=search_q, source=source, statuses=statuses,
301 repo_name, search_q=search_q, source=source, statuses=statuses,
302 opened_by=opened_by, user_id=user_id)
302 opened_by=opened_by, user_id=user_id)
303
303
304 return len(pull_requests)
304 return len(pull_requests)
305
305
306 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
306 def get_awaiting_my_review(self, repo_name, search_q=None, source=False, statuses=None,
307 opened_by=None, user_id=None, offset=0,
307 opened_by=None, user_id=None, offset=0,
308 length=None, order_by=None, order_dir='desc'):
308 length=None, order_by=None, order_dir='desc'):
309 """
309 """
310 Get all pull requests for a specific repository that are awaiting
310 Get all pull requests for a specific repository that are awaiting
311 review from a specific user.
311 review from a specific user.
312
312
313 :param repo_name: target or source repo
313 :param repo_name: target or source repo
314 :param search_q: filter by text
314 :param search_q: filter by text
315 :param source: boolean flag to specify if repo_name refers to source
315 :param source: boolean flag to specify if repo_name refers to source
316 :param statuses: list of pull request statuses
316 :param statuses: list of pull request statuses
317 :param opened_by: author user of the pull request
317 :param opened_by: author user of the pull request
318 :param user_id: reviewer user of the pull request
318 :param user_id: reviewer user of the pull request
319 :param offset: pagination offset
319 :param offset: pagination offset
320 :param length: length of returned list
320 :param length: length of returned list
321 :param order_by: order of the returned list
321 :param order_by: order of the returned list
322 :param order_dir: 'asc' or 'desc' ordering direction
322 :param order_dir: 'asc' or 'desc' ordering direction
323 :returns: list of pull requests
323 :returns: list of pull requests
324 """
324 """
325 pull_requests = self.get_all(
325 pull_requests = self.get_all(
326 repo_name, search_q=search_q, source=source, statuses=statuses,
326 repo_name, search_q=search_q, source=source, statuses=statuses,
327 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
327 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
328
328
329 _my = PullRequestModel().get_not_reviewed(user_id)
329 _my = PullRequestModel().get_not_reviewed(user_id)
330 my_participation = []
330 my_participation = []
331 for pr in pull_requests:
331 for pr in pull_requests:
332 if pr in _my:
332 if pr in _my:
333 my_participation.append(pr)
333 my_participation.append(pr)
334 _filtered_pull_requests = my_participation
334 _filtered_pull_requests = my_participation
335 if length:
335 if length:
336 return _filtered_pull_requests[offset:offset+length]
336 return _filtered_pull_requests[offset:offset+length]
337 else:
337 else:
338 return _filtered_pull_requests
338 return _filtered_pull_requests
339
339
340 def get_not_reviewed(self, user_id):
340 def get_not_reviewed(self, user_id):
341 return [
341 return [
342 x.pull_request for x in PullRequestReviewers.query().filter(
342 x.pull_request for x in PullRequestReviewers.query().filter(
343 PullRequestReviewers.user_id == user_id).all()
343 PullRequestReviewers.user_id == user_id).all()
344 ]
344 ]
345
345
346 def _prepare_participating_query(self, user_id=None, statuses=None,
346 def _prepare_participating_query(self, user_id=None, statuses=None,
347 order_by=None, order_dir='desc'):
347 order_by=None, order_dir='desc'):
348 q = PullRequest.query()
348 q = PullRequest.query()
349 if user_id:
349 if user_id:
350 reviewers_subquery = Session().query(
350 reviewers_subquery = Session().query(
351 PullRequestReviewers.pull_request_id).filter(
351 PullRequestReviewers.pull_request_id).filter(
352 PullRequestReviewers.user_id == user_id).subquery()
352 PullRequestReviewers.user_id == user_id).subquery()
353 user_filter = or_(
353 user_filter = or_(
354 PullRequest.user_id == user_id,
354 PullRequest.user_id == user_id,
355 PullRequest.pull_request_id.in_(reviewers_subquery)
355 PullRequest.pull_request_id.in_(reviewers_subquery)
356 )
356 )
357 q = PullRequest.query().filter(user_filter)
357 q = PullRequest.query().filter(user_filter)
358
358
359 # closed,opened
359 # closed,opened
360 if statuses:
360 if statuses:
361 q = q.filter(PullRequest.status.in_(statuses))
361 q = q.filter(PullRequest.status.in_(statuses))
362
362
363 if order_by:
363 if order_by:
364 order_map = {
364 order_map = {
365 'name_raw': PullRequest.pull_request_id,
365 'name_raw': PullRequest.pull_request_id,
366 'title': PullRequest.title,
366 'title': PullRequest.title,
367 'updated_on_raw': PullRequest.updated_on,
367 'updated_on_raw': PullRequest.updated_on,
368 'target_repo': PullRequest.target_repo_id
368 'target_repo': PullRequest.target_repo_id
369 }
369 }
370 if order_dir == 'asc':
370 if order_dir == 'asc':
371 q = q.order_by(order_map[order_by].asc())
371 q = q.order_by(order_map[order_by].asc())
372 else:
372 else:
373 q = q.order_by(order_map[order_by].desc())
373 q = q.order_by(order_map[order_by].desc())
374
374
375 return q
375 return q
376
376
377 def count_im_participating_in(self, user_id=None, statuses=None):
377 def count_im_participating_in(self, user_id=None, statuses=None):
378 q = self._prepare_participating_query(user_id, statuses=statuses)
378 q = self._prepare_participating_query(user_id, statuses=statuses)
379 return q.count()
379 return q.count()
380
380
381 def get_im_participating_in(
381 def get_im_participating_in(
382 self, user_id=None, statuses=None, offset=0,
382 self, user_id=None, statuses=None, offset=0,
383 length=None, order_by=None, order_dir='desc'):
383 length=None, order_by=None, order_dir='desc'):
384 """
384 """
385 Get all Pull requests that i'm participating in, or i have opened
385 Get all Pull requests that i'm participating in, or i have opened
386 """
386 """
387
387
388 q = self._prepare_participating_query(
388 q = self._prepare_participating_query(
389 user_id, statuses=statuses, order_by=order_by,
389 user_id, statuses=statuses, order_by=order_by,
390 order_dir=order_dir)
390 order_dir=order_dir)
391
391
392 if length:
392 if length:
393 pull_requests = q.limit(length).offset(offset).all()
393 pull_requests = q.limit(length).offset(offset).all()
394 else:
394 else:
395 pull_requests = q.all()
395 pull_requests = q.all()
396
396
397 return pull_requests
397 return pull_requests
398
398
399 def get_versions(self, pull_request):
399 def get_versions(self, pull_request):
400 """
400 """
401 returns version of pull request sorted by ID descending
401 returns version of pull request sorted by ID descending
402 """
402 """
403 return PullRequestVersion.query()\
403 return PullRequestVersion.query()\
404 .filter(PullRequestVersion.pull_request == pull_request)\
404 .filter(PullRequestVersion.pull_request == pull_request)\
405 .order_by(PullRequestVersion.pull_request_version_id.asc())\
405 .order_by(PullRequestVersion.pull_request_version_id.asc())\
406 .all()
406 .all()
407
407
408 def get_pr_version(self, pull_request_id, version=None):
408 def get_pr_version(self, pull_request_id, version=None):
409 at_version = None
409 at_version = None
410
410
411 if version and version == 'latest':
411 if version and version == 'latest':
412 pull_request_ver = PullRequest.get(pull_request_id)
412 pull_request_ver = PullRequest.get(pull_request_id)
413 pull_request_obj = pull_request_ver
413 pull_request_obj = pull_request_ver
414 _org_pull_request_obj = pull_request_obj
414 _org_pull_request_obj = pull_request_obj
415 at_version = 'latest'
415 at_version = 'latest'
416 elif version:
416 elif version:
417 pull_request_ver = PullRequestVersion.get_or_404(version)
417 pull_request_ver = PullRequestVersion.get_or_404(version)
418 pull_request_obj = pull_request_ver
418 pull_request_obj = pull_request_ver
419 _org_pull_request_obj = pull_request_ver.pull_request
419 _org_pull_request_obj = pull_request_ver.pull_request
420 at_version = pull_request_ver.pull_request_version_id
420 at_version = pull_request_ver.pull_request_version_id
421 else:
421 else:
422 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
422 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
423 pull_request_id)
423 pull_request_id)
424
424
425 pull_request_display_obj = PullRequest.get_pr_display_object(
425 pull_request_display_obj = PullRequest.get_pr_display_object(
426 pull_request_obj, _org_pull_request_obj)
426 pull_request_obj, _org_pull_request_obj)
427
427
428 return _org_pull_request_obj, pull_request_obj, \
428 return _org_pull_request_obj, pull_request_obj, \
429 pull_request_display_obj, at_version
429 pull_request_display_obj, at_version
430
430
431 def create(self, created_by, source_repo, source_ref, target_repo,
431 def create(self, created_by, source_repo, source_ref, target_repo,
432 target_ref, revisions, reviewers, title, description=None,
432 target_ref, revisions, reviewers, title, description=None,
433 description_renderer=None,
433 description_renderer=None,
434 reviewer_data=None, translator=None, auth_user=None):
434 reviewer_data=None, translator=None, auth_user=None):
435 translator = translator or get_current_request().translate
435 translator = translator or get_current_request().translate
436
436
437 created_by_user = self._get_user(created_by)
437 created_by_user = self._get_user(created_by)
438 auth_user = auth_user or created_by_user.AuthUser()
438 auth_user = auth_user or created_by_user.AuthUser()
439 source_repo = self._get_repo(source_repo)
439 source_repo = self._get_repo(source_repo)
440 target_repo = self._get_repo(target_repo)
440 target_repo = self._get_repo(target_repo)
441
441
442 pull_request = PullRequest()
442 pull_request = PullRequest()
443 pull_request.source_repo = source_repo
443 pull_request.source_repo = source_repo
444 pull_request.source_ref = source_ref
444 pull_request.source_ref = source_ref
445 pull_request.target_repo = target_repo
445 pull_request.target_repo = target_repo
446 pull_request.target_ref = target_ref
446 pull_request.target_ref = target_ref
447 pull_request.revisions = revisions
447 pull_request.revisions = revisions
448 pull_request.title = title
448 pull_request.title = title
449 pull_request.description = description
449 pull_request.description = description
450 pull_request.description_renderer = description_renderer
450 pull_request.description_renderer = description_renderer
451 pull_request.author = created_by_user
451 pull_request.author = created_by_user
452 pull_request.reviewer_data = reviewer_data
452 pull_request.reviewer_data = reviewer_data
453 pull_request.pull_request_state = pull_request.STATE_CREATING
453 pull_request.pull_request_state = pull_request.STATE_CREATING
454 Session().add(pull_request)
454 Session().add(pull_request)
455 Session().flush()
455 Session().flush()
456
456
457 reviewer_ids = set()
457 reviewer_ids = set()
458 # members / reviewers
458 # members / reviewers
459 for reviewer_object in reviewers:
459 for reviewer_object in reviewers:
460 user_id, reasons, mandatory, rules = reviewer_object
460 user_id, reasons, mandatory, rules = reviewer_object
461 user = self._get_user(user_id)
461 user = self._get_user(user_id)
462
462
463 # skip duplicates
463 # skip duplicates
464 if user.user_id in reviewer_ids:
464 if user.user_id in reviewer_ids:
465 continue
465 continue
466
466
467 reviewer_ids.add(user.user_id)
467 reviewer_ids.add(user.user_id)
468
468
469 reviewer = PullRequestReviewers()
469 reviewer = PullRequestReviewers()
470 reviewer.user = user
470 reviewer.user = user
471 reviewer.pull_request = pull_request
471 reviewer.pull_request = pull_request
472 reviewer.reasons = reasons
472 reviewer.reasons = reasons
473 reviewer.mandatory = mandatory
473 reviewer.mandatory = mandatory
474
474
475 # NOTE(marcink): pick only first rule for now
475 # NOTE(marcink): pick only first rule for now
476 rule_id = list(rules)[0] if rules else None
476 rule_id = list(rules)[0] if rules else None
477 rule = RepoReviewRule.get(rule_id) if rule_id else None
477 rule = RepoReviewRule.get(rule_id) if rule_id else None
478 if rule:
478 if rule:
479 review_group = rule.user_group_vote_rule(user_id)
479 review_group = rule.user_group_vote_rule(user_id)
480 # we check if this particular reviewer is member of a voting group
480 # we check if this particular reviewer is member of a voting group
481 if review_group:
481 if review_group:
482 # NOTE(marcink):
482 # NOTE(marcink):
483 # can be that user is member of more but we pick the first same,
483 # can be that user is member of more but we pick the first same,
484 # same as default reviewers algo
484 # same as default reviewers algo
485 review_group = review_group[0]
485 review_group = review_group[0]
486
486
487 rule_data = {
487 rule_data = {
488 'rule_name':
488 'rule_name':
489 rule.review_rule_name,
489 rule.review_rule_name,
490 'rule_user_group_entry_id':
490 'rule_user_group_entry_id':
491 review_group.repo_review_rule_users_group_id,
491 review_group.repo_review_rule_users_group_id,
492 'rule_user_group_name':
492 'rule_user_group_name':
493 review_group.users_group.users_group_name,
493 review_group.users_group.users_group_name,
494 'rule_user_group_members':
494 'rule_user_group_members':
495 [x.user.username for x in review_group.users_group.members],
495 [x.user.username for x in review_group.users_group.members],
496 'rule_user_group_members_id':
496 'rule_user_group_members_id':
497 [x.user.user_id for x in review_group.users_group.members],
497 [x.user.user_id for x in review_group.users_group.members],
498 }
498 }
499 # e.g {'vote_rule': -1, 'mandatory': True}
499 # e.g {'vote_rule': -1, 'mandatory': True}
500 rule_data.update(review_group.rule_data())
500 rule_data.update(review_group.rule_data())
501
501
502 reviewer.rule_data = rule_data
502 reviewer.rule_data = rule_data
503
503
504 Session().add(reviewer)
504 Session().add(reviewer)
505 Session().flush()
505 Session().flush()
506
506
507 # Set approval status to "Under Review" for all commits which are
507 # Set approval status to "Under Review" for all commits which are
508 # part of this pull request.
508 # part of this pull request.
509 ChangesetStatusModel().set_status(
509 ChangesetStatusModel().set_status(
510 repo=target_repo,
510 repo=target_repo,
511 status=ChangesetStatus.STATUS_UNDER_REVIEW,
511 status=ChangesetStatus.STATUS_UNDER_REVIEW,
512 user=created_by_user,
512 user=created_by_user,
513 pull_request=pull_request
513 pull_request=pull_request
514 )
514 )
515 # we commit early at this point. This has to do with a fact
515 # we commit early at this point. This has to do with a fact
516 # that before queries do some row-locking. And because of that
516 # that before queries do some row-locking. And because of that
517 # we need to commit and finish transaction before below validate call
517 # we need to commit and finish transaction before below validate call
518 # that for large repos could be long resulting in long row locks
518 # that for large repos could be long resulting in long row locks
519 Session().commit()
519 Session().commit()
520
520
521 # prepare workspace, and run initial merge simulation. Set state during that
521 # prepare workspace, and run initial merge simulation. Set state during that
522 # operation
522 # operation
523 pull_request = PullRequest.get(pull_request.pull_request_id)
523 pull_request = PullRequest.get(pull_request.pull_request_id)
524
524
525 # set as merging, for merge simulation, and if finished to created so we mark
525 # set as merging, for merge simulation, and if finished to created so we mark
526 # simulation is working fine
526 # simulation is working fine
527 with pull_request.set_state(PullRequest.STATE_MERGING,
527 with pull_request.set_state(PullRequest.STATE_MERGING,
528 final_state=PullRequest.STATE_CREATED) as state_obj:
528 final_state=PullRequest.STATE_CREATED) as state_obj:
529 MergeCheck.validate(
529 MergeCheck.validate(
530 pull_request, auth_user=auth_user, translator=translator)
530 pull_request, auth_user=auth_user, translator=translator)
531
531
532 self.notify_reviewers(pull_request, reviewer_ids)
532 self.notify_reviewers(pull_request, reviewer_ids)
533 self.trigger_pull_request_hook(
533 self.trigger_pull_request_hook(
534 pull_request, created_by_user, 'create')
534 pull_request, created_by_user, 'create')
535
535
536 creation_data = pull_request.get_api_data(with_merge_state=False)
536 creation_data = pull_request.get_api_data(with_merge_state=False)
537 self._log_audit_action(
537 self._log_audit_action(
538 'repo.pull_request.create', {'data': creation_data},
538 'repo.pull_request.create', {'data': creation_data},
539 auth_user, pull_request)
539 auth_user, pull_request)
540
540
541 return pull_request
541 return pull_request
542
542
543 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
543 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
544 pull_request = self.__get_pull_request(pull_request)
544 pull_request = self.__get_pull_request(pull_request)
545 target_scm = pull_request.target_repo.scm_instance()
545 target_scm = pull_request.target_repo.scm_instance()
546 if action == 'create':
546 if action == 'create':
547 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
547 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
548 elif action == 'merge':
548 elif action == 'merge':
549 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
549 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
550 elif action == 'close':
550 elif action == 'close':
551 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
551 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
552 elif action == 'review_status_change':
552 elif action == 'review_status_change':
553 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
553 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
554 elif action == 'update':
554 elif action == 'update':
555 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
555 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
556 elif action == 'comment':
556 elif action == 'comment':
557 # dummy hook ! for comment. We want this function to handle all cases
557 # dummy hook ! for comment. We want this function to handle all cases
558 def trigger_hook(*args, **kwargs):
558 def trigger_hook(*args, **kwargs):
559 pass
559 pass
560 comment = data['comment']
560 comment = data['comment']
561 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
561 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
562 else:
562 else:
563 return
563 return
564
564
565 trigger_hook(
565 trigger_hook(
566 username=user.username,
566 username=user.username,
567 repo_name=pull_request.target_repo.repo_name,
567 repo_name=pull_request.target_repo.repo_name,
568 repo_alias=target_scm.alias,
568 repo_alias=target_scm.alias,
569 pull_request=pull_request,
569 pull_request=pull_request,
570 data=data)
570 data=data)
571
571
572 def _get_commit_ids(self, pull_request):
572 def _get_commit_ids(self, pull_request):
573 """
573 """
574 Return the commit ids of the merged pull request.
574 Return the commit ids of the merged pull request.
575
575
576 This method is not dealing correctly yet with the lack of autoupdates
576 This method is not dealing correctly yet with the lack of autoupdates
577 nor with the implicit target updates.
577 nor with the implicit target updates.
578 For example: if a commit in the source repo is already in the target it
578 For example: if a commit in the source repo is already in the target it
579 will be reported anyways.
579 will be reported anyways.
580 """
580 """
581 merge_rev = pull_request.merge_rev
581 merge_rev = pull_request.merge_rev
582 if merge_rev is None:
582 if merge_rev is None:
583 raise ValueError('This pull request was not merged yet')
583 raise ValueError('This pull request was not merged yet')
584
584
585 commit_ids = list(pull_request.revisions)
585 commit_ids = list(pull_request.revisions)
586 if merge_rev not in commit_ids:
586 if merge_rev not in commit_ids:
587 commit_ids.append(merge_rev)
587 commit_ids.append(merge_rev)
588
588
589 return commit_ids
589 return commit_ids
590
590
591 def merge_repo(self, pull_request, user, extras):
591 def merge_repo(self, pull_request, user, extras):
592 log.debug("Merging pull request %s", pull_request.pull_request_id)
592 log.debug("Merging pull request %s", pull_request.pull_request_id)
593 extras['user_agent'] = 'internal-merge'
593 extras['user_agent'] = 'internal-merge'
594 merge_state = self._merge_pull_request(pull_request, user, extras)
594 merge_state = self._merge_pull_request(pull_request, user, extras)
595 if merge_state.executed:
595 if merge_state.executed:
596 log.debug("Merge was successful, updating the pull request comments.")
596 log.debug("Merge was successful, updating the pull request comments.")
597 self._comment_and_close_pr(pull_request, user, merge_state)
597 self._comment_and_close_pr(pull_request, user, merge_state)
598
598
599 self._log_audit_action(
599 self._log_audit_action(
600 'repo.pull_request.merge',
600 'repo.pull_request.merge',
601 {'merge_state': merge_state.__dict__},
601 {'merge_state': merge_state.__dict__},
602 user, pull_request)
602 user, pull_request)
603
603
604 else:
604 else:
605 log.warn("Merge failed, not updating the pull request.")
605 log.warn("Merge failed, not updating the pull request.")
606 return merge_state
606 return merge_state
607
607
608 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
608 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
609 target_vcs = pull_request.target_repo.scm_instance()
609 target_vcs = pull_request.target_repo.scm_instance()
610 source_vcs = pull_request.source_repo.scm_instance()
610 source_vcs = pull_request.source_repo.scm_instance()
611
611
612 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
612 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
613 pr_id=pull_request.pull_request_id,
613 pr_id=pull_request.pull_request_id,
614 pr_title=pull_request.title,
614 pr_title=pull_request.title,
615 source_repo=source_vcs.name,
615 source_repo=source_vcs.name,
616 source_ref_name=pull_request.source_ref_parts.name,
616 source_ref_name=pull_request.source_ref_parts.name,
617 target_repo=target_vcs.name,
617 target_repo=target_vcs.name,
618 target_ref_name=pull_request.target_ref_parts.name,
618 target_ref_name=pull_request.target_ref_parts.name,
619 )
619 )
620
620
621 workspace_id = self._workspace_id(pull_request)
621 workspace_id = self._workspace_id(pull_request)
622 repo_id = pull_request.target_repo.repo_id
622 repo_id = pull_request.target_repo.repo_id
623 use_rebase = self._use_rebase_for_merging(pull_request)
623 use_rebase = self._use_rebase_for_merging(pull_request)
624 close_branch = self._close_branch_before_merging(pull_request)
624 close_branch = self._close_branch_before_merging(pull_request)
625
625
626 target_ref = self._refresh_reference(
626 target_ref = self._refresh_reference(
627 pull_request.target_ref_parts, target_vcs)
627 pull_request.target_ref_parts, target_vcs)
628
628
629 callback_daemon, extras = prepare_callback_daemon(
629 callback_daemon, extras = prepare_callback_daemon(
630 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
630 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
631 host=vcs_settings.HOOKS_HOST,
631 host=vcs_settings.HOOKS_HOST,
632 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
632 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
633
633
634 with callback_daemon:
634 with callback_daemon:
635 # TODO: johbo: Implement a clean way to run a config_override
635 # TODO: johbo: Implement a clean way to run a config_override
636 # for a single call.
636 # for a single call.
637 target_vcs.config.set(
637 target_vcs.config.set(
638 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
638 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
639
639
640 user_name = user.short_contact
640 user_name = user.short_contact
641 merge_state = target_vcs.merge(
641 merge_state = target_vcs.merge(
642 repo_id, workspace_id, target_ref, source_vcs,
642 repo_id, workspace_id, target_ref, source_vcs,
643 pull_request.source_ref_parts,
643 pull_request.source_ref_parts,
644 user_name=user_name, user_email=user.email,
644 user_name=user_name, user_email=user.email,
645 message=message, use_rebase=use_rebase,
645 message=message, use_rebase=use_rebase,
646 close_branch=close_branch)
646 close_branch=close_branch)
647 return merge_state
647 return merge_state
648
648
649 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
649 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
650 pull_request.merge_rev = merge_state.merge_ref.commit_id
650 pull_request.merge_rev = merge_state.merge_ref.commit_id
651 pull_request.updated_on = datetime.datetime.now()
651 pull_request.updated_on = datetime.datetime.now()
652 close_msg = close_msg or 'Pull request merged and closed'
652 close_msg = close_msg or 'Pull request merged and closed'
653
653
654 CommentsModel().create(
654 CommentsModel().create(
655 text=safe_unicode(close_msg),
655 text=safe_unicode(close_msg),
656 repo=pull_request.target_repo.repo_id,
656 repo=pull_request.target_repo.repo_id,
657 user=user.user_id,
657 user=user.user_id,
658 pull_request=pull_request.pull_request_id,
658 pull_request=pull_request.pull_request_id,
659 f_path=None,
659 f_path=None,
660 line_no=None,
660 line_no=None,
661 closing_pr=True
661 closing_pr=True
662 )
662 )
663
663
664 Session().add(pull_request)
664 Session().add(pull_request)
665 Session().flush()
665 Session().flush()
666 # TODO: paris: replace invalidation with less radical solution
666 # TODO: paris: replace invalidation with less radical solution
667 ScmModel().mark_for_invalidation(
667 ScmModel().mark_for_invalidation(
668 pull_request.target_repo.repo_name)
668 pull_request.target_repo.repo_name)
669 self.trigger_pull_request_hook(pull_request, user, 'merge')
669 self.trigger_pull_request_hook(pull_request, user, 'merge')
670
670
671 def has_valid_update_type(self, pull_request):
671 def has_valid_update_type(self, pull_request):
672 source_ref_type = pull_request.source_ref_parts.type
672 source_ref_type = pull_request.source_ref_parts.type
673 return source_ref_type in self.REF_TYPES
673 return source_ref_type in self.REF_TYPES
674
674
675 def update_commits(self, pull_request):
675 def update_commits(self, pull_request):
676 """
676 """
677 Get the updated list of commits for the pull request
677 Get the updated list of commits for the pull request
678 and return the new pull request version and the list
678 and return the new pull request version and the list
679 of commits processed by this update action
679 of commits processed by this update action
680 """
680 """
681 pull_request = self.__get_pull_request(pull_request)
681 pull_request = self.__get_pull_request(pull_request)
682 source_ref_type = pull_request.source_ref_parts.type
682 source_ref_type = pull_request.source_ref_parts.type
683 source_ref_name = pull_request.source_ref_parts.name
683 source_ref_name = pull_request.source_ref_parts.name
684 source_ref_id = pull_request.source_ref_parts.commit_id
684 source_ref_id = pull_request.source_ref_parts.commit_id
685
685
686 target_ref_type = pull_request.target_ref_parts.type
686 target_ref_type = pull_request.target_ref_parts.type
687 target_ref_name = pull_request.target_ref_parts.name
687 target_ref_name = pull_request.target_ref_parts.name
688 target_ref_id = pull_request.target_ref_parts.commit_id
688 target_ref_id = pull_request.target_ref_parts.commit_id
689
689
690 if not self.has_valid_update_type(pull_request):
690 if not self.has_valid_update_type(pull_request):
691 log.debug("Skipping update of pull request %s due to ref type: %s",
691 log.debug("Skipping update of pull request %s due to ref type: %s",
692 pull_request, source_ref_type)
692 pull_request, source_ref_type)
693 return UpdateResponse(
693 return UpdateResponse(
694 executed=False,
694 executed=False,
695 reason=UpdateFailureReason.WRONG_REF_TYPE,
695 reason=UpdateFailureReason.WRONG_REF_TYPE,
696 old=pull_request, new=None, changes=None,
696 old=pull_request, new=None, changes=None,
697 source_changed=False, target_changed=False)
697 source_changed=False, target_changed=False)
698
698
699 # source repo
699 # source repo
700 source_repo = pull_request.source_repo.scm_instance()
700 source_repo = pull_request.source_repo.scm_instance()
701
701
702 try:
702 try:
703 source_commit = source_repo.get_commit(commit_id=source_ref_name)
703 source_commit = source_repo.get_commit(commit_id=source_ref_name)
704 except CommitDoesNotExistError:
704 except CommitDoesNotExistError:
705 return UpdateResponse(
705 return UpdateResponse(
706 executed=False,
706 executed=False,
707 reason=UpdateFailureReason.MISSING_SOURCE_REF,
707 reason=UpdateFailureReason.MISSING_SOURCE_REF,
708 old=pull_request, new=None, changes=None,
708 old=pull_request, new=None, changes=None,
709 source_changed=False, target_changed=False)
709 source_changed=False, target_changed=False)
710
710
711 source_changed = source_ref_id != source_commit.raw_id
711 source_changed = source_ref_id != source_commit.raw_id
712
712
713 # target repo
713 # target repo
714 target_repo = pull_request.target_repo.scm_instance()
714 target_repo = pull_request.target_repo.scm_instance()
715
715
716 try:
716 try:
717 target_commit = target_repo.get_commit(commit_id=target_ref_name)
717 target_commit = target_repo.get_commit(commit_id=target_ref_name)
718 except CommitDoesNotExistError:
718 except CommitDoesNotExistError:
719 return UpdateResponse(
719 return UpdateResponse(
720 executed=False,
720 executed=False,
721 reason=UpdateFailureReason.MISSING_TARGET_REF,
721 reason=UpdateFailureReason.MISSING_TARGET_REF,
722 old=pull_request, new=None, changes=None,
722 old=pull_request, new=None, changes=None,
723 source_changed=False, target_changed=False)
723 source_changed=False, target_changed=False)
724 target_changed = target_ref_id != target_commit.raw_id
724 target_changed = target_ref_id != target_commit.raw_id
725
725
726 if not (source_changed or target_changed):
726 if not (source_changed or target_changed):
727 log.debug("Nothing changed in pull request %s", pull_request)
727 log.debug("Nothing changed in pull request %s", pull_request)
728 return UpdateResponse(
728 return UpdateResponse(
729 executed=False,
729 executed=False,
730 reason=UpdateFailureReason.NO_CHANGE,
730 reason=UpdateFailureReason.NO_CHANGE,
731 old=pull_request, new=None, changes=None,
731 old=pull_request, new=None, changes=None,
732 source_changed=target_changed, target_changed=source_changed)
732 source_changed=target_changed, target_changed=source_changed)
733
733
734 change_in_found = 'target repo' if target_changed else 'source repo'
734 change_in_found = 'target repo' if target_changed else 'source repo'
735 log.debug('Updating pull request because of change in %s detected',
735 log.debug('Updating pull request because of change in %s detected',
736 change_in_found)
736 change_in_found)
737
737
738 # Finally there is a need for an update, in case of source change
738 # Finally there is a need for an update, in case of source change
739 # we create a new version, else just an update
739 # we create a new version, else just an update
740 if source_changed:
740 if source_changed:
741 pull_request_version = self._create_version_from_snapshot(pull_request)
741 pull_request_version = self._create_version_from_snapshot(pull_request)
742 self._link_comments_to_version(pull_request_version)
742 self._link_comments_to_version(pull_request_version)
743 else:
743 else:
744 try:
744 try:
745 ver = pull_request.versions[-1]
745 ver = pull_request.versions[-1]
746 except IndexError:
746 except IndexError:
747 ver = None
747 ver = None
748
748
749 pull_request.pull_request_version_id = \
749 pull_request.pull_request_version_id = \
750 ver.pull_request_version_id if ver else None
750 ver.pull_request_version_id if ver else None
751 pull_request_version = pull_request
751 pull_request_version = pull_request
752
752
753 try:
753 try:
754 if target_ref_type in self.REF_TYPES:
754 if target_ref_type in self.REF_TYPES:
755 target_commit = target_repo.get_commit(target_ref_name)
755 target_commit = target_repo.get_commit(target_ref_name)
756 else:
756 else:
757 target_commit = target_repo.get_commit(target_ref_id)
757 target_commit = target_repo.get_commit(target_ref_id)
758 except CommitDoesNotExistError:
758 except CommitDoesNotExistError:
759 return UpdateResponse(
759 return UpdateResponse(
760 executed=False,
760 executed=False,
761 reason=UpdateFailureReason.MISSING_TARGET_REF,
761 reason=UpdateFailureReason.MISSING_TARGET_REF,
762 old=pull_request, new=None, changes=None,
762 old=pull_request, new=None, changes=None,
763 source_changed=source_changed, target_changed=target_changed)
763 source_changed=source_changed, target_changed=target_changed)
764
764
765 # re-compute commit ids
765 # re-compute commit ids
766 old_commit_ids = pull_request.revisions
766 old_commit_ids = pull_request.revisions
767 pre_load = ["author", "date", "message", "branch"]
767 pre_load = ["author", "date", "message", "branch"]
768 commit_ranges = target_repo.compare(
768 commit_ranges = target_repo.compare(
769 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
769 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
770 pre_load=pre_load)
770 pre_load=pre_load)
771
771
772 ancestor = source_repo.get_common_ancestor(
772 ancestor = source_repo.get_common_ancestor(
773 source_commit.raw_id, target_commit.raw_id, target_repo)
773 source_commit.raw_id, target_commit.raw_id, target_repo)
774
774
775 pull_request.source_ref = '%s:%s:%s' % (
775 pull_request.source_ref = '%s:%s:%s' % (
776 source_ref_type, source_ref_name, source_commit.raw_id)
776 source_ref_type, source_ref_name, source_commit.raw_id)
777 pull_request.target_ref = '%s:%s:%s' % (
777 pull_request.target_ref = '%s:%s:%s' % (
778 target_ref_type, target_ref_name, ancestor)
778 target_ref_type, target_ref_name, ancestor)
779
779
780 pull_request.revisions = [
780 pull_request.revisions = [
781 commit.raw_id for commit in reversed(commit_ranges)]
781 commit.raw_id for commit in reversed(commit_ranges)]
782 pull_request.updated_on = datetime.datetime.now()
782 pull_request.updated_on = datetime.datetime.now()
783 Session().add(pull_request)
783 Session().add(pull_request)
784 new_commit_ids = pull_request.revisions
784 new_commit_ids = pull_request.revisions
785
785
786 old_diff_data, new_diff_data = self._generate_update_diffs(
786 old_diff_data, new_diff_data = self._generate_update_diffs(
787 pull_request, pull_request_version)
787 pull_request, pull_request_version)
788
788
789 # calculate commit and file changes
789 # calculate commit and file changes
790 changes = self._calculate_commit_id_changes(
790 changes = self._calculate_commit_id_changes(
791 old_commit_ids, new_commit_ids)
791 old_commit_ids, new_commit_ids)
792 file_changes = self._calculate_file_changes(
792 file_changes = self._calculate_file_changes(
793 old_diff_data, new_diff_data)
793 old_diff_data, new_diff_data)
794
794
795 # set comments as outdated if DIFFS changed
795 # set comments as outdated if DIFFS changed
796 CommentsModel().outdate_comments(
796 CommentsModel().outdate_comments(
797 pull_request, old_diff_data=old_diff_data,
797 pull_request, old_diff_data=old_diff_data,
798 new_diff_data=new_diff_data)
798 new_diff_data=new_diff_data)
799
799
800 commit_changes = (changes.added or changes.removed)
800 commit_changes = (changes.added or changes.removed)
801 file_node_changes = (
801 file_node_changes = (
802 file_changes.added or file_changes.modified or file_changes.removed)
802 file_changes.added or file_changes.modified or file_changes.removed)
803 pr_has_changes = commit_changes or file_node_changes
803 pr_has_changes = commit_changes or file_node_changes
804
804
805 # Add an automatic comment to the pull request, in case
805 # Add an automatic comment to the pull request, in case
806 # anything has changed
806 # anything has changed
807 if pr_has_changes:
807 if pr_has_changes:
808 update_comment = CommentsModel().create(
808 update_comment = CommentsModel().create(
809 text=self._render_update_message(changes, file_changes),
809 text=self._render_update_message(changes, file_changes),
810 repo=pull_request.target_repo,
810 repo=pull_request.target_repo,
811 user=pull_request.author,
811 user=pull_request.author,
812 pull_request=pull_request,
812 pull_request=pull_request,
813 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
813 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
814
814
815 # Update status to "Under Review" for added commits
815 # Update status to "Under Review" for added commits
816 for commit_id in changes.added:
816 for commit_id in changes.added:
817 ChangesetStatusModel().set_status(
817 ChangesetStatusModel().set_status(
818 repo=pull_request.source_repo,
818 repo=pull_request.source_repo,
819 status=ChangesetStatus.STATUS_UNDER_REVIEW,
819 status=ChangesetStatus.STATUS_UNDER_REVIEW,
820 comment=update_comment,
820 comment=update_comment,
821 user=pull_request.author,
821 user=pull_request.author,
822 pull_request=pull_request,
822 pull_request=pull_request,
823 revision=commit_id)
823 revision=commit_id)
824
824
825 log.debug(
825 log.debug(
826 'Updated pull request %s, added_ids: %s, common_ids: %s, '
826 'Updated pull request %s, added_ids: %s, common_ids: %s, '
827 'removed_ids: %s', pull_request.pull_request_id,
827 'removed_ids: %s', pull_request.pull_request_id,
828 changes.added, changes.common, changes.removed)
828 changes.added, changes.common, changes.removed)
829 log.debug(
829 log.debug(
830 'Updated pull request with the following file changes: %s',
830 'Updated pull request with the following file changes: %s',
831 file_changes)
831 file_changes)
832
832
833 log.info(
833 log.info(
834 "Updated pull request %s from commit %s to commit %s, "
834 "Updated pull request %s from commit %s to commit %s, "
835 "stored new version %s of this pull request.",
835 "stored new version %s of this pull request.",
836 pull_request.pull_request_id, source_ref_id,
836 pull_request.pull_request_id, source_ref_id,
837 pull_request.source_ref_parts.commit_id,
837 pull_request.source_ref_parts.commit_id,
838 pull_request_version.pull_request_version_id)
838 pull_request_version.pull_request_version_id)
839 Session().commit()
839 Session().commit()
840 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
840 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
841
841
842 return UpdateResponse(
842 return UpdateResponse(
843 executed=True, reason=UpdateFailureReason.NONE,
843 executed=True, reason=UpdateFailureReason.NONE,
844 old=pull_request, new=pull_request_version, changes=changes,
844 old=pull_request, new=pull_request_version, changes=changes,
845 source_changed=source_changed, target_changed=target_changed)
845 source_changed=source_changed, target_changed=target_changed)
846
846
847 def _create_version_from_snapshot(self, pull_request):
847 def _create_version_from_snapshot(self, pull_request):
848 version = PullRequestVersion()
848 version = PullRequestVersion()
849 version.title = pull_request.title
849 version.title = pull_request.title
850 version.description = pull_request.description
850 version.description = pull_request.description
851 version.status = pull_request.status
851 version.status = pull_request.status
852 version.pull_request_state = pull_request.pull_request_state
852 version.pull_request_state = pull_request.pull_request_state
853 version.created_on = datetime.datetime.now()
853 version.created_on = datetime.datetime.now()
854 version.updated_on = pull_request.updated_on
854 version.updated_on = pull_request.updated_on
855 version.user_id = pull_request.user_id
855 version.user_id = pull_request.user_id
856 version.source_repo = pull_request.source_repo
856 version.source_repo = pull_request.source_repo
857 version.source_ref = pull_request.source_ref
857 version.source_ref = pull_request.source_ref
858 version.target_repo = pull_request.target_repo
858 version.target_repo = pull_request.target_repo
859 version.target_ref = pull_request.target_ref
859 version.target_ref = pull_request.target_ref
860
860
861 version._last_merge_source_rev = pull_request._last_merge_source_rev
861 version._last_merge_source_rev = pull_request._last_merge_source_rev
862 version._last_merge_target_rev = pull_request._last_merge_target_rev
862 version._last_merge_target_rev = pull_request._last_merge_target_rev
863 version.last_merge_status = pull_request.last_merge_status
863 version.last_merge_status = pull_request.last_merge_status
864 version.shadow_merge_ref = pull_request.shadow_merge_ref
864 version.shadow_merge_ref = pull_request.shadow_merge_ref
865 version.merge_rev = pull_request.merge_rev
865 version.merge_rev = pull_request.merge_rev
866 version.reviewer_data = pull_request.reviewer_data
866 version.reviewer_data = pull_request.reviewer_data
867
867
868 version.revisions = pull_request.revisions
868 version.revisions = pull_request.revisions
869 version.pull_request = pull_request
869 version.pull_request = pull_request
870 Session().add(version)
870 Session().add(version)
871 Session().flush()
871 Session().flush()
872
872
873 return version
873 return version
874
874
875 def _generate_update_diffs(self, pull_request, pull_request_version):
875 def _generate_update_diffs(self, pull_request, pull_request_version):
876
876
877 diff_context = (
877 diff_context = (
878 self.DIFF_CONTEXT +
878 self.DIFF_CONTEXT +
879 CommentsModel.needed_extra_diff_context())
879 CommentsModel.needed_extra_diff_context())
880 hide_whitespace_changes = False
880 hide_whitespace_changes = False
881 source_repo = pull_request_version.source_repo
881 source_repo = pull_request_version.source_repo
882 source_ref_id = pull_request_version.source_ref_parts.commit_id
882 source_ref_id = pull_request_version.source_ref_parts.commit_id
883 target_ref_id = pull_request_version.target_ref_parts.commit_id
883 target_ref_id = pull_request_version.target_ref_parts.commit_id
884 old_diff = self._get_diff_from_pr_or_version(
884 old_diff = self._get_diff_from_pr_or_version(
885 source_repo, source_ref_id, target_ref_id,
885 source_repo, source_ref_id, target_ref_id,
886 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
886 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
887
887
888 source_repo = pull_request.source_repo
888 source_repo = pull_request.source_repo
889 source_ref_id = pull_request.source_ref_parts.commit_id
889 source_ref_id = pull_request.source_ref_parts.commit_id
890 target_ref_id = pull_request.target_ref_parts.commit_id
890 target_ref_id = pull_request.target_ref_parts.commit_id
891
891
892 new_diff = self._get_diff_from_pr_or_version(
892 new_diff = self._get_diff_from_pr_or_version(
893 source_repo, source_ref_id, target_ref_id,
893 source_repo, source_ref_id, target_ref_id,
894 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
894 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
895
895
896 old_diff_data = diffs.DiffProcessor(old_diff)
896 old_diff_data = diffs.DiffProcessor(old_diff)
897 old_diff_data.prepare()
897 old_diff_data.prepare()
898 new_diff_data = diffs.DiffProcessor(new_diff)
898 new_diff_data = diffs.DiffProcessor(new_diff)
899 new_diff_data.prepare()
899 new_diff_data.prepare()
900
900
901 return old_diff_data, new_diff_data
901 return old_diff_data, new_diff_data
902
902
903 def _link_comments_to_version(self, pull_request_version):
903 def _link_comments_to_version(self, pull_request_version):
904 """
904 """
905 Link all unlinked comments of this pull request to the given version.
905 Link all unlinked comments of this pull request to the given version.
906
906
907 :param pull_request_version: The `PullRequestVersion` to which
907 :param pull_request_version: The `PullRequestVersion` to which
908 the comments shall be linked.
908 the comments shall be linked.
909
909
910 """
910 """
911 pull_request = pull_request_version.pull_request
911 pull_request = pull_request_version.pull_request
912 comments = ChangesetComment.query()\
912 comments = ChangesetComment.query()\
913 .filter(
913 .filter(
914 # TODO: johbo: Should we query for the repo at all here?
914 # TODO: johbo: Should we query for the repo at all here?
915 # Pending decision on how comments of PRs are to be related
915 # Pending decision on how comments of PRs are to be related
916 # to either the source repo, the target repo or no repo at all.
916 # to either the source repo, the target repo or no repo at all.
917 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
917 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
918 ChangesetComment.pull_request == pull_request,
918 ChangesetComment.pull_request == pull_request,
919 ChangesetComment.pull_request_version == None)\
919 ChangesetComment.pull_request_version == None)\
920 .order_by(ChangesetComment.comment_id.asc())
920 .order_by(ChangesetComment.comment_id.asc())
921
921
922 # TODO: johbo: Find out why this breaks if it is done in a bulk
922 # TODO: johbo: Find out why this breaks if it is done in a bulk
923 # operation.
923 # operation.
924 for comment in comments:
924 for comment in comments:
925 comment.pull_request_version_id = (
925 comment.pull_request_version_id = (
926 pull_request_version.pull_request_version_id)
926 pull_request_version.pull_request_version_id)
927 Session().add(comment)
927 Session().add(comment)
928
928
929 def _calculate_commit_id_changes(self, old_ids, new_ids):
929 def _calculate_commit_id_changes(self, old_ids, new_ids):
930 added = [x for x in new_ids if x not in old_ids]
930 added = [x for x in new_ids if x not in old_ids]
931 common = [x for x in new_ids if x in old_ids]
931 common = [x for x in new_ids if x in old_ids]
932 removed = [x for x in old_ids if x not in new_ids]
932 removed = [x for x in old_ids if x not in new_ids]
933 total = new_ids
933 total = new_ids
934 return ChangeTuple(added, common, removed, total)
934 return ChangeTuple(added, common, removed, total)
935
935
936 def _calculate_file_changes(self, old_diff_data, new_diff_data):
936 def _calculate_file_changes(self, old_diff_data, new_diff_data):
937
937
938 old_files = OrderedDict()
938 old_files = OrderedDict()
939 for diff_data in old_diff_data.parsed_diff:
939 for diff_data in old_diff_data.parsed_diff:
940 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
940 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
941
941
942 added_files = []
942 added_files = []
943 modified_files = []
943 modified_files = []
944 removed_files = []
944 removed_files = []
945 for diff_data in new_diff_data.parsed_diff:
945 for diff_data in new_diff_data.parsed_diff:
946 new_filename = diff_data['filename']
946 new_filename = diff_data['filename']
947 new_hash = md5_safe(diff_data['raw_diff'])
947 new_hash = md5_safe(diff_data['raw_diff'])
948
948
949 old_hash = old_files.get(new_filename)
949 old_hash = old_files.get(new_filename)
950 if not old_hash:
950 if not old_hash:
951 # file is not present in old diff, means it's added
951 # file is not present in old diff, means it's added
952 added_files.append(new_filename)
952 added_files.append(new_filename)
953 else:
953 else:
954 if new_hash != old_hash:
954 if new_hash != old_hash:
955 modified_files.append(new_filename)
955 modified_files.append(new_filename)
956 # now remove a file from old, since we have seen it already
956 # now remove a file from old, since we have seen it already
957 del old_files[new_filename]
957 del old_files[new_filename]
958
958
959 # removed files is when there are present in old, but not in NEW,
959 # removed files is when there are present in old, but not in NEW,
960 # since we remove old files that are present in new diff, left-overs
960 # since we remove old files that are present in new diff, left-overs
961 # if any should be the removed files
961 # if any should be the removed files
962 removed_files.extend(old_files.keys())
962 removed_files.extend(old_files.keys())
963
963
964 return FileChangeTuple(added_files, modified_files, removed_files)
964 return FileChangeTuple(added_files, modified_files, removed_files)
965
965
966 def _render_update_message(self, changes, file_changes):
966 def _render_update_message(self, changes, file_changes):
967 """
967 """
968 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
968 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
969 so it's always looking the same disregarding on which default
969 so it's always looking the same disregarding on which default
970 renderer system is using.
970 renderer system is using.
971
971
972 :param changes: changes named tuple
972 :param changes: changes named tuple
973 :param file_changes: file changes named tuple
973 :param file_changes: file changes named tuple
974
974
975 """
975 """
976 new_status = ChangesetStatus.get_status_lbl(
976 new_status = ChangesetStatus.get_status_lbl(
977 ChangesetStatus.STATUS_UNDER_REVIEW)
977 ChangesetStatus.STATUS_UNDER_REVIEW)
978
978
979 changed_files = (
979 changed_files = (
980 file_changes.added + file_changes.modified + file_changes.removed)
980 file_changes.added + file_changes.modified + file_changes.removed)
981
981
982 params = {
982 params = {
983 'under_review_label': new_status,
983 'under_review_label': new_status,
984 'added_commits': changes.added,
984 'added_commits': changes.added,
985 'removed_commits': changes.removed,
985 'removed_commits': changes.removed,
986 'changed_files': changed_files,
986 'changed_files': changed_files,
987 'added_files': file_changes.added,
987 'added_files': file_changes.added,
988 'modified_files': file_changes.modified,
988 'modified_files': file_changes.modified,
989 'removed_files': file_changes.removed,
989 'removed_files': file_changes.removed,
990 }
990 }
991 renderer = RstTemplateRenderer()
991 renderer = RstTemplateRenderer()
992 return renderer.render('pull_request_update.mako', **params)
992 return renderer.render('pull_request_update.mako', **params)
993
993
994 def edit(self, pull_request, title, description, description_renderer, user):
994 def edit(self, pull_request, title, description, description_renderer, user):
995 pull_request = self.__get_pull_request(pull_request)
995 pull_request = self.__get_pull_request(pull_request)
996 old_data = pull_request.get_api_data(with_merge_state=False)
996 old_data = pull_request.get_api_data(with_merge_state=False)
997 if pull_request.is_closed():
997 if pull_request.is_closed():
998 raise ValueError('This pull request is closed')
998 raise ValueError('This pull request is closed')
999 if title:
999 if title:
1000 pull_request.title = title
1000 pull_request.title = title
1001 pull_request.description = description
1001 pull_request.description = description
1002 pull_request.updated_on = datetime.datetime.now()
1002 pull_request.updated_on = datetime.datetime.now()
1003 pull_request.description_renderer = description_renderer
1003 pull_request.description_renderer = description_renderer
1004 Session().add(pull_request)
1004 Session().add(pull_request)
1005 self._log_audit_action(
1005 self._log_audit_action(
1006 'repo.pull_request.edit', {'old_data': old_data},
1006 'repo.pull_request.edit', {'old_data': old_data},
1007 user, pull_request)
1007 user, pull_request)
1008
1008
1009 def update_reviewers(self, pull_request, reviewer_data, user):
1009 def update_reviewers(self, pull_request, reviewer_data, user):
1010 """
1010 """
1011 Update the reviewers in the pull request
1011 Update the reviewers in the pull request
1012
1012
1013 :param pull_request: the pr to update
1013 :param pull_request: the pr to update
1014 :param reviewer_data: list of tuples
1014 :param reviewer_data: list of tuples
1015 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1015 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1016 """
1016 """
1017 pull_request = self.__get_pull_request(pull_request)
1017 pull_request = self.__get_pull_request(pull_request)
1018 if pull_request.is_closed():
1018 if pull_request.is_closed():
1019 raise ValueError('This pull request is closed')
1019 raise ValueError('This pull request is closed')
1020
1020
1021 reviewers = {}
1021 reviewers = {}
1022 for user_id, reasons, mandatory, rules in reviewer_data:
1022 for user_id, reasons, mandatory, rules in reviewer_data:
1023 if isinstance(user_id, (int, compat.string_types)):
1023 if isinstance(user_id, (int, compat.string_types)):
1024 user_id = self._get_user(user_id).user_id
1024 user_id = self._get_user(user_id).user_id
1025 reviewers[user_id] = {
1025 reviewers[user_id] = {
1026 'reasons': reasons, 'mandatory': mandatory}
1026 'reasons': reasons, 'mandatory': mandatory}
1027
1027
1028 reviewers_ids = set(reviewers.keys())
1028 reviewers_ids = set(reviewers.keys())
1029 current_reviewers = PullRequestReviewers.query()\
1029 current_reviewers = PullRequestReviewers.query()\
1030 .filter(PullRequestReviewers.pull_request ==
1030 .filter(PullRequestReviewers.pull_request ==
1031 pull_request).all()
1031 pull_request).all()
1032 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1032 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1033
1033
1034 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1034 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1035 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1035 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1036
1036
1037 log.debug("Adding %s reviewers", ids_to_add)
1037 log.debug("Adding %s reviewers", ids_to_add)
1038 log.debug("Removing %s reviewers", ids_to_remove)
1038 log.debug("Removing %s reviewers", ids_to_remove)
1039 changed = False
1039 changed = False
1040 added_audit_reviewers = []
1040 added_audit_reviewers = []
1041 removed_audit_reviewers = []
1041 removed_audit_reviewers = []
1042
1042
1043 for uid in ids_to_add:
1043 for uid in ids_to_add:
1044 changed = True
1044 changed = True
1045 _usr = self._get_user(uid)
1045 _usr = self._get_user(uid)
1046 reviewer = PullRequestReviewers()
1046 reviewer = PullRequestReviewers()
1047 reviewer.user = _usr
1047 reviewer.user = _usr
1048 reviewer.pull_request = pull_request
1048 reviewer.pull_request = pull_request
1049 reviewer.reasons = reviewers[uid]['reasons']
1049 reviewer.reasons = reviewers[uid]['reasons']
1050 # NOTE(marcink): mandatory shouldn't be changed now
1050 # NOTE(marcink): mandatory shouldn't be changed now
1051 # reviewer.mandatory = reviewers[uid]['reasons']
1051 # reviewer.mandatory = reviewers[uid]['reasons']
1052 Session().add(reviewer)
1052 Session().add(reviewer)
1053 added_audit_reviewers.append(reviewer.get_dict())
1053 added_audit_reviewers.append(reviewer.get_dict())
1054
1054
1055 for uid in ids_to_remove:
1055 for uid in ids_to_remove:
1056 changed = True
1056 changed = True
1057 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1057 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1058 # that prevents and fixes cases that we added the same reviewer twice.
1058 # that prevents and fixes cases that we added the same reviewer twice.
1059 # this CAN happen due to the lack of DB checks
1059 # this CAN happen due to the lack of DB checks
1060 reviewers = PullRequestReviewers.query()\
1060 reviewers = PullRequestReviewers.query()\
1061 .filter(PullRequestReviewers.user_id == uid,
1061 .filter(PullRequestReviewers.user_id == uid,
1062 PullRequestReviewers.pull_request == pull_request)\
1062 PullRequestReviewers.pull_request == pull_request)\
1063 .all()
1063 .all()
1064
1064
1065 for obj in reviewers:
1065 for obj in reviewers:
1066 added_audit_reviewers.append(obj.get_dict())
1066 added_audit_reviewers.append(obj.get_dict())
1067 Session().delete(obj)
1067 Session().delete(obj)
1068
1068
1069 if changed:
1069 if changed:
1070 Session().expire_all()
1070 Session().expire_all()
1071 pull_request.updated_on = datetime.datetime.now()
1071 pull_request.updated_on = datetime.datetime.now()
1072 Session().add(pull_request)
1072 Session().add(pull_request)
1073
1073
1074 # finally store audit logs
1074 # finally store audit logs
1075 for user_data in added_audit_reviewers:
1075 for user_data in added_audit_reviewers:
1076 self._log_audit_action(
1076 self._log_audit_action(
1077 'repo.pull_request.reviewer.add', {'data': user_data},
1077 'repo.pull_request.reviewer.add', {'data': user_data},
1078 user, pull_request)
1078 user, pull_request)
1079 for user_data in removed_audit_reviewers:
1079 for user_data in removed_audit_reviewers:
1080 self._log_audit_action(
1080 self._log_audit_action(
1081 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1081 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1082 user, pull_request)
1082 user, pull_request)
1083
1083
1084 self.notify_reviewers(pull_request, ids_to_add)
1084 self.notify_reviewers(pull_request, ids_to_add)
1085 return ids_to_add, ids_to_remove
1085 return ids_to_add, ids_to_remove
1086
1086
1087 def get_url(self, pull_request, request=None, permalink=False):
1087 def get_url(self, pull_request, request=None, permalink=False):
1088 if not request:
1088 if not request:
1089 request = get_current_request()
1089 request = get_current_request()
1090
1090
1091 if permalink:
1091 if permalink:
1092 return request.route_url(
1092 return request.route_url(
1093 'pull_requests_global',
1093 'pull_requests_global',
1094 pull_request_id=pull_request.pull_request_id,)
1094 pull_request_id=pull_request.pull_request_id,)
1095 else:
1095 else:
1096 return request.route_url('pullrequest_show',
1096 return request.route_url('pullrequest_show',
1097 repo_name=safe_str(pull_request.target_repo.repo_name),
1097 repo_name=safe_str(pull_request.target_repo.repo_name),
1098 pull_request_id=pull_request.pull_request_id,)
1098 pull_request_id=pull_request.pull_request_id,)
1099
1099
1100 def get_shadow_clone_url(self, pull_request, request=None):
1100 def get_shadow_clone_url(self, pull_request, request=None):
1101 """
1101 """
1102 Returns qualified url pointing to the shadow repository. If this pull
1102 Returns qualified url pointing to the shadow repository. If this pull
1103 request is closed there is no shadow repository and ``None`` will be
1103 request is closed there is no shadow repository and ``None`` will be
1104 returned.
1104 returned.
1105 """
1105 """
1106 if pull_request.is_closed():
1106 if pull_request.is_closed():
1107 return None
1107 return None
1108 else:
1108 else:
1109 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1109 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1110 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1110 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1111
1111
1112 def notify_reviewers(self, pull_request, reviewers_ids):
1112 def notify_reviewers(self, pull_request, reviewers_ids):
1113 # notification to reviewers
1113 # notification to reviewers
1114 if not reviewers_ids:
1114 if not reviewers_ids:
1115 return
1115 return
1116
1116
1117 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1117 log.debug('Notify following reviewers about pull-request %s', reviewers_ids)
1118
1118
1119 pull_request_obj = pull_request
1119 pull_request_obj = pull_request
1120 # get the current participants of this pull request
1120 # get the current participants of this pull request
1121 recipients = reviewers_ids
1121 recipients = reviewers_ids
1122 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1122 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1123
1123
1124 pr_source_repo = pull_request_obj.source_repo
1124 pr_source_repo = pull_request_obj.source_repo
1125 pr_target_repo = pull_request_obj.target_repo
1125 pr_target_repo = pull_request_obj.target_repo
1126
1126
1127 pr_url = h.route_url('pullrequest_show',
1127 pr_url = h.route_url('pullrequest_show',
1128 repo_name=pr_target_repo.repo_name,
1128 repo_name=pr_target_repo.repo_name,
1129 pull_request_id=pull_request_obj.pull_request_id,)
1129 pull_request_id=pull_request_obj.pull_request_id,)
1130
1130
1131 # set some variables for email notification
1131 # set some variables for email notification
1132 pr_target_repo_url = h.route_url(
1132 pr_target_repo_url = h.route_url(
1133 'repo_summary', repo_name=pr_target_repo.repo_name)
1133 'repo_summary', repo_name=pr_target_repo.repo_name)
1134
1134
1135 pr_source_repo_url = h.route_url(
1135 pr_source_repo_url = h.route_url(
1136 'repo_summary', repo_name=pr_source_repo.repo_name)
1136 'repo_summary', repo_name=pr_source_repo.repo_name)
1137
1137
1138 # pull request specifics
1138 # pull request specifics
1139 pull_request_commits = [
1139 pull_request_commits = [
1140 (x.raw_id, x.message)
1140 (x.raw_id, x.message)
1141 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1141 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1142
1142
1143 kwargs = {
1143 kwargs = {
1144 'user': pull_request.author,
1144 'user': pull_request.author,
1145 'pull_request': pull_request_obj,
1145 'pull_request': pull_request_obj,
1146 'pull_request_commits': pull_request_commits,
1146 'pull_request_commits': pull_request_commits,
1147
1147
1148 'pull_request_target_repo': pr_target_repo,
1148 'pull_request_target_repo': pr_target_repo,
1149 'pull_request_target_repo_url': pr_target_repo_url,
1149 'pull_request_target_repo_url': pr_target_repo_url,
1150
1150
1151 'pull_request_source_repo': pr_source_repo,
1151 'pull_request_source_repo': pr_source_repo,
1152 'pull_request_source_repo_url': pr_source_repo_url,
1152 'pull_request_source_repo_url': pr_source_repo_url,
1153
1153
1154 'pull_request_url': pr_url,
1154 'pull_request_url': pr_url,
1155 }
1155 }
1156
1156
1157 # pre-generate the subject for notification itself
1157 # pre-generate the subject for notification itself
1158 (subject,
1158 (subject,
1159 _h, _e, # we don't care about those
1159 _h, _e, # we don't care about those
1160 body_plaintext) = EmailNotificationModel().render_email(
1160 body_plaintext) = EmailNotificationModel().render_email(
1161 notification_type, **kwargs)
1161 notification_type, **kwargs)
1162
1162
1163 # create notification objects, and emails
1163 # create notification objects, and emails
1164 NotificationModel().create(
1164 NotificationModel().create(
1165 created_by=pull_request.author,
1165 created_by=pull_request.author,
1166 notification_subject=subject,
1166 notification_subject=subject,
1167 notification_body=body_plaintext,
1167 notification_body=body_plaintext,
1168 notification_type=notification_type,
1168 notification_type=notification_type,
1169 recipients=recipients,
1169 recipients=recipients,
1170 email_kwargs=kwargs,
1170 email_kwargs=kwargs,
1171 )
1171 )
1172
1172
1173 def delete(self, pull_request, user):
1173 def delete(self, pull_request, user):
1174 pull_request = self.__get_pull_request(pull_request)
1174 pull_request = self.__get_pull_request(pull_request)
1175 old_data = pull_request.get_api_data(with_merge_state=False)
1175 old_data = pull_request.get_api_data(with_merge_state=False)
1176 self._cleanup_merge_workspace(pull_request)
1176 self._cleanup_merge_workspace(pull_request)
1177 self._log_audit_action(
1177 self._log_audit_action(
1178 'repo.pull_request.delete', {'old_data': old_data},
1178 'repo.pull_request.delete', {'old_data': old_data},
1179 user, pull_request)
1179 user, pull_request)
1180 Session().delete(pull_request)
1180 Session().delete(pull_request)
1181
1181
1182 def close_pull_request(self, pull_request, user):
1182 def close_pull_request(self, pull_request, user):
1183 pull_request = self.__get_pull_request(pull_request)
1183 pull_request = self.__get_pull_request(pull_request)
1184 self._cleanup_merge_workspace(pull_request)
1184 self._cleanup_merge_workspace(pull_request)
1185 pull_request.status = PullRequest.STATUS_CLOSED
1185 pull_request.status = PullRequest.STATUS_CLOSED
1186 pull_request.updated_on = datetime.datetime.now()
1186 pull_request.updated_on = datetime.datetime.now()
1187 Session().add(pull_request)
1187 Session().add(pull_request)
1188 self.trigger_pull_request_hook(
1188 self.trigger_pull_request_hook(
1189 pull_request, pull_request.author, 'close')
1189 pull_request, pull_request.author, 'close')
1190
1190
1191 pr_data = pull_request.get_api_data(with_merge_state=False)
1191 pr_data = pull_request.get_api_data(with_merge_state=False)
1192 self._log_audit_action(
1192 self._log_audit_action(
1193 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1193 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1194
1194
1195 def close_pull_request_with_comment(
1195 def close_pull_request_with_comment(
1196 self, pull_request, user, repo, message=None, auth_user=None):
1196 self, pull_request, user, repo, message=None, auth_user=None):
1197
1197
1198 pull_request_review_status = pull_request.calculated_review_status()
1198 pull_request_review_status = pull_request.calculated_review_status()
1199
1199
1200 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1200 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1201 # approved only if we have voting consent
1201 # approved only if we have voting consent
1202 status = ChangesetStatus.STATUS_APPROVED
1202 status = ChangesetStatus.STATUS_APPROVED
1203 else:
1203 else:
1204 status = ChangesetStatus.STATUS_REJECTED
1204 status = ChangesetStatus.STATUS_REJECTED
1205 status_lbl = ChangesetStatus.get_status_lbl(status)
1205 status_lbl = ChangesetStatus.get_status_lbl(status)
1206
1206
1207 default_message = (
1207 default_message = (
1208 'Closing with status change {transition_icon} {status}.'
1208 'Closing with status change {transition_icon} {status}.'
1209 ).format(transition_icon='>', status=status_lbl)
1209 ).format(transition_icon='>', status=status_lbl)
1210 text = message or default_message
1210 text = message or default_message
1211
1211
1212 # create a comment, and link it to new status
1212 # create a comment, and link it to new status
1213 comment = CommentsModel().create(
1213 comment = CommentsModel().create(
1214 text=text,
1214 text=text,
1215 repo=repo.repo_id,
1215 repo=repo.repo_id,
1216 user=user.user_id,
1216 user=user.user_id,
1217 pull_request=pull_request.pull_request_id,
1217 pull_request=pull_request.pull_request_id,
1218 status_change=status_lbl,
1218 status_change=status_lbl,
1219 status_change_type=status,
1219 status_change_type=status,
1220 closing_pr=True,
1220 closing_pr=True,
1221 auth_user=auth_user,
1221 auth_user=auth_user,
1222 )
1222 )
1223
1223
1224 # calculate old status before we change it
1224 # calculate old status before we change it
1225 old_calculated_status = pull_request.calculated_review_status()
1225 old_calculated_status = pull_request.calculated_review_status()
1226 ChangesetStatusModel().set_status(
1226 ChangesetStatusModel().set_status(
1227 repo.repo_id,
1227 repo.repo_id,
1228 status,
1228 status,
1229 user.user_id,
1229 user.user_id,
1230 comment=comment,
1230 comment=comment,
1231 pull_request=pull_request.pull_request_id
1231 pull_request=pull_request.pull_request_id
1232 )
1232 )
1233
1233
1234 Session().flush()
1234 Session().flush()
1235 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1235 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1236 # we now calculate the status of pull request again, and based on that
1236 # we now calculate the status of pull request again, and based on that
1237 # calculation trigger status change. This might happen in cases
1237 # calculation trigger status change. This might happen in cases
1238 # that non-reviewer admin closes a pr, which means his vote doesn't
1238 # that non-reviewer admin closes a pr, which means his vote doesn't
1239 # change the status, while if he's a reviewer this might change it.
1239 # change the status, while if he's a reviewer this might change it.
1240 calculated_status = pull_request.calculated_review_status()
1240 calculated_status = pull_request.calculated_review_status()
1241 if old_calculated_status != calculated_status:
1241 if old_calculated_status != calculated_status:
1242 self.trigger_pull_request_hook(
1242 self.trigger_pull_request_hook(
1243 pull_request, user, 'review_status_change',
1243 pull_request, user, 'review_status_change',
1244 data={'status': calculated_status})
1244 data={'status': calculated_status})
1245
1245
1246 # finally close the PR
1246 # finally close the PR
1247 PullRequestModel().close_pull_request(
1247 PullRequestModel().close_pull_request(
1248 pull_request.pull_request_id, user)
1248 pull_request.pull_request_id, user)
1249
1249
1250 return comment, status
1250 return comment, status
1251
1251
1252 def merge_status(self, pull_request, translator=None,
1252 def merge_status(self, pull_request, translator=None,
1253 force_shadow_repo_refresh=False):
1253 force_shadow_repo_refresh=False):
1254 _ = translator or get_current_request().translate
1254 _ = translator or get_current_request().translate
1255
1255
1256 if not self._is_merge_enabled(pull_request):
1256 if not self._is_merge_enabled(pull_request):
1257 return False, _('Server-side pull request merging is disabled.')
1257 return False, _('Server-side pull request merging is disabled.')
1258 if pull_request.is_closed():
1258 if pull_request.is_closed():
1259 return False, _('This pull request is closed.')
1259 return False, _('This pull request is closed.')
1260 merge_possible, msg = self._check_repo_requirements(
1260 merge_possible, msg = self._check_repo_requirements(
1261 target=pull_request.target_repo, source=pull_request.source_repo,
1261 target=pull_request.target_repo, source=pull_request.source_repo,
1262 translator=_)
1262 translator=_)
1263 if not merge_possible:
1263 if not merge_possible:
1264 return merge_possible, msg
1264 return merge_possible, msg
1265
1265
1266 try:
1266 try:
1267 resp = self._try_merge(
1267 resp = self._try_merge(
1268 pull_request,
1268 pull_request,
1269 force_shadow_repo_refresh=force_shadow_repo_refresh)
1269 force_shadow_repo_refresh=force_shadow_repo_refresh)
1270 log.debug("Merge response: %s", resp)
1270 log.debug("Merge response: %s", resp)
1271 status = resp.possible, resp.merge_status_message
1271 status = resp.possible, resp.merge_status_message
1272 except NotImplementedError:
1272 except NotImplementedError:
1273 status = False, _('Pull request merging is not supported.')
1273 status = False, _('Pull request merging is not supported.')
1274
1274
1275 return status
1275 return status
1276
1276
1277 def _check_repo_requirements(self, target, source, translator):
1277 def _check_repo_requirements(self, target, source, translator):
1278 """
1278 """
1279 Check if `target` and `source` have compatible requirements.
1279 Check if `target` and `source` have compatible requirements.
1280
1280
1281 Currently this is just checking for largefiles.
1281 Currently this is just checking for largefiles.
1282 """
1282 """
1283 _ = translator
1283 _ = translator
1284 target_has_largefiles = self._has_largefiles(target)
1284 target_has_largefiles = self._has_largefiles(target)
1285 source_has_largefiles = self._has_largefiles(source)
1285 source_has_largefiles = self._has_largefiles(source)
1286 merge_possible = True
1286 merge_possible = True
1287 message = u''
1287 message = u''
1288
1288
1289 if target_has_largefiles != source_has_largefiles:
1289 if target_has_largefiles != source_has_largefiles:
1290 merge_possible = False
1290 merge_possible = False
1291 if source_has_largefiles:
1291 if source_has_largefiles:
1292 message = _(
1292 message = _(
1293 'Target repository large files support is disabled.')
1293 'Target repository large files support is disabled.')
1294 else:
1294 else:
1295 message = _(
1295 message = _(
1296 'Source repository large files support is disabled.')
1296 'Source repository large files support is disabled.')
1297
1297
1298 return merge_possible, message
1298 return merge_possible, message
1299
1299
1300 def _has_largefiles(self, repo):
1300 def _has_largefiles(self, repo):
1301 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1301 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1302 'extensions', 'largefiles')
1302 'extensions', 'largefiles')
1303 return largefiles_ui and largefiles_ui[0].active
1303 return largefiles_ui and largefiles_ui[0].active
1304
1304
1305 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1305 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1306 """
1306 """
1307 Try to merge the pull request and return the merge status.
1307 Try to merge the pull request and return the merge status.
1308 """
1308 """
1309 log.debug(
1309 log.debug(
1310 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1310 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1311 pull_request.pull_request_id, force_shadow_repo_refresh)
1311 pull_request.pull_request_id, force_shadow_repo_refresh)
1312 target_vcs = pull_request.target_repo.scm_instance()
1312 target_vcs = pull_request.target_repo.scm_instance()
1313 # Refresh the target reference.
1313 # Refresh the target reference.
1314 try:
1314 try:
1315 target_ref = self._refresh_reference(
1315 target_ref = self._refresh_reference(
1316 pull_request.target_ref_parts, target_vcs)
1316 pull_request.target_ref_parts, target_vcs)
1317 except CommitDoesNotExistError:
1317 except CommitDoesNotExistError:
1318 merge_state = MergeResponse(
1318 merge_state = MergeResponse(
1319 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1319 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1320 metadata={'target_ref': pull_request.target_ref_parts})
1320 metadata={'target_ref': pull_request.target_ref_parts})
1321 return merge_state
1321 return merge_state
1322
1322
1323 target_locked = pull_request.target_repo.locked
1323 target_locked = pull_request.target_repo.locked
1324 if target_locked and target_locked[0]:
1324 if target_locked and target_locked[0]:
1325 locked_by = 'user:{}'.format(target_locked[0])
1325 locked_by = 'user:{}'.format(target_locked[0])
1326 log.debug("The target repository is locked by %s.", locked_by)
1326 log.debug("The target repository is locked by %s.", locked_by)
1327 merge_state = MergeResponse(
1327 merge_state = MergeResponse(
1328 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1328 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1329 metadata={'locked_by': locked_by})
1329 metadata={'locked_by': locked_by})
1330 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1330 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1331 pull_request, target_ref):
1331 pull_request, target_ref):
1332 log.debug("Refreshing the merge status of the repository.")
1332 log.debug("Refreshing the merge status of the repository.")
1333 merge_state = self._refresh_merge_state(
1333 merge_state = self._refresh_merge_state(
1334 pull_request, target_vcs, target_ref)
1334 pull_request, target_vcs, target_ref)
1335 else:
1335 else:
1336 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1336 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1337 metadata = {
1337 metadata = {
1338 'unresolved_files': '',
1338 'target_ref': pull_request.target_ref_parts,
1339 'target_ref': pull_request.target_ref_parts,
1339 'source_ref': pull_request.source_ref_parts,
1340 'source_ref': pull_request.source_ref_parts,
1340 }
1341 }
1341 if not possible and target_ref.type == 'branch':
1342 if not possible and target_ref.type == 'branch':
1342 # NOTE(marcink): case for mercurial multiple heads on branch
1343 # NOTE(marcink): case for mercurial multiple heads on branch
1343 heads = target_vcs._heads(target_ref.name)
1344 heads = target_vcs._heads(target_ref.name)
1344 if len(heads) != 1:
1345 if len(heads) != 1:
1345 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1346 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1346 metadata.update({
1347 metadata.update({
1347 'heads': heads
1348 'heads': heads
1348 })
1349 })
1349 merge_state = MergeResponse(
1350 merge_state = MergeResponse(
1350 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1351 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1351
1352
1352 return merge_state
1353 return merge_state
1353
1354
1354 def _refresh_reference(self, reference, vcs_repository):
1355 def _refresh_reference(self, reference, vcs_repository):
1355 if reference.type in self.UPDATABLE_REF_TYPES:
1356 if reference.type in self.UPDATABLE_REF_TYPES:
1356 name_or_id = reference.name
1357 name_or_id = reference.name
1357 else:
1358 else:
1358 name_or_id = reference.commit_id
1359 name_or_id = reference.commit_id
1359
1360
1360 refreshed_commit = vcs_repository.get_commit(name_or_id)
1361 refreshed_commit = vcs_repository.get_commit(name_or_id)
1361 refreshed_reference = Reference(
1362 refreshed_reference = Reference(
1362 reference.type, reference.name, refreshed_commit.raw_id)
1363 reference.type, reference.name, refreshed_commit.raw_id)
1363 return refreshed_reference
1364 return refreshed_reference
1364
1365
1365 def _needs_merge_state_refresh(self, pull_request, target_reference):
1366 def _needs_merge_state_refresh(self, pull_request, target_reference):
1366 return not(
1367 return not(
1367 pull_request.revisions and
1368 pull_request.revisions and
1368 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1369 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1369 target_reference.commit_id == pull_request._last_merge_target_rev)
1370 target_reference.commit_id == pull_request._last_merge_target_rev)
1370
1371
1371 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1372 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1372 workspace_id = self._workspace_id(pull_request)
1373 workspace_id = self._workspace_id(pull_request)
1373 source_vcs = pull_request.source_repo.scm_instance()
1374 source_vcs = pull_request.source_repo.scm_instance()
1374 repo_id = pull_request.target_repo.repo_id
1375 repo_id = pull_request.target_repo.repo_id
1375 use_rebase = self._use_rebase_for_merging(pull_request)
1376 use_rebase = self._use_rebase_for_merging(pull_request)
1376 close_branch = self._close_branch_before_merging(pull_request)
1377 close_branch = self._close_branch_before_merging(pull_request)
1377 merge_state = target_vcs.merge(
1378 merge_state = target_vcs.merge(
1378 repo_id, workspace_id,
1379 repo_id, workspace_id,
1379 target_reference, source_vcs, pull_request.source_ref_parts,
1380 target_reference, source_vcs, pull_request.source_ref_parts,
1380 dry_run=True, use_rebase=use_rebase,
1381 dry_run=True, use_rebase=use_rebase,
1381 close_branch=close_branch)
1382 close_branch=close_branch)
1382
1383
1383 # Do not store the response if there was an unknown error.
1384 # Do not store the response if there was an unknown error.
1384 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1385 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1385 pull_request._last_merge_source_rev = \
1386 pull_request._last_merge_source_rev = \
1386 pull_request.source_ref_parts.commit_id
1387 pull_request.source_ref_parts.commit_id
1387 pull_request._last_merge_target_rev = target_reference.commit_id
1388 pull_request._last_merge_target_rev = target_reference.commit_id
1388 pull_request.last_merge_status = merge_state.failure_reason
1389 pull_request.last_merge_status = merge_state.failure_reason
1389 pull_request.shadow_merge_ref = merge_state.merge_ref
1390 pull_request.shadow_merge_ref = merge_state.merge_ref
1390 Session().add(pull_request)
1391 Session().add(pull_request)
1391 Session().commit()
1392 Session().commit()
1392
1393
1393 return merge_state
1394 return merge_state
1394
1395
1395 def _workspace_id(self, pull_request):
1396 def _workspace_id(self, pull_request):
1396 workspace_id = 'pr-%s' % pull_request.pull_request_id
1397 workspace_id = 'pr-%s' % pull_request.pull_request_id
1397 return workspace_id
1398 return workspace_id
1398
1399
1399 def generate_repo_data(self, repo, commit_id=None, branch=None,
1400 def generate_repo_data(self, repo, commit_id=None, branch=None,
1400 bookmark=None, translator=None):
1401 bookmark=None, translator=None):
1401 from rhodecode.model.repo import RepoModel
1402 from rhodecode.model.repo import RepoModel
1402
1403
1403 all_refs, selected_ref = \
1404 all_refs, selected_ref = \
1404 self._get_repo_pullrequest_sources(
1405 self._get_repo_pullrequest_sources(
1405 repo.scm_instance(), commit_id=commit_id,
1406 repo.scm_instance(), commit_id=commit_id,
1406 branch=branch, bookmark=bookmark, translator=translator)
1407 branch=branch, bookmark=bookmark, translator=translator)
1407
1408
1408 refs_select2 = []
1409 refs_select2 = []
1409 for element in all_refs:
1410 for element in all_refs:
1410 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1411 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1411 refs_select2.append({'text': element[1], 'children': children})
1412 refs_select2.append({'text': element[1], 'children': children})
1412
1413
1413 return {
1414 return {
1414 'user': {
1415 'user': {
1415 'user_id': repo.user.user_id,
1416 'user_id': repo.user.user_id,
1416 'username': repo.user.username,
1417 'username': repo.user.username,
1417 'firstname': repo.user.first_name,
1418 'firstname': repo.user.first_name,
1418 'lastname': repo.user.last_name,
1419 'lastname': repo.user.last_name,
1419 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1420 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1420 },
1421 },
1421 'name': repo.repo_name,
1422 'name': repo.repo_name,
1422 'link': RepoModel().get_url(repo),
1423 'link': RepoModel().get_url(repo),
1423 'description': h.chop_at_smart(repo.description_safe, '\n'),
1424 'description': h.chop_at_smart(repo.description_safe, '\n'),
1424 'refs': {
1425 'refs': {
1425 'all_refs': all_refs,
1426 'all_refs': all_refs,
1426 'selected_ref': selected_ref,
1427 'selected_ref': selected_ref,
1427 'select2_refs': refs_select2
1428 'select2_refs': refs_select2
1428 }
1429 }
1429 }
1430 }
1430
1431
1431 def generate_pullrequest_title(self, source, source_ref, target):
1432 def generate_pullrequest_title(self, source, source_ref, target):
1432 return u'{source}#{at_ref} to {target}'.format(
1433 return u'{source}#{at_ref} to {target}'.format(
1433 source=source,
1434 source=source,
1434 at_ref=source_ref,
1435 at_ref=source_ref,
1435 target=target,
1436 target=target,
1436 )
1437 )
1437
1438
1438 def _cleanup_merge_workspace(self, pull_request):
1439 def _cleanup_merge_workspace(self, pull_request):
1439 # Merging related cleanup
1440 # Merging related cleanup
1440 repo_id = pull_request.target_repo.repo_id
1441 repo_id = pull_request.target_repo.repo_id
1441 target_scm = pull_request.target_repo.scm_instance()
1442 target_scm = pull_request.target_repo.scm_instance()
1442 workspace_id = self._workspace_id(pull_request)
1443 workspace_id = self._workspace_id(pull_request)
1443
1444
1444 try:
1445 try:
1445 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1446 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1446 except NotImplementedError:
1447 except NotImplementedError:
1447 pass
1448 pass
1448
1449
1449 def _get_repo_pullrequest_sources(
1450 def _get_repo_pullrequest_sources(
1450 self, repo, commit_id=None, branch=None, bookmark=None,
1451 self, repo, commit_id=None, branch=None, bookmark=None,
1451 translator=None):
1452 translator=None):
1452 """
1453 """
1453 Return a structure with repo's interesting commits, suitable for
1454 Return a structure with repo's interesting commits, suitable for
1454 the selectors in pullrequest controller
1455 the selectors in pullrequest controller
1455
1456
1456 :param commit_id: a commit that must be in the list somehow
1457 :param commit_id: a commit that must be in the list somehow
1457 and selected by default
1458 and selected by default
1458 :param branch: a branch that must be in the list and selected
1459 :param branch: a branch that must be in the list and selected
1459 by default - even if closed
1460 by default - even if closed
1460 :param bookmark: a bookmark that must be in the list and selected
1461 :param bookmark: a bookmark that must be in the list and selected
1461 """
1462 """
1462 _ = translator or get_current_request().translate
1463 _ = translator or get_current_request().translate
1463
1464
1464 commit_id = safe_str(commit_id) if commit_id else None
1465 commit_id = safe_str(commit_id) if commit_id else None
1465 branch = safe_unicode(branch) if branch else None
1466 branch = safe_unicode(branch) if branch else None
1466 bookmark = safe_unicode(bookmark) if bookmark else None
1467 bookmark = safe_unicode(bookmark) if bookmark else None
1467
1468
1468 selected = None
1469 selected = None
1469
1470
1470 # order matters: first source that has commit_id in it will be selected
1471 # order matters: first source that has commit_id in it will be selected
1471 sources = []
1472 sources = []
1472 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1473 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1473 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1474 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1474
1475
1475 if commit_id:
1476 if commit_id:
1476 ref_commit = (h.short_id(commit_id), commit_id)
1477 ref_commit = (h.short_id(commit_id), commit_id)
1477 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1478 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1478
1479
1479 sources.append(
1480 sources.append(
1480 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1481 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1481 )
1482 )
1482
1483
1483 groups = []
1484 groups = []
1484
1485
1485 for group_key, ref_list, group_name, match in sources:
1486 for group_key, ref_list, group_name, match in sources:
1486 group_refs = []
1487 group_refs = []
1487 for ref_name, ref_id in ref_list:
1488 for ref_name, ref_id in ref_list:
1488 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1489 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1489 group_refs.append((ref_key, ref_name))
1490 group_refs.append((ref_key, ref_name))
1490
1491
1491 if not selected:
1492 if not selected:
1492 if set([commit_id, match]) & set([ref_id, ref_name]):
1493 if set([commit_id, match]) & set([ref_id, ref_name]):
1493 selected = ref_key
1494 selected = ref_key
1494
1495
1495 if group_refs:
1496 if group_refs:
1496 groups.append((group_refs, group_name))
1497 groups.append((group_refs, group_name))
1497
1498
1498 if not selected:
1499 if not selected:
1499 ref = commit_id or branch or bookmark
1500 ref = commit_id or branch or bookmark
1500 if ref:
1501 if ref:
1501 raise CommitDoesNotExistError(
1502 raise CommitDoesNotExistError(
1502 u'No commit refs could be found matching: {}'.format(ref))
1503 u'No commit refs could be found matching: {}'.format(ref))
1503 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1504 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1504 selected = u'branch:{}:{}'.format(
1505 selected = u'branch:{}:{}'.format(
1505 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1506 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1506 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1507 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1507 )
1508 )
1508 elif repo.commit_ids:
1509 elif repo.commit_ids:
1509 # make the user select in this case
1510 # make the user select in this case
1510 selected = None
1511 selected = None
1511 else:
1512 else:
1512 raise EmptyRepositoryError()
1513 raise EmptyRepositoryError()
1513 return groups, selected
1514 return groups, selected
1514
1515
1515 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1516 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1516 hide_whitespace_changes, diff_context):
1517 hide_whitespace_changes, diff_context):
1517
1518
1518 return self._get_diff_from_pr_or_version(
1519 return self._get_diff_from_pr_or_version(
1519 source_repo, source_ref_id, target_ref_id,
1520 source_repo, source_ref_id, target_ref_id,
1520 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1521 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1521
1522
1522 def _get_diff_from_pr_or_version(
1523 def _get_diff_from_pr_or_version(
1523 self, source_repo, source_ref_id, target_ref_id,
1524 self, source_repo, source_ref_id, target_ref_id,
1524 hide_whitespace_changes, diff_context):
1525 hide_whitespace_changes, diff_context):
1525
1526
1526 target_commit = source_repo.get_commit(
1527 target_commit = source_repo.get_commit(
1527 commit_id=safe_str(target_ref_id))
1528 commit_id=safe_str(target_ref_id))
1528 source_commit = source_repo.get_commit(
1529 source_commit = source_repo.get_commit(
1529 commit_id=safe_str(source_ref_id))
1530 commit_id=safe_str(source_ref_id))
1530 if isinstance(source_repo, Repository):
1531 if isinstance(source_repo, Repository):
1531 vcs_repo = source_repo.scm_instance()
1532 vcs_repo = source_repo.scm_instance()
1532 else:
1533 else:
1533 vcs_repo = source_repo
1534 vcs_repo = source_repo
1534
1535
1535 # TODO: johbo: In the context of an update, we cannot reach
1536 # TODO: johbo: In the context of an update, we cannot reach
1536 # the old commit anymore with our normal mechanisms. It needs
1537 # the old commit anymore with our normal mechanisms. It needs
1537 # some sort of special support in the vcs layer to avoid this
1538 # some sort of special support in the vcs layer to avoid this
1538 # workaround.
1539 # workaround.
1539 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1540 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1540 vcs_repo.alias == 'git'):
1541 vcs_repo.alias == 'git'):
1541 source_commit.raw_id = safe_str(source_ref_id)
1542 source_commit.raw_id = safe_str(source_ref_id)
1542
1543
1543 log.debug('calculating diff between '
1544 log.debug('calculating diff between '
1544 'source_ref:%s and target_ref:%s for repo `%s`',
1545 'source_ref:%s and target_ref:%s for repo `%s`',
1545 target_ref_id, source_ref_id,
1546 target_ref_id, source_ref_id,
1546 safe_unicode(vcs_repo.path))
1547 safe_unicode(vcs_repo.path))
1547
1548
1548 vcs_diff = vcs_repo.get_diff(
1549 vcs_diff = vcs_repo.get_diff(
1549 commit1=target_commit, commit2=source_commit,
1550 commit1=target_commit, commit2=source_commit,
1550 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1551 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1551 return vcs_diff
1552 return vcs_diff
1552
1553
1553 def _is_merge_enabled(self, pull_request):
1554 def _is_merge_enabled(self, pull_request):
1554 return self._get_general_setting(
1555 return self._get_general_setting(
1555 pull_request, 'rhodecode_pr_merge_enabled')
1556 pull_request, 'rhodecode_pr_merge_enabled')
1556
1557
1557 def _use_rebase_for_merging(self, pull_request):
1558 def _use_rebase_for_merging(self, pull_request):
1558 repo_type = pull_request.target_repo.repo_type
1559 repo_type = pull_request.target_repo.repo_type
1559 if repo_type == 'hg':
1560 if repo_type == 'hg':
1560 return self._get_general_setting(
1561 return self._get_general_setting(
1561 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1562 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1562 elif repo_type == 'git':
1563 elif repo_type == 'git':
1563 return self._get_general_setting(
1564 return self._get_general_setting(
1564 pull_request, 'rhodecode_git_use_rebase_for_merging')
1565 pull_request, 'rhodecode_git_use_rebase_for_merging')
1565
1566
1566 return False
1567 return False
1567
1568
1568 def _close_branch_before_merging(self, pull_request):
1569 def _close_branch_before_merging(self, pull_request):
1569 repo_type = pull_request.target_repo.repo_type
1570 repo_type = pull_request.target_repo.repo_type
1570 if repo_type == 'hg':
1571 if repo_type == 'hg':
1571 return self._get_general_setting(
1572 return self._get_general_setting(
1572 pull_request, 'rhodecode_hg_close_branch_before_merging')
1573 pull_request, 'rhodecode_hg_close_branch_before_merging')
1573 elif repo_type == 'git':
1574 elif repo_type == 'git':
1574 return self._get_general_setting(
1575 return self._get_general_setting(
1575 pull_request, 'rhodecode_git_close_branch_before_merging')
1576 pull_request, 'rhodecode_git_close_branch_before_merging')
1576
1577
1577 return False
1578 return False
1578
1579
1579 def _get_general_setting(self, pull_request, settings_key, default=False):
1580 def _get_general_setting(self, pull_request, settings_key, default=False):
1580 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1581 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1581 settings = settings_model.get_general_settings()
1582 settings = settings_model.get_general_settings()
1582 return settings.get(settings_key, default)
1583 return settings.get(settings_key, default)
1583
1584
1584 def _log_audit_action(self, action, action_data, user, pull_request):
1585 def _log_audit_action(self, action, action_data, user, pull_request):
1585 audit_logger.store(
1586 audit_logger.store(
1586 action=action,
1587 action=action,
1587 action_data=action_data,
1588 action_data=action_data,
1588 user=user,
1589 user=user,
1589 repo=pull_request.target_repo)
1590 repo=pull_request.target_repo)
1590
1591
1591 def get_reviewer_functions(self):
1592 def get_reviewer_functions(self):
1592 """
1593 """
1593 Fetches functions for validation and fetching default reviewers.
1594 Fetches functions for validation and fetching default reviewers.
1594 If available we use the EE package, else we fallback to CE
1595 If available we use the EE package, else we fallback to CE
1595 package functions
1596 package functions
1596 """
1597 """
1597 try:
1598 try:
1598 from rc_reviewers.utils import get_default_reviewers_data
1599 from rc_reviewers.utils import get_default_reviewers_data
1599 from rc_reviewers.utils import validate_default_reviewers
1600 from rc_reviewers.utils import validate_default_reviewers
1600 except ImportError:
1601 except ImportError:
1601 from rhodecode.apps.repository.utils import get_default_reviewers_data
1602 from rhodecode.apps.repository.utils import get_default_reviewers_data
1602 from rhodecode.apps.repository.utils import validate_default_reviewers
1603 from rhodecode.apps.repository.utils import validate_default_reviewers
1603
1604
1604 return get_default_reviewers_data, validate_default_reviewers
1605 return get_default_reviewers_data, validate_default_reviewers
1605
1606
1606
1607
1607 class MergeCheck(object):
1608 class MergeCheck(object):
1608 """
1609 """
1609 Perform Merge Checks and returns a check object which stores information
1610 Perform Merge Checks and returns a check object which stores information
1610 about merge errors, and merge conditions
1611 about merge errors, and merge conditions
1611 """
1612 """
1612 TODO_CHECK = 'todo'
1613 TODO_CHECK = 'todo'
1613 PERM_CHECK = 'perm'
1614 PERM_CHECK = 'perm'
1614 REVIEW_CHECK = 'review'
1615 REVIEW_CHECK = 'review'
1615 MERGE_CHECK = 'merge'
1616 MERGE_CHECK = 'merge'
1616
1617
1617 def __init__(self):
1618 def __init__(self):
1618 self.review_status = None
1619 self.review_status = None
1619 self.merge_possible = None
1620 self.merge_possible = None
1620 self.merge_msg = ''
1621 self.merge_msg = ''
1621 self.failed = None
1622 self.failed = None
1622 self.errors = []
1623 self.errors = []
1623 self.error_details = OrderedDict()
1624 self.error_details = OrderedDict()
1624
1625
1625 def push_error(self, error_type, message, error_key, details):
1626 def push_error(self, error_type, message, error_key, details):
1626 self.failed = True
1627 self.failed = True
1627 self.errors.append([error_type, message])
1628 self.errors.append([error_type, message])
1628 self.error_details[error_key] = dict(
1629 self.error_details[error_key] = dict(
1629 details=details,
1630 details=details,
1630 error_type=error_type,
1631 error_type=error_type,
1631 message=message
1632 message=message
1632 )
1633 )
1633
1634
1634 @classmethod
1635 @classmethod
1635 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1636 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1636 force_shadow_repo_refresh=False):
1637 force_shadow_repo_refresh=False):
1637 _ = translator
1638 _ = translator
1638 merge_check = cls()
1639 merge_check = cls()
1639
1640
1640 # permissions to merge
1641 # permissions to merge
1641 user_allowed_to_merge = PullRequestModel().check_user_merge(
1642 user_allowed_to_merge = PullRequestModel().check_user_merge(
1642 pull_request, auth_user)
1643 pull_request, auth_user)
1643 if not user_allowed_to_merge:
1644 if not user_allowed_to_merge:
1644 log.debug("MergeCheck: cannot merge, approval is pending.")
1645 log.debug("MergeCheck: cannot merge, approval is pending.")
1645
1646
1646 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1647 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1647 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1648 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1648 if fail_early:
1649 if fail_early:
1649 return merge_check
1650 return merge_check
1650
1651
1651 # permission to merge into the target branch
1652 # permission to merge into the target branch
1652 target_commit_id = pull_request.target_ref_parts.commit_id
1653 target_commit_id = pull_request.target_ref_parts.commit_id
1653 if pull_request.target_ref_parts.type == 'branch':
1654 if pull_request.target_ref_parts.type == 'branch':
1654 branch_name = pull_request.target_ref_parts.name
1655 branch_name = pull_request.target_ref_parts.name
1655 else:
1656 else:
1656 # for mercurial we can always figure out the branch from the commit
1657 # for mercurial we can always figure out the branch from the commit
1657 # in case of bookmark
1658 # in case of bookmark
1658 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1659 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1659 branch_name = target_commit.branch
1660 branch_name = target_commit.branch
1660
1661
1661 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1662 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1662 pull_request.target_repo.repo_name, branch_name)
1663 pull_request.target_repo.repo_name, branch_name)
1663 if branch_perm and branch_perm == 'branch.none':
1664 if branch_perm and branch_perm == 'branch.none':
1664 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1665 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1665 branch_name, rule)
1666 branch_name, rule)
1666 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1667 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1667 if fail_early:
1668 if fail_early:
1668 return merge_check
1669 return merge_check
1669
1670
1670 # review status, must be always present
1671 # review status, must be always present
1671 review_status = pull_request.calculated_review_status()
1672 review_status = pull_request.calculated_review_status()
1672 merge_check.review_status = review_status
1673 merge_check.review_status = review_status
1673
1674
1674 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1675 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1675 if not status_approved:
1676 if not status_approved:
1676 log.debug("MergeCheck: cannot merge, approval is pending.")
1677 log.debug("MergeCheck: cannot merge, approval is pending.")
1677
1678
1678 msg = _('Pull request reviewer approval is pending.')
1679 msg = _('Pull request reviewer approval is pending.')
1679
1680
1680 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1681 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1681
1682
1682 if fail_early:
1683 if fail_early:
1683 return merge_check
1684 return merge_check
1684
1685
1685 # left over TODOs
1686 # left over TODOs
1686 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1687 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1687 if todos:
1688 if todos:
1688 log.debug("MergeCheck: cannot merge, {} "
1689 log.debug("MergeCheck: cannot merge, {} "
1689 "unresolved TODOs left.".format(len(todos)))
1690 "unresolved TODOs left.".format(len(todos)))
1690
1691
1691 if len(todos) == 1:
1692 if len(todos) == 1:
1692 msg = _('Cannot merge, {} TODO still not resolved.').format(
1693 msg = _('Cannot merge, {} TODO still not resolved.').format(
1693 len(todos))
1694 len(todos))
1694 else:
1695 else:
1695 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1696 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1696 len(todos))
1697 len(todos))
1697
1698
1698 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1699 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1699
1700
1700 if fail_early:
1701 if fail_early:
1701 return merge_check
1702 return merge_check
1702
1703
1703 # merge possible, here is the filesystem simulation + shadow repo
1704 # merge possible, here is the filesystem simulation + shadow repo
1704 merge_status, msg = PullRequestModel().merge_status(
1705 merge_status, msg = PullRequestModel().merge_status(
1705 pull_request, translator=translator,
1706 pull_request, translator=translator,
1706 force_shadow_repo_refresh=force_shadow_repo_refresh)
1707 force_shadow_repo_refresh=force_shadow_repo_refresh)
1707 merge_check.merge_possible = merge_status
1708 merge_check.merge_possible = merge_status
1708 merge_check.merge_msg = msg
1709 merge_check.merge_msg = msg
1709 if not merge_status:
1710 if not merge_status:
1710 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1711 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1711 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1712 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1712
1713
1713 if fail_early:
1714 if fail_early:
1714 return merge_check
1715 return merge_check
1715
1716
1716 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1717 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1717 return merge_check
1718 return merge_check
1718
1719
1719 @classmethod
1720 @classmethod
1720 def get_merge_conditions(cls, pull_request, translator):
1721 def get_merge_conditions(cls, pull_request, translator):
1721 _ = translator
1722 _ = translator
1722 merge_details = {}
1723 merge_details = {}
1723
1724
1724 model = PullRequestModel()
1725 model = PullRequestModel()
1725 use_rebase = model._use_rebase_for_merging(pull_request)
1726 use_rebase = model._use_rebase_for_merging(pull_request)
1726
1727
1727 if use_rebase:
1728 if use_rebase:
1728 merge_details['merge_strategy'] = dict(
1729 merge_details['merge_strategy'] = dict(
1729 details={},
1730 details={},
1730 message=_('Merge strategy: rebase')
1731 message=_('Merge strategy: rebase')
1731 )
1732 )
1732 else:
1733 else:
1733 merge_details['merge_strategy'] = dict(
1734 merge_details['merge_strategy'] = dict(
1734 details={},
1735 details={},
1735 message=_('Merge strategy: explicit merge commit')
1736 message=_('Merge strategy: explicit merge commit')
1736 )
1737 )
1737
1738
1738 close_branch = model._close_branch_before_merging(pull_request)
1739 close_branch = model._close_branch_before_merging(pull_request)
1739 if close_branch:
1740 if close_branch:
1740 repo_type = pull_request.target_repo.repo_type
1741 repo_type = pull_request.target_repo.repo_type
1741 close_msg = ''
1742 close_msg = ''
1742 if repo_type == 'hg':
1743 if repo_type == 'hg':
1743 close_msg = _('Source branch will be closed after merge.')
1744 close_msg = _('Source branch will be closed after merge.')
1744 elif repo_type == 'git':
1745 elif repo_type == 'git':
1745 close_msg = _('Source branch will be deleted after merge.')
1746 close_msg = _('Source branch will be deleted after merge.')
1746
1747
1747 merge_details['close_branch'] = dict(
1748 merge_details['close_branch'] = dict(
1748 details={},
1749 details={},
1749 message=close_msg
1750 message=close_msg
1750 )
1751 )
1751
1752
1752 return merge_details
1753 return merge_details
1753
1754
1754
1755
1755 ChangeTuple = collections.namedtuple(
1756 ChangeTuple = collections.namedtuple(
1756 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1757 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1757
1758
1758 FileChangeTuple = collections.namedtuple(
1759 FileChangeTuple = collections.namedtuple(
1759 'FileChangeTuple', ['added', 'modified', 'removed'])
1760 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,964 +1,966 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import textwrap
23 import textwrap
24
24
25 import rhodecode
25 import rhodecode
26 from rhodecode.lib.utils2 import safe_unicode
26 from rhodecode.lib.utils2 import safe_unicode
27 from rhodecode.lib.vcs.backends import get_backend
27 from rhodecode.lib.vcs.backends import get_backend
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 MergeResponse, MergeFailureReason, Reference)
29 MergeResponse, MergeFailureReason, Reference)
30 from rhodecode.lib.vcs.exceptions import RepositoryError
30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 from rhodecode.lib.vcs.nodes import FileNode
31 from rhodecode.lib.vcs.nodes import FileNode
32 from rhodecode.model.comment import CommentsModel
32 from rhodecode.model.comment import CommentsModel
33 from rhodecode.model.db import PullRequest, Session
33 from rhodecode.model.db import PullRequest, Session
34 from rhodecode.model.pull_request import PullRequestModel
34 from rhodecode.model.pull_request import PullRequestModel
35 from rhodecode.model.user import UserModel
35 from rhodecode.model.user import UserModel
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37
37
38
38
39 pytestmark = [
39 pytestmark = [
40 pytest.mark.backends("git", "hg"),
40 pytest.mark.backends("git", "hg"),
41 ]
41 ]
42
42
43
43
44 @pytest.mark.usefixtures('config_stub')
44 @pytest.mark.usefixtures('config_stub')
45 class TestPullRequestModel(object):
45 class TestPullRequestModel(object):
46
46
47 @pytest.fixture()
47 @pytest.fixture()
48 def pull_request(self, request, backend, pr_util):
48 def pull_request(self, request, backend, pr_util):
49 """
49 """
50 A pull request combined with multiples patches.
50 A pull request combined with multiples patches.
51 """
51 """
52 BackendClass = get_backend(backend.alias)
52 BackendClass = get_backend(backend.alias)
53 merge_resp = MergeResponse(
53 merge_resp = MergeResponse(
54 False, False, None, MergeFailureReason.UNKNOWN,
54 False, False, None, MergeFailureReason.UNKNOWN,
55 metadata={'exception': 'MockError'})
55 metadata={'exception': 'MockError'})
56 self.merge_patcher = mock.patch.object(
56 self.merge_patcher = mock.patch.object(
57 BackendClass, 'merge', return_value=merge_resp)
57 BackendClass, 'merge', return_value=merge_resp)
58 self.workspace_remove_patcher = mock.patch.object(
58 self.workspace_remove_patcher = mock.patch.object(
59 BackendClass, 'cleanup_merge_workspace')
59 BackendClass, 'cleanup_merge_workspace')
60
60
61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
62 self.merge_mock = self.merge_patcher.start()
62 self.merge_mock = self.merge_patcher.start()
63 self.comment_patcher = mock.patch(
63 self.comment_patcher = mock.patch(
64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
65 self.comment_patcher.start()
65 self.comment_patcher.start()
66 self.notification_patcher = mock.patch(
66 self.notification_patcher = mock.patch(
67 'rhodecode.model.notification.NotificationModel.create')
67 'rhodecode.model.notification.NotificationModel.create')
68 self.notification_patcher.start()
68 self.notification_patcher.start()
69 self.helper_patcher = mock.patch(
69 self.helper_patcher = mock.patch(
70 'rhodecode.lib.helpers.route_path')
70 'rhodecode.lib.helpers.route_path')
71 self.helper_patcher.start()
71 self.helper_patcher.start()
72
72
73 self.hook_patcher = mock.patch.object(PullRequestModel,
73 self.hook_patcher = mock.patch.object(PullRequestModel,
74 'trigger_pull_request_hook')
74 'trigger_pull_request_hook')
75 self.hook_mock = self.hook_patcher.start()
75 self.hook_mock = self.hook_patcher.start()
76
76
77 self.invalidation_patcher = mock.patch(
77 self.invalidation_patcher = mock.patch(
78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
79 self.invalidation_mock = self.invalidation_patcher.start()
79 self.invalidation_mock = self.invalidation_patcher.start()
80
80
81 self.pull_request = pr_util.create_pull_request(
81 self.pull_request = pr_util.create_pull_request(
82 mergeable=True, name_suffix=u'Δ…Δ‡')
82 mergeable=True, name_suffix=u'Δ…Δ‡')
83 self.source_commit = self.pull_request.source_ref_parts.commit_id
83 self.source_commit = self.pull_request.source_ref_parts.commit_id
84 self.target_commit = self.pull_request.target_ref_parts.commit_id
84 self.target_commit = self.pull_request.target_ref_parts.commit_id
85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
86 self.repo_id = self.pull_request.target_repo.repo_id
86 self.repo_id = self.pull_request.target_repo.repo_id
87
87
88 @request.addfinalizer
88 @request.addfinalizer
89 def cleanup_pull_request():
89 def cleanup_pull_request():
90 calls = [mock.call(
90 calls = [mock.call(
91 self.pull_request, self.pull_request.author, 'create')]
91 self.pull_request, self.pull_request.author, 'create')]
92 self.hook_mock.assert_has_calls(calls)
92 self.hook_mock.assert_has_calls(calls)
93
93
94 self.workspace_remove_patcher.stop()
94 self.workspace_remove_patcher.stop()
95 self.merge_patcher.stop()
95 self.merge_patcher.stop()
96 self.comment_patcher.stop()
96 self.comment_patcher.stop()
97 self.notification_patcher.stop()
97 self.notification_patcher.stop()
98 self.helper_patcher.stop()
98 self.helper_patcher.stop()
99 self.hook_patcher.stop()
99 self.hook_patcher.stop()
100 self.invalidation_patcher.stop()
100 self.invalidation_patcher.stop()
101
101
102 return self.pull_request
102 return self.pull_request
103
103
104 def test_get_all(self, pull_request):
104 def test_get_all(self, pull_request):
105 prs = PullRequestModel().get_all(pull_request.target_repo)
105 prs = PullRequestModel().get_all(pull_request.target_repo)
106 assert isinstance(prs, list)
106 assert isinstance(prs, list)
107 assert len(prs) == 1
107 assert len(prs) == 1
108
108
109 def test_count_all(self, pull_request):
109 def test_count_all(self, pull_request):
110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
111 assert pr_count == 1
111 assert pr_count == 1
112
112
113 def test_get_awaiting_review(self, pull_request):
113 def test_get_awaiting_review(self, pull_request):
114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
115 assert isinstance(prs, list)
115 assert isinstance(prs, list)
116 assert len(prs) == 1
116 assert len(prs) == 1
117
117
118 def test_count_awaiting_review(self, pull_request):
118 def test_count_awaiting_review(self, pull_request):
119 pr_count = PullRequestModel().count_awaiting_review(
119 pr_count = PullRequestModel().count_awaiting_review(
120 pull_request.target_repo)
120 pull_request.target_repo)
121 assert pr_count == 1
121 assert pr_count == 1
122
122
123 def test_get_awaiting_my_review(self, pull_request):
123 def test_get_awaiting_my_review(self, pull_request):
124 PullRequestModel().update_reviewers(
124 PullRequestModel().update_reviewers(
125 pull_request, [(pull_request.author, ['author'], False, [])],
125 pull_request, [(pull_request.author, ['author'], False, [])],
126 pull_request.author)
126 pull_request.author)
127 Session().commit()
127 Session().commit()
128
128
129 prs = PullRequestModel().get_awaiting_my_review(
129 prs = PullRequestModel().get_awaiting_my_review(
130 pull_request.target_repo, user_id=pull_request.author.user_id)
130 pull_request.target_repo, user_id=pull_request.author.user_id)
131 assert isinstance(prs, list)
131 assert isinstance(prs, list)
132 assert len(prs) == 1
132 assert len(prs) == 1
133
133
134 def test_count_awaiting_my_review(self, pull_request):
134 def test_count_awaiting_my_review(self, pull_request):
135 PullRequestModel().update_reviewers(
135 PullRequestModel().update_reviewers(
136 pull_request, [(pull_request.author, ['author'], False, [])],
136 pull_request, [(pull_request.author, ['author'], False, [])],
137 pull_request.author)
137 pull_request.author)
138 Session().commit()
138 Session().commit()
139
139
140 pr_count = PullRequestModel().count_awaiting_my_review(
140 pr_count = PullRequestModel().count_awaiting_my_review(
141 pull_request.target_repo, user_id=pull_request.author.user_id)
141 pull_request.target_repo, user_id=pull_request.author.user_id)
142 assert pr_count == 1
142 assert pr_count == 1
143
143
144 def test_delete_calls_cleanup_merge(self, pull_request):
144 def test_delete_calls_cleanup_merge(self, pull_request):
145 repo_id = pull_request.target_repo.repo_id
145 repo_id = pull_request.target_repo.repo_id
146 PullRequestModel().delete(pull_request, pull_request.author)
146 PullRequestModel().delete(pull_request, pull_request.author)
147 Session().commit()
147 Session().commit()
148
148
149 self.workspace_remove_mock.assert_called_once_with(
149 self.workspace_remove_mock.assert_called_once_with(
150 repo_id, self.workspace_id)
150 repo_id, self.workspace_id)
151
151
152 def test_close_calls_cleanup_and_hook(self, pull_request):
152 def test_close_calls_cleanup_and_hook(self, pull_request):
153 PullRequestModel().close_pull_request(
153 PullRequestModel().close_pull_request(
154 pull_request, pull_request.author)
154 pull_request, pull_request.author)
155 Session().commit()
155 Session().commit()
156
156
157 repo_id = pull_request.target_repo.repo_id
157 repo_id = pull_request.target_repo.repo_id
158
158
159 self.workspace_remove_mock.assert_called_once_with(
159 self.workspace_remove_mock.assert_called_once_with(
160 repo_id, self.workspace_id)
160 repo_id, self.workspace_id)
161 self.hook_mock.assert_called_with(
161 self.hook_mock.assert_called_with(
162 self.pull_request, self.pull_request.author, 'close')
162 self.pull_request, self.pull_request.author, 'close')
163
163
164 def test_merge_status(self, pull_request):
164 def test_merge_status(self, pull_request):
165 self.merge_mock.return_value = MergeResponse(
165 self.merge_mock.return_value = MergeResponse(
166 True, False, None, MergeFailureReason.NONE)
166 True, False, None, MergeFailureReason.NONE)
167
167
168 assert pull_request._last_merge_source_rev is None
168 assert pull_request._last_merge_source_rev is None
169 assert pull_request._last_merge_target_rev is None
169 assert pull_request._last_merge_target_rev is None
170 assert pull_request.last_merge_status is None
170 assert pull_request.last_merge_status is None
171
171
172 status, msg = PullRequestModel().merge_status(pull_request)
172 status, msg = PullRequestModel().merge_status(pull_request)
173 assert status is True
173 assert status is True
174 assert msg == 'This pull request can be automatically merged.'
174 assert msg == 'This pull request can be automatically merged.'
175 self.merge_mock.assert_called_with(
175 self.merge_mock.assert_called_with(
176 self.repo_id, self.workspace_id,
176 self.repo_id, self.workspace_id,
177 pull_request.target_ref_parts,
177 pull_request.target_ref_parts,
178 pull_request.source_repo.scm_instance(),
178 pull_request.source_repo.scm_instance(),
179 pull_request.source_ref_parts, dry_run=True,
179 pull_request.source_ref_parts, dry_run=True,
180 use_rebase=False, close_branch=False)
180 use_rebase=False, close_branch=False)
181
181
182 assert pull_request._last_merge_source_rev == self.source_commit
182 assert pull_request._last_merge_source_rev == self.source_commit
183 assert pull_request._last_merge_target_rev == self.target_commit
183 assert pull_request._last_merge_target_rev == self.target_commit
184 assert pull_request.last_merge_status is MergeFailureReason.NONE
184 assert pull_request.last_merge_status is MergeFailureReason.NONE
185
185
186 self.merge_mock.reset_mock()
186 self.merge_mock.reset_mock()
187 status, msg = PullRequestModel().merge_status(pull_request)
187 status, msg = PullRequestModel().merge_status(pull_request)
188 assert status is True
188 assert status is True
189 assert msg == 'This pull request can be automatically merged.'
189 assert msg == 'This pull request can be automatically merged.'
190 assert self.merge_mock.called is False
190 assert self.merge_mock.called is False
191
191
192 def test_merge_status_known_failure(self, pull_request):
192 def test_merge_status_known_failure(self, pull_request):
193 self.merge_mock.return_value = MergeResponse(
193 self.merge_mock.return_value = MergeResponse(
194 False, False, None, MergeFailureReason.MERGE_FAILED)
194 False, False, None, MergeFailureReason.MERGE_FAILED,
195 metadata={'unresolved_files': 'file1'})
195
196
196 assert pull_request._last_merge_source_rev is None
197 assert pull_request._last_merge_source_rev is None
197 assert pull_request._last_merge_target_rev is None
198 assert pull_request._last_merge_target_rev is None
198 assert pull_request.last_merge_status is None
199 assert pull_request.last_merge_status is None
199
200
200 status, msg = PullRequestModel().merge_status(pull_request)
201 status, msg = PullRequestModel().merge_status(pull_request)
201 assert status is False
202 assert status is False
202 assert msg == 'This pull request cannot be merged because of merge conflicts.'
203 assert msg == 'This pull request cannot be merged because of merge conflicts. file1'
203 self.merge_mock.assert_called_with(
204 self.merge_mock.assert_called_with(
204 self.repo_id, self.workspace_id,
205 self.repo_id, self.workspace_id,
205 pull_request.target_ref_parts,
206 pull_request.target_ref_parts,
206 pull_request.source_repo.scm_instance(),
207 pull_request.source_repo.scm_instance(),
207 pull_request.source_ref_parts, dry_run=True,
208 pull_request.source_ref_parts, dry_run=True,
208 use_rebase=False, close_branch=False)
209 use_rebase=False, close_branch=False)
209
210
210 assert pull_request._last_merge_source_rev == self.source_commit
211 assert pull_request._last_merge_source_rev == self.source_commit
211 assert pull_request._last_merge_target_rev == self.target_commit
212 assert pull_request._last_merge_target_rev == self.target_commit
212 assert (
213 assert pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED
213 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
214
214
215 self.merge_mock.reset_mock()
215 self.merge_mock.reset_mock()
216 status, msg = PullRequestModel().merge_status(pull_request)
216 status, msg = PullRequestModel().merge_status(pull_request)
217 assert status is False
217 assert status is False
218 assert msg == 'This pull request cannot be merged because of merge conflicts.'
218 assert msg == 'This pull request cannot be merged because of merge conflicts. '
219 assert self.merge_mock.called is False
219 assert self.merge_mock.called is False
220
220
221 def test_merge_status_unknown_failure(self, pull_request):
221 def test_merge_status_unknown_failure(self, pull_request):
222 self.merge_mock.return_value = MergeResponse(
222 self.merge_mock.return_value = MergeResponse(
223 False, False, None, MergeFailureReason.UNKNOWN,
223 False, False, None, MergeFailureReason.UNKNOWN,
224 metadata={'exception': 'MockError'})
224 metadata={'exception': 'MockError'})
225
225
226 assert pull_request._last_merge_source_rev is None
226 assert pull_request._last_merge_source_rev is None
227 assert pull_request._last_merge_target_rev is None
227 assert pull_request._last_merge_target_rev is None
228 assert pull_request.last_merge_status is None
228 assert pull_request.last_merge_status is None
229
229
230 status, msg = PullRequestModel().merge_status(pull_request)
230 status, msg = PullRequestModel().merge_status(pull_request)
231 assert status is False
231 assert status is False
232 assert msg == (
232 assert msg == (
233 'This pull request cannot be merged because of an unhandled exception. '
233 'This pull request cannot be merged because of an unhandled exception. '
234 'MockError')
234 'MockError')
235 self.merge_mock.assert_called_with(
235 self.merge_mock.assert_called_with(
236 self.repo_id, self.workspace_id,
236 self.repo_id, self.workspace_id,
237 pull_request.target_ref_parts,
237 pull_request.target_ref_parts,
238 pull_request.source_repo.scm_instance(),
238 pull_request.source_repo.scm_instance(),
239 pull_request.source_ref_parts, dry_run=True,
239 pull_request.source_ref_parts, dry_run=True,
240 use_rebase=False, close_branch=False)
240 use_rebase=False, close_branch=False)
241
241
242 assert pull_request._last_merge_source_rev is None
242 assert pull_request._last_merge_source_rev is None
243 assert pull_request._last_merge_target_rev is None
243 assert pull_request._last_merge_target_rev is None
244 assert pull_request.last_merge_status is None
244 assert pull_request.last_merge_status is None
245
245
246 self.merge_mock.reset_mock()
246 self.merge_mock.reset_mock()
247 status, msg = PullRequestModel().merge_status(pull_request)
247 status, msg = PullRequestModel().merge_status(pull_request)
248 assert status is False
248 assert status is False
249 assert msg == (
249 assert msg == (
250 'This pull request cannot be merged because of an unhandled exception. '
250 'This pull request cannot be merged because of an unhandled exception. '
251 'MockError')
251 'MockError')
252 assert self.merge_mock.called is True
252 assert self.merge_mock.called is True
253
253
254 def test_merge_status_when_target_is_locked(self, pull_request):
254 def test_merge_status_when_target_is_locked(self, pull_request):
255 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
255 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
256 status, msg = PullRequestModel().merge_status(pull_request)
256 status, msg = PullRequestModel().merge_status(pull_request)
257 assert status is False
257 assert status is False
258 assert msg == (
258 assert msg == (
259 'This pull request cannot be merged because the target repository '
259 'This pull request cannot be merged because the target repository '
260 'is locked by user:1.')
260 'is locked by user:1.')
261
261
262 def test_merge_status_requirements_check_target(self, pull_request):
262 def test_merge_status_requirements_check_target(self, pull_request):
263
263
264 def has_largefiles(self, repo):
264 def has_largefiles(self, repo):
265 return repo == pull_request.source_repo
265 return repo == pull_request.source_repo
266
266
267 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
267 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
268 with patcher:
268 with patcher:
269 status, msg = PullRequestModel().merge_status(pull_request)
269 status, msg = PullRequestModel().merge_status(pull_request)
270
270
271 assert status is False
271 assert status is False
272 assert msg == 'Target repository large files support is disabled.'
272 assert msg == 'Target repository large files support is disabled.'
273
273
274 def test_merge_status_requirements_check_source(self, pull_request):
274 def test_merge_status_requirements_check_source(self, pull_request):
275
275
276 def has_largefiles(self, repo):
276 def has_largefiles(self, repo):
277 return repo == pull_request.target_repo
277 return repo == pull_request.target_repo
278
278
279 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
279 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
280 with patcher:
280 with patcher:
281 status, msg = PullRequestModel().merge_status(pull_request)
281 status, msg = PullRequestModel().merge_status(pull_request)
282
282
283 assert status is False
283 assert status is False
284 assert msg == 'Source repository large files support is disabled.'
284 assert msg == 'Source repository large files support is disabled.'
285
285
286 def test_merge(self, pull_request, merge_extras):
286 def test_merge(self, pull_request, merge_extras):
287 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
287 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
288 merge_ref = Reference(
288 merge_ref = Reference(
289 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
289 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
290 self.merge_mock.return_value = MergeResponse(
290 self.merge_mock.return_value = MergeResponse(
291 True, True, merge_ref, MergeFailureReason.NONE)
291 True, True, merge_ref, MergeFailureReason.NONE)
292
292
293 merge_extras['repository'] = pull_request.target_repo.repo_name
293 merge_extras['repository'] = pull_request.target_repo.repo_name
294 PullRequestModel().merge_repo(
294 PullRequestModel().merge_repo(
295 pull_request, pull_request.author, extras=merge_extras)
295 pull_request, pull_request.author, extras=merge_extras)
296 Session().commit()
296 Session().commit()
297
297
298 message = (
298 message = (
299 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
299 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
300 u'\n\n {pr_title}'.format(
300 u'\n\n {pr_title}'.format(
301 pr_id=pull_request.pull_request_id,
301 pr_id=pull_request.pull_request_id,
302 source_repo=safe_unicode(
302 source_repo=safe_unicode(
303 pull_request.source_repo.scm_instance().name),
303 pull_request.source_repo.scm_instance().name),
304 source_ref_name=pull_request.source_ref_parts.name,
304 source_ref_name=pull_request.source_ref_parts.name,
305 pr_title=safe_unicode(pull_request.title)
305 pr_title=safe_unicode(pull_request.title)
306 )
306 )
307 )
307 )
308 self.merge_mock.assert_called_with(
308 self.merge_mock.assert_called_with(
309 self.repo_id, self.workspace_id,
309 self.repo_id, self.workspace_id,
310 pull_request.target_ref_parts,
310 pull_request.target_ref_parts,
311 pull_request.source_repo.scm_instance(),
311 pull_request.source_repo.scm_instance(),
312 pull_request.source_ref_parts,
312 pull_request.source_ref_parts,
313 user_name=user.short_contact, user_email=user.email, message=message,
313 user_name=user.short_contact, user_email=user.email, message=message,
314 use_rebase=False, close_branch=False
314 use_rebase=False, close_branch=False
315 )
315 )
316 self.invalidation_mock.assert_called_once_with(
316 self.invalidation_mock.assert_called_once_with(
317 pull_request.target_repo.repo_name)
317 pull_request.target_repo.repo_name)
318
318
319 self.hook_mock.assert_called_with(
319 self.hook_mock.assert_called_with(
320 self.pull_request, self.pull_request.author, 'merge')
320 self.pull_request, self.pull_request.author, 'merge')
321
321
322 pull_request = PullRequest.get(pull_request.pull_request_id)
322 pull_request = PullRequest.get(pull_request.pull_request_id)
323 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
323 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
324
324
325 def test_merge_with_status_lock(self, pull_request, merge_extras):
325 def test_merge_with_status_lock(self, pull_request, merge_extras):
326 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
326 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
327 merge_ref = Reference(
327 merge_ref = Reference(
328 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
328 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
329 self.merge_mock.return_value = MergeResponse(
329 self.merge_mock.return_value = MergeResponse(
330 True, True, merge_ref, MergeFailureReason.NONE)
330 True, True, merge_ref, MergeFailureReason.NONE)
331
331
332 merge_extras['repository'] = pull_request.target_repo.repo_name
332 merge_extras['repository'] = pull_request.target_repo.repo_name
333
333
334 with pull_request.set_state(PullRequest.STATE_UPDATING):
334 with pull_request.set_state(PullRequest.STATE_UPDATING):
335 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
335 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
336 PullRequestModel().merge_repo(
336 PullRequestModel().merge_repo(
337 pull_request, pull_request.author, extras=merge_extras)
337 pull_request, pull_request.author, extras=merge_extras)
338 Session().commit()
338 Session().commit()
339
339
340 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
340 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
341
341
342 message = (
342 message = (
343 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
343 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
344 u'\n\n {pr_title}'.format(
344 u'\n\n {pr_title}'.format(
345 pr_id=pull_request.pull_request_id,
345 pr_id=pull_request.pull_request_id,
346 source_repo=safe_unicode(
346 source_repo=safe_unicode(
347 pull_request.source_repo.scm_instance().name),
347 pull_request.source_repo.scm_instance().name),
348 source_ref_name=pull_request.source_ref_parts.name,
348 source_ref_name=pull_request.source_ref_parts.name,
349 pr_title=safe_unicode(pull_request.title)
349 pr_title=safe_unicode(pull_request.title)
350 )
350 )
351 )
351 )
352 self.merge_mock.assert_called_with(
352 self.merge_mock.assert_called_with(
353 self.repo_id, self.workspace_id,
353 self.repo_id, self.workspace_id,
354 pull_request.target_ref_parts,
354 pull_request.target_ref_parts,
355 pull_request.source_repo.scm_instance(),
355 pull_request.source_repo.scm_instance(),
356 pull_request.source_ref_parts,
356 pull_request.source_ref_parts,
357 user_name=user.short_contact, user_email=user.email, message=message,
357 user_name=user.short_contact, user_email=user.email, message=message,
358 use_rebase=False, close_branch=False
358 use_rebase=False, close_branch=False
359 )
359 )
360 self.invalidation_mock.assert_called_once_with(
360 self.invalidation_mock.assert_called_once_with(
361 pull_request.target_repo.repo_name)
361 pull_request.target_repo.repo_name)
362
362
363 self.hook_mock.assert_called_with(
363 self.hook_mock.assert_called_with(
364 self.pull_request, self.pull_request.author, 'merge')
364 self.pull_request, self.pull_request.author, 'merge')
365
365
366 pull_request = PullRequest.get(pull_request.pull_request_id)
366 pull_request = PullRequest.get(pull_request.pull_request_id)
367 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
367 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
368
368
369 def test_merge_failed(self, pull_request, merge_extras):
369 def test_merge_failed(self, pull_request, merge_extras):
370 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
370 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
371 merge_ref = Reference(
371 merge_ref = Reference(
372 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
372 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
373 self.merge_mock.return_value = MergeResponse(
373 self.merge_mock.return_value = MergeResponse(
374 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
374 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
375
375
376 merge_extras['repository'] = pull_request.target_repo.repo_name
376 merge_extras['repository'] = pull_request.target_repo.repo_name
377 PullRequestModel().merge_repo(
377 PullRequestModel().merge_repo(
378 pull_request, pull_request.author, extras=merge_extras)
378 pull_request, pull_request.author, extras=merge_extras)
379 Session().commit()
379 Session().commit()
380
380
381 message = (
381 message = (
382 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
382 u'Merge pull request !{pr_id} from {source_repo} {source_ref_name}'
383 u'\n\n {pr_title}'.format(
383 u'\n\n {pr_title}'.format(
384 pr_id=pull_request.pull_request_id,
384 pr_id=pull_request.pull_request_id,
385 source_repo=safe_unicode(
385 source_repo=safe_unicode(
386 pull_request.source_repo.scm_instance().name),
386 pull_request.source_repo.scm_instance().name),
387 source_ref_name=pull_request.source_ref_parts.name,
387 source_ref_name=pull_request.source_ref_parts.name,
388 pr_title=safe_unicode(pull_request.title)
388 pr_title=safe_unicode(pull_request.title)
389 )
389 )
390 )
390 )
391 self.merge_mock.assert_called_with(
391 self.merge_mock.assert_called_with(
392 self.repo_id, self.workspace_id,
392 self.repo_id, self.workspace_id,
393 pull_request.target_ref_parts,
393 pull_request.target_ref_parts,
394 pull_request.source_repo.scm_instance(),
394 pull_request.source_repo.scm_instance(),
395 pull_request.source_ref_parts,
395 pull_request.source_ref_parts,
396 user_name=user.short_contact, user_email=user.email, message=message,
396 user_name=user.short_contact, user_email=user.email, message=message,
397 use_rebase=False, close_branch=False
397 use_rebase=False, close_branch=False
398 )
398 )
399
399
400 pull_request = PullRequest.get(pull_request.pull_request_id)
400 pull_request = PullRequest.get(pull_request.pull_request_id)
401 assert self.invalidation_mock.called is False
401 assert self.invalidation_mock.called is False
402 assert pull_request.merge_rev is None
402 assert pull_request.merge_rev is None
403
403
404 def test_get_commit_ids(self, pull_request):
404 def test_get_commit_ids(self, pull_request):
405 # The PR has been not merget yet, so expect an exception
405 # The PR has been not merget yet, so expect an exception
406 with pytest.raises(ValueError):
406 with pytest.raises(ValueError):
407 PullRequestModel()._get_commit_ids(pull_request)
407 PullRequestModel()._get_commit_ids(pull_request)
408
408
409 # Merge revision is in the revisions list
409 # Merge revision is in the revisions list
410 pull_request.merge_rev = pull_request.revisions[0]
410 pull_request.merge_rev = pull_request.revisions[0]
411 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
411 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
412 assert commit_ids == pull_request.revisions
412 assert commit_ids == pull_request.revisions
413
413
414 # Merge revision is not in the revisions list
414 # Merge revision is not in the revisions list
415 pull_request.merge_rev = 'f000' * 10
415 pull_request.merge_rev = 'f000' * 10
416 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
416 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
417 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
417 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
418
418
419 def test_get_diff_from_pr_version(self, pull_request):
419 def test_get_diff_from_pr_version(self, pull_request):
420 source_repo = pull_request.source_repo
420 source_repo = pull_request.source_repo
421 source_ref_id = pull_request.source_ref_parts.commit_id
421 source_ref_id = pull_request.source_ref_parts.commit_id
422 target_ref_id = pull_request.target_ref_parts.commit_id
422 target_ref_id = pull_request.target_ref_parts.commit_id
423 diff = PullRequestModel()._get_diff_from_pr_or_version(
423 diff = PullRequestModel()._get_diff_from_pr_or_version(
424 source_repo, source_ref_id, target_ref_id,
424 source_repo, source_ref_id, target_ref_id,
425 hide_whitespace_changes=False, diff_context=6)
425 hide_whitespace_changes=False, diff_context=6)
426 assert 'file_1' in diff.raw
426 assert 'file_1' in diff.raw
427
427
428 def test_generate_title_returns_unicode(self):
428 def test_generate_title_returns_unicode(self):
429 title = PullRequestModel().generate_pullrequest_title(
429 title = PullRequestModel().generate_pullrequest_title(
430 source='source-dummy',
430 source='source-dummy',
431 source_ref='source-ref-dummy',
431 source_ref='source-ref-dummy',
432 target='target-dummy',
432 target='target-dummy',
433 )
433 )
434 assert type(title) == unicode
434 assert type(title) == unicode
435
435
436
436
437 @pytest.mark.usefixtures('config_stub')
437 @pytest.mark.usefixtures('config_stub')
438 class TestIntegrationMerge(object):
438 class TestIntegrationMerge(object):
439 @pytest.mark.parametrize('extra_config', (
439 @pytest.mark.parametrize('extra_config', (
440 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
440 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
441 ))
441 ))
442 def test_merge_triggers_push_hooks(
442 def test_merge_triggers_push_hooks(
443 self, pr_util, user_admin, capture_rcextensions, merge_extras,
443 self, pr_util, user_admin, capture_rcextensions, merge_extras,
444 extra_config):
444 extra_config):
445
445
446 pull_request = pr_util.create_pull_request(
446 pull_request = pr_util.create_pull_request(
447 approved=True, mergeable=True)
447 approved=True, mergeable=True)
448 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
448 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
449 merge_extras['repository'] = pull_request.target_repo.repo_name
449 merge_extras['repository'] = pull_request.target_repo.repo_name
450 Session().commit()
450 Session().commit()
451
451
452 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
452 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
453 merge_state = PullRequestModel().merge_repo(
453 merge_state = PullRequestModel().merge_repo(
454 pull_request, user_admin, extras=merge_extras)
454 pull_request, user_admin, extras=merge_extras)
455 Session().commit()
455 Session().commit()
456
456
457 assert merge_state.executed
457 assert merge_state.executed
458 assert '_pre_push_hook' in capture_rcextensions
458 assert '_pre_push_hook' in capture_rcextensions
459 assert '_push_hook' in capture_rcextensions
459 assert '_push_hook' in capture_rcextensions
460
460
461 def test_merge_can_be_rejected_by_pre_push_hook(
461 def test_merge_can_be_rejected_by_pre_push_hook(
462 self, pr_util, user_admin, capture_rcextensions, merge_extras):
462 self, pr_util, user_admin, capture_rcextensions, merge_extras):
463 pull_request = pr_util.create_pull_request(
463 pull_request = pr_util.create_pull_request(
464 approved=True, mergeable=True)
464 approved=True, mergeable=True)
465 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
465 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
466 merge_extras['repository'] = pull_request.target_repo.repo_name
466 merge_extras['repository'] = pull_request.target_repo.repo_name
467 Session().commit()
467 Session().commit()
468
468
469 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
469 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
470 pre_pull.side_effect = RepositoryError("Disallow push!")
470 pre_pull.side_effect = RepositoryError("Disallow push!")
471 merge_status = PullRequestModel().merge_repo(
471 merge_status = PullRequestModel().merge_repo(
472 pull_request, user_admin, extras=merge_extras)
472 pull_request, user_admin, extras=merge_extras)
473 Session().commit()
473 Session().commit()
474
474
475 assert not merge_status.executed
475 assert not merge_status.executed
476 assert 'pre_push' not in capture_rcextensions
476 assert 'pre_push' not in capture_rcextensions
477 assert 'post_push' not in capture_rcextensions
477 assert 'post_push' not in capture_rcextensions
478
478
479 def test_merge_fails_if_target_is_locked(
479 def test_merge_fails_if_target_is_locked(
480 self, pr_util, user_regular, merge_extras):
480 self, pr_util, user_regular, merge_extras):
481 pull_request = pr_util.create_pull_request(
481 pull_request = pr_util.create_pull_request(
482 approved=True, mergeable=True)
482 approved=True, mergeable=True)
483 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
483 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
484 pull_request.target_repo.locked = locked_by
484 pull_request.target_repo.locked = locked_by
485 # TODO: johbo: Check if this can work based on the database, currently
485 # TODO: johbo: Check if this can work based on the database, currently
486 # all data is pre-computed, that's why just updating the DB is not
486 # all data is pre-computed, that's why just updating the DB is not
487 # enough.
487 # enough.
488 merge_extras['locked_by'] = locked_by
488 merge_extras['locked_by'] = locked_by
489 merge_extras['repository'] = pull_request.target_repo.repo_name
489 merge_extras['repository'] = pull_request.target_repo.repo_name
490 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
490 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
491 Session().commit()
491 Session().commit()
492 merge_status = PullRequestModel().merge_repo(
492 merge_status = PullRequestModel().merge_repo(
493 pull_request, user_regular, extras=merge_extras)
493 pull_request, user_regular, extras=merge_extras)
494 Session().commit()
494 Session().commit()
495
495
496 assert not merge_status.executed
496 assert not merge_status.executed
497
497
498
498
499 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
499 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
500 (False, 1, 0),
500 (False, 1, 0),
501 (True, 0, 1),
501 (True, 0, 1),
502 ])
502 ])
503 def test_outdated_comments(
503 def test_outdated_comments(
504 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
504 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
505 pull_request = pr_util.create_pull_request()
505 pull_request = pr_util.create_pull_request()
506 pr_util.create_inline_comment(file_path='not_in_updated_diff')
506 pr_util.create_inline_comment(file_path='not_in_updated_diff')
507
507
508 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
508 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
509 pr_util.add_one_commit()
509 pr_util.add_one_commit()
510 assert_inline_comments(
510 assert_inline_comments(
511 pull_request, visible=inlines_count, outdated=outdated_count)
511 pull_request, visible=inlines_count, outdated=outdated_count)
512 outdated_comment_mock.assert_called_with(pull_request)
512 outdated_comment_mock.assert_called_with(pull_request)
513
513
514
514
515 @pytest.mark.parametrize('mr_type, expected_msg', [
515 @pytest.mark.parametrize('mr_type, expected_msg', [
516 (MergeFailureReason.NONE,
516 (MergeFailureReason.NONE,
517 'This pull request can be automatically merged.'),
517 'This pull request can be automatically merged.'),
518 (MergeFailureReason.UNKNOWN,
518 (MergeFailureReason.UNKNOWN,
519 'This pull request cannot be merged because of an unhandled exception. CRASH'),
519 'This pull request cannot be merged because of an unhandled exception. CRASH'),
520 (MergeFailureReason.MERGE_FAILED,
520 (MergeFailureReason.MERGE_FAILED,
521 'This pull request cannot be merged because of merge conflicts.'),
521 'This pull request cannot be merged because of merge conflicts. CONFLICT_FILE'),
522 (MergeFailureReason.PUSH_FAILED,
522 (MergeFailureReason.PUSH_FAILED,
523 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
523 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
524 (MergeFailureReason.TARGET_IS_NOT_HEAD,
524 (MergeFailureReason.TARGET_IS_NOT_HEAD,
525 'This pull request cannot be merged because the target `ref_name` is not a head.'),
525 'This pull request cannot be merged because the target `ref_name` is not a head.'),
526 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
526 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
527 'This pull request cannot be merged because the source contains more branches than the target.'),
527 'This pull request cannot be merged because the source contains more branches than the target.'),
528 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
528 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
529 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
529 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
530 (MergeFailureReason.TARGET_IS_LOCKED,
530 (MergeFailureReason.TARGET_IS_LOCKED,
531 'This pull request cannot be merged because the target repository is locked by user:123.'),
531 'This pull request cannot be merged because the target repository is locked by user:123.'),
532 (MergeFailureReason.MISSING_TARGET_REF,
532 (MergeFailureReason.MISSING_TARGET_REF,
533 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
533 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
534 (MergeFailureReason.MISSING_SOURCE_REF,
534 (MergeFailureReason.MISSING_SOURCE_REF,
535 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
535 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
536 (MergeFailureReason.SUBREPO_MERGE_FAILED,
536 (MergeFailureReason.SUBREPO_MERGE_FAILED,
537 'This pull request cannot be merged because of conflicts related to sub repositories.'),
537 'This pull request cannot be merged because of conflicts related to sub repositories.'),
538
538
539 ])
539 ])
540 def test_merge_response_message(mr_type, expected_msg):
540 def test_merge_response_message(mr_type, expected_msg):
541 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
541 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
542 metadata = {
542 metadata = {
543 'unresolved_files': 'CONFLICT_FILE',
543 'exception': "CRASH",
544 'exception': "CRASH",
544 'target': 'some-repo',
545 'target': 'some-repo',
545 'merge_commit': 'merge_commit',
546 'merge_commit': 'merge_commit',
546 'target_ref': merge_ref,
547 'target_ref': merge_ref,
547 'source_ref': merge_ref,
548 'source_ref': merge_ref,
548 'heads': ','.join(['a', 'b', 'c']),
549 'heads': ','.join(['a', 'b', 'c']),
549 'locked_by': 'user:123'}
550 'locked_by': 'user:123'
551 }
550
552
551 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
553 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
552 assert merge_response.merge_status_message == expected_msg
554 assert merge_response.merge_status_message == expected_msg
553
555
554
556
555 @pytest.fixture()
557 @pytest.fixture()
556 def merge_extras(user_regular):
558 def merge_extras(user_regular):
557 """
559 """
558 Context for the vcs operation when running a merge.
560 Context for the vcs operation when running a merge.
559 """
561 """
560 extras = {
562 extras = {
561 'ip': '127.0.0.1',
563 'ip': '127.0.0.1',
562 'username': user_regular.username,
564 'username': user_regular.username,
563 'user_id': user_regular.user_id,
565 'user_id': user_regular.user_id,
564 'action': 'push',
566 'action': 'push',
565 'repository': 'fake_target_repo_name',
567 'repository': 'fake_target_repo_name',
566 'scm': 'git',
568 'scm': 'git',
567 'config': 'fake_config_ini_path',
569 'config': 'fake_config_ini_path',
568 'repo_store': '',
570 'repo_store': '',
569 'make_lock': None,
571 'make_lock': None,
570 'locked_by': [None, None, None],
572 'locked_by': [None, None, None],
571 'server_url': 'http://test.example.com:5000',
573 'server_url': 'http://test.example.com:5000',
572 'hooks': ['push', 'pull'],
574 'hooks': ['push', 'pull'],
573 'is_shadow_repo': False,
575 'is_shadow_repo': False,
574 }
576 }
575 return extras
577 return extras
576
578
577
579
578 @pytest.mark.usefixtures('config_stub')
580 @pytest.mark.usefixtures('config_stub')
579 class TestUpdateCommentHandling(object):
581 class TestUpdateCommentHandling(object):
580
582
581 @pytest.fixture(autouse=True, scope='class')
583 @pytest.fixture(autouse=True, scope='class')
582 def enable_outdated_comments(self, request, baseapp):
584 def enable_outdated_comments(self, request, baseapp):
583 config_patch = mock.patch.dict(
585 config_patch = mock.patch.dict(
584 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
586 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
585 config_patch.start()
587 config_patch.start()
586
588
587 @request.addfinalizer
589 @request.addfinalizer
588 def cleanup():
590 def cleanup():
589 config_patch.stop()
591 config_patch.stop()
590
592
591 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
593 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
592 commits = [
594 commits = [
593 {'message': 'a'},
595 {'message': 'a'},
594 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
596 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
595 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
597 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
596 ]
598 ]
597 pull_request = pr_util.create_pull_request(
599 pull_request = pr_util.create_pull_request(
598 commits=commits, target_head='a', source_head='b', revisions=['b'])
600 commits=commits, target_head='a', source_head='b', revisions=['b'])
599 pr_util.create_inline_comment(file_path='file_b')
601 pr_util.create_inline_comment(file_path='file_b')
600 pr_util.add_one_commit(head='c')
602 pr_util.add_one_commit(head='c')
601
603
602 assert_inline_comments(pull_request, visible=1, outdated=0)
604 assert_inline_comments(pull_request, visible=1, outdated=0)
603
605
604 def test_comment_stays_unflagged_on_change_above(self, pr_util):
606 def test_comment_stays_unflagged_on_change_above(self, pr_util):
605 original_content = ''.join(
607 original_content = ''.join(
606 ['line {}\n'.format(x) for x in range(1, 11)])
608 ['line {}\n'.format(x) for x in range(1, 11)])
607 updated_content = 'new_line_at_top\n' + original_content
609 updated_content = 'new_line_at_top\n' + original_content
608 commits = [
610 commits = [
609 {'message': 'a'},
611 {'message': 'a'},
610 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
612 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
611 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
613 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
612 ]
614 ]
613 pull_request = pr_util.create_pull_request(
615 pull_request = pr_util.create_pull_request(
614 commits=commits, target_head='a', source_head='b', revisions=['b'])
616 commits=commits, target_head='a', source_head='b', revisions=['b'])
615
617
616 with outdated_comments_patcher():
618 with outdated_comments_patcher():
617 comment = pr_util.create_inline_comment(
619 comment = pr_util.create_inline_comment(
618 line_no=u'n8', file_path='file_b')
620 line_no=u'n8', file_path='file_b')
619 pr_util.add_one_commit(head='c')
621 pr_util.add_one_commit(head='c')
620
622
621 assert_inline_comments(pull_request, visible=1, outdated=0)
623 assert_inline_comments(pull_request, visible=1, outdated=0)
622 assert comment.line_no == u'n9'
624 assert comment.line_no == u'n9'
623
625
624 def test_comment_stays_unflagged_on_change_below(self, pr_util):
626 def test_comment_stays_unflagged_on_change_below(self, pr_util):
625 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
627 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
626 updated_content = original_content + 'new_line_at_end\n'
628 updated_content = original_content + 'new_line_at_end\n'
627 commits = [
629 commits = [
628 {'message': 'a'},
630 {'message': 'a'},
629 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
631 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
630 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
632 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
631 ]
633 ]
632 pull_request = pr_util.create_pull_request(
634 pull_request = pr_util.create_pull_request(
633 commits=commits, target_head='a', source_head='b', revisions=['b'])
635 commits=commits, target_head='a', source_head='b', revisions=['b'])
634 pr_util.create_inline_comment(file_path='file_b')
636 pr_util.create_inline_comment(file_path='file_b')
635 pr_util.add_one_commit(head='c')
637 pr_util.add_one_commit(head='c')
636
638
637 assert_inline_comments(pull_request, visible=1, outdated=0)
639 assert_inline_comments(pull_request, visible=1, outdated=0)
638
640
639 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
641 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
640 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
642 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
641 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
643 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
642 change_lines = list(base_lines)
644 change_lines = list(base_lines)
643 change_lines.insert(6, 'line 6a added\n')
645 change_lines.insert(6, 'line 6a added\n')
644
646
645 # Changes on the last line of sight
647 # Changes on the last line of sight
646 update_lines = list(change_lines)
648 update_lines = list(change_lines)
647 update_lines[0] = 'line 1 changed\n'
649 update_lines[0] = 'line 1 changed\n'
648 update_lines[-1] = 'line 12 changed\n'
650 update_lines[-1] = 'line 12 changed\n'
649
651
650 def file_b(lines):
652 def file_b(lines):
651 return FileNode('file_b', ''.join(lines))
653 return FileNode('file_b', ''.join(lines))
652
654
653 commits = [
655 commits = [
654 {'message': 'a', 'added': [file_b(base_lines)]},
656 {'message': 'a', 'added': [file_b(base_lines)]},
655 {'message': 'b', 'changed': [file_b(change_lines)]},
657 {'message': 'b', 'changed': [file_b(change_lines)]},
656 {'message': 'c', 'changed': [file_b(update_lines)]},
658 {'message': 'c', 'changed': [file_b(update_lines)]},
657 ]
659 ]
658
660
659 pull_request = pr_util.create_pull_request(
661 pull_request = pr_util.create_pull_request(
660 commits=commits, target_head='a', source_head='b', revisions=['b'])
662 commits=commits, target_head='a', source_head='b', revisions=['b'])
661 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
663 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
662
664
663 with outdated_comments_patcher():
665 with outdated_comments_patcher():
664 pr_util.add_one_commit(head='c')
666 pr_util.add_one_commit(head='c')
665 assert_inline_comments(pull_request, visible=0, outdated=1)
667 assert_inline_comments(pull_request, visible=0, outdated=1)
666
668
667 @pytest.mark.parametrize("change, content", [
669 @pytest.mark.parametrize("change, content", [
668 ('changed', 'changed\n'),
670 ('changed', 'changed\n'),
669 ('removed', ''),
671 ('removed', ''),
670 ], ids=['changed', 'removed'])
672 ], ids=['changed', 'removed'])
671 def test_comment_flagged_on_change(self, pr_util, change, content):
673 def test_comment_flagged_on_change(self, pr_util, change, content):
672 commits = [
674 commits = [
673 {'message': 'a'},
675 {'message': 'a'},
674 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
676 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
675 {'message': 'c', change: [FileNode('file_b', content)]},
677 {'message': 'c', change: [FileNode('file_b', content)]},
676 ]
678 ]
677 pull_request = pr_util.create_pull_request(
679 pull_request = pr_util.create_pull_request(
678 commits=commits, target_head='a', source_head='b', revisions=['b'])
680 commits=commits, target_head='a', source_head='b', revisions=['b'])
679 pr_util.create_inline_comment(file_path='file_b')
681 pr_util.create_inline_comment(file_path='file_b')
680
682
681 with outdated_comments_patcher():
683 with outdated_comments_patcher():
682 pr_util.add_one_commit(head='c')
684 pr_util.add_one_commit(head='c')
683 assert_inline_comments(pull_request, visible=0, outdated=1)
685 assert_inline_comments(pull_request, visible=0, outdated=1)
684
686
685
687
686 @pytest.mark.usefixtures('config_stub')
688 @pytest.mark.usefixtures('config_stub')
687 class TestUpdateChangedFiles(object):
689 class TestUpdateChangedFiles(object):
688
690
689 def test_no_changes_on_unchanged_diff(self, pr_util):
691 def test_no_changes_on_unchanged_diff(self, pr_util):
690 commits = [
692 commits = [
691 {'message': 'a'},
693 {'message': 'a'},
692 {'message': 'b',
694 {'message': 'b',
693 'added': [FileNode('file_b', 'test_content b\n')]},
695 'added': [FileNode('file_b', 'test_content b\n')]},
694 {'message': 'c',
696 {'message': 'c',
695 'added': [FileNode('file_c', 'test_content c\n')]},
697 'added': [FileNode('file_c', 'test_content c\n')]},
696 ]
698 ]
697 # open a PR from a to b, adding file_b
699 # open a PR from a to b, adding file_b
698 pull_request = pr_util.create_pull_request(
700 pull_request = pr_util.create_pull_request(
699 commits=commits, target_head='a', source_head='b', revisions=['b'],
701 commits=commits, target_head='a', source_head='b', revisions=['b'],
700 name_suffix='per-file-review')
702 name_suffix='per-file-review')
701
703
702 # modify PR adding new file file_c
704 # modify PR adding new file file_c
703 pr_util.add_one_commit(head='c')
705 pr_util.add_one_commit(head='c')
704
706
705 assert_pr_file_changes(
707 assert_pr_file_changes(
706 pull_request,
708 pull_request,
707 added=['file_c'],
709 added=['file_c'],
708 modified=[],
710 modified=[],
709 removed=[])
711 removed=[])
710
712
711 def test_modify_and_undo_modification_diff(self, pr_util):
713 def test_modify_and_undo_modification_diff(self, pr_util):
712 commits = [
714 commits = [
713 {'message': 'a'},
715 {'message': 'a'},
714 {'message': 'b',
716 {'message': 'b',
715 'added': [FileNode('file_b', 'test_content b\n')]},
717 'added': [FileNode('file_b', 'test_content b\n')]},
716 {'message': 'c',
718 {'message': 'c',
717 'changed': [FileNode('file_b', 'test_content b modified\n')]},
719 'changed': [FileNode('file_b', 'test_content b modified\n')]},
718 {'message': 'd',
720 {'message': 'd',
719 'changed': [FileNode('file_b', 'test_content b\n')]},
721 'changed': [FileNode('file_b', 'test_content b\n')]},
720 ]
722 ]
721 # open a PR from a to b, adding file_b
723 # open a PR from a to b, adding file_b
722 pull_request = pr_util.create_pull_request(
724 pull_request = pr_util.create_pull_request(
723 commits=commits, target_head='a', source_head='b', revisions=['b'],
725 commits=commits, target_head='a', source_head='b', revisions=['b'],
724 name_suffix='per-file-review')
726 name_suffix='per-file-review')
725
727
726 # modify PR modifying file file_b
728 # modify PR modifying file file_b
727 pr_util.add_one_commit(head='c')
729 pr_util.add_one_commit(head='c')
728
730
729 assert_pr_file_changes(
731 assert_pr_file_changes(
730 pull_request,
732 pull_request,
731 added=[],
733 added=[],
732 modified=['file_b'],
734 modified=['file_b'],
733 removed=[])
735 removed=[])
734
736
735 # move the head again to d, which rollbacks change,
737 # move the head again to d, which rollbacks change,
736 # meaning we should indicate no changes
738 # meaning we should indicate no changes
737 pr_util.add_one_commit(head='d')
739 pr_util.add_one_commit(head='d')
738
740
739 assert_pr_file_changes(
741 assert_pr_file_changes(
740 pull_request,
742 pull_request,
741 added=[],
743 added=[],
742 modified=[],
744 modified=[],
743 removed=[])
745 removed=[])
744
746
745 def test_updated_all_files_in_pr(self, pr_util):
747 def test_updated_all_files_in_pr(self, pr_util):
746 commits = [
748 commits = [
747 {'message': 'a'},
749 {'message': 'a'},
748 {'message': 'b', 'added': [
750 {'message': 'b', 'added': [
749 FileNode('file_a', 'test_content a\n'),
751 FileNode('file_a', 'test_content a\n'),
750 FileNode('file_b', 'test_content b\n'),
752 FileNode('file_b', 'test_content b\n'),
751 FileNode('file_c', 'test_content c\n')]},
753 FileNode('file_c', 'test_content c\n')]},
752 {'message': 'c', 'changed': [
754 {'message': 'c', 'changed': [
753 FileNode('file_a', 'test_content a changed\n'),
755 FileNode('file_a', 'test_content a changed\n'),
754 FileNode('file_b', 'test_content b changed\n'),
756 FileNode('file_b', 'test_content b changed\n'),
755 FileNode('file_c', 'test_content c changed\n')]},
757 FileNode('file_c', 'test_content c changed\n')]},
756 ]
758 ]
757 # open a PR from a to b, changing 3 files
759 # open a PR from a to b, changing 3 files
758 pull_request = pr_util.create_pull_request(
760 pull_request = pr_util.create_pull_request(
759 commits=commits, target_head='a', source_head='b', revisions=['b'],
761 commits=commits, target_head='a', source_head='b', revisions=['b'],
760 name_suffix='per-file-review')
762 name_suffix='per-file-review')
761
763
762 pr_util.add_one_commit(head='c')
764 pr_util.add_one_commit(head='c')
763
765
764 assert_pr_file_changes(
766 assert_pr_file_changes(
765 pull_request,
767 pull_request,
766 added=[],
768 added=[],
767 modified=['file_a', 'file_b', 'file_c'],
769 modified=['file_a', 'file_b', 'file_c'],
768 removed=[])
770 removed=[])
769
771
770 def test_updated_and_removed_all_files_in_pr(self, pr_util):
772 def test_updated_and_removed_all_files_in_pr(self, pr_util):
771 commits = [
773 commits = [
772 {'message': 'a'},
774 {'message': 'a'},
773 {'message': 'b', 'added': [
775 {'message': 'b', 'added': [
774 FileNode('file_a', 'test_content a\n'),
776 FileNode('file_a', 'test_content a\n'),
775 FileNode('file_b', 'test_content b\n'),
777 FileNode('file_b', 'test_content b\n'),
776 FileNode('file_c', 'test_content c\n')]},
778 FileNode('file_c', 'test_content c\n')]},
777 {'message': 'c', 'removed': [
779 {'message': 'c', 'removed': [
778 FileNode('file_a', 'test_content a changed\n'),
780 FileNode('file_a', 'test_content a changed\n'),
779 FileNode('file_b', 'test_content b changed\n'),
781 FileNode('file_b', 'test_content b changed\n'),
780 FileNode('file_c', 'test_content c changed\n')]},
782 FileNode('file_c', 'test_content c changed\n')]},
781 ]
783 ]
782 # open a PR from a to b, removing 3 files
784 # open a PR from a to b, removing 3 files
783 pull_request = pr_util.create_pull_request(
785 pull_request = pr_util.create_pull_request(
784 commits=commits, target_head='a', source_head='b', revisions=['b'],
786 commits=commits, target_head='a', source_head='b', revisions=['b'],
785 name_suffix='per-file-review')
787 name_suffix='per-file-review')
786
788
787 pr_util.add_one_commit(head='c')
789 pr_util.add_one_commit(head='c')
788
790
789 assert_pr_file_changes(
791 assert_pr_file_changes(
790 pull_request,
792 pull_request,
791 added=[],
793 added=[],
792 modified=[],
794 modified=[],
793 removed=['file_a', 'file_b', 'file_c'])
795 removed=['file_a', 'file_b', 'file_c'])
794
796
795
797
796 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
798 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
797 model = PullRequestModel()
799 model = PullRequestModel()
798 pull_request = pr_util.create_pull_request()
800 pull_request = pr_util.create_pull_request()
799 pr_util.update_source_repository()
801 pr_util.update_source_repository()
800
802
801 model.update_commits(pull_request)
803 model.update_commits(pull_request)
802
804
803 # Expect that it has a version entry now
805 # Expect that it has a version entry now
804 assert len(model.get_versions(pull_request)) == 1
806 assert len(model.get_versions(pull_request)) == 1
805
807
806
808
807 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
809 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
808 pull_request = pr_util.create_pull_request()
810 pull_request = pr_util.create_pull_request()
809 model = PullRequestModel()
811 model = PullRequestModel()
810 model.update_commits(pull_request)
812 model.update_commits(pull_request)
811
813
812 # Expect that it still has no versions
814 # Expect that it still has no versions
813 assert len(model.get_versions(pull_request)) == 0
815 assert len(model.get_versions(pull_request)) == 0
814
816
815
817
816 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
818 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
817 model = PullRequestModel()
819 model = PullRequestModel()
818 pull_request = pr_util.create_pull_request()
820 pull_request = pr_util.create_pull_request()
819 comment = pr_util.create_comment()
821 comment = pr_util.create_comment()
820 pr_util.update_source_repository()
822 pr_util.update_source_repository()
821
823
822 model.update_commits(pull_request)
824 model.update_commits(pull_request)
823
825
824 # Expect that the comment is linked to the pr version now
826 # Expect that the comment is linked to the pr version now
825 assert comment.pull_request_version == model.get_versions(pull_request)[0]
827 assert comment.pull_request_version == model.get_versions(pull_request)[0]
826
828
827
829
828 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
830 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
829 model = PullRequestModel()
831 model = PullRequestModel()
830 pull_request = pr_util.create_pull_request()
832 pull_request = pr_util.create_pull_request()
831 pr_util.update_source_repository()
833 pr_util.update_source_repository()
832 pr_util.update_source_repository()
834 pr_util.update_source_repository()
833
835
834 model.update_commits(pull_request)
836 model.update_commits(pull_request)
835
837
836 # Expect to find a new comment about the change
838 # Expect to find a new comment about the change
837 expected_message = textwrap.dedent(
839 expected_message = textwrap.dedent(
838 """\
840 """\
839 Pull request updated. Auto status change to |under_review|
841 Pull request updated. Auto status change to |under_review|
840
842
841 .. role:: added
843 .. role:: added
842 .. role:: removed
844 .. role:: removed
843 .. parsed-literal::
845 .. parsed-literal::
844
846
845 Changed commits:
847 Changed commits:
846 * :added:`1 added`
848 * :added:`1 added`
847 * :removed:`0 removed`
849 * :removed:`0 removed`
848
850
849 Changed files:
851 Changed files:
850 * `A file_2 <#a_c--92ed3b5f07b4>`_
852 * `A file_2 <#a_c--92ed3b5f07b4>`_
851
853
852 .. |under_review| replace:: *"Under Review"*"""
854 .. |under_review| replace:: *"Under Review"*"""
853 )
855 )
854 pull_request_comments = sorted(
856 pull_request_comments = sorted(
855 pull_request.comments, key=lambda c: c.modified_at)
857 pull_request.comments, key=lambda c: c.modified_at)
856 update_comment = pull_request_comments[-1]
858 update_comment = pull_request_comments[-1]
857 assert update_comment.text == expected_message
859 assert update_comment.text == expected_message
858
860
859
861
860 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
862 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
861 pull_request = pr_util.create_pull_request()
863 pull_request = pr_util.create_pull_request()
862
864
863 # Avoiding default values
865 # Avoiding default values
864 pull_request.status = PullRequest.STATUS_CLOSED
866 pull_request.status = PullRequest.STATUS_CLOSED
865 pull_request._last_merge_source_rev = "0" * 40
867 pull_request._last_merge_source_rev = "0" * 40
866 pull_request._last_merge_target_rev = "1" * 40
868 pull_request._last_merge_target_rev = "1" * 40
867 pull_request.last_merge_status = 1
869 pull_request.last_merge_status = 1
868 pull_request.merge_rev = "2" * 40
870 pull_request.merge_rev = "2" * 40
869
871
870 # Remember automatic values
872 # Remember automatic values
871 created_on = pull_request.created_on
873 created_on = pull_request.created_on
872 updated_on = pull_request.updated_on
874 updated_on = pull_request.updated_on
873
875
874 # Create a new version of the pull request
876 # Create a new version of the pull request
875 version = PullRequestModel()._create_version_from_snapshot(pull_request)
877 version = PullRequestModel()._create_version_from_snapshot(pull_request)
876
878
877 # Check attributes
879 # Check attributes
878 assert version.title == pr_util.create_parameters['title']
880 assert version.title == pr_util.create_parameters['title']
879 assert version.description == pr_util.create_parameters['description']
881 assert version.description == pr_util.create_parameters['description']
880 assert version.status == PullRequest.STATUS_CLOSED
882 assert version.status == PullRequest.STATUS_CLOSED
881
883
882 # versions get updated created_on
884 # versions get updated created_on
883 assert version.created_on != created_on
885 assert version.created_on != created_on
884
886
885 assert version.updated_on == updated_on
887 assert version.updated_on == updated_on
886 assert version.user_id == pull_request.user_id
888 assert version.user_id == pull_request.user_id
887 assert version.revisions == pr_util.create_parameters['revisions']
889 assert version.revisions == pr_util.create_parameters['revisions']
888 assert version.source_repo == pr_util.source_repository
890 assert version.source_repo == pr_util.source_repository
889 assert version.source_ref == pr_util.create_parameters['source_ref']
891 assert version.source_ref == pr_util.create_parameters['source_ref']
890 assert version.target_repo == pr_util.target_repository
892 assert version.target_repo == pr_util.target_repository
891 assert version.target_ref == pr_util.create_parameters['target_ref']
893 assert version.target_ref == pr_util.create_parameters['target_ref']
892 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
894 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
893 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
895 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
894 assert version.last_merge_status == pull_request.last_merge_status
896 assert version.last_merge_status == pull_request.last_merge_status
895 assert version.merge_rev == pull_request.merge_rev
897 assert version.merge_rev == pull_request.merge_rev
896 assert version.pull_request == pull_request
898 assert version.pull_request == pull_request
897
899
898
900
899 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
901 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
900 version1 = pr_util.create_version_of_pull_request()
902 version1 = pr_util.create_version_of_pull_request()
901 comment_linked = pr_util.create_comment(linked_to=version1)
903 comment_linked = pr_util.create_comment(linked_to=version1)
902 comment_unlinked = pr_util.create_comment()
904 comment_unlinked = pr_util.create_comment()
903 version2 = pr_util.create_version_of_pull_request()
905 version2 = pr_util.create_version_of_pull_request()
904
906
905 PullRequestModel()._link_comments_to_version(version2)
907 PullRequestModel()._link_comments_to_version(version2)
906 Session().commit()
908 Session().commit()
907
909
908 # Expect that only the new comment is linked to version2
910 # Expect that only the new comment is linked to version2
909 assert (
911 assert (
910 comment_unlinked.pull_request_version_id ==
912 comment_unlinked.pull_request_version_id ==
911 version2.pull_request_version_id)
913 version2.pull_request_version_id)
912 assert (
914 assert (
913 comment_linked.pull_request_version_id ==
915 comment_linked.pull_request_version_id ==
914 version1.pull_request_version_id)
916 version1.pull_request_version_id)
915 assert (
917 assert (
916 comment_unlinked.pull_request_version_id !=
918 comment_unlinked.pull_request_version_id !=
917 comment_linked.pull_request_version_id)
919 comment_linked.pull_request_version_id)
918
920
919
921
920 def test_calculate_commits():
922 def test_calculate_commits():
921 old_ids = [1, 2, 3]
923 old_ids = [1, 2, 3]
922 new_ids = [1, 3, 4, 5]
924 new_ids = [1, 3, 4, 5]
923 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
925 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
924 assert change.added == [4, 5]
926 assert change.added == [4, 5]
925 assert change.common == [1, 3]
927 assert change.common == [1, 3]
926 assert change.removed == [2]
928 assert change.removed == [2]
927 assert change.total == [1, 3, 4, 5]
929 assert change.total == [1, 3, 4, 5]
928
930
929
931
930 def assert_inline_comments(pull_request, visible=None, outdated=None):
932 def assert_inline_comments(pull_request, visible=None, outdated=None):
931 if visible is not None:
933 if visible is not None:
932 inline_comments = CommentsModel().get_inline_comments(
934 inline_comments = CommentsModel().get_inline_comments(
933 pull_request.target_repo.repo_id, pull_request=pull_request)
935 pull_request.target_repo.repo_id, pull_request=pull_request)
934 inline_cnt = CommentsModel().get_inline_comments_count(
936 inline_cnt = CommentsModel().get_inline_comments_count(
935 inline_comments)
937 inline_comments)
936 assert inline_cnt == visible
938 assert inline_cnt == visible
937 if outdated is not None:
939 if outdated is not None:
938 outdated_comments = CommentsModel().get_outdated_comments(
940 outdated_comments = CommentsModel().get_outdated_comments(
939 pull_request.target_repo.repo_id, pull_request)
941 pull_request.target_repo.repo_id, pull_request)
940 assert len(outdated_comments) == outdated
942 assert len(outdated_comments) == outdated
941
943
942
944
943 def assert_pr_file_changes(
945 def assert_pr_file_changes(
944 pull_request, added=None, modified=None, removed=None):
946 pull_request, added=None, modified=None, removed=None):
945 pr_versions = PullRequestModel().get_versions(pull_request)
947 pr_versions = PullRequestModel().get_versions(pull_request)
946 # always use first version, ie original PR to calculate changes
948 # always use first version, ie original PR to calculate changes
947 pull_request_version = pr_versions[0]
949 pull_request_version = pr_versions[0]
948 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
950 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
949 pull_request, pull_request_version)
951 pull_request, pull_request_version)
950 file_changes = PullRequestModel()._calculate_file_changes(
952 file_changes = PullRequestModel()._calculate_file_changes(
951 old_diff_data, new_diff_data)
953 old_diff_data, new_diff_data)
952
954
953 assert added == file_changes.added, \
955 assert added == file_changes.added, \
954 'expected added:%s vs value:%s' % (added, file_changes.added)
956 'expected added:%s vs value:%s' % (added, file_changes.added)
955 assert modified == file_changes.modified, \
957 assert modified == file_changes.modified, \
956 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
958 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
957 assert removed == file_changes.removed, \
959 assert removed == file_changes.removed, \
958 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
960 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
959
961
960
962
961 def outdated_comments_patcher(use_outdated=True):
963 def outdated_comments_patcher(use_outdated=True):
962 return mock.patch.object(
964 return mock.patch.object(
963 CommentsModel, 'use_outdated_comments',
965 CommentsModel, 'use_outdated_comments',
964 return_value=use_outdated)
966 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now