##// END OF EJS Templates
pull-requests: updated metadata information for failed merges with multiple heads.
marcink -
r3627:cff84552 default
parent child Browse files
Show More
@@ -1,169 +1,169 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import webob
22 22 from pyramid.threadlocal import get_current_request
23 23
24 24 from rhodecode import events
25 25 from rhodecode.lib import hooks_base
26 26 from rhodecode.lib import utils2
27 27
28 28
29 29 def _get_rc_scm_extras(username, repo_name, repo_alias, action):
30 30 # TODO: johbo: Replace by vcs_operation_context and remove fully
31 31 from rhodecode.lib.base import vcs_operation_context
32 32 check_locking = action in ('pull', 'push')
33 33
34 34 request = get_current_request()
35 35
36 36 # default
37 37 dummy_environ = webob.Request.blank('').environ
38 38 try:
39 39 environ = request.environ or dummy_environ
40 40 except TypeError:
41 41 # we might use this outside of request context
42 42 environ = dummy_environ
43 43
44 44 extras = vcs_operation_context(
45 45 environ, repo_name, username, action, repo_alias, check_locking)
46 46 return utils2.AttributeDict(extras)
47 47
48 48
49 49 def trigger_post_push_hook(
50 50 username, action, hook_type, repo_name, repo_alias, commit_ids):
51 51 """
52 52 Triggers push action hooks
53 53
54 54 :param username: username who pushes
55 55 :param action: push/push_local/push_remote
56 56 :param repo_name: name of repo
57 57 :param repo_alias: the type of SCM repo
58 58 :param commit_ids: list of commit ids that we pushed
59 59 """
60 60 extras = _get_rc_scm_extras(username, repo_name, repo_alias, action)
61 61 extras.commit_ids = commit_ids
62 62 extras.hook_type = hook_type
63 63 hooks_base.post_push(extras)
64 64
65 65
66 66 def trigger_log_create_pull_request_hook(username, repo_name, repo_alias,
67 67 pull_request, data=None):
68 68 """
69 69 Triggers create pull request action hooks
70 70
71 71 :param username: username who creates the pull request
72 72 :param repo_name: name of target repo
73 73 :param repo_alias: the type of SCM target repo
74 74 :param pull_request: the pull request that was created
75 75 :param data: extra data for specific events e.g {'comment': comment_obj}
76 76 """
77 77 if repo_alias not in ('hg', 'git'):
78 78 return
79 79
80 80 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
81 81 'create_pull_request')
82 82 events.trigger(events.PullRequestCreateEvent(pull_request))
83 extras.update(pull_request.get_api_data())
83 extras.update(pull_request.get_api_data(with_merge_state=False))
84 84 hooks_base.log_create_pull_request(**extras)
85 85
86 86
87 87 def trigger_log_merge_pull_request_hook(username, repo_name, repo_alias,
88 88 pull_request, data=None):
89 89 """
90 90 Triggers merge pull request action hooks
91 91
92 92 :param username: username who creates the pull request
93 93 :param repo_name: name of target repo
94 94 :param repo_alias: the type of SCM target repo
95 95 :param pull_request: the pull request that was merged
96 96 :param data: extra data for specific events e.g {'comment': comment_obj}
97 97 """
98 98 if repo_alias not in ('hg', 'git'):
99 99 return
100 100
101 101 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
102 102 'merge_pull_request')
103 103 events.trigger(events.PullRequestMergeEvent(pull_request))
104 104 extras.update(pull_request.get_api_data())
105 105 hooks_base.log_merge_pull_request(**extras)
106 106
107 107
108 108 def trigger_log_close_pull_request_hook(username, repo_name, repo_alias,
109 109 pull_request, data=None):
110 110 """
111 111 Triggers close pull request action hooks
112 112
113 113 :param username: username who creates the pull request
114 114 :param repo_name: name of target repo
115 115 :param repo_alias: the type of SCM target repo
116 116 :param pull_request: the pull request that was closed
117 117 :param data: extra data for specific events e.g {'comment': comment_obj}
118 118 """
119 119 if repo_alias not in ('hg', 'git'):
120 120 return
121 121
122 122 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
123 123 'close_pull_request')
124 124 events.trigger(events.PullRequestCloseEvent(pull_request))
125 125 extras.update(pull_request.get_api_data())
126 126 hooks_base.log_close_pull_request(**extras)
127 127
128 128
129 129 def trigger_log_review_pull_request_hook(username, repo_name, repo_alias,
130 130 pull_request, data=None):
131 131 """
132 132 Triggers review status change pull request action hooks
133 133
134 134 :param username: username who creates the pull request
135 135 :param repo_name: name of target repo
136 136 :param repo_alias: the type of SCM target repo
137 137 :param pull_request: the pull request that review status changed
138 138 :param data: extra data for specific events e.g {'comment': comment_obj}
139 139 """
140 140 if repo_alias not in ('hg', 'git'):
141 141 return
142 142
143 143 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
144 144 'review_pull_request')
145 145 status = data.get('status')
146 146 events.trigger(events.PullRequestReviewEvent(pull_request, status))
147 147 extras.update(pull_request.get_api_data())
148 148 hooks_base.log_review_pull_request(**extras)
149 149
150 150
151 151 def trigger_log_update_pull_request_hook(username, repo_name, repo_alias,
152 152 pull_request, data=None):
153 153 """
154 154 Triggers update pull request action hooks
155 155
156 156 :param username: username who creates the pull request
157 157 :param repo_name: name of target repo
158 158 :param repo_alias: the type of SCM target repo
159 159 :param pull_request: the pull request that was updated
160 160 :param data: extra data for specific events e.g {'comment': comment_obj}
161 161 """
162 162 if repo_alias not in ('hg', 'git'):
163 163 return
164 164
165 165 extras = _get_rc_scm_extras(username, repo_name, repo_alias,
166 166 'update_pull_request')
167 167 events.trigger(events.PullRequestUpdateEvent(pull_request))
168 168 extras.update(pull_request.get_api_data())
169 169 hooks_base.log_update_pull_request(**extras)
@@ -1,1846 +1,1849 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24 import os
25 25 import re
26 26 import time
27 27 import shutil
28 28 import datetime
29 29 import fnmatch
30 30 import itertools
31 31 import logging
32 32 import collections
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36 from pyramid import compat
37 37
38 38 from rhodecode.translation import lazy_ugettext
39 39 from rhodecode.lib.utils2 import safe_str, safe_unicode
40 40 from rhodecode.lib.vcs import connection
41 41 from rhodecode.lib.vcs.utils import author_name, author_email
42 42 from rhodecode.lib.vcs.conf import settings
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
45 45 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
46 46 NodeDoesNotExistError, NodeNotChangedError, VCSError,
47 47 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
48 48 RepositoryError)
49 49
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 FILEMODE_DEFAULT = 0o100644
55 55 FILEMODE_EXECUTABLE = 0o100755
56 56
57 57 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
58 58
59 59
60 60 class MergeFailureReason(object):
61 61 """
62 62 Enumeration with all the reasons why the server side merge could fail.
63 63
64 64 DO NOT change the number of the reasons, as they may be stored in the
65 65 database.
66 66
67 67 Changing the name of a reason is acceptable and encouraged to deprecate old
68 68 reasons.
69 69 """
70 70
71 71 # Everything went well.
72 72 NONE = 0
73 73
74 74 # An unexpected exception was raised. Check the logs for more details.
75 75 UNKNOWN = 1
76 76
77 77 # The merge was not successful, there are conflicts.
78 78 MERGE_FAILED = 2
79 79
80 80 # The merge succeeded but we could not push it to the target repository.
81 81 PUSH_FAILED = 3
82 82
83 83 # The specified target is not a head in the target repository.
84 84 TARGET_IS_NOT_HEAD = 4
85 85
86 86 # The source repository contains more branches than the target. Pushing
87 87 # the merge will create additional branches in the target.
88 88 HG_SOURCE_HAS_MORE_BRANCHES = 5
89 89
90 90 # The target reference has multiple heads. That does not allow to correctly
91 91 # identify the target location. This could only happen for mercurial
92 92 # branches.
93 93 HG_TARGET_HAS_MULTIPLE_HEADS = 6
94 94
95 95 # The target repository is locked
96 96 TARGET_IS_LOCKED = 7
97 97
98 98 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
99 99 # A involved commit could not be found.
100 100 _DEPRECATED_MISSING_COMMIT = 8
101 101
102 102 # The target repo reference is missing.
103 103 MISSING_TARGET_REF = 9
104 104
105 105 # The source repo reference is missing.
106 106 MISSING_SOURCE_REF = 10
107 107
108 108 # The merge was not successful, there are conflicts related to sub
109 109 # repositories.
110 110 SUBREPO_MERGE_FAILED = 11
111 111
112 112
113 113 class UpdateFailureReason(object):
114 114 """
115 115 Enumeration with all the reasons why the pull request update could fail.
116 116
117 117 DO NOT change the number of the reasons, as they may be stored in the
118 118 database.
119 119
120 120 Changing the name of a reason is acceptable and encouraged to deprecate old
121 121 reasons.
122 122 """
123 123
124 124 # Everything went well.
125 125 NONE = 0
126 126
127 127 # An unexpected exception was raised. Check the logs for more details.
128 128 UNKNOWN = 1
129 129
130 130 # The pull request is up to date.
131 131 NO_CHANGE = 2
132 132
133 133 # The pull request has a reference type that is not supported for update.
134 134 WRONG_REF_TYPE = 3
135 135
136 136 # Update failed because the target reference is missing.
137 137 MISSING_TARGET_REF = 4
138 138
139 139 # Update failed because the source reference is missing.
140 140 MISSING_SOURCE_REF = 5
141 141
142 142
143 143 class MergeResponse(object):
144 144
145 145 # uses .format(**metadata) for variables
146 146 MERGE_STATUS_MESSAGES = {
147 147 MergeFailureReason.NONE: lazy_ugettext(
148 148 u'This pull request can be automatically merged.'),
149 149 MergeFailureReason.UNKNOWN: lazy_ugettext(
150 150 u'This pull request cannot be merged because of an unhandled exception. '
151 151 u'{exception}'),
152 152 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
153 153 u'This pull request cannot be merged because of merge conflicts.'),
154 154 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
155 155 u'This pull request could not be merged because push to '
156 156 u'target:`{target}@{merge_commit}` failed.'),
157 157 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
158 158 u'This pull request cannot be merged because the target '
159 159 u'`{target_ref.name}` is not a head.'),
160 160 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
161 161 u'This pull request cannot be merged because the source contains '
162 162 u'more branches than the target.'),
163 163 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
164 u'This pull request cannot be merged because the target '
164 u'This pull request cannot be merged because the target `{target_ref.name}` '
165 165 u'has multiple heads: `{heads}`.'),
166 166 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
167 167 u'This pull request cannot be merged because the target repository is '
168 168 u'locked by {locked_by}.'),
169 169
170 170 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
171 171 u'This pull request cannot be merged because the target '
172 172 u'reference `{target_ref.name}` is missing.'),
173 173 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
174 174 u'This pull request cannot be merged because the source '
175 175 u'reference `{source_ref.name}` is missing.'),
176 176 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
177 177 u'This pull request cannot be merged because of conflicts related '
178 178 u'to sub repositories.'),
179 179
180 180 # Deprecations
181 181 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
182 182 u'This pull request cannot be merged because the target or the '
183 183 u'source reference is missing.'),
184 184
185 185 }
186 186
187 187 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
188 188 self.possible = possible
189 189 self.executed = executed
190 190 self.merge_ref = merge_ref
191 191 self.failure_reason = failure_reason
192 192 self.metadata = metadata or {}
193 193
194 194 def __repr__(self):
195 195 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
196 196
197 197 def __eq__(self, other):
198 198 same_instance = isinstance(other, self.__class__)
199 199 return same_instance \
200 200 and self.possible == other.possible \
201 201 and self.executed == other.executed \
202 202 and self.failure_reason == other.failure_reason
203 203
204 204 @property
205 205 def label(self):
206 206 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
207 207 not k.startswith('_'))
208 208 return label_dict.get(self.failure_reason)
209 209
210 210 @property
211 211 def merge_status_message(self):
212 212 """
213 213 Return a human friendly error message for the given merge status code.
214 214 """
215 215 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
216 216 try:
217 217 return msg.format(**self.metadata)
218 218 except Exception:
219 219 log.exception('Failed to format %s message', self)
220 220 return msg
221 221
222 222 def asdict(self):
223 223 data = {}
224 224 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
225 225 'merge_status_message']:
226 226 data[k] = getattr(self, k)
227 227 return data
228 228
229 229
230 230 class BaseRepository(object):
231 231 """
232 232 Base Repository for final backends
233 233
234 234 .. attribute:: DEFAULT_BRANCH_NAME
235 235
236 236 name of default branch (i.e. "trunk" for svn, "master" for git etc.
237 237
238 238 .. attribute:: commit_ids
239 239
240 240 list of all available commit ids, in ascending order
241 241
242 242 .. attribute:: path
243 243
244 244 absolute path to the repository
245 245
246 246 .. attribute:: bookmarks
247 247
248 248 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
249 249 there are no bookmarks or the backend implementation does not support
250 250 bookmarks.
251 251
252 252 .. attribute:: tags
253 253
254 254 Mapping from name to :term:`Commit ID` of the tag.
255 255
256 256 """
257 257
258 258 DEFAULT_BRANCH_NAME = None
259 259 DEFAULT_CONTACT = u"Unknown"
260 260 DEFAULT_DESCRIPTION = u"unknown"
261 261 EMPTY_COMMIT_ID = '0' * 40
262 262
263 263 path = None
264 264
265 265 def __init__(self, repo_path, config=None, create=False, **kwargs):
266 266 """
267 267 Initializes repository. Raises RepositoryError if repository could
268 268 not be find at the given ``repo_path`` or directory at ``repo_path``
269 269 exists and ``create`` is set to True.
270 270
271 271 :param repo_path: local path of the repository
272 272 :param config: repository configuration
273 273 :param create=False: if set to True, would try to create repository.
274 274 :param src_url=None: if set, should be proper url from which repository
275 275 would be cloned; requires ``create`` parameter to be set to True -
276 276 raises RepositoryError if src_url is set and create evaluates to
277 277 False
278 278 """
279 279 raise NotImplementedError
280 280
281 281 def __repr__(self):
282 282 return '<%s at %s>' % (self.__class__.__name__, self.path)
283 283
284 284 def __len__(self):
285 285 return self.count()
286 286
287 287 def __eq__(self, other):
288 288 same_instance = isinstance(other, self.__class__)
289 289 return same_instance and other.path == self.path
290 290
291 291 def __ne__(self, other):
292 292 return not self.__eq__(other)
293 293
294 294 def get_create_shadow_cache_pr_path(self, db_repo):
295 295 path = db_repo.cached_diffs_dir
296 296 if not os.path.exists(path):
297 297 os.makedirs(path, 0o755)
298 298 return path
299 299
300 300 @classmethod
301 301 def get_default_config(cls, default=None):
302 302 config = Config()
303 303 if default and isinstance(default, list):
304 304 for section, key, val in default:
305 305 config.set(section, key, val)
306 306 return config
307 307
308 308 @LazyProperty
309 309 def _remote(self):
310 310 raise NotImplementedError
311 311
312 def _heads(self, branch=None):
313 return []
314
312 315 @LazyProperty
313 316 def EMPTY_COMMIT(self):
314 317 return EmptyCommit(self.EMPTY_COMMIT_ID)
315 318
316 319 @LazyProperty
317 320 def alias(self):
318 321 for k, v in settings.BACKENDS.items():
319 322 if v.split('.')[-1] == str(self.__class__.__name__):
320 323 return k
321 324
322 325 @LazyProperty
323 326 def name(self):
324 327 return safe_unicode(os.path.basename(self.path))
325 328
326 329 @LazyProperty
327 330 def description(self):
328 331 raise NotImplementedError
329 332
330 333 def refs(self):
331 334 """
332 335 returns a `dict` with branches, bookmarks, tags, and closed_branches
333 336 for this repository
334 337 """
335 338 return dict(
336 339 branches=self.branches,
337 340 branches_closed=self.branches_closed,
338 341 tags=self.tags,
339 342 bookmarks=self.bookmarks
340 343 )
341 344
342 345 @LazyProperty
343 346 def branches(self):
344 347 """
345 348 A `dict` which maps branch names to commit ids.
346 349 """
347 350 raise NotImplementedError
348 351
349 352 @LazyProperty
350 353 def branches_closed(self):
351 354 """
352 355 A `dict` which maps tags names to commit ids.
353 356 """
354 357 raise NotImplementedError
355 358
356 359 @LazyProperty
357 360 def bookmarks(self):
358 361 """
359 362 A `dict` which maps tags names to commit ids.
360 363 """
361 364 raise NotImplementedError
362 365
363 366 @LazyProperty
364 367 def tags(self):
365 368 """
366 369 A `dict` which maps tags names to commit ids.
367 370 """
368 371 raise NotImplementedError
369 372
370 373 @LazyProperty
371 374 def size(self):
372 375 """
373 376 Returns combined size in bytes for all repository files
374 377 """
375 378 tip = self.get_commit()
376 379 return tip.size
377 380
378 381 def size_at_commit(self, commit_id):
379 382 commit = self.get_commit(commit_id)
380 383 return commit.size
381 384
382 385 def is_empty(self):
383 386 return not bool(self.commit_ids)
384 387
385 388 @staticmethod
386 389 def check_url(url, config):
387 390 """
388 391 Function will check given url and try to verify if it's a valid
389 392 link.
390 393 """
391 394 raise NotImplementedError
392 395
393 396 @staticmethod
394 397 def is_valid_repository(path):
395 398 """
396 399 Check if given `path` contains a valid repository of this backend
397 400 """
398 401 raise NotImplementedError
399 402
400 403 # ==========================================================================
401 404 # COMMITS
402 405 # ==========================================================================
403 406
404 407 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
405 408 """
406 409 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
407 410 are both None, most recent commit is returned.
408 411
409 412 :param pre_load: Optional. List of commit attributes to load.
410 413
411 414 :raises ``EmptyRepositoryError``: if there are no commits
412 415 """
413 416 raise NotImplementedError
414 417
415 418 def __iter__(self):
416 419 for commit_id in self.commit_ids:
417 420 yield self.get_commit(commit_id=commit_id)
418 421
419 422 def get_commits(
420 423 self, start_id=None, end_id=None, start_date=None, end_date=None,
421 424 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
422 425 """
423 426 Returns iterator of `BaseCommit` objects from start to end
424 427 not inclusive. This should behave just like a list, ie. end is not
425 428 inclusive.
426 429
427 430 :param start_id: None or str, must be a valid commit id
428 431 :param end_id: None or str, must be a valid commit id
429 432 :param start_date:
430 433 :param end_date:
431 434 :param branch_name:
432 435 :param show_hidden:
433 436 :param pre_load:
434 437 :param translate_tags:
435 438 """
436 439 raise NotImplementedError
437 440
438 441 def __getitem__(self, key):
439 442 """
440 443 Allows index based access to the commit objects of this repository.
441 444 """
442 445 pre_load = ["author", "branch", "date", "message", "parents"]
443 446 if isinstance(key, slice):
444 447 return self._get_range(key, pre_load)
445 448 return self.get_commit(commit_idx=key, pre_load=pre_load)
446 449
447 450 def _get_range(self, slice_obj, pre_load):
448 451 for commit_id in self.commit_ids.__getitem__(slice_obj):
449 452 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
450 453
451 454 def count(self):
452 455 return len(self.commit_ids)
453 456
454 457 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
455 458 """
456 459 Creates and returns a tag for the given ``commit_id``.
457 460
458 461 :param name: name for new tag
459 462 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
460 463 :param commit_id: commit id for which new tag would be created
461 464 :param message: message of the tag's commit
462 465 :param date: date of tag's commit
463 466
464 467 :raises TagAlreadyExistError: if tag with same name already exists
465 468 """
466 469 raise NotImplementedError
467 470
468 471 def remove_tag(self, name, user, message=None, date=None):
469 472 """
470 473 Removes tag with the given ``name``.
471 474
472 475 :param name: name of the tag to be removed
473 476 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
474 477 :param message: message of the tag's removal commit
475 478 :param date: date of tag's removal commit
476 479
477 480 :raises TagDoesNotExistError: if tag with given name does not exists
478 481 """
479 482 raise NotImplementedError
480 483
481 484 def get_diff(
482 485 self, commit1, commit2, path=None, ignore_whitespace=False,
483 486 context=3, path1=None):
484 487 """
485 488 Returns (git like) *diff*, as plain text. Shows changes introduced by
486 489 `commit2` since `commit1`.
487 490
488 491 :param commit1: Entry point from which diff is shown. Can be
489 492 ``self.EMPTY_COMMIT`` - in this case, patch showing all
490 493 the changes since empty state of the repository until `commit2`
491 494 :param commit2: Until which commit changes should be shown.
492 495 :param path: Can be set to a path of a file to create a diff of that
493 496 file. If `path1` is also set, this value is only associated to
494 497 `commit2`.
495 498 :param ignore_whitespace: If set to ``True``, would not show whitespace
496 499 changes. Defaults to ``False``.
497 500 :param context: How many lines before/after changed lines should be
498 501 shown. Defaults to ``3``.
499 502 :param path1: Can be set to a path to associate with `commit1`. This
500 503 parameter works only for backends which support diff generation for
501 504 different paths. Other backends will raise a `ValueError` if `path1`
502 505 is set and has a different value than `path`.
503 506 :param file_path: filter this diff by given path pattern
504 507 """
505 508 raise NotImplementedError
506 509
507 510 def strip(self, commit_id, branch=None):
508 511 """
509 512 Strip given commit_id from the repository
510 513 """
511 514 raise NotImplementedError
512 515
513 516 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
514 517 """
515 518 Return a latest common ancestor commit if one exists for this repo
516 519 `commit_id1` vs `commit_id2` from `repo2`.
517 520
518 521 :param commit_id1: Commit it from this repository to use as a
519 522 target for the comparison.
520 523 :param commit_id2: Source commit id to use for comparison.
521 524 :param repo2: Source repository to use for comparison.
522 525 """
523 526 raise NotImplementedError
524 527
525 528 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
526 529 """
527 530 Compare this repository's revision `commit_id1` with `commit_id2`.
528 531
529 532 Returns a tuple(commits, ancestor) that would be merged from
530 533 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
531 534 will be returned as ancestor.
532 535
533 536 :param commit_id1: Commit it from this repository to use as a
534 537 target for the comparison.
535 538 :param commit_id2: Source commit id to use for comparison.
536 539 :param repo2: Source repository to use for comparison.
537 540 :param merge: If set to ``True`` will do a merge compare which also
538 541 returns the common ancestor.
539 542 :param pre_load: Optional. List of commit attributes to load.
540 543 """
541 544 raise NotImplementedError
542 545
543 546 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
544 547 user_name='', user_email='', message='', dry_run=False,
545 548 use_rebase=False, close_branch=False):
546 549 """
547 550 Merge the revisions specified in `source_ref` from `source_repo`
548 551 onto the `target_ref` of this repository.
549 552
550 553 `source_ref` and `target_ref` are named tupls with the following
551 554 fields `type`, `name` and `commit_id`.
552 555
553 556 Returns a MergeResponse named tuple with the following fields
554 557 'possible', 'executed', 'source_commit', 'target_commit',
555 558 'merge_commit'.
556 559
557 560 :param repo_id: `repo_id` target repo id.
558 561 :param workspace_id: `workspace_id` unique identifier.
559 562 :param target_ref: `target_ref` points to the commit on top of which
560 563 the `source_ref` should be merged.
561 564 :param source_repo: The repository that contains the commits to be
562 565 merged.
563 566 :param source_ref: `source_ref` points to the topmost commit from
564 567 the `source_repo` which should be merged.
565 568 :param user_name: Merge commit `user_name`.
566 569 :param user_email: Merge commit `user_email`.
567 570 :param message: Merge commit `message`.
568 571 :param dry_run: If `True` the merge will not take place.
569 572 :param use_rebase: If `True` commits from the source will be rebased
570 573 on top of the target instead of being merged.
571 574 :param close_branch: If `True` branch will be close before merging it
572 575 """
573 576 if dry_run:
574 577 message = message or settings.MERGE_DRY_RUN_MESSAGE
575 578 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
576 579 user_name = user_name or settings.MERGE_DRY_RUN_USER
577 580 else:
578 581 if not user_name:
579 582 raise ValueError('user_name cannot be empty')
580 583 if not user_email:
581 584 raise ValueError('user_email cannot be empty')
582 585 if not message:
583 586 raise ValueError('message cannot be empty')
584 587
585 588 try:
586 589 return self._merge_repo(
587 590 repo_id, workspace_id, target_ref, source_repo,
588 591 source_ref, message, user_name, user_email, dry_run=dry_run,
589 592 use_rebase=use_rebase, close_branch=close_branch)
590 593 except RepositoryError as exc:
591 594 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
592 595 return MergeResponse(
593 596 False, False, None, MergeFailureReason.UNKNOWN,
594 597 metadata={'exception': str(exc)})
595 598
596 599 def _merge_repo(self, repo_id, workspace_id, target_ref,
597 600 source_repo, source_ref, merge_message,
598 601 merger_name, merger_email, dry_run=False,
599 602 use_rebase=False, close_branch=False):
600 603 """Internal implementation of merge."""
601 604 raise NotImplementedError
602 605
603 606 def _maybe_prepare_merge_workspace(
604 607 self, repo_id, workspace_id, target_ref, source_ref):
605 608 """
606 609 Create the merge workspace.
607 610
608 611 :param workspace_id: `workspace_id` unique identifier.
609 612 """
610 613 raise NotImplementedError
611 614
612 615 def _get_legacy_shadow_repository_path(self, workspace_id):
613 616 """
614 617 Legacy version that was used before. We still need it for
615 618 backward compat
616 619 """
617 620 return os.path.join(
618 621 os.path.dirname(self.path),
619 622 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
620 623
621 624 def _get_shadow_repository_path(self, repo_id, workspace_id):
622 625 # The name of the shadow repository must start with '.', so it is
623 626 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
624 627 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
625 628 if os.path.exists(legacy_repository_path):
626 629 return legacy_repository_path
627 630 else:
628 631 return os.path.join(
629 632 os.path.dirname(self.path),
630 633 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
631 634
632 635 def cleanup_merge_workspace(self, repo_id, workspace_id):
633 636 """
634 637 Remove merge workspace.
635 638
636 639 This function MUST not fail in case there is no workspace associated to
637 640 the given `workspace_id`.
638 641
639 642 :param workspace_id: `workspace_id` unique identifier.
640 643 """
641 644 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
642 645 shadow_repository_path_del = '{}.{}.delete'.format(
643 646 shadow_repository_path, time.time())
644 647
645 648 # move the shadow repo, so it never conflicts with the one used.
646 649 # we use this method because shutil.rmtree had some edge case problems
647 650 # removing symlinked repositories
648 651 if not os.path.isdir(shadow_repository_path):
649 652 return
650 653
651 654 shutil.move(shadow_repository_path, shadow_repository_path_del)
652 655 try:
653 656 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
654 657 except Exception:
655 658 log.exception('Failed to gracefully remove shadow repo under %s',
656 659 shadow_repository_path_del)
657 660 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
658 661
659 662 # ========== #
660 663 # COMMIT API #
661 664 # ========== #
662 665
663 666 @LazyProperty
664 667 def in_memory_commit(self):
665 668 """
666 669 Returns :class:`InMemoryCommit` object for this repository.
667 670 """
668 671 raise NotImplementedError
669 672
670 673 # ======================== #
671 674 # UTILITIES FOR SUBCLASSES #
672 675 # ======================== #
673 676
674 677 def _validate_diff_commits(self, commit1, commit2):
675 678 """
676 679 Validates that the given commits are related to this repository.
677 680
678 681 Intended as a utility for sub classes to have a consistent validation
679 682 of input parameters in methods like :meth:`get_diff`.
680 683 """
681 684 self._validate_commit(commit1)
682 685 self._validate_commit(commit2)
683 686 if (isinstance(commit1, EmptyCommit) and
684 687 isinstance(commit2, EmptyCommit)):
685 688 raise ValueError("Cannot compare two empty commits")
686 689
687 690 def _validate_commit(self, commit):
688 691 if not isinstance(commit, BaseCommit):
689 692 raise TypeError(
690 693 "%s is not of type BaseCommit" % repr(commit))
691 694 if commit.repository != self and not isinstance(commit, EmptyCommit):
692 695 raise ValueError(
693 696 "Commit %s must be a valid commit from this repository %s, "
694 697 "related to this repository instead %s." %
695 698 (commit, self, commit.repository))
696 699
697 700 def _validate_commit_id(self, commit_id):
698 701 if not isinstance(commit_id, compat.string_types):
699 702 raise TypeError("commit_id must be a string value")
700 703
701 704 def _validate_commit_idx(self, commit_idx):
702 705 if not isinstance(commit_idx, (int, long)):
703 706 raise TypeError("commit_idx must be a numeric value")
704 707
705 708 def _validate_branch_name(self, branch_name):
706 709 if branch_name and branch_name not in self.branches_all:
707 710 msg = ("Branch %s not found in %s" % (branch_name, self))
708 711 raise BranchDoesNotExistError(msg)
709 712
710 713 #
711 714 # Supporting deprecated API parts
712 715 # TODO: johbo: consider to move this into a mixin
713 716 #
714 717
715 718 @property
716 719 def EMPTY_CHANGESET(self):
717 720 warnings.warn(
718 721 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
719 722 return self.EMPTY_COMMIT_ID
720 723
721 724 @property
722 725 def revisions(self):
723 726 warnings.warn("Use commits attribute instead", DeprecationWarning)
724 727 return self.commit_ids
725 728
726 729 @revisions.setter
727 730 def revisions(self, value):
728 731 warnings.warn("Use commits attribute instead", DeprecationWarning)
729 732 self.commit_ids = value
730 733
731 734 def get_changeset(self, revision=None, pre_load=None):
732 735 warnings.warn("Use get_commit instead", DeprecationWarning)
733 736 commit_id = None
734 737 commit_idx = None
735 738 if isinstance(revision, compat.string_types):
736 739 commit_id = revision
737 740 else:
738 741 commit_idx = revision
739 742 return self.get_commit(
740 743 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
741 744
742 745 def get_changesets(
743 746 self, start=None, end=None, start_date=None, end_date=None,
744 747 branch_name=None, pre_load=None):
745 748 warnings.warn("Use get_commits instead", DeprecationWarning)
746 749 start_id = self._revision_to_commit(start)
747 750 end_id = self._revision_to_commit(end)
748 751 return self.get_commits(
749 752 start_id=start_id, end_id=end_id, start_date=start_date,
750 753 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
751 754
752 755 def _revision_to_commit(self, revision):
753 756 """
754 757 Translates a revision to a commit_id
755 758
756 759 Helps to support the old changeset based API which allows to use
757 760 commit ids and commit indices interchangeable.
758 761 """
759 762 if revision is None:
760 763 return revision
761 764
762 765 if isinstance(revision, compat.string_types):
763 766 commit_id = revision
764 767 else:
765 768 commit_id = self.commit_ids[revision]
766 769 return commit_id
767 770
768 771 @property
769 772 def in_memory_changeset(self):
770 773 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
771 774 return self.in_memory_commit
772 775
773 776 def get_path_permissions(self, username):
774 777 """
775 778 Returns a path permission checker or None if not supported
776 779
777 780 :param username: session user name
778 781 :return: an instance of BasePathPermissionChecker or None
779 782 """
780 783 return None
781 784
782 785 def install_hooks(self, force=False):
783 786 return self._remote.install_hooks(force)
784 787
785 788 def get_hooks_info(self):
786 789 return self._remote.get_hooks_info()
787 790
788 791
789 792 class BaseCommit(object):
790 793 """
791 794 Each backend should implement it's commit representation.
792 795
793 796 **Attributes**
794 797
795 798 ``repository``
796 799 repository object within which commit exists
797 800
798 801 ``id``
799 802 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
800 803 just ``tip``.
801 804
802 805 ``raw_id``
803 806 raw commit representation (i.e. full 40 length sha for git
804 807 backend)
805 808
806 809 ``short_id``
807 810 shortened (if apply) version of ``raw_id``; it would be simple
808 811 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
809 812 as ``raw_id`` for subversion
810 813
811 814 ``idx``
812 815 commit index
813 816
814 817 ``files``
815 818 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
816 819
817 820 ``dirs``
818 821 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
819 822
820 823 ``nodes``
821 824 combined list of ``Node`` objects
822 825
823 826 ``author``
824 827 author of the commit, as unicode
825 828
826 829 ``message``
827 830 message of the commit, as unicode
828 831
829 832 ``parents``
830 833 list of parent commits
831 834
832 835 """
833 836
834 837 branch = None
835 838 """
836 839 Depending on the backend this should be set to the branch name of the
837 840 commit. Backends not supporting branches on commits should leave this
838 841 value as ``None``.
839 842 """
840 843
841 844 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
842 845 """
843 846 This template is used to generate a default prefix for repository archives
844 847 if no prefix has been specified.
845 848 """
846 849
847 850 def __str__(self):
848 851 return '<%s at %s:%s>' % (
849 852 self.__class__.__name__, self.idx, self.short_id)
850 853
851 854 def __repr__(self):
852 855 return self.__str__()
853 856
854 857 def __unicode__(self):
855 858 return u'%s:%s' % (self.idx, self.short_id)
856 859
857 860 def __eq__(self, other):
858 861 same_instance = isinstance(other, self.__class__)
859 862 return same_instance and self.raw_id == other.raw_id
860 863
861 864 def __json__(self):
862 865 parents = []
863 866 try:
864 867 for parent in self.parents:
865 868 parents.append({'raw_id': parent.raw_id})
866 869 except NotImplementedError:
867 870 # empty commit doesn't have parents implemented
868 871 pass
869 872
870 873 return {
871 874 'short_id': self.short_id,
872 875 'raw_id': self.raw_id,
873 876 'revision': self.idx,
874 877 'message': self.message,
875 878 'date': self.date,
876 879 'author': self.author,
877 880 'parents': parents,
878 881 'branch': self.branch
879 882 }
880 883
881 884 def __getstate__(self):
882 885 d = self.__dict__.copy()
883 886 d.pop('_remote', None)
884 887 d.pop('repository', None)
885 888 return d
886 889
887 890 def _get_refs(self):
888 891 return {
889 892 'branches': [self.branch] if self.branch else [],
890 893 'bookmarks': getattr(self, 'bookmarks', []),
891 894 'tags': self.tags
892 895 }
893 896
894 897 @LazyProperty
895 898 def last(self):
896 899 """
897 900 ``True`` if this is last commit in repository, ``False``
898 901 otherwise; trying to access this attribute while there is no
899 902 commits would raise `EmptyRepositoryError`
900 903 """
901 904 if self.repository is None:
902 905 raise CommitError("Cannot check if it's most recent commit")
903 906 return self.raw_id == self.repository.commit_ids[-1]
904 907
905 908 @LazyProperty
906 909 def parents(self):
907 910 """
908 911 Returns list of parent commits.
909 912 """
910 913 raise NotImplementedError
911 914
912 915 @LazyProperty
913 916 def first_parent(self):
914 917 """
915 918 Returns list of parent commits.
916 919 """
917 920 return self.parents[0] if self.parents else EmptyCommit()
918 921
919 922 @property
920 923 def merge(self):
921 924 """
922 925 Returns boolean if commit is a merge.
923 926 """
924 927 return len(self.parents) > 1
925 928
926 929 @LazyProperty
927 930 def children(self):
928 931 """
929 932 Returns list of child commits.
930 933 """
931 934 raise NotImplementedError
932 935
933 936 @LazyProperty
934 937 def id(self):
935 938 """
936 939 Returns string identifying this commit.
937 940 """
938 941 raise NotImplementedError
939 942
940 943 @LazyProperty
941 944 def raw_id(self):
942 945 """
943 946 Returns raw string identifying this commit.
944 947 """
945 948 raise NotImplementedError
946 949
947 950 @LazyProperty
948 951 def short_id(self):
949 952 """
950 953 Returns shortened version of ``raw_id`` attribute, as string,
951 954 identifying this commit, useful for presentation to users.
952 955 """
953 956 raise NotImplementedError
954 957
955 958 @LazyProperty
956 959 def idx(self):
957 960 """
958 961 Returns integer identifying this commit.
959 962 """
960 963 raise NotImplementedError
961 964
962 965 @LazyProperty
963 966 def committer(self):
964 967 """
965 968 Returns committer for this commit
966 969 """
967 970 raise NotImplementedError
968 971
969 972 @LazyProperty
970 973 def committer_name(self):
971 974 """
972 975 Returns committer name for this commit
973 976 """
974 977
975 978 return author_name(self.committer)
976 979
977 980 @LazyProperty
978 981 def committer_email(self):
979 982 """
980 983 Returns committer email address for this commit
981 984 """
982 985
983 986 return author_email(self.committer)
984 987
985 988 @LazyProperty
986 989 def author(self):
987 990 """
988 991 Returns author for this commit
989 992 """
990 993
991 994 raise NotImplementedError
992 995
993 996 @LazyProperty
994 997 def author_name(self):
995 998 """
996 999 Returns author name for this commit
997 1000 """
998 1001
999 1002 return author_name(self.author)
1000 1003
1001 1004 @LazyProperty
1002 1005 def author_email(self):
1003 1006 """
1004 1007 Returns author email address for this commit
1005 1008 """
1006 1009
1007 1010 return author_email(self.author)
1008 1011
1009 1012 def get_file_mode(self, path):
1010 1013 """
1011 1014 Returns stat mode of the file at `path`.
1012 1015 """
1013 1016 raise NotImplementedError
1014 1017
1015 1018 def is_link(self, path):
1016 1019 """
1017 1020 Returns ``True`` if given `path` is a symlink
1018 1021 """
1019 1022 raise NotImplementedError
1020 1023
1021 1024 def get_file_content(self, path):
1022 1025 """
1023 1026 Returns content of the file at the given `path`.
1024 1027 """
1025 1028 raise NotImplementedError
1026 1029
1027 1030 def get_file_size(self, path):
1028 1031 """
1029 1032 Returns size of the file at the given `path`.
1030 1033 """
1031 1034 raise NotImplementedError
1032 1035
1033 1036 def get_path_commit(self, path, pre_load=None):
1034 1037 """
1035 1038 Returns last commit of the file at the given `path`.
1036 1039
1037 1040 :param pre_load: Optional. List of commit attributes to load.
1038 1041 """
1039 1042 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1040 1043 if not commits:
1041 1044 raise RepositoryError(
1042 1045 'Failed to fetch history for path {}. '
1043 1046 'Please check if such path exists in your repository'.format(
1044 1047 path))
1045 1048 return commits[0]
1046 1049
1047 1050 def get_path_history(self, path, limit=None, pre_load=None):
1048 1051 """
1049 1052 Returns history of file as reversed list of :class:`BaseCommit`
1050 1053 objects for which file at given `path` has been modified.
1051 1054
1052 1055 :param limit: Optional. Allows to limit the size of the returned
1053 1056 history. This is intended as a hint to the underlying backend, so
1054 1057 that it can apply optimizations depending on the limit.
1055 1058 :param pre_load: Optional. List of commit attributes to load.
1056 1059 """
1057 1060 raise NotImplementedError
1058 1061
1059 1062 def get_file_annotate(self, path, pre_load=None):
1060 1063 """
1061 1064 Returns a generator of four element tuples with
1062 1065 lineno, sha, commit lazy loader and line
1063 1066
1064 1067 :param pre_load: Optional. List of commit attributes to load.
1065 1068 """
1066 1069 raise NotImplementedError
1067 1070
1068 1071 def get_nodes(self, path):
1069 1072 """
1070 1073 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1071 1074 state of commit at the given ``path``.
1072 1075
1073 1076 :raises ``CommitError``: if node at the given ``path`` is not
1074 1077 instance of ``DirNode``
1075 1078 """
1076 1079 raise NotImplementedError
1077 1080
1078 1081 def get_node(self, path):
1079 1082 """
1080 1083 Returns ``Node`` object from the given ``path``.
1081 1084
1082 1085 :raises ``NodeDoesNotExistError``: if there is no node at the given
1083 1086 ``path``
1084 1087 """
1085 1088 raise NotImplementedError
1086 1089
1087 1090 def get_largefile_node(self, path):
1088 1091 """
1089 1092 Returns the path to largefile from Mercurial/Git-lfs storage.
1090 1093 or None if it's not a largefile node
1091 1094 """
1092 1095 return None
1093 1096
1094 1097 def archive_repo(self, file_path, kind='tgz', subrepos=None,
1095 1098 prefix=None, write_metadata=False, mtime=None):
1096 1099 """
1097 1100 Creates an archive containing the contents of the repository.
1098 1101
1099 1102 :param file_path: path to the file which to create the archive.
1100 1103 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1101 1104 :param prefix: name of root directory in archive.
1102 1105 Default is repository name and commit's short_id joined with dash:
1103 1106 ``"{repo_name}-{short_id}"``.
1104 1107 :param write_metadata: write a metadata file into archive.
1105 1108 :param mtime: custom modification time for archive creation, defaults
1106 1109 to time.time() if not given.
1107 1110
1108 1111 :raise VCSError: If prefix has a problem.
1109 1112 """
1110 1113 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1111 1114 if kind not in allowed_kinds:
1112 1115 raise ImproperArchiveTypeError(
1113 1116 'Archive kind (%s) not supported use one of %s' %
1114 1117 (kind, allowed_kinds))
1115 1118
1116 1119 prefix = self._validate_archive_prefix(prefix)
1117 1120
1118 1121 mtime = mtime or time.mktime(self.date.timetuple())
1119 1122
1120 1123 file_info = []
1121 1124 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1122 1125 for _r, _d, files in cur_rev.walk('/'):
1123 1126 for f in files:
1124 1127 f_path = os.path.join(prefix, f.path)
1125 1128 file_info.append(
1126 1129 (f_path, f.mode, f.is_link(), f.raw_bytes))
1127 1130
1128 1131 if write_metadata:
1129 1132 metadata = [
1130 1133 ('repo_name', self.repository.name),
1131 1134 ('rev', self.raw_id),
1132 1135 ('create_time', mtime),
1133 1136 ('branch', self.branch),
1134 1137 ('tags', ','.join(self.tags)),
1135 1138 ]
1136 1139 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1137 1140 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1138 1141
1139 1142 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1140 1143
1141 1144 def _validate_archive_prefix(self, prefix):
1142 1145 if prefix is None:
1143 1146 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1144 1147 repo_name=safe_str(self.repository.name),
1145 1148 short_id=self.short_id)
1146 1149 elif not isinstance(prefix, str):
1147 1150 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1148 1151 elif prefix.startswith('/'):
1149 1152 raise VCSError("Prefix cannot start with leading slash")
1150 1153 elif prefix.strip() == '':
1151 1154 raise VCSError("Prefix cannot be empty")
1152 1155 return prefix
1153 1156
1154 1157 @LazyProperty
1155 1158 def root(self):
1156 1159 """
1157 1160 Returns ``RootNode`` object for this commit.
1158 1161 """
1159 1162 return self.get_node('')
1160 1163
1161 1164 def next(self, branch=None):
1162 1165 """
1163 1166 Returns next commit from current, if branch is gives it will return
1164 1167 next commit belonging to this branch
1165 1168
1166 1169 :param branch: show commits within the given named branch
1167 1170 """
1168 1171 indexes = xrange(self.idx + 1, self.repository.count())
1169 1172 return self._find_next(indexes, branch)
1170 1173
1171 1174 def prev(self, branch=None):
1172 1175 """
1173 1176 Returns previous commit from current, if branch is gives it will
1174 1177 return previous commit belonging to this branch
1175 1178
1176 1179 :param branch: show commit within the given named branch
1177 1180 """
1178 1181 indexes = xrange(self.idx - 1, -1, -1)
1179 1182 return self._find_next(indexes, branch)
1180 1183
1181 1184 def _find_next(self, indexes, branch=None):
1182 1185 if branch and self.branch != branch:
1183 1186 raise VCSError('Branch option used on commit not belonging '
1184 1187 'to that branch')
1185 1188
1186 1189 for next_idx in indexes:
1187 1190 commit = self.repository.get_commit(commit_idx=next_idx)
1188 1191 if branch and branch != commit.branch:
1189 1192 continue
1190 1193 return commit
1191 1194 raise CommitDoesNotExistError
1192 1195
1193 1196 def diff(self, ignore_whitespace=True, context=3):
1194 1197 """
1195 1198 Returns a `Diff` object representing the change made by this commit.
1196 1199 """
1197 1200 parent = self.first_parent
1198 1201 diff = self.repository.get_diff(
1199 1202 parent, self,
1200 1203 ignore_whitespace=ignore_whitespace,
1201 1204 context=context)
1202 1205 return diff
1203 1206
1204 1207 @LazyProperty
1205 1208 def added(self):
1206 1209 """
1207 1210 Returns list of added ``FileNode`` objects.
1208 1211 """
1209 1212 raise NotImplementedError
1210 1213
1211 1214 @LazyProperty
1212 1215 def changed(self):
1213 1216 """
1214 1217 Returns list of modified ``FileNode`` objects.
1215 1218 """
1216 1219 raise NotImplementedError
1217 1220
1218 1221 @LazyProperty
1219 1222 def removed(self):
1220 1223 """
1221 1224 Returns list of removed ``FileNode`` objects.
1222 1225 """
1223 1226 raise NotImplementedError
1224 1227
1225 1228 @LazyProperty
1226 1229 def size(self):
1227 1230 """
1228 1231 Returns total number of bytes from contents of all filenodes.
1229 1232 """
1230 1233 return sum((node.size for node in self.get_filenodes_generator()))
1231 1234
1232 1235 def walk(self, topurl=''):
1233 1236 """
1234 1237 Similar to os.walk method. Insted of filesystem it walks through
1235 1238 commit starting at given ``topurl``. Returns generator of tuples
1236 1239 (topnode, dirnodes, filenodes).
1237 1240 """
1238 1241 topnode = self.get_node(topurl)
1239 1242 if not topnode.is_dir():
1240 1243 return
1241 1244 yield (topnode, topnode.dirs, topnode.files)
1242 1245 for dirnode in topnode.dirs:
1243 1246 for tup in self.walk(dirnode.path):
1244 1247 yield tup
1245 1248
1246 1249 def get_filenodes_generator(self):
1247 1250 """
1248 1251 Returns generator that yields *all* file nodes.
1249 1252 """
1250 1253 for topnode, dirs, files in self.walk():
1251 1254 for node in files:
1252 1255 yield node
1253 1256
1254 1257 #
1255 1258 # Utilities for sub classes to support consistent behavior
1256 1259 #
1257 1260
1258 1261 def no_node_at_path(self, path):
1259 1262 return NodeDoesNotExistError(
1260 1263 u"There is no file nor directory at the given path: "
1261 1264 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1262 1265
1263 1266 def _fix_path(self, path):
1264 1267 """
1265 1268 Paths are stored without trailing slash so we need to get rid off it if
1266 1269 needed.
1267 1270 """
1268 1271 return path.rstrip('/')
1269 1272
1270 1273 #
1271 1274 # Deprecated API based on changesets
1272 1275 #
1273 1276
1274 1277 @property
1275 1278 def revision(self):
1276 1279 warnings.warn("Use idx instead", DeprecationWarning)
1277 1280 return self.idx
1278 1281
1279 1282 @revision.setter
1280 1283 def revision(self, value):
1281 1284 warnings.warn("Use idx instead", DeprecationWarning)
1282 1285 self.idx = value
1283 1286
1284 1287 def get_file_changeset(self, path):
1285 1288 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1286 1289 return self.get_path_commit(path)
1287 1290
1288 1291
1289 1292 class BaseChangesetClass(type):
1290 1293
1291 1294 def __instancecheck__(self, instance):
1292 1295 return isinstance(instance, BaseCommit)
1293 1296
1294 1297
1295 1298 class BaseChangeset(BaseCommit):
1296 1299
1297 1300 __metaclass__ = BaseChangesetClass
1298 1301
1299 1302 def __new__(cls, *args, **kwargs):
1300 1303 warnings.warn(
1301 1304 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1302 1305 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1303 1306
1304 1307
1305 1308 class BaseInMemoryCommit(object):
1306 1309 """
1307 1310 Represents differences between repository's state (most recent head) and
1308 1311 changes made *in place*.
1309 1312
1310 1313 **Attributes**
1311 1314
1312 1315 ``repository``
1313 1316 repository object for this in-memory-commit
1314 1317
1315 1318 ``added``
1316 1319 list of ``FileNode`` objects marked as *added*
1317 1320
1318 1321 ``changed``
1319 1322 list of ``FileNode`` objects marked as *changed*
1320 1323
1321 1324 ``removed``
1322 1325 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1323 1326 *removed*
1324 1327
1325 1328 ``parents``
1326 1329 list of :class:`BaseCommit` instances representing parents of
1327 1330 in-memory commit. Should always be 2-element sequence.
1328 1331
1329 1332 """
1330 1333
1331 1334 def __init__(self, repository):
1332 1335 self.repository = repository
1333 1336 self.added = []
1334 1337 self.changed = []
1335 1338 self.removed = []
1336 1339 self.parents = []
1337 1340
1338 1341 def add(self, *filenodes):
1339 1342 """
1340 1343 Marks given ``FileNode`` objects as *to be committed*.
1341 1344
1342 1345 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1343 1346 latest commit
1344 1347 :raises ``NodeAlreadyAddedError``: if node with same path is already
1345 1348 marked as *added*
1346 1349 """
1347 1350 # Check if not already marked as *added* first
1348 1351 for node in filenodes:
1349 1352 if node.path in (n.path for n in self.added):
1350 1353 raise NodeAlreadyAddedError(
1351 1354 "Such FileNode %s is already marked for addition"
1352 1355 % node.path)
1353 1356 for node in filenodes:
1354 1357 self.added.append(node)
1355 1358
1356 1359 def change(self, *filenodes):
1357 1360 """
1358 1361 Marks given ``FileNode`` objects to be *changed* in next commit.
1359 1362
1360 1363 :raises ``EmptyRepositoryError``: if there are no commits yet
1361 1364 :raises ``NodeAlreadyExistsError``: if node with same path is already
1362 1365 marked to be *changed*
1363 1366 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1364 1367 marked to be *removed*
1365 1368 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1366 1369 commit
1367 1370 :raises ``NodeNotChangedError``: if node hasn't really be changed
1368 1371 """
1369 1372 for node in filenodes:
1370 1373 if node.path in (n.path for n in self.removed):
1371 1374 raise NodeAlreadyRemovedError(
1372 1375 "Node at %s is already marked as removed" % node.path)
1373 1376 try:
1374 1377 self.repository.get_commit()
1375 1378 except EmptyRepositoryError:
1376 1379 raise EmptyRepositoryError(
1377 1380 "Nothing to change - try to *add* new nodes rather than "
1378 1381 "changing them")
1379 1382 for node in filenodes:
1380 1383 if node.path in (n.path for n in self.changed):
1381 1384 raise NodeAlreadyChangedError(
1382 1385 "Node at '%s' is already marked as changed" % node.path)
1383 1386 self.changed.append(node)
1384 1387
1385 1388 def remove(self, *filenodes):
1386 1389 """
1387 1390 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1388 1391 *removed* in next commit.
1389 1392
1390 1393 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1391 1394 be *removed*
1392 1395 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1393 1396 be *changed*
1394 1397 """
1395 1398 for node in filenodes:
1396 1399 if node.path in (n.path for n in self.removed):
1397 1400 raise NodeAlreadyRemovedError(
1398 1401 "Node is already marked to for removal at %s" % node.path)
1399 1402 if node.path in (n.path for n in self.changed):
1400 1403 raise NodeAlreadyChangedError(
1401 1404 "Node is already marked to be changed at %s" % node.path)
1402 1405 # We only mark node as *removed* - real removal is done by
1403 1406 # commit method
1404 1407 self.removed.append(node)
1405 1408
1406 1409 def reset(self):
1407 1410 """
1408 1411 Resets this instance to initial state (cleans ``added``, ``changed``
1409 1412 and ``removed`` lists).
1410 1413 """
1411 1414 self.added = []
1412 1415 self.changed = []
1413 1416 self.removed = []
1414 1417 self.parents = []
1415 1418
1416 1419 def get_ipaths(self):
1417 1420 """
1418 1421 Returns generator of paths from nodes marked as added, changed or
1419 1422 removed.
1420 1423 """
1421 1424 for node in itertools.chain(self.added, self.changed, self.removed):
1422 1425 yield node.path
1423 1426
1424 1427 def get_paths(self):
1425 1428 """
1426 1429 Returns list of paths from nodes marked as added, changed or removed.
1427 1430 """
1428 1431 return list(self.get_ipaths())
1429 1432
1430 1433 def check_integrity(self, parents=None):
1431 1434 """
1432 1435 Checks in-memory commit's integrity. Also, sets parents if not
1433 1436 already set.
1434 1437
1435 1438 :raises CommitError: if any error occurs (i.e.
1436 1439 ``NodeDoesNotExistError``).
1437 1440 """
1438 1441 if not self.parents:
1439 1442 parents = parents or []
1440 1443 if len(parents) == 0:
1441 1444 try:
1442 1445 parents = [self.repository.get_commit(), None]
1443 1446 except EmptyRepositoryError:
1444 1447 parents = [None, None]
1445 1448 elif len(parents) == 1:
1446 1449 parents += [None]
1447 1450 self.parents = parents
1448 1451
1449 1452 # Local parents, only if not None
1450 1453 parents = [p for p in self.parents if p]
1451 1454
1452 1455 # Check nodes marked as added
1453 1456 for p in parents:
1454 1457 for node in self.added:
1455 1458 try:
1456 1459 p.get_node(node.path)
1457 1460 except NodeDoesNotExistError:
1458 1461 pass
1459 1462 else:
1460 1463 raise NodeAlreadyExistsError(
1461 1464 "Node `%s` already exists at %s" % (node.path, p))
1462 1465
1463 1466 # Check nodes marked as changed
1464 1467 missing = set(self.changed)
1465 1468 not_changed = set(self.changed)
1466 1469 if self.changed and not parents:
1467 1470 raise NodeDoesNotExistError(str(self.changed[0].path))
1468 1471 for p in parents:
1469 1472 for node in self.changed:
1470 1473 try:
1471 1474 old = p.get_node(node.path)
1472 1475 missing.remove(node)
1473 1476 # if content actually changed, remove node from not_changed
1474 1477 if old.content != node.content:
1475 1478 not_changed.remove(node)
1476 1479 except NodeDoesNotExistError:
1477 1480 pass
1478 1481 if self.changed and missing:
1479 1482 raise NodeDoesNotExistError(
1480 1483 "Node `%s` marked as modified but missing in parents: %s"
1481 1484 % (node.path, parents))
1482 1485
1483 1486 if self.changed and not_changed:
1484 1487 raise NodeNotChangedError(
1485 1488 "Node `%s` wasn't actually changed (parents: %s)"
1486 1489 % (not_changed.pop().path, parents))
1487 1490
1488 1491 # Check nodes marked as removed
1489 1492 if self.removed and not parents:
1490 1493 raise NodeDoesNotExistError(
1491 1494 "Cannot remove node at %s as there "
1492 1495 "were no parents specified" % self.removed[0].path)
1493 1496 really_removed = set()
1494 1497 for p in parents:
1495 1498 for node in self.removed:
1496 1499 try:
1497 1500 p.get_node(node.path)
1498 1501 really_removed.add(node)
1499 1502 except CommitError:
1500 1503 pass
1501 1504 not_removed = set(self.removed) - really_removed
1502 1505 if not_removed:
1503 1506 # TODO: johbo: This code branch does not seem to be covered
1504 1507 raise NodeDoesNotExistError(
1505 1508 "Cannot remove node at %s from "
1506 1509 "following parents: %s" % (not_removed, parents))
1507 1510
1508 1511 def commit(
1509 1512 self, message, author, parents=None, branch=None, date=None,
1510 1513 **kwargs):
1511 1514 """
1512 1515 Performs in-memory commit (doesn't check workdir in any way) and
1513 1516 returns newly created :class:`BaseCommit`. Updates repository's
1514 1517 attribute `commits`.
1515 1518
1516 1519 .. note::
1517 1520
1518 1521 While overriding this method each backend's should call
1519 1522 ``self.check_integrity(parents)`` in the first place.
1520 1523
1521 1524 :param message: message of the commit
1522 1525 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1523 1526 :param parents: single parent or sequence of parents from which commit
1524 1527 would be derived
1525 1528 :param date: ``datetime.datetime`` instance. Defaults to
1526 1529 ``datetime.datetime.now()``.
1527 1530 :param branch: branch name, as string. If none given, default backend's
1528 1531 branch would be used.
1529 1532
1530 1533 :raises ``CommitError``: if any error occurs while committing
1531 1534 """
1532 1535 raise NotImplementedError
1533 1536
1534 1537
1535 1538 class BaseInMemoryChangesetClass(type):
1536 1539
1537 1540 def __instancecheck__(self, instance):
1538 1541 return isinstance(instance, BaseInMemoryCommit)
1539 1542
1540 1543
1541 1544 class BaseInMemoryChangeset(BaseInMemoryCommit):
1542 1545
1543 1546 __metaclass__ = BaseInMemoryChangesetClass
1544 1547
1545 1548 def __new__(cls, *args, **kwargs):
1546 1549 warnings.warn(
1547 1550 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1548 1551 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1549 1552
1550 1553
1551 1554 class EmptyCommit(BaseCommit):
1552 1555 """
1553 1556 An dummy empty commit. It's possible to pass hash when creating
1554 1557 an EmptyCommit
1555 1558 """
1556 1559
1557 1560 def __init__(
1558 1561 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1559 1562 message='', author='', date=None):
1560 1563 self._empty_commit_id = commit_id
1561 1564 # TODO: johbo: Solve idx parameter, default value does not make
1562 1565 # too much sense
1563 1566 self.idx = idx
1564 1567 self.message = message
1565 1568 self.author = author
1566 1569 self.date = date or datetime.datetime.fromtimestamp(0)
1567 1570 self.repository = repo
1568 1571 self.alias = alias
1569 1572
1570 1573 @LazyProperty
1571 1574 def raw_id(self):
1572 1575 """
1573 1576 Returns raw string identifying this commit, useful for web
1574 1577 representation.
1575 1578 """
1576 1579
1577 1580 return self._empty_commit_id
1578 1581
1579 1582 @LazyProperty
1580 1583 def branch(self):
1581 1584 if self.alias:
1582 1585 from rhodecode.lib.vcs.backends import get_backend
1583 1586 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1584 1587
1585 1588 @LazyProperty
1586 1589 def short_id(self):
1587 1590 return self.raw_id[:12]
1588 1591
1589 1592 @LazyProperty
1590 1593 def id(self):
1591 1594 return self.raw_id
1592 1595
1593 1596 def get_path_commit(self, path):
1594 1597 return self
1595 1598
1596 1599 def get_file_content(self, path):
1597 1600 return u''
1598 1601
1599 1602 def get_file_size(self, path):
1600 1603 return 0
1601 1604
1602 1605
1603 1606 class EmptyChangesetClass(type):
1604 1607
1605 1608 def __instancecheck__(self, instance):
1606 1609 return isinstance(instance, EmptyCommit)
1607 1610
1608 1611
1609 1612 class EmptyChangeset(EmptyCommit):
1610 1613
1611 1614 __metaclass__ = EmptyChangesetClass
1612 1615
1613 1616 def __new__(cls, *args, **kwargs):
1614 1617 warnings.warn(
1615 1618 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1616 1619 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1617 1620
1618 1621 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1619 1622 alias=None, revision=-1, message='', author='', date=None):
1620 1623 if requested_revision is not None:
1621 1624 warnings.warn(
1622 1625 "Parameter requested_revision not supported anymore",
1623 1626 DeprecationWarning)
1624 1627 super(EmptyChangeset, self).__init__(
1625 1628 commit_id=cs, repo=repo, alias=alias, idx=revision,
1626 1629 message=message, author=author, date=date)
1627 1630
1628 1631 @property
1629 1632 def revision(self):
1630 1633 warnings.warn("Use idx instead", DeprecationWarning)
1631 1634 return self.idx
1632 1635
1633 1636 @revision.setter
1634 1637 def revision(self, value):
1635 1638 warnings.warn("Use idx instead", DeprecationWarning)
1636 1639 self.idx = value
1637 1640
1638 1641
1639 1642 class EmptyRepository(BaseRepository):
1640 1643 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1641 1644 pass
1642 1645
1643 1646 def get_diff(self, *args, **kwargs):
1644 1647 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1645 1648 return GitDiff('')
1646 1649
1647 1650
1648 1651 class CollectionGenerator(object):
1649 1652
1650 1653 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1651 1654 self.repo = repo
1652 1655 self.commit_ids = commit_ids
1653 1656 # TODO: (oliver) this isn't currently hooked up
1654 1657 self.collection_size = None
1655 1658 self.pre_load = pre_load
1656 1659 self.translate_tag = translate_tag
1657 1660
1658 1661 def __len__(self):
1659 1662 if self.collection_size is not None:
1660 1663 return self.collection_size
1661 1664 return self.commit_ids.__len__()
1662 1665
1663 1666 def __iter__(self):
1664 1667 for commit_id in self.commit_ids:
1665 1668 # TODO: johbo: Mercurial passes in commit indices or commit ids
1666 1669 yield self._commit_factory(commit_id)
1667 1670
1668 1671 def _commit_factory(self, commit_id):
1669 1672 """
1670 1673 Allows backends to override the way commits are generated.
1671 1674 """
1672 1675 return self.repo.get_commit(
1673 1676 commit_id=commit_id, pre_load=self.pre_load,
1674 1677 translate_tag=self.translate_tag)
1675 1678
1676 1679 def __getslice__(self, i, j):
1677 1680 """
1678 1681 Returns an iterator of sliced repository
1679 1682 """
1680 1683 commit_ids = self.commit_ids[i:j]
1681 1684 return self.__class__(
1682 1685 self.repo, commit_ids, pre_load=self.pre_load,
1683 1686 translate_tag=self.translate_tag)
1684 1687
1685 1688 def __repr__(self):
1686 1689 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1687 1690
1688 1691
1689 1692 class Config(object):
1690 1693 """
1691 1694 Represents the configuration for a repository.
1692 1695
1693 1696 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1694 1697 standard library. It implements only the needed subset.
1695 1698 """
1696 1699
1697 1700 def __init__(self):
1698 1701 self._values = {}
1699 1702
1700 1703 def copy(self):
1701 1704 clone = Config()
1702 1705 for section, values in self._values.items():
1703 1706 clone._values[section] = values.copy()
1704 1707 return clone
1705 1708
1706 1709 def __repr__(self):
1707 1710 return '<Config(%s sections) at %s>' % (
1708 1711 len(self._values), hex(id(self)))
1709 1712
1710 1713 def items(self, section):
1711 1714 return self._values.get(section, {}).iteritems()
1712 1715
1713 1716 def get(self, section, option):
1714 1717 return self._values.get(section, {}).get(option)
1715 1718
1716 1719 def set(self, section, option, value):
1717 1720 section_values = self._values.setdefault(section, {})
1718 1721 section_values[option] = value
1719 1722
1720 1723 def clear_section(self, section):
1721 1724 self._values[section] = {}
1722 1725
1723 1726 def serialize(self):
1724 1727 """
1725 1728 Creates a list of three tuples (section, key, value) representing
1726 1729 this config object.
1727 1730 """
1728 1731 items = []
1729 1732 for section in self._values:
1730 1733 for option, value in self._values[section].items():
1731 1734 items.append(
1732 1735 (safe_str(section), safe_str(option), safe_str(value)))
1733 1736 return items
1734 1737
1735 1738
1736 1739 class Diff(object):
1737 1740 """
1738 1741 Represents a diff result from a repository backend.
1739 1742
1740 1743 Subclasses have to provide a backend specific value for
1741 1744 :attr:`_header_re` and :attr:`_meta_re`.
1742 1745 """
1743 1746 _meta_re = None
1744 1747 _header_re = None
1745 1748
1746 1749 def __init__(self, raw_diff):
1747 1750 self.raw = raw_diff
1748 1751
1749 1752 def chunks(self):
1750 1753 """
1751 1754 split the diff in chunks of separate --git a/file b/file chunks
1752 1755 to make diffs consistent we must prepend with \n, and make sure
1753 1756 we can detect last chunk as this was also has special rule
1754 1757 """
1755 1758
1756 1759 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1757 1760 header = diff_parts[0]
1758 1761
1759 1762 if self._meta_re:
1760 1763 match = self._meta_re.match(header)
1761 1764
1762 1765 chunks = diff_parts[1:]
1763 1766 total_chunks = len(chunks)
1764 1767
1765 1768 return (
1766 1769 DiffChunk(chunk, self, cur_chunk == total_chunks)
1767 1770 for cur_chunk, chunk in enumerate(chunks, start=1))
1768 1771
1769 1772
1770 1773 class DiffChunk(object):
1771 1774
1772 1775 def __init__(self, chunk, diff, last_chunk):
1773 1776 self._diff = diff
1774 1777
1775 1778 # since we split by \ndiff --git that part is lost from original diff
1776 1779 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1777 1780 if not last_chunk:
1778 1781 chunk += '\n'
1779 1782
1780 1783 match = self._diff._header_re.match(chunk)
1781 1784 self.header = match.groupdict()
1782 1785 self.diff = chunk[match.end():]
1783 1786 self.raw = chunk
1784 1787
1785 1788
1786 1789 class BasePathPermissionChecker(object):
1787 1790
1788 1791 @staticmethod
1789 1792 def create_from_patterns(includes, excludes):
1790 1793 if includes and '*' in includes and not excludes:
1791 1794 return AllPathPermissionChecker()
1792 1795 elif excludes and '*' in excludes:
1793 1796 return NonePathPermissionChecker()
1794 1797 else:
1795 1798 return PatternPathPermissionChecker(includes, excludes)
1796 1799
1797 1800 @property
1798 1801 def has_full_access(self):
1799 1802 raise NotImplemented()
1800 1803
1801 1804 def has_access(self, path):
1802 1805 raise NotImplemented()
1803 1806
1804 1807
1805 1808 class AllPathPermissionChecker(BasePathPermissionChecker):
1806 1809
1807 1810 @property
1808 1811 def has_full_access(self):
1809 1812 return True
1810 1813
1811 1814 def has_access(self, path):
1812 1815 return True
1813 1816
1814 1817
1815 1818 class NonePathPermissionChecker(BasePathPermissionChecker):
1816 1819
1817 1820 @property
1818 1821 def has_full_access(self):
1819 1822 return False
1820 1823
1821 1824 def has_access(self, path):
1822 1825 return False
1823 1826
1824 1827
1825 1828 class PatternPathPermissionChecker(BasePathPermissionChecker):
1826 1829
1827 1830 def __init__(self, includes, excludes):
1828 1831 self.includes = includes
1829 1832 self.excludes = excludes
1830 1833 self.includes_re = [] if not includes else [
1831 1834 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1832 1835 self.excludes_re = [] if not excludes else [
1833 1836 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1834 1837
1835 1838 @property
1836 1839 def has_full_access(self):
1837 1840 return '*' in self.includes and not self.excludes
1838 1841
1839 1842 def has_access(self, path):
1840 1843 for regex in self.excludes_re:
1841 1844 if regex.match(path):
1842 1845 return False
1843 1846 for regex in self.includes_re:
1844 1847 if regex.match(path):
1845 1848 return True
1846 1849 return False
@@ -1,932 +1,937 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG repository module
23 23 """
24 24 import os
25 25 import logging
26 26 import binascii
27 27 import urllib
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.vcs import connection, exceptions
36 36 from rhodecode.lib.vcs.backends.base import (
37 37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 42 from rhodecode.lib.vcs.exceptions import (
43 43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 45 from rhodecode.lib.vcs.compat import configparser
46 46
47 47 hexlify = binascii.hexlify
48 48 nullid = "\0" * 20
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class MercurialRepository(BaseRepository):
54 54 """
55 55 Mercurial repository backend
56 56 """
57 57 DEFAULT_BRANCH_NAME = 'default'
58 58
59 59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 60 do_workspace_checkout=False, with_wire=None, bare=False):
61 61 """
62 62 Raises RepositoryError if repository could not be find at the given
63 63 ``repo_path``.
64 64
65 65 :param repo_path: local path of the repository
66 66 :param config: config object containing the repo configuration
67 67 :param create=False: if set to True, would try to create repository if
68 68 it does not exist rather than raising exception
69 69 :param src_url=None: would try to clone repository from given location
70 70 :param do_workspace_checkout=False: sets update of working copy after
71 71 making a clone
72 72 :param bare: not used, compatible with other VCS
73 73 """
74 74
75 75 self.path = safe_str(os.path.abspath(repo_path))
76 76 # mercurial since 4.4.X requires certain configuration to be present
77 77 # because sometimes we init the repos with config we need to meet
78 78 # special requirements
79 79 self.config = config if config else self.get_default_config(
80 80 default=[('extensions', 'largefiles', '1')])
81 81 self.with_wire = with_wire
82 82
83 83 self._init_repo(create, src_url, do_workspace_checkout)
84 84
85 85 # caches
86 86 self._commit_ids = {}
87 87
88 88 @LazyProperty
89 89 def _remote(self):
90 90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
91 91
92 92 @LazyProperty
93 93 def commit_ids(self):
94 94 """
95 95 Returns list of commit ids, in ascending order. Being lazy
96 96 attribute allows external tools to inject shas from cache.
97 97 """
98 98 commit_ids = self._get_all_commit_ids()
99 99 self._rebuild_cache(commit_ids)
100 100 return commit_ids
101 101
102 102 def _rebuild_cache(self, commit_ids):
103 103 self._commit_ids = dict((commit_id, index)
104 104 for index, commit_id in enumerate(commit_ids))
105 105
106 106 @LazyProperty
107 107 def branches(self):
108 108 return self._get_branches()
109 109
110 110 @LazyProperty
111 111 def branches_closed(self):
112 112 return self._get_branches(active=False, closed=True)
113 113
114 114 @LazyProperty
115 115 def branches_all(self):
116 116 all_branches = {}
117 117 all_branches.update(self.branches)
118 118 all_branches.update(self.branches_closed)
119 119 return all_branches
120 120
121 121 def _get_branches(self, active=True, closed=False):
122 122 """
123 123 Gets branches for this repository
124 124 Returns only not closed active branches by default
125 125
126 126 :param active: return also active branches
127 127 :param closed: return also closed branches
128 128
129 129 """
130 130 if self.is_empty():
131 131 return {}
132 132
133 133 def get_name(ctx):
134 134 return ctx[0]
135 135
136 136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
137 137 self._remote.branches(active, closed).items()]
138 138
139 139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
140 140
141 141 @LazyProperty
142 142 def tags(self):
143 143 """
144 144 Gets tags for this repository
145 145 """
146 146 return self._get_tags()
147 147
148 148 def _get_tags(self):
149 149 if self.is_empty():
150 150 return {}
151 151
152 152 def get_name(ctx):
153 153 return ctx[0]
154 154
155 155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
156 156 self._remote.tags().items()]
157 157
158 158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
159 159
160 160 def tag(self, name, user, commit_id=None, message=None, date=None,
161 161 **kwargs):
162 162 """
163 163 Creates and returns a tag for the given ``commit_id``.
164 164
165 165 :param name: name for new tag
166 166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 167 :param commit_id: commit id for which new tag would be created
168 168 :param message: message of the tag's commit
169 169 :param date: date of tag's commit
170 170
171 171 :raises TagAlreadyExistError: if tag with same name already exists
172 172 """
173 173 if name in self.tags:
174 174 raise TagAlreadyExistError("Tag %s already exists" % name)
175 175 commit = self.get_commit(commit_id=commit_id)
176 176 local = kwargs.setdefault('local', False)
177 177
178 178 if message is None:
179 179 message = "Added tag %s for commit %s" % (name, commit.short_id)
180 180
181 181 date, tz = date_to_timestamp_plus_offset(date)
182 182
183 183 self._remote.tag(
184 184 name, commit.raw_id, message, local, user, date, tz)
185 185 self._remote.invalidate_vcs_cache()
186 186
187 187 # Reinitialize tags
188 188 self.tags = self._get_tags()
189 189 tag_id = self.tags[name]
190 190
191 191 return self.get_commit(commit_id=tag_id)
192 192
193 193 def remove_tag(self, name, user, message=None, date=None):
194 194 """
195 195 Removes tag with the given `name`.
196 196
197 197 :param name: name of the tag to be removed
198 198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 199 :param message: message of the tag's removal commit
200 200 :param date: date of tag's removal commit
201 201
202 202 :raises TagDoesNotExistError: if tag with given name does not exists
203 203 """
204 204 if name not in self.tags:
205 205 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 206 if message is None:
207 207 message = "Removed tag %s" % name
208 208 local = False
209 209
210 210 date, tz = date_to_timestamp_plus_offset(date)
211 211
212 212 self._remote.tag(name, nullid, message, local, user, date, tz)
213 213 self._remote.invalidate_vcs_cache()
214 214 self.tags = self._get_tags()
215 215
216 216 @LazyProperty
217 217 def bookmarks(self):
218 218 """
219 219 Gets bookmarks for this repository
220 220 """
221 221 return self._get_bookmarks()
222 222
223 223 def _get_bookmarks(self):
224 224 if self.is_empty():
225 225 return {}
226 226
227 227 def get_name(ctx):
228 228 return ctx[0]
229 229
230 230 _bookmarks = [
231 231 (safe_unicode(n), hexlify(h)) for n, h in
232 232 self._remote.bookmarks().items()]
233 233
234 234 return OrderedDict(sorted(_bookmarks, key=get_name))
235 235
236 236 def _get_all_commit_ids(self):
237 237 return self._remote.get_all_commit_ids('visible')
238 238
239 239 def get_diff(
240 240 self, commit1, commit2, path='', ignore_whitespace=False,
241 241 context=3, path1=None):
242 242 """
243 243 Returns (git like) *diff*, as plain text. Shows changes introduced by
244 244 `commit2` since `commit1`.
245 245
246 246 :param commit1: Entry point from which diff is shown. Can be
247 247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
248 248 the changes since empty state of the repository until `commit2`
249 249 :param commit2: Until which commit changes should be shown.
250 250 :param ignore_whitespace: If set to ``True``, would not show whitespace
251 251 changes. Defaults to ``False``.
252 252 :param context: How many lines before/after changed lines should be
253 253 shown. Defaults to ``3``.
254 254 """
255 255 self._validate_diff_commits(commit1, commit2)
256 256 if path1 is not None and path1 != path:
257 257 raise ValueError("Diff of two different paths not supported.")
258 258
259 259 if path:
260 260 file_filter = [self.path, path]
261 261 else:
262 262 file_filter = None
263 263
264 264 diff = self._remote.diff(
265 265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
266 266 opt_git=True, opt_ignorews=ignore_whitespace,
267 267 context=context)
268 268 return MercurialDiff(diff)
269 269
270 270 def strip(self, commit_id, branch=None):
271 271 self._remote.strip(commit_id, update=False, backup="none")
272 272
273 273 self._remote.invalidate_vcs_cache()
274 274 self.commit_ids = self._get_all_commit_ids()
275 275 self._rebuild_cache(self.commit_ids)
276 276
277 277 def verify(self):
278 278 verify = self._remote.verify()
279 279
280 280 self._remote.invalidate_vcs_cache()
281 281 return verify
282 282
283 283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
284 284 if commit_id1 == commit_id2:
285 285 return commit_id1
286 286
287 287 ancestors = self._remote.revs_from_revspec(
288 288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
289 289 other_path=repo2.path)
290 290 return repo2[ancestors[0]].raw_id if ancestors else None
291 291
292 292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
293 293 if commit_id1 == commit_id2:
294 294 commits = []
295 295 else:
296 296 if merge:
297 297 indexes = self._remote.revs_from_revspec(
298 298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
299 299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
300 300 else:
301 301 indexes = self._remote.revs_from_revspec(
302 302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
303 303 commit_id1, other_path=repo2.path)
304 304
305 305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
306 306 for idx in indexes]
307 307
308 308 return commits
309 309
310 310 @staticmethod
311 311 def check_url(url, config):
312 312 """
313 313 Function will check given url and try to verify if it's a valid
314 314 link. Sometimes it may happened that mercurial will issue basic
315 315 auth request that can cause whole API to hang when used from python
316 316 or other external calls.
317 317
318 318 On failures it'll raise urllib2.HTTPError, exception is also thrown
319 319 when the return code is non 200
320 320 """
321 321 # check first if it's not an local url
322 322 if os.path.isdir(url) or url.startswith('file:'):
323 323 return True
324 324
325 325 # Request the _remote to verify the url
326 326 return connection.Hg.check_url(url, config.serialize())
327 327
328 328 @staticmethod
329 329 def is_valid_repository(path):
330 330 return os.path.isdir(os.path.join(path, '.hg'))
331 331
332 332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
333 333 """
334 334 Function will check for mercurial repository in given path. If there
335 335 is no repository in that path it will raise an exception unless
336 336 `create` parameter is set to True - in that case repository would
337 337 be created.
338 338
339 339 If `src_url` is given, would try to clone repository from the
340 340 location at given clone_point. Additionally it'll make update to
341 341 working copy accordingly to `do_workspace_checkout` flag.
342 342 """
343 343 if create and os.path.exists(self.path):
344 344 raise RepositoryError(
345 345 "Cannot create repository at %s, location already exist"
346 346 % self.path)
347 347
348 348 if src_url:
349 349 url = str(self._get_url(src_url))
350 350 MercurialRepository.check_url(url, self.config)
351 351
352 352 self._remote.clone(url, self.path, do_workspace_checkout)
353 353
354 354 # Don't try to create if we've already cloned repo
355 355 create = False
356 356
357 357 if create:
358 358 os.makedirs(self.path, mode=0o755)
359 359
360 360 self._remote.localrepository(create)
361 361
362 362 @LazyProperty
363 363 def in_memory_commit(self):
364 364 return MercurialInMemoryCommit(self)
365 365
366 366 @LazyProperty
367 367 def description(self):
368 368 description = self._remote.get_config_value(
369 369 'web', 'description', untrusted=True)
370 370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
371 371
372 372 @LazyProperty
373 373 def contact(self):
374 374 contact = (
375 375 self._remote.get_config_value("web", "contact") or
376 376 self._remote.get_config_value("ui", "username"))
377 377 return safe_unicode(contact or self.DEFAULT_CONTACT)
378 378
379 379 @LazyProperty
380 380 def last_change(self):
381 381 """
382 382 Returns last change made on this repository as
383 383 `datetime.datetime` object.
384 384 """
385 385 try:
386 386 return self.get_commit().date
387 387 except RepositoryError:
388 388 tzoffset = makedate()[1]
389 389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
390 390
391 391 def _get_fs_mtime(self):
392 392 # fallback to filesystem
393 393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
394 394 st_path = os.path.join(self.path, '.hg', "store")
395 395 if os.path.exists(cl_path):
396 396 return os.stat(cl_path).st_mtime
397 397 else:
398 398 return os.stat(st_path).st_mtime
399 399
400 400 def _get_url(self, url):
401 401 """
402 402 Returns normalized url. If schema is not given, would fall
403 403 to filesystem
404 404 (``file:///``) schema.
405 405 """
406 406 url = url.encode('utf8')
407 407 if url != 'default' and '://' not in url:
408 408 url = "file:" + urllib.pathname2url(url)
409 409 return url
410 410
411 411 def get_hook_location(self):
412 412 """
413 413 returns absolute path to location where hooks are stored
414 414 """
415 415 return os.path.join(self.path, '.hg', '.hgrc')
416 416
417 417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
418 418 """
419 419 Returns ``MercurialCommit`` object representing repository's
420 420 commit at the given `commit_id` or `commit_idx`.
421 421 """
422 422 if self.is_empty():
423 423 raise EmptyRepositoryError("There are no commits yet")
424 424
425 425 if commit_id is not None:
426 426 self._validate_commit_id(commit_id)
427 427 try:
428 428 idx = self._commit_ids[commit_id]
429 429 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
430 430 except KeyError:
431 431 pass
432 432 elif commit_idx is not None:
433 433 self._validate_commit_idx(commit_idx)
434 434 try:
435 435 id_ = self.commit_ids[commit_idx]
436 436 if commit_idx < 0:
437 437 commit_idx += len(self.commit_ids)
438 438 return MercurialCommit(
439 439 self, id_, commit_idx, pre_load=pre_load)
440 440 except IndexError:
441 441 commit_id = commit_idx
442 442 else:
443 443 commit_id = "tip"
444 444
445 445 if isinstance(commit_id, unicode):
446 446 commit_id = safe_str(commit_id)
447 447
448 448 try:
449 449 raw_id, idx = self._remote.lookup(commit_id, both=True)
450 450 except CommitDoesNotExistError:
451 451 msg = "Commit %s does not exist for %s" % (
452 452 commit_id, self)
453 453 raise CommitDoesNotExistError(msg)
454 454
455 455 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
456 456
457 457 def get_commits(
458 458 self, start_id=None, end_id=None, start_date=None, end_date=None,
459 459 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
460 460 """
461 461 Returns generator of ``MercurialCommit`` objects from start to end
462 462 (both are inclusive)
463 463
464 464 :param start_id: None, str(commit_id)
465 465 :param end_id: None, str(commit_id)
466 466 :param start_date: if specified, commits with commit date less than
467 467 ``start_date`` would be filtered out from returned set
468 468 :param end_date: if specified, commits with commit date greater than
469 469 ``end_date`` would be filtered out from returned set
470 470 :param branch_name: if specified, commits not reachable from given
471 471 branch would be filtered out from returned set
472 472 :param show_hidden: Show hidden commits such as obsolete or hidden from
473 473 Mercurial evolve
474 474 :raise BranchDoesNotExistError: If given ``branch_name`` does not
475 475 exist.
476 476 :raise CommitDoesNotExistError: If commit for given ``start`` or
477 477 ``end`` could not be found.
478 478 """
479 479 # actually we should check now if it's not an empty repo
480 480 branch_ancestors = False
481 481 if self.is_empty():
482 482 raise EmptyRepositoryError("There are no commits yet")
483 483 self._validate_branch_name(branch_name)
484 484
485 485 if start_id is not None:
486 486 self._validate_commit_id(start_id)
487 487 c_start = self.get_commit(commit_id=start_id)
488 488 start_pos = self._commit_ids[c_start.raw_id]
489 489 else:
490 490 start_pos = None
491 491
492 492 if end_id is not None:
493 493 self._validate_commit_id(end_id)
494 494 c_end = self.get_commit(commit_id=end_id)
495 495 end_pos = max(0, self._commit_ids[c_end.raw_id])
496 496 else:
497 497 end_pos = None
498 498
499 499 if None not in [start_id, end_id] and start_pos > end_pos:
500 500 raise RepositoryError(
501 501 "Start commit '%s' cannot be after end commit '%s'" %
502 502 (start_id, end_id))
503 503
504 504 if end_pos is not None:
505 505 end_pos += 1
506 506
507 507 commit_filter = []
508 508
509 509 if branch_name and not branch_ancestors:
510 510 commit_filter.append('branch("%s")' % (branch_name,))
511 511 elif branch_name and branch_ancestors:
512 512 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
513 513
514 514 if start_date and not end_date:
515 515 commit_filter.append('date(">%s")' % (start_date,))
516 516 if end_date and not start_date:
517 517 commit_filter.append('date("<%s")' % (end_date,))
518 518 if start_date and end_date:
519 519 commit_filter.append(
520 520 'date(">%s") and date("<%s")' % (start_date, end_date))
521 521
522 522 if not show_hidden:
523 523 commit_filter.append('not obsolete()')
524 524 commit_filter.append('not hidden()')
525 525
526 526 # TODO: johbo: Figure out a simpler way for this solution
527 527 collection_generator = CollectionGenerator
528 528 if commit_filter:
529 529 commit_filter = ' and '.join(map(safe_str, commit_filter))
530 530 revisions = self._remote.rev_range([commit_filter])
531 531 collection_generator = MercurialIndexBasedCollectionGenerator
532 532 else:
533 533 revisions = self.commit_ids
534 534
535 535 if start_pos or end_pos:
536 536 revisions = revisions[start_pos:end_pos]
537 537
538 538 return collection_generator(self, revisions, pre_load=pre_load)
539 539
540 540 def pull(self, url, commit_ids=None):
541 541 """
542 542 Pull changes from external location.
543 543
544 544 :param commit_ids: Optional. Can be set to a list of commit ids
545 545 which shall be pulled from the other repository.
546 546 """
547 547 url = self._get_url(url)
548 548 self._remote.pull(url, commit_ids=commit_ids)
549 549 self._remote.invalidate_vcs_cache()
550 550
551 551 def fetch(self, url, commit_ids=None):
552 552 """
553 553 Backward compatibility with GIT fetch==pull
554 554 """
555 555 return self.pull(url, commit_ids=commit_ids)
556 556
557 557 def push(self, url):
558 558 url = self._get_url(url)
559 559 self._remote.sync_push(url)
560 560
561 561 def _local_clone(self, clone_path):
562 562 """
563 563 Create a local clone of the current repo.
564 564 """
565 565 self._remote.clone(self.path, clone_path, update_after_clone=True,
566 566 hooks=False)
567 567
568 568 def _update(self, revision, clean=False):
569 569 """
570 570 Update the working copy to the specified revision.
571 571 """
572 572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
573 573 self._remote.update(revision, clean=clean)
574 574
575 575 def _identify(self):
576 576 """
577 577 Return the current state of the working directory.
578 578 """
579 579 return self._remote.identify().strip().rstrip('+')
580 580
581 581 def _heads(self, branch=None):
582 582 """
583 583 Return the commit ids of the repository heads.
584 584 """
585 585 return self._remote.heads(branch=branch).strip().split(' ')
586 586
587 587 def _ancestor(self, revision1, revision2):
588 588 """
589 589 Return the common ancestor of the two revisions.
590 590 """
591 591 return self._remote.ancestor(revision1, revision2)
592 592
593 593 def _local_push(
594 594 self, revision, repository_path, push_branches=False,
595 595 enable_hooks=False):
596 596 """
597 597 Push the given revision to the specified repository.
598 598
599 599 :param push_branches: allow to create branches in the target repo.
600 600 """
601 601 self._remote.push(
602 602 [revision], repository_path, hooks=enable_hooks,
603 603 push_branches=push_branches)
604 604
605 605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
606 606 source_ref, use_rebase=False, dry_run=False):
607 607 """
608 608 Merge the given source_revision into the checked out revision.
609 609
610 610 Returns the commit id of the merge and a boolean indicating if the
611 611 commit needs to be pushed.
612 612 """
613 613 self._update(target_ref.commit_id, clean=True)
614 614
615 615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
616 616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
617 617
618 618 if ancestor == source_ref.commit_id:
619 619 # Nothing to do, the changes were already integrated
620 620 return target_ref.commit_id, False
621 621
622 622 elif ancestor == target_ref.commit_id and is_the_same_branch:
623 623 # In this case we should force a commit message
624 624 return source_ref.commit_id, True
625 625
626 626 if use_rebase:
627 627 try:
628 628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
629 629 target_ref.commit_id)
630 630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
631 631 self._remote.rebase(
632 632 source=source_ref.commit_id, dest=target_ref.commit_id)
633 633 self._remote.invalidate_vcs_cache()
634 634 self._update(bookmark_name, clean=True)
635 635 return self._identify(), True
636 636 except RepositoryError:
637 637 # The rebase-abort may raise another exception which 'hides'
638 638 # the original one, therefore we log it here.
639 639 log.exception('Error while rebasing shadow repo during merge.')
640 640
641 641 # Cleanup any rebase leftovers
642 642 self._remote.invalidate_vcs_cache()
643 643 self._remote.rebase(abort=True)
644 644 self._remote.invalidate_vcs_cache()
645 645 self._remote.update(clean=True)
646 646 raise
647 647 else:
648 648 try:
649 649 self._remote.merge(source_ref.commit_id)
650 650 self._remote.invalidate_vcs_cache()
651 651 self._remote.commit(
652 652 message=safe_str(merge_message),
653 653 username=safe_str('%s <%s>' % (user_name, user_email)))
654 654 self._remote.invalidate_vcs_cache()
655 655 return self._identify(), True
656 656 except RepositoryError:
657 657 # Cleanup any merge leftovers
658 658 self._remote.update(clean=True)
659 659 raise
660 660
661 661 def _local_close(self, target_ref, user_name, user_email,
662 662 source_ref, close_message=''):
663 663 """
664 664 Close the branch of the given source_revision
665 665
666 666 Returns the commit id of the close and a boolean indicating if the
667 667 commit needs to be pushed.
668 668 """
669 669 self._update(source_ref.commit_id)
670 670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
671 671 try:
672 672 self._remote.commit(
673 673 message=safe_str(message),
674 674 username=safe_str('%s <%s>' % (user_name, user_email)),
675 675 close_branch=True)
676 676 self._remote.invalidate_vcs_cache()
677 677 return self._identify(), True
678 678 except RepositoryError:
679 679 # Cleanup any commit leftovers
680 680 self._remote.update(clean=True)
681 681 raise
682 682
683 683 def _is_the_same_branch(self, target_ref, source_ref):
684 684 return (
685 685 self._get_branch_name(target_ref) ==
686 686 self._get_branch_name(source_ref))
687 687
688 688 def _get_branch_name(self, ref):
689 689 if ref.type == 'branch':
690 690 return ref.name
691 691 return self._remote.ctx_branch(ref.commit_id)
692 692
693 693 def _maybe_prepare_merge_workspace(
694 694 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
695 695 shadow_repository_path = self._get_shadow_repository_path(
696 696 repo_id, workspace_id)
697 697 if not os.path.exists(shadow_repository_path):
698 698 self._local_clone(shadow_repository_path)
699 699 log.debug(
700 700 'Prepared shadow repository in %s', shadow_repository_path)
701 701
702 702 return shadow_repository_path
703 703
704 704 def _merge_repo(self, repo_id, workspace_id, target_ref,
705 705 source_repo, source_ref, merge_message,
706 706 merger_name, merger_email, dry_run=False,
707 707 use_rebase=False, close_branch=False):
708 708
709 709 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
710 710 'rebase' if use_rebase else 'merge', dry_run)
711 711 if target_ref.commit_id not in self._heads():
712 712 return MergeResponse(
713 713 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
714 714 metadata={'target_ref': target_ref})
715 715
716 716 try:
717 717 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
718 heads = ','.join(self._heads(target_ref.name))
718 heads = '\n,'.join(self._heads(target_ref.name))
719 metadata = {
720 'target_ref': target_ref,
721 'source_ref': source_ref,
722 'heads': heads
723 }
719 724 return MergeResponse(
720 725 False, False, None,
721 726 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
722 metadata={'heads': heads})
727 metadata=metadata)
723 728 except CommitDoesNotExistError:
724 729 log.exception('Failure when looking up branch heads on hg target')
725 730 return MergeResponse(
726 731 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
727 732 metadata={'target_ref': target_ref})
728 733
729 734 shadow_repository_path = self._maybe_prepare_merge_workspace(
730 735 repo_id, workspace_id, target_ref, source_ref)
731 736 shadow_repo = self._get_shadow_instance(shadow_repository_path)
732 737
733 738 log.debug('Pulling in target reference %s', target_ref)
734 739 self._validate_pull_reference(target_ref)
735 740 shadow_repo._local_pull(self.path, target_ref)
736 741
737 742 try:
738 743 log.debug('Pulling in source reference %s', source_ref)
739 744 source_repo._validate_pull_reference(source_ref)
740 745 shadow_repo._local_pull(source_repo.path, source_ref)
741 746 except CommitDoesNotExistError:
742 747 log.exception('Failure when doing local pull on hg shadow repo')
743 748 return MergeResponse(
744 749 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
745 750 metadata={'source_ref': source_ref})
746 751
747 752 merge_ref = None
748 753 merge_commit_id = None
749 754 close_commit_id = None
750 755 merge_failure_reason = MergeFailureReason.NONE
751 756 metadata = {}
752 757
753 758 # enforce that close branch should be used only in case we source from
754 759 # an actual Branch
755 760 close_branch = close_branch and source_ref.type == 'branch'
756 761
757 762 # don't allow to close branch if source and target are the same
758 763 close_branch = close_branch and source_ref.name != target_ref.name
759 764
760 765 needs_push_on_close = False
761 766 if close_branch and not use_rebase and not dry_run:
762 767 try:
763 768 close_commit_id, needs_push_on_close = shadow_repo._local_close(
764 769 target_ref, merger_name, merger_email, source_ref)
765 770 merge_possible = True
766 771 except RepositoryError:
767 772 log.exception('Failure when doing close branch on '
768 773 'shadow repo: %s', shadow_repo)
769 774 merge_possible = False
770 775 merge_failure_reason = MergeFailureReason.MERGE_FAILED
771 776 else:
772 777 merge_possible = True
773 778
774 779 needs_push = False
775 780 if merge_possible:
776 781 try:
777 782 merge_commit_id, needs_push = shadow_repo._local_merge(
778 783 target_ref, merge_message, merger_name, merger_email,
779 784 source_ref, use_rebase=use_rebase, dry_run=dry_run)
780 785 merge_possible = True
781 786
782 787 # read the state of the close action, if it
783 788 # maybe required a push
784 789 needs_push = needs_push or needs_push_on_close
785 790
786 791 # Set a bookmark pointing to the merge commit. This bookmark
787 792 # may be used to easily identify the last successful merge
788 793 # commit in the shadow repository.
789 794 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
790 795 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
791 796 except SubrepoMergeError:
792 797 log.exception(
793 798 'Subrepo merge error during local merge on hg shadow repo.')
794 799 merge_possible = False
795 800 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
796 801 needs_push = False
797 802 except RepositoryError:
798 803 log.exception('Failure when doing local merge on hg shadow repo')
799 804 merge_possible = False
800 805 merge_failure_reason = MergeFailureReason.MERGE_FAILED
801 806 needs_push = False
802 807
803 808 if merge_possible and not dry_run:
804 809 if needs_push:
805 810 # In case the target is a bookmark, update it, so after pushing
806 811 # the bookmarks is also updated in the target.
807 812 if target_ref.type == 'book':
808 813 shadow_repo.bookmark(
809 814 target_ref.name, revision=merge_commit_id)
810 815 try:
811 816 shadow_repo_with_hooks = self._get_shadow_instance(
812 817 shadow_repository_path,
813 818 enable_hooks=True)
814 819 # This is the actual merge action, we push from shadow
815 820 # into origin.
816 821 # Note: the push_branches option will push any new branch
817 822 # defined in the source repository to the target. This may
818 823 # be dangerous as branches are permanent in Mercurial.
819 824 # This feature was requested in issue #441.
820 825 shadow_repo_with_hooks._local_push(
821 826 merge_commit_id, self.path, push_branches=True,
822 827 enable_hooks=True)
823 828
824 829 # maybe we also need to push the close_commit_id
825 830 if close_commit_id:
826 831 shadow_repo_with_hooks._local_push(
827 832 close_commit_id, self.path, push_branches=True,
828 833 enable_hooks=True)
829 834 merge_succeeded = True
830 835 except RepositoryError:
831 836 log.exception(
832 837 'Failure when doing local push from the shadow '
833 838 'repository to the target repository at %s.', self.path)
834 839 merge_succeeded = False
835 840 merge_failure_reason = MergeFailureReason.PUSH_FAILED
836 841 metadata['target'] = 'hg shadow repo'
837 842 metadata['merge_commit'] = merge_commit_id
838 843 else:
839 844 merge_succeeded = True
840 845 else:
841 846 merge_succeeded = False
842 847
843 848 return MergeResponse(
844 849 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
845 850 metadata=metadata)
846 851
847 852 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
848 853 config = self.config.copy()
849 854 if not enable_hooks:
850 855 config.clear_section('hooks')
851 856 return MercurialRepository(shadow_repository_path, config)
852 857
853 858 def _validate_pull_reference(self, reference):
854 859 if not (reference.name in self.bookmarks or
855 860 reference.name in self.branches or
856 861 self.get_commit(reference.commit_id)):
857 862 raise CommitDoesNotExistError(
858 863 'Unknown branch, bookmark or commit id')
859 864
860 865 def _local_pull(self, repository_path, reference):
861 866 """
862 867 Fetch a branch, bookmark or commit from a local repository.
863 868 """
864 869 repository_path = os.path.abspath(repository_path)
865 870 if repository_path == self.path:
866 871 raise ValueError('Cannot pull from the same repository')
867 872
868 873 reference_type_to_option_name = {
869 874 'book': 'bookmark',
870 875 'branch': 'branch',
871 876 }
872 877 option_name = reference_type_to_option_name.get(
873 878 reference.type, 'revision')
874 879
875 880 if option_name == 'revision':
876 881 ref = reference.commit_id
877 882 else:
878 883 ref = reference.name
879 884
880 885 options = {option_name: [ref]}
881 886 self._remote.pull_cmd(repository_path, hooks=False, **options)
882 887 self._remote.invalidate_vcs_cache()
883 888
884 889 def bookmark(self, bookmark, revision=None):
885 890 if isinstance(bookmark, unicode):
886 891 bookmark = safe_str(bookmark)
887 892 self._remote.bookmark(bookmark, revision=revision)
888 893 self._remote.invalidate_vcs_cache()
889 894
890 895 def get_path_permissions(self, username):
891 896 hgacl_file = os.path.join(self.path, '.hg/hgacl')
892 897
893 898 def read_patterns(suffix):
894 899 svalue = None
895 900 try:
896 901 svalue = hgacl.get('narrowhgacl', username + suffix)
897 902 except configparser.NoOptionError:
898 903 try:
899 904 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
900 905 except configparser.NoOptionError:
901 906 pass
902 907 if not svalue:
903 908 return None
904 909 result = ['/']
905 910 for pattern in svalue.split():
906 911 result.append(pattern)
907 912 if '*' not in pattern and '?' not in pattern:
908 913 result.append(pattern + '/*')
909 914 return result
910 915
911 916 if os.path.exists(hgacl_file):
912 917 try:
913 918 hgacl = configparser.RawConfigParser()
914 919 hgacl.read(hgacl_file)
915 920
916 921 includes = read_patterns('.includes')
917 922 excludes = read_patterns('.excludes')
918 923 return BasePathPermissionChecker.create_from_patterns(
919 924 includes, excludes)
920 925 except BaseException as e:
921 926 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
922 927 hgacl_file, self.name, e)
923 928 raise exceptions.RepositoryRequirementError(msg)
924 929 else:
925 930 return None
926 931
927 932
928 933 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
929 934
930 935 def _commit_factory(self, commit_id):
931 936 return self.repo.get_commit(
932 937 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,1731 +1,1739 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2012-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26 26
27 27 import json
28 28 import logging
29 29 import datetime
30 30 import urllib
31 31 import collections
32 32
33 33 from pyramid import compat
34 34 from pyramid.threadlocal import get_current_request
35 35
36 36 from rhodecode import events
37 37 from rhodecode.translation import lazy_ugettext
38 38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 39 from rhodecode.lib import audit_logger
40 40 from rhodecode.lib.compat import OrderedDict
41 41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 42 from rhodecode.lib.markup_renderer import (
43 43 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 44 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
45 45 from rhodecode.lib.vcs.backends.base import (
46 46 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
47 47 from rhodecode.lib.vcs.conf import settings as vcs_settings
48 48 from rhodecode.lib.vcs.exceptions import (
49 49 CommitDoesNotExistError, EmptyRepositoryError)
50 50 from rhodecode.model import BaseModel
51 51 from rhodecode.model.changeset_status import ChangesetStatusModel
52 52 from rhodecode.model.comment import CommentsModel
53 53 from rhodecode.model.db import (
54 54 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
55 55 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
56 56 from rhodecode.model.meta import Session
57 57 from rhodecode.model.notification import NotificationModel, \
58 58 EmailNotificationModel
59 59 from rhodecode.model.scm import ScmModel
60 60 from rhodecode.model.settings import VcsSettingsModel
61 61
62 62
63 63 log = logging.getLogger(__name__)
64 64
65 65
66 66 # Data structure to hold the response data when updating commits during a pull
67 67 # request update.
68 68 UpdateResponse = collections.namedtuple('UpdateResponse', [
69 69 'executed', 'reason', 'new', 'old', 'changes',
70 70 'source_changed', 'target_changed'])
71 71
72 72
73 73 class PullRequestModel(BaseModel):
74 74
75 75 cls = PullRequest
76 76
77 77 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
78 78
79 79 UPDATE_STATUS_MESSAGES = {
80 80 UpdateFailureReason.NONE: lazy_ugettext(
81 81 'Pull request update successful.'),
82 82 UpdateFailureReason.UNKNOWN: lazy_ugettext(
83 83 'Pull request update failed because of an unknown error.'),
84 84 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
85 85 'No update needed because the source and target have not changed.'),
86 86 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
87 87 'Pull request cannot be updated because the reference type is '
88 88 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
89 89 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
90 90 'This pull request cannot be updated because the target '
91 91 'reference is missing.'),
92 92 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
93 93 'This pull request cannot be updated because the source '
94 94 'reference is missing.'),
95 95 }
96 96 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
97 97 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
98 98
99 99 def __get_pull_request(self, pull_request):
100 100 return self._get_instance((
101 101 PullRequest, PullRequestVersion), pull_request)
102 102
103 103 def _check_perms(self, perms, pull_request, user, api=False):
104 104 if not api:
105 105 return h.HasRepoPermissionAny(*perms)(
106 106 user=user, repo_name=pull_request.target_repo.repo_name)
107 107 else:
108 108 return h.HasRepoPermissionAnyApi(*perms)(
109 109 user=user, repo_name=pull_request.target_repo.repo_name)
110 110
111 111 def check_user_read(self, pull_request, user, api=False):
112 112 _perms = ('repository.admin', 'repository.write', 'repository.read',)
113 113 return self._check_perms(_perms, pull_request, user, api)
114 114
115 115 def check_user_merge(self, pull_request, user, api=False):
116 116 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
117 117 return self._check_perms(_perms, pull_request, user, api)
118 118
119 119 def check_user_update(self, pull_request, user, api=False):
120 120 owner = user.user_id == pull_request.user_id
121 121 return self.check_user_merge(pull_request, user, api) or owner
122 122
123 123 def check_user_delete(self, pull_request, user):
124 124 owner = user.user_id == pull_request.user_id
125 125 _perms = ('repository.admin',)
126 126 return self._check_perms(_perms, pull_request, user) or owner
127 127
128 128 def check_user_change_status(self, pull_request, user, api=False):
129 129 reviewer = user.user_id in [x.user_id for x in
130 130 pull_request.reviewers]
131 131 return self.check_user_update(pull_request, user, api) or reviewer
132 132
133 133 def check_user_comment(self, pull_request, user):
134 134 owner = user.user_id == pull_request.user_id
135 135 return self.check_user_read(pull_request, user) or owner
136 136
137 137 def get(self, pull_request):
138 138 return self.__get_pull_request(pull_request)
139 139
140 140 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
141 141 opened_by=None, order_by=None,
142 142 order_dir='desc', only_created=True):
143 143 repo = None
144 144 if repo_name:
145 145 repo = self._get_repo(repo_name)
146 146
147 147 q = PullRequest.query()
148 148
149 149 # source or target
150 150 if repo and source:
151 151 q = q.filter(PullRequest.source_repo == repo)
152 152 elif repo:
153 153 q = q.filter(PullRequest.target_repo == repo)
154 154
155 155 # closed,opened
156 156 if statuses:
157 157 q = q.filter(PullRequest.status.in_(statuses))
158 158
159 159 # opened by filter
160 160 if opened_by:
161 161 q = q.filter(PullRequest.user_id.in_(opened_by))
162 162
163 163 # only get those that are in "created" state
164 164 if only_created:
165 165 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
166 166
167 167 if order_by:
168 168 order_map = {
169 169 'name_raw': PullRequest.pull_request_id,
170 170 'id': PullRequest.pull_request_id,
171 171 'title': PullRequest.title,
172 172 'updated_on_raw': PullRequest.updated_on,
173 173 'target_repo': PullRequest.target_repo_id
174 174 }
175 175 if order_dir == 'asc':
176 176 q = q.order_by(order_map[order_by].asc())
177 177 else:
178 178 q = q.order_by(order_map[order_by].desc())
179 179
180 180 return q
181 181
182 182 def count_all(self, repo_name, source=False, statuses=None,
183 183 opened_by=None):
184 184 """
185 185 Count the number of pull requests for a specific repository.
186 186
187 187 :param repo_name: target or source repo
188 188 :param source: boolean flag to specify if repo_name refers to source
189 189 :param statuses: list of pull request statuses
190 190 :param opened_by: author user of the pull request
191 191 :returns: int number of pull requests
192 192 """
193 193 q = self._prepare_get_all_query(
194 194 repo_name, source=source, statuses=statuses, opened_by=opened_by)
195 195
196 196 return q.count()
197 197
198 198 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
199 199 offset=0, length=None, order_by=None, order_dir='desc'):
200 200 """
201 201 Get all pull requests for a specific repository.
202 202
203 203 :param repo_name: target or source repo
204 204 :param source: boolean flag to specify if repo_name refers to source
205 205 :param statuses: list of pull request statuses
206 206 :param opened_by: author user of the pull request
207 207 :param offset: pagination offset
208 208 :param length: length of returned list
209 209 :param order_by: order of the returned list
210 210 :param order_dir: 'asc' or 'desc' ordering direction
211 211 :returns: list of pull requests
212 212 """
213 213 q = self._prepare_get_all_query(
214 214 repo_name, source=source, statuses=statuses, opened_by=opened_by,
215 215 order_by=order_by, order_dir=order_dir)
216 216
217 217 if length:
218 218 pull_requests = q.limit(length).offset(offset).all()
219 219 else:
220 220 pull_requests = q.all()
221 221
222 222 return pull_requests
223 223
224 224 def count_awaiting_review(self, repo_name, source=False, statuses=None,
225 225 opened_by=None):
226 226 """
227 227 Count the number of pull requests for a specific repository that are
228 228 awaiting review.
229 229
230 230 :param repo_name: target or source repo
231 231 :param source: boolean flag to specify if repo_name refers to source
232 232 :param statuses: list of pull request statuses
233 233 :param opened_by: author user of the pull request
234 234 :returns: int number of pull requests
235 235 """
236 236 pull_requests = self.get_awaiting_review(
237 237 repo_name, source=source, statuses=statuses, opened_by=opened_by)
238 238
239 239 return len(pull_requests)
240 240
241 241 def get_awaiting_review(self, repo_name, source=False, statuses=None,
242 242 opened_by=None, offset=0, length=None,
243 243 order_by=None, order_dir='desc'):
244 244 """
245 245 Get all pull requests for a specific repository that are awaiting
246 246 review.
247 247
248 248 :param repo_name: target or source repo
249 249 :param source: boolean flag to specify if repo_name refers to source
250 250 :param statuses: list of pull request statuses
251 251 :param opened_by: author user of the pull request
252 252 :param offset: pagination offset
253 253 :param length: length of returned list
254 254 :param order_by: order of the returned list
255 255 :param order_dir: 'asc' or 'desc' ordering direction
256 256 :returns: list of pull requests
257 257 """
258 258 pull_requests = self.get_all(
259 259 repo_name, source=source, statuses=statuses, opened_by=opened_by,
260 260 order_by=order_by, order_dir=order_dir)
261 261
262 262 _filtered_pull_requests = []
263 263 for pr in pull_requests:
264 264 status = pr.calculated_review_status()
265 265 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
266 266 ChangesetStatus.STATUS_UNDER_REVIEW]:
267 267 _filtered_pull_requests.append(pr)
268 268 if length:
269 269 return _filtered_pull_requests[offset:offset+length]
270 270 else:
271 271 return _filtered_pull_requests
272 272
273 273 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
274 274 opened_by=None, user_id=None):
275 275 """
276 276 Count the number of pull requests for a specific repository that are
277 277 awaiting review from a specific user.
278 278
279 279 :param repo_name: target or source repo
280 280 :param source: boolean flag to specify if repo_name refers to source
281 281 :param statuses: list of pull request statuses
282 282 :param opened_by: author user of the pull request
283 283 :param user_id: reviewer user of the pull request
284 284 :returns: int number of pull requests
285 285 """
286 286 pull_requests = self.get_awaiting_my_review(
287 287 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 288 user_id=user_id)
289 289
290 290 return len(pull_requests)
291 291
292 292 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
293 293 opened_by=None, user_id=None, offset=0,
294 294 length=None, order_by=None, order_dir='desc'):
295 295 """
296 296 Get all pull requests for a specific repository that are awaiting
297 297 review from a specific user.
298 298
299 299 :param repo_name: target or source repo
300 300 :param source: boolean flag to specify if repo_name refers to source
301 301 :param statuses: list of pull request statuses
302 302 :param opened_by: author user of the pull request
303 303 :param user_id: reviewer user of the pull request
304 304 :param offset: pagination offset
305 305 :param length: length of returned list
306 306 :param order_by: order of the returned list
307 307 :param order_dir: 'asc' or 'desc' ordering direction
308 308 :returns: list of pull requests
309 309 """
310 310 pull_requests = self.get_all(
311 311 repo_name, source=source, statuses=statuses, opened_by=opened_by,
312 312 order_by=order_by, order_dir=order_dir)
313 313
314 314 _my = PullRequestModel().get_not_reviewed(user_id)
315 315 my_participation = []
316 316 for pr in pull_requests:
317 317 if pr in _my:
318 318 my_participation.append(pr)
319 319 _filtered_pull_requests = my_participation
320 320 if length:
321 321 return _filtered_pull_requests[offset:offset+length]
322 322 else:
323 323 return _filtered_pull_requests
324 324
325 325 def get_not_reviewed(self, user_id):
326 326 return [
327 327 x.pull_request for x in PullRequestReviewers.query().filter(
328 328 PullRequestReviewers.user_id == user_id).all()
329 329 ]
330 330
331 331 def _prepare_participating_query(self, user_id=None, statuses=None,
332 332 order_by=None, order_dir='desc'):
333 333 q = PullRequest.query()
334 334 if user_id:
335 335 reviewers_subquery = Session().query(
336 336 PullRequestReviewers.pull_request_id).filter(
337 337 PullRequestReviewers.user_id == user_id).subquery()
338 338 user_filter = or_(
339 339 PullRequest.user_id == user_id,
340 340 PullRequest.pull_request_id.in_(reviewers_subquery)
341 341 )
342 342 q = PullRequest.query().filter(user_filter)
343 343
344 344 # closed,opened
345 345 if statuses:
346 346 q = q.filter(PullRequest.status.in_(statuses))
347 347
348 348 if order_by:
349 349 order_map = {
350 350 'name_raw': PullRequest.pull_request_id,
351 351 'title': PullRequest.title,
352 352 'updated_on_raw': PullRequest.updated_on,
353 353 'target_repo': PullRequest.target_repo_id
354 354 }
355 355 if order_dir == 'asc':
356 356 q = q.order_by(order_map[order_by].asc())
357 357 else:
358 358 q = q.order_by(order_map[order_by].desc())
359 359
360 360 return q
361 361
362 362 def count_im_participating_in(self, user_id=None, statuses=None):
363 363 q = self._prepare_participating_query(user_id, statuses=statuses)
364 364 return q.count()
365 365
366 366 def get_im_participating_in(
367 367 self, user_id=None, statuses=None, offset=0,
368 368 length=None, order_by=None, order_dir='desc'):
369 369 """
370 370 Get all Pull requests that i'm participating in, or i have opened
371 371 """
372 372
373 373 q = self._prepare_participating_query(
374 374 user_id, statuses=statuses, order_by=order_by,
375 375 order_dir=order_dir)
376 376
377 377 if length:
378 378 pull_requests = q.limit(length).offset(offset).all()
379 379 else:
380 380 pull_requests = q.all()
381 381
382 382 return pull_requests
383 383
384 384 def get_versions(self, pull_request):
385 385 """
386 386 returns version of pull request sorted by ID descending
387 387 """
388 388 return PullRequestVersion.query()\
389 389 .filter(PullRequestVersion.pull_request == pull_request)\
390 390 .order_by(PullRequestVersion.pull_request_version_id.asc())\
391 391 .all()
392 392
393 393 def get_pr_version(self, pull_request_id, version=None):
394 394 at_version = None
395 395
396 396 if version and version == 'latest':
397 397 pull_request_ver = PullRequest.get(pull_request_id)
398 398 pull_request_obj = pull_request_ver
399 399 _org_pull_request_obj = pull_request_obj
400 400 at_version = 'latest'
401 401 elif version:
402 402 pull_request_ver = PullRequestVersion.get_or_404(version)
403 403 pull_request_obj = pull_request_ver
404 404 _org_pull_request_obj = pull_request_ver.pull_request
405 405 at_version = pull_request_ver.pull_request_version_id
406 406 else:
407 407 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
408 408 pull_request_id)
409 409
410 410 pull_request_display_obj = PullRequest.get_pr_display_object(
411 411 pull_request_obj, _org_pull_request_obj)
412 412
413 413 return _org_pull_request_obj, pull_request_obj, \
414 414 pull_request_display_obj, at_version
415 415
416 416 def create(self, created_by, source_repo, source_ref, target_repo,
417 417 target_ref, revisions, reviewers, title, description=None,
418 418 description_renderer=None,
419 419 reviewer_data=None, translator=None, auth_user=None):
420 420 translator = translator or get_current_request().translate
421 421
422 422 created_by_user = self._get_user(created_by)
423 423 auth_user = auth_user or created_by_user.AuthUser()
424 424 source_repo = self._get_repo(source_repo)
425 425 target_repo = self._get_repo(target_repo)
426 426
427 427 pull_request = PullRequest()
428 428 pull_request.source_repo = source_repo
429 429 pull_request.source_ref = source_ref
430 430 pull_request.target_repo = target_repo
431 431 pull_request.target_ref = target_ref
432 432 pull_request.revisions = revisions
433 433 pull_request.title = title
434 434 pull_request.description = description
435 435 pull_request.description_renderer = description_renderer
436 436 pull_request.author = created_by_user
437 437 pull_request.reviewer_data = reviewer_data
438 438 pull_request.pull_request_state = pull_request.STATE_CREATING
439 439 Session().add(pull_request)
440 440 Session().flush()
441 441
442 442 reviewer_ids = set()
443 443 # members / reviewers
444 444 for reviewer_object in reviewers:
445 445 user_id, reasons, mandatory, rules = reviewer_object
446 446 user = self._get_user(user_id)
447 447
448 448 # skip duplicates
449 449 if user.user_id in reviewer_ids:
450 450 continue
451 451
452 452 reviewer_ids.add(user.user_id)
453 453
454 454 reviewer = PullRequestReviewers()
455 455 reviewer.user = user
456 456 reviewer.pull_request = pull_request
457 457 reviewer.reasons = reasons
458 458 reviewer.mandatory = mandatory
459 459
460 460 # NOTE(marcink): pick only first rule for now
461 461 rule_id = list(rules)[0] if rules else None
462 462 rule = RepoReviewRule.get(rule_id) if rule_id else None
463 463 if rule:
464 464 review_group = rule.user_group_vote_rule(user_id)
465 465 # we check if this particular reviewer is member of a voting group
466 466 if review_group:
467 467 # NOTE(marcink):
468 468 # can be that user is member of more but we pick the first same,
469 469 # same as default reviewers algo
470 470 review_group = review_group[0]
471 471
472 472 rule_data = {
473 473 'rule_name':
474 474 rule.review_rule_name,
475 475 'rule_user_group_entry_id':
476 476 review_group.repo_review_rule_users_group_id,
477 477 'rule_user_group_name':
478 478 review_group.users_group.users_group_name,
479 479 'rule_user_group_members':
480 480 [x.user.username for x in review_group.users_group.members],
481 481 'rule_user_group_members_id':
482 482 [x.user.user_id for x in review_group.users_group.members],
483 483 }
484 484 # e.g {'vote_rule': -1, 'mandatory': True}
485 485 rule_data.update(review_group.rule_data())
486 486
487 487 reviewer.rule_data = rule_data
488 488
489 489 Session().add(reviewer)
490 490 Session().flush()
491 491
492 492 # Set approval status to "Under Review" for all commits which are
493 493 # part of this pull request.
494 494 ChangesetStatusModel().set_status(
495 495 repo=target_repo,
496 496 status=ChangesetStatus.STATUS_UNDER_REVIEW,
497 497 user=created_by_user,
498 498 pull_request=pull_request
499 499 )
500 500 # we commit early at this point. This has to do with a fact
501 501 # that before queries do some row-locking. And because of that
502 502 # we need to commit and finish transaction before below validate call
503 503 # that for large repos could be long resulting in long row locks
504 504 Session().commit()
505 505
506 506 # prepare workspace, and run initial merge simulation. Set state during that
507 507 # operation
508 508 pull_request = PullRequest.get(pull_request.pull_request_id)
509 509
510 510 # set as merging, for simulation, and if finished to created so we mark
511 511 # simulation is working fine
512 512 with pull_request.set_state(PullRequest.STATE_MERGING,
513 513 final_state=PullRequest.STATE_CREATED):
514 514 MergeCheck.validate(
515 515 pull_request, auth_user=auth_user, translator=translator)
516 516
517 517 self.notify_reviewers(pull_request, reviewer_ids)
518 518 self.trigger_pull_request_hook(
519 519 pull_request, created_by_user, 'create')
520 520
521 521 creation_data = pull_request.get_api_data(with_merge_state=False)
522 522 self._log_audit_action(
523 523 'repo.pull_request.create', {'data': creation_data},
524 524 auth_user, pull_request)
525 525
526 526 return pull_request
527 527
528 528 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
529 529 pull_request = self.__get_pull_request(pull_request)
530 530 target_scm = pull_request.target_repo.scm_instance()
531 531 if action == 'create':
532 532 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
533 533 elif action == 'merge':
534 534 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
535 535 elif action == 'close':
536 536 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
537 537 elif action == 'review_status_change':
538 538 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
539 539 elif action == 'update':
540 540 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
541 541 elif action == 'comment':
542 542 # dummy hook ! for comment. We want this function to handle all cases
543 543 def trigger_hook(*args, **kwargs):
544 544 pass
545 545 comment = data['comment']
546 546 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
547 547 else:
548 548 return
549 549
550 550 trigger_hook(
551 551 username=user.username,
552 552 repo_name=pull_request.target_repo.repo_name,
553 553 repo_alias=target_scm.alias,
554 554 pull_request=pull_request,
555 555 data=data)
556 556
557 557 def _get_commit_ids(self, pull_request):
558 558 """
559 559 Return the commit ids of the merged pull request.
560 560
561 561 This method is not dealing correctly yet with the lack of autoupdates
562 562 nor with the implicit target updates.
563 563 For example: if a commit in the source repo is already in the target it
564 564 will be reported anyways.
565 565 """
566 566 merge_rev = pull_request.merge_rev
567 567 if merge_rev is None:
568 568 raise ValueError('This pull request was not merged yet')
569 569
570 570 commit_ids = list(pull_request.revisions)
571 571 if merge_rev not in commit_ids:
572 572 commit_ids.append(merge_rev)
573 573
574 574 return commit_ids
575 575
576 576 def merge_repo(self, pull_request, user, extras):
577 577 log.debug("Merging pull request %s", pull_request.pull_request_id)
578 578 extras['user_agent'] = 'internal-merge'
579 579 merge_state = self._merge_pull_request(pull_request, user, extras)
580 580 if merge_state.executed:
581 581 log.debug("Merge was successful, updating the pull request comments.")
582 582 self._comment_and_close_pr(pull_request, user, merge_state)
583 583
584 584 self._log_audit_action(
585 585 'repo.pull_request.merge',
586 586 {'merge_state': merge_state.__dict__},
587 587 user, pull_request)
588 588
589 589 else:
590 590 log.warn("Merge failed, not updating the pull request.")
591 591 return merge_state
592 592
593 593 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
594 594 target_vcs = pull_request.target_repo.scm_instance()
595 595 source_vcs = pull_request.source_repo.scm_instance()
596 596
597 597 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
598 598 pr_id=pull_request.pull_request_id,
599 599 pr_title=pull_request.title,
600 600 source_repo=source_vcs.name,
601 601 source_ref_name=pull_request.source_ref_parts.name,
602 602 target_repo=target_vcs.name,
603 603 target_ref_name=pull_request.target_ref_parts.name,
604 604 )
605 605
606 606 workspace_id = self._workspace_id(pull_request)
607 607 repo_id = pull_request.target_repo.repo_id
608 608 use_rebase = self._use_rebase_for_merging(pull_request)
609 609 close_branch = self._close_branch_before_merging(pull_request)
610 610
611 611 target_ref = self._refresh_reference(
612 612 pull_request.target_ref_parts, target_vcs)
613 613
614 614 callback_daemon, extras = prepare_callback_daemon(
615 615 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
616 616 host=vcs_settings.HOOKS_HOST,
617 617 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
618 618
619 619 with callback_daemon:
620 620 # TODO: johbo: Implement a clean way to run a config_override
621 621 # for a single call.
622 622 target_vcs.config.set(
623 623 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
624 624
625 625 user_name = user.short_contact
626 626 merge_state = target_vcs.merge(
627 627 repo_id, workspace_id, target_ref, source_vcs,
628 628 pull_request.source_ref_parts,
629 629 user_name=user_name, user_email=user.email,
630 630 message=message, use_rebase=use_rebase,
631 631 close_branch=close_branch)
632 632 return merge_state
633 633
634 634 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
635 635 pull_request.merge_rev = merge_state.merge_ref.commit_id
636 636 pull_request.updated_on = datetime.datetime.now()
637 637 close_msg = close_msg or 'Pull request merged and closed'
638 638
639 639 CommentsModel().create(
640 640 text=safe_unicode(close_msg),
641 641 repo=pull_request.target_repo.repo_id,
642 642 user=user.user_id,
643 643 pull_request=pull_request.pull_request_id,
644 644 f_path=None,
645 645 line_no=None,
646 646 closing_pr=True
647 647 )
648 648
649 649 Session().add(pull_request)
650 650 Session().flush()
651 651 # TODO: paris: replace invalidation with less radical solution
652 652 ScmModel().mark_for_invalidation(
653 653 pull_request.target_repo.repo_name)
654 654 self.trigger_pull_request_hook(pull_request, user, 'merge')
655 655
656 656 def has_valid_update_type(self, pull_request):
657 657 source_ref_type = pull_request.source_ref_parts.type
658 658 return source_ref_type in self.REF_TYPES
659 659
660 660 def update_commits(self, pull_request):
661 661 """
662 662 Get the updated list of commits for the pull request
663 663 and return the new pull request version and the list
664 664 of commits processed by this update action
665 665 """
666 666 pull_request = self.__get_pull_request(pull_request)
667 667 source_ref_type = pull_request.source_ref_parts.type
668 668 source_ref_name = pull_request.source_ref_parts.name
669 669 source_ref_id = pull_request.source_ref_parts.commit_id
670 670
671 671 target_ref_type = pull_request.target_ref_parts.type
672 672 target_ref_name = pull_request.target_ref_parts.name
673 673 target_ref_id = pull_request.target_ref_parts.commit_id
674 674
675 675 if not self.has_valid_update_type(pull_request):
676 676 log.debug("Skipping update of pull request %s due to ref type: %s",
677 677 pull_request, source_ref_type)
678 678 return UpdateResponse(
679 679 executed=False,
680 680 reason=UpdateFailureReason.WRONG_REF_TYPE,
681 681 old=pull_request, new=None, changes=None,
682 682 source_changed=False, target_changed=False)
683 683
684 684 # source repo
685 685 source_repo = pull_request.source_repo.scm_instance()
686 686 try:
687 687 source_commit = source_repo.get_commit(commit_id=source_ref_name)
688 688 except CommitDoesNotExistError:
689 689 return UpdateResponse(
690 690 executed=False,
691 691 reason=UpdateFailureReason.MISSING_SOURCE_REF,
692 692 old=pull_request, new=None, changes=None,
693 693 source_changed=False, target_changed=False)
694 694
695 695 source_changed = source_ref_id != source_commit.raw_id
696 696
697 697 # target repo
698 698 target_repo = pull_request.target_repo.scm_instance()
699 699 try:
700 700 target_commit = target_repo.get_commit(commit_id=target_ref_name)
701 701 except CommitDoesNotExistError:
702 702 return UpdateResponse(
703 703 executed=False,
704 704 reason=UpdateFailureReason.MISSING_TARGET_REF,
705 705 old=pull_request, new=None, changes=None,
706 706 source_changed=False, target_changed=False)
707 707 target_changed = target_ref_id != target_commit.raw_id
708 708
709 709 if not (source_changed or target_changed):
710 710 log.debug("Nothing changed in pull request %s", pull_request)
711 711 return UpdateResponse(
712 712 executed=False,
713 713 reason=UpdateFailureReason.NO_CHANGE,
714 714 old=pull_request, new=None, changes=None,
715 715 source_changed=target_changed, target_changed=source_changed)
716 716
717 717 change_in_found = 'target repo' if target_changed else 'source repo'
718 718 log.debug('Updating pull request because of change in %s detected',
719 719 change_in_found)
720 720
721 721 # Finally there is a need for an update, in case of source change
722 722 # we create a new version, else just an update
723 723 if source_changed:
724 724 pull_request_version = self._create_version_from_snapshot(pull_request)
725 725 self._link_comments_to_version(pull_request_version)
726 726 else:
727 727 try:
728 728 ver = pull_request.versions[-1]
729 729 except IndexError:
730 730 ver = None
731 731
732 732 pull_request.pull_request_version_id = \
733 733 ver.pull_request_version_id if ver else None
734 734 pull_request_version = pull_request
735 735
736 736 try:
737 737 if target_ref_type in self.REF_TYPES:
738 738 target_commit = target_repo.get_commit(target_ref_name)
739 739 else:
740 740 target_commit = target_repo.get_commit(target_ref_id)
741 741 except CommitDoesNotExistError:
742 742 return UpdateResponse(
743 743 executed=False,
744 744 reason=UpdateFailureReason.MISSING_TARGET_REF,
745 745 old=pull_request, new=None, changes=None,
746 746 source_changed=source_changed, target_changed=target_changed)
747 747
748 748 # re-compute commit ids
749 749 old_commit_ids = pull_request.revisions
750 750 pre_load = ["author", "branch", "date", "message"]
751 751 commit_ranges = target_repo.compare(
752 752 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
753 753 pre_load=pre_load)
754 754
755 755 ancestor = target_repo.get_common_ancestor(
756 756 target_commit.raw_id, source_commit.raw_id, source_repo)
757 757
758 758 pull_request.source_ref = '%s:%s:%s' % (
759 759 source_ref_type, source_ref_name, source_commit.raw_id)
760 760 pull_request.target_ref = '%s:%s:%s' % (
761 761 target_ref_type, target_ref_name, ancestor)
762 762
763 763 pull_request.revisions = [
764 764 commit.raw_id for commit in reversed(commit_ranges)]
765 765 pull_request.updated_on = datetime.datetime.now()
766 766 Session().add(pull_request)
767 767 new_commit_ids = pull_request.revisions
768 768
769 769 old_diff_data, new_diff_data = self._generate_update_diffs(
770 770 pull_request, pull_request_version)
771 771
772 772 # calculate commit and file changes
773 773 changes = self._calculate_commit_id_changes(
774 774 old_commit_ids, new_commit_ids)
775 775 file_changes = self._calculate_file_changes(
776 776 old_diff_data, new_diff_data)
777 777
778 778 # set comments as outdated if DIFFS changed
779 779 CommentsModel().outdate_comments(
780 780 pull_request, old_diff_data=old_diff_data,
781 781 new_diff_data=new_diff_data)
782 782
783 783 commit_changes = (changes.added or changes.removed)
784 784 file_node_changes = (
785 785 file_changes.added or file_changes.modified or file_changes.removed)
786 786 pr_has_changes = commit_changes or file_node_changes
787 787
788 788 # Add an automatic comment to the pull request, in case
789 789 # anything has changed
790 790 if pr_has_changes:
791 791 update_comment = CommentsModel().create(
792 792 text=self._render_update_message(changes, file_changes),
793 793 repo=pull_request.target_repo,
794 794 user=pull_request.author,
795 795 pull_request=pull_request,
796 796 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
797 797
798 798 # Update status to "Under Review" for added commits
799 799 for commit_id in changes.added:
800 800 ChangesetStatusModel().set_status(
801 801 repo=pull_request.source_repo,
802 802 status=ChangesetStatus.STATUS_UNDER_REVIEW,
803 803 comment=update_comment,
804 804 user=pull_request.author,
805 805 pull_request=pull_request,
806 806 revision=commit_id)
807 807
808 808 log.debug(
809 809 'Updated pull request %s, added_ids: %s, common_ids: %s, '
810 810 'removed_ids: %s', pull_request.pull_request_id,
811 811 changes.added, changes.common, changes.removed)
812 812 log.debug(
813 813 'Updated pull request with the following file changes: %s',
814 814 file_changes)
815 815
816 816 log.info(
817 817 "Updated pull request %s from commit %s to commit %s, "
818 818 "stored new version %s of this pull request.",
819 819 pull_request.pull_request_id, source_ref_id,
820 820 pull_request.source_ref_parts.commit_id,
821 821 pull_request_version.pull_request_version_id)
822 822 Session().commit()
823 823 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
824 824
825 825 return UpdateResponse(
826 826 executed=True, reason=UpdateFailureReason.NONE,
827 827 old=pull_request, new=pull_request_version, changes=changes,
828 828 source_changed=source_changed, target_changed=target_changed)
829 829
830 830 def _create_version_from_snapshot(self, pull_request):
831 831 version = PullRequestVersion()
832 832 version.title = pull_request.title
833 833 version.description = pull_request.description
834 834 version.status = pull_request.status
835 835 version.pull_request_state = pull_request.pull_request_state
836 836 version.created_on = datetime.datetime.now()
837 837 version.updated_on = pull_request.updated_on
838 838 version.user_id = pull_request.user_id
839 839 version.source_repo = pull_request.source_repo
840 840 version.source_ref = pull_request.source_ref
841 841 version.target_repo = pull_request.target_repo
842 842 version.target_ref = pull_request.target_ref
843 843
844 844 version._last_merge_source_rev = pull_request._last_merge_source_rev
845 845 version._last_merge_target_rev = pull_request._last_merge_target_rev
846 846 version.last_merge_status = pull_request.last_merge_status
847 847 version.shadow_merge_ref = pull_request.shadow_merge_ref
848 848 version.merge_rev = pull_request.merge_rev
849 849 version.reviewer_data = pull_request.reviewer_data
850 850
851 851 version.revisions = pull_request.revisions
852 852 version.pull_request = pull_request
853 853 Session().add(version)
854 854 Session().flush()
855 855
856 856 return version
857 857
858 858 def _generate_update_diffs(self, pull_request, pull_request_version):
859 859
860 860 diff_context = (
861 861 self.DIFF_CONTEXT +
862 862 CommentsModel.needed_extra_diff_context())
863 863 hide_whitespace_changes = False
864 864 source_repo = pull_request_version.source_repo
865 865 source_ref_id = pull_request_version.source_ref_parts.commit_id
866 866 target_ref_id = pull_request_version.target_ref_parts.commit_id
867 867 old_diff = self._get_diff_from_pr_or_version(
868 868 source_repo, source_ref_id, target_ref_id,
869 869 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
870 870
871 871 source_repo = pull_request.source_repo
872 872 source_ref_id = pull_request.source_ref_parts.commit_id
873 873 target_ref_id = pull_request.target_ref_parts.commit_id
874 874
875 875 new_diff = self._get_diff_from_pr_or_version(
876 876 source_repo, source_ref_id, target_ref_id,
877 877 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
878 878
879 879 old_diff_data = diffs.DiffProcessor(old_diff)
880 880 old_diff_data.prepare()
881 881 new_diff_data = diffs.DiffProcessor(new_diff)
882 882 new_diff_data.prepare()
883 883
884 884 return old_diff_data, new_diff_data
885 885
886 886 def _link_comments_to_version(self, pull_request_version):
887 887 """
888 888 Link all unlinked comments of this pull request to the given version.
889 889
890 890 :param pull_request_version: The `PullRequestVersion` to which
891 891 the comments shall be linked.
892 892
893 893 """
894 894 pull_request = pull_request_version.pull_request
895 895 comments = ChangesetComment.query()\
896 896 .filter(
897 897 # TODO: johbo: Should we query for the repo at all here?
898 898 # Pending decision on how comments of PRs are to be related
899 899 # to either the source repo, the target repo or no repo at all.
900 900 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
901 901 ChangesetComment.pull_request == pull_request,
902 902 ChangesetComment.pull_request_version == None)\
903 903 .order_by(ChangesetComment.comment_id.asc())
904 904
905 905 # TODO: johbo: Find out why this breaks if it is done in a bulk
906 906 # operation.
907 907 for comment in comments:
908 908 comment.pull_request_version_id = (
909 909 pull_request_version.pull_request_version_id)
910 910 Session().add(comment)
911 911
912 912 def _calculate_commit_id_changes(self, old_ids, new_ids):
913 913 added = [x for x in new_ids if x not in old_ids]
914 914 common = [x for x in new_ids if x in old_ids]
915 915 removed = [x for x in old_ids if x not in new_ids]
916 916 total = new_ids
917 917 return ChangeTuple(added, common, removed, total)
918 918
919 919 def _calculate_file_changes(self, old_diff_data, new_diff_data):
920 920
921 921 old_files = OrderedDict()
922 922 for diff_data in old_diff_data.parsed_diff:
923 923 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
924 924
925 925 added_files = []
926 926 modified_files = []
927 927 removed_files = []
928 928 for diff_data in new_diff_data.parsed_diff:
929 929 new_filename = diff_data['filename']
930 930 new_hash = md5_safe(diff_data['raw_diff'])
931 931
932 932 old_hash = old_files.get(new_filename)
933 933 if not old_hash:
934 934 # file is not present in old diff, means it's added
935 935 added_files.append(new_filename)
936 936 else:
937 937 if new_hash != old_hash:
938 938 modified_files.append(new_filename)
939 939 # now remove a file from old, since we have seen it already
940 940 del old_files[new_filename]
941 941
942 942 # removed files is when there are present in old, but not in NEW,
943 943 # since we remove old files that are present in new diff, left-overs
944 944 # if any should be the removed files
945 945 removed_files.extend(old_files.keys())
946 946
947 947 return FileChangeTuple(added_files, modified_files, removed_files)
948 948
949 949 def _render_update_message(self, changes, file_changes):
950 950 """
951 951 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
952 952 so it's always looking the same disregarding on which default
953 953 renderer system is using.
954 954
955 955 :param changes: changes named tuple
956 956 :param file_changes: file changes named tuple
957 957
958 958 """
959 959 new_status = ChangesetStatus.get_status_lbl(
960 960 ChangesetStatus.STATUS_UNDER_REVIEW)
961 961
962 962 changed_files = (
963 963 file_changes.added + file_changes.modified + file_changes.removed)
964 964
965 965 params = {
966 966 'under_review_label': new_status,
967 967 'added_commits': changes.added,
968 968 'removed_commits': changes.removed,
969 969 'changed_files': changed_files,
970 970 'added_files': file_changes.added,
971 971 'modified_files': file_changes.modified,
972 972 'removed_files': file_changes.removed,
973 973 }
974 974 renderer = RstTemplateRenderer()
975 975 return renderer.render('pull_request_update.mako', **params)
976 976
977 977 def edit(self, pull_request, title, description, description_renderer, user):
978 978 pull_request = self.__get_pull_request(pull_request)
979 979 old_data = pull_request.get_api_data(with_merge_state=False)
980 980 if pull_request.is_closed():
981 981 raise ValueError('This pull request is closed')
982 982 if title:
983 983 pull_request.title = title
984 984 pull_request.description = description
985 985 pull_request.updated_on = datetime.datetime.now()
986 986 pull_request.description_renderer = description_renderer
987 987 Session().add(pull_request)
988 988 self._log_audit_action(
989 989 'repo.pull_request.edit', {'old_data': old_data},
990 990 user, pull_request)
991 991
992 992 def update_reviewers(self, pull_request, reviewer_data, user):
993 993 """
994 994 Update the reviewers in the pull request
995 995
996 996 :param pull_request: the pr to update
997 997 :param reviewer_data: list of tuples
998 998 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
999 999 """
1000 1000 pull_request = self.__get_pull_request(pull_request)
1001 1001 if pull_request.is_closed():
1002 1002 raise ValueError('This pull request is closed')
1003 1003
1004 1004 reviewers = {}
1005 1005 for user_id, reasons, mandatory, rules in reviewer_data:
1006 1006 if isinstance(user_id, (int, compat.string_types)):
1007 1007 user_id = self._get_user(user_id).user_id
1008 1008 reviewers[user_id] = {
1009 1009 'reasons': reasons, 'mandatory': mandatory}
1010 1010
1011 1011 reviewers_ids = set(reviewers.keys())
1012 1012 current_reviewers = PullRequestReviewers.query()\
1013 1013 .filter(PullRequestReviewers.pull_request ==
1014 1014 pull_request).all()
1015 1015 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1016 1016
1017 1017 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1018 1018 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1019 1019
1020 1020 log.debug("Adding %s reviewers", ids_to_add)
1021 1021 log.debug("Removing %s reviewers", ids_to_remove)
1022 1022 changed = False
1023 1023 added_audit_reviewers = []
1024 1024 removed_audit_reviewers = []
1025 1025
1026 1026 for uid in ids_to_add:
1027 1027 changed = True
1028 1028 _usr = self._get_user(uid)
1029 1029 reviewer = PullRequestReviewers()
1030 1030 reviewer.user = _usr
1031 1031 reviewer.pull_request = pull_request
1032 1032 reviewer.reasons = reviewers[uid]['reasons']
1033 1033 # NOTE(marcink): mandatory shouldn't be changed now
1034 1034 # reviewer.mandatory = reviewers[uid]['reasons']
1035 1035 Session().add(reviewer)
1036 1036 added_audit_reviewers.append(reviewer.get_dict())
1037 1037
1038 1038 for uid in ids_to_remove:
1039 1039 changed = True
1040 1040 # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case
1041 1041 # that prevents and fixes cases that we added the same reviewer twice.
1042 1042 # this CAN happen due to the lack of DB checks
1043 1043 reviewers = PullRequestReviewers.query()\
1044 1044 .filter(PullRequestReviewers.user_id == uid,
1045 1045 PullRequestReviewers.pull_request == pull_request)\
1046 1046 .all()
1047 1047
1048 1048 for obj in reviewers:
1049 1049 added_audit_reviewers.append(obj.get_dict())
1050 1050 Session().delete(obj)
1051 1051
1052 1052 if changed:
1053 1053 Session().expire_all()
1054 1054 pull_request.updated_on = datetime.datetime.now()
1055 1055 Session().add(pull_request)
1056 1056
1057 1057 # finally store audit logs
1058 1058 for user_data in added_audit_reviewers:
1059 1059 self._log_audit_action(
1060 1060 'repo.pull_request.reviewer.add', {'data': user_data},
1061 1061 user, pull_request)
1062 1062 for user_data in removed_audit_reviewers:
1063 1063 self._log_audit_action(
1064 1064 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1065 1065 user, pull_request)
1066 1066
1067 1067 self.notify_reviewers(pull_request, ids_to_add)
1068 1068 return ids_to_add, ids_to_remove
1069 1069
1070 1070 def get_url(self, pull_request, request=None, permalink=False):
1071 1071 if not request:
1072 1072 request = get_current_request()
1073 1073
1074 1074 if permalink:
1075 1075 return request.route_url(
1076 1076 'pull_requests_global',
1077 1077 pull_request_id=pull_request.pull_request_id,)
1078 1078 else:
1079 1079 return request.route_url('pullrequest_show',
1080 1080 repo_name=safe_str(pull_request.target_repo.repo_name),
1081 1081 pull_request_id=pull_request.pull_request_id,)
1082 1082
1083 1083 def get_shadow_clone_url(self, pull_request, request=None):
1084 1084 """
1085 1085 Returns qualified url pointing to the shadow repository. If this pull
1086 1086 request is closed there is no shadow repository and ``None`` will be
1087 1087 returned.
1088 1088 """
1089 1089 if pull_request.is_closed():
1090 1090 return None
1091 1091 else:
1092 1092 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1093 1093 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1094 1094
1095 1095 def notify_reviewers(self, pull_request, reviewers_ids):
1096 1096 # notification to reviewers
1097 1097 if not reviewers_ids:
1098 1098 return
1099 1099
1100 1100 pull_request_obj = pull_request
1101 1101 # get the current participants of this pull request
1102 1102 recipients = reviewers_ids
1103 1103 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1104 1104
1105 1105 pr_source_repo = pull_request_obj.source_repo
1106 1106 pr_target_repo = pull_request_obj.target_repo
1107 1107
1108 1108 pr_url = h.route_url('pullrequest_show',
1109 1109 repo_name=pr_target_repo.repo_name,
1110 1110 pull_request_id=pull_request_obj.pull_request_id,)
1111 1111
1112 1112 # set some variables for email notification
1113 1113 pr_target_repo_url = h.route_url(
1114 1114 'repo_summary', repo_name=pr_target_repo.repo_name)
1115 1115
1116 1116 pr_source_repo_url = h.route_url(
1117 1117 'repo_summary', repo_name=pr_source_repo.repo_name)
1118 1118
1119 1119 # pull request specifics
1120 1120 pull_request_commits = [
1121 1121 (x.raw_id, x.message)
1122 1122 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1123 1123
1124 1124 kwargs = {
1125 1125 'user': pull_request.author,
1126 1126 'pull_request': pull_request_obj,
1127 1127 'pull_request_commits': pull_request_commits,
1128 1128
1129 1129 'pull_request_target_repo': pr_target_repo,
1130 1130 'pull_request_target_repo_url': pr_target_repo_url,
1131 1131
1132 1132 'pull_request_source_repo': pr_source_repo,
1133 1133 'pull_request_source_repo_url': pr_source_repo_url,
1134 1134
1135 1135 'pull_request_url': pr_url,
1136 1136 }
1137 1137
1138 1138 # pre-generate the subject for notification itself
1139 1139 (subject,
1140 1140 _h, _e, # we don't care about those
1141 1141 body_plaintext) = EmailNotificationModel().render_email(
1142 1142 notification_type, **kwargs)
1143 1143
1144 1144 # create notification objects, and emails
1145 1145 NotificationModel().create(
1146 1146 created_by=pull_request.author,
1147 1147 notification_subject=subject,
1148 1148 notification_body=body_plaintext,
1149 1149 notification_type=notification_type,
1150 1150 recipients=recipients,
1151 1151 email_kwargs=kwargs,
1152 1152 )
1153 1153
1154 1154 def delete(self, pull_request, user):
1155 1155 pull_request = self.__get_pull_request(pull_request)
1156 1156 old_data = pull_request.get_api_data(with_merge_state=False)
1157 1157 self._cleanup_merge_workspace(pull_request)
1158 1158 self._log_audit_action(
1159 1159 'repo.pull_request.delete', {'old_data': old_data},
1160 1160 user, pull_request)
1161 1161 Session().delete(pull_request)
1162 1162
1163 1163 def close_pull_request(self, pull_request, user):
1164 1164 pull_request = self.__get_pull_request(pull_request)
1165 1165 self._cleanup_merge_workspace(pull_request)
1166 1166 pull_request.status = PullRequest.STATUS_CLOSED
1167 1167 pull_request.updated_on = datetime.datetime.now()
1168 1168 Session().add(pull_request)
1169 1169 self.trigger_pull_request_hook(
1170 1170 pull_request, pull_request.author, 'close')
1171 1171
1172 1172 pr_data = pull_request.get_api_data(with_merge_state=False)
1173 1173 self._log_audit_action(
1174 1174 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1175 1175
1176 1176 def close_pull_request_with_comment(
1177 1177 self, pull_request, user, repo, message=None, auth_user=None):
1178 1178
1179 1179 pull_request_review_status = pull_request.calculated_review_status()
1180 1180
1181 1181 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1182 1182 # approved only if we have voting consent
1183 1183 status = ChangesetStatus.STATUS_APPROVED
1184 1184 else:
1185 1185 status = ChangesetStatus.STATUS_REJECTED
1186 1186 status_lbl = ChangesetStatus.get_status_lbl(status)
1187 1187
1188 1188 default_message = (
1189 1189 'Closing with status change {transition_icon} {status}.'
1190 1190 ).format(transition_icon='>', status=status_lbl)
1191 1191 text = message or default_message
1192 1192
1193 1193 # create a comment, and link it to new status
1194 1194 comment = CommentsModel().create(
1195 1195 text=text,
1196 1196 repo=repo.repo_id,
1197 1197 user=user.user_id,
1198 1198 pull_request=pull_request.pull_request_id,
1199 1199 status_change=status_lbl,
1200 1200 status_change_type=status,
1201 1201 closing_pr=True,
1202 1202 auth_user=auth_user,
1203 1203 )
1204 1204
1205 1205 # calculate old status before we change it
1206 1206 old_calculated_status = pull_request.calculated_review_status()
1207 1207 ChangesetStatusModel().set_status(
1208 1208 repo.repo_id,
1209 1209 status,
1210 1210 user.user_id,
1211 1211 comment=comment,
1212 1212 pull_request=pull_request.pull_request_id
1213 1213 )
1214 1214
1215 1215 Session().flush()
1216 1216 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1217 1217 # we now calculate the status of pull request again, and based on that
1218 1218 # calculation trigger status change. This might happen in cases
1219 1219 # that non-reviewer admin closes a pr, which means his vote doesn't
1220 1220 # change the status, while if he's a reviewer this might change it.
1221 1221 calculated_status = pull_request.calculated_review_status()
1222 1222 if old_calculated_status != calculated_status:
1223 1223 self.trigger_pull_request_hook(
1224 1224 pull_request, user, 'review_status_change',
1225 1225 data={'status': calculated_status})
1226 1226
1227 1227 # finally close the PR
1228 1228 PullRequestModel().close_pull_request(
1229 1229 pull_request.pull_request_id, user)
1230 1230
1231 1231 return comment, status
1232 1232
1233 1233 def merge_status(self, pull_request, translator=None,
1234 1234 force_shadow_repo_refresh=False):
1235 1235 _ = translator or get_current_request().translate
1236 1236
1237 1237 if not self._is_merge_enabled(pull_request):
1238 1238 return False, _('Server-side pull request merging is disabled.')
1239 1239 if pull_request.is_closed():
1240 1240 return False, _('This pull request is closed.')
1241 1241 merge_possible, msg = self._check_repo_requirements(
1242 1242 target=pull_request.target_repo, source=pull_request.source_repo,
1243 1243 translator=_)
1244 1244 if not merge_possible:
1245 1245 return merge_possible, msg
1246 1246
1247 1247 try:
1248 1248 resp = self._try_merge(
1249 1249 pull_request,
1250 1250 force_shadow_repo_refresh=force_shadow_repo_refresh)
1251 1251 log.debug("Merge response: %s", resp)
1252 1252 status = resp.possible, resp.merge_status_message
1253 1253 except NotImplementedError:
1254 1254 status = False, _('Pull request merging is not supported.')
1255 1255
1256 1256 return status
1257 1257
1258 1258 def _check_repo_requirements(self, target, source, translator):
1259 1259 """
1260 1260 Check if `target` and `source` have compatible requirements.
1261 1261
1262 1262 Currently this is just checking for largefiles.
1263 1263 """
1264 1264 _ = translator
1265 1265 target_has_largefiles = self._has_largefiles(target)
1266 1266 source_has_largefiles = self._has_largefiles(source)
1267 1267 merge_possible = True
1268 1268 message = u''
1269 1269
1270 1270 if target_has_largefiles != source_has_largefiles:
1271 1271 merge_possible = False
1272 1272 if source_has_largefiles:
1273 1273 message = _(
1274 1274 'Target repository large files support is disabled.')
1275 1275 else:
1276 1276 message = _(
1277 1277 'Source repository large files support is disabled.')
1278 1278
1279 1279 return merge_possible, message
1280 1280
1281 1281 def _has_largefiles(self, repo):
1282 1282 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1283 1283 'extensions', 'largefiles')
1284 1284 return largefiles_ui and largefiles_ui[0].active
1285 1285
1286 1286 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1287 1287 """
1288 1288 Try to merge the pull request and return the merge status.
1289 1289 """
1290 1290 log.debug(
1291 1291 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1292 1292 pull_request.pull_request_id, force_shadow_repo_refresh)
1293 1293 target_vcs = pull_request.target_repo.scm_instance()
1294 1294 # Refresh the target reference.
1295 1295 try:
1296 1296 target_ref = self._refresh_reference(
1297 1297 pull_request.target_ref_parts, target_vcs)
1298 1298 except CommitDoesNotExistError:
1299 1299 merge_state = MergeResponse(
1300 1300 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1301 1301 metadata={'target_ref': pull_request.target_ref_parts})
1302 1302 return merge_state
1303 1303
1304 1304 target_locked = pull_request.target_repo.locked
1305 1305 if target_locked and target_locked[0]:
1306 1306 locked_by = 'user:{}'.format(target_locked[0])
1307 1307 log.debug("The target repository is locked by %s.", locked_by)
1308 1308 merge_state = MergeResponse(
1309 1309 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1310 1310 metadata={'locked_by': locked_by})
1311 1311 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1312 1312 pull_request, target_ref):
1313 1313 log.debug("Refreshing the merge status of the repository.")
1314 1314 merge_state = self._refresh_merge_state(
1315 1315 pull_request, target_vcs, target_ref)
1316 1316 else:
1317 1317 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1318 1318 metadata = {
1319 1319 'target_ref': pull_request.target_ref_parts,
1320 'source_ref': pull_request.source_ref_parts
1320 'source_ref': pull_request.source_ref_parts,
1321 1321 }
1322 if not possible and target_ref.type == 'branch':
1323 # NOTE(marcink): case for mercurial multiple heads on branch
1324 heads = target_vcs._heads(target_ref.name)
1325 if len(heads) != 1:
1326 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1327 metadata.update({
1328 'heads': heads
1329 })
1322 1330 merge_state = MergeResponse(
1323 1331 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1324 1332
1325 1333 return merge_state
1326 1334
1327 1335 def _refresh_reference(self, reference, vcs_repository):
1328 1336 if reference.type in self.UPDATABLE_REF_TYPES:
1329 1337 name_or_id = reference.name
1330 1338 else:
1331 1339 name_or_id = reference.commit_id
1332 1340 refreshed_commit = vcs_repository.get_commit(name_or_id)
1333 1341 refreshed_reference = Reference(
1334 1342 reference.type, reference.name, refreshed_commit.raw_id)
1335 1343 return refreshed_reference
1336 1344
1337 1345 def _needs_merge_state_refresh(self, pull_request, target_reference):
1338 1346 return not(
1339 1347 pull_request.revisions and
1340 1348 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1341 1349 target_reference.commit_id == pull_request._last_merge_target_rev)
1342 1350
1343 1351 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1344 1352 workspace_id = self._workspace_id(pull_request)
1345 1353 source_vcs = pull_request.source_repo.scm_instance()
1346 1354 repo_id = pull_request.target_repo.repo_id
1347 1355 use_rebase = self._use_rebase_for_merging(pull_request)
1348 1356 close_branch = self._close_branch_before_merging(pull_request)
1349 1357 merge_state = target_vcs.merge(
1350 1358 repo_id, workspace_id,
1351 1359 target_reference, source_vcs, pull_request.source_ref_parts,
1352 1360 dry_run=True, use_rebase=use_rebase,
1353 1361 close_branch=close_branch)
1354 1362
1355 1363 # Do not store the response if there was an unknown error.
1356 1364 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1357 1365 pull_request._last_merge_source_rev = \
1358 1366 pull_request.source_ref_parts.commit_id
1359 1367 pull_request._last_merge_target_rev = target_reference.commit_id
1360 1368 pull_request.last_merge_status = merge_state.failure_reason
1361 1369 pull_request.shadow_merge_ref = merge_state.merge_ref
1362 1370 Session().add(pull_request)
1363 1371 Session().commit()
1364 1372
1365 1373 return merge_state
1366 1374
1367 1375 def _workspace_id(self, pull_request):
1368 1376 workspace_id = 'pr-%s' % pull_request.pull_request_id
1369 1377 return workspace_id
1370 1378
1371 1379 def generate_repo_data(self, repo, commit_id=None, branch=None,
1372 1380 bookmark=None, translator=None):
1373 1381 from rhodecode.model.repo import RepoModel
1374 1382
1375 1383 all_refs, selected_ref = \
1376 1384 self._get_repo_pullrequest_sources(
1377 1385 repo.scm_instance(), commit_id=commit_id,
1378 1386 branch=branch, bookmark=bookmark, translator=translator)
1379 1387
1380 1388 refs_select2 = []
1381 1389 for element in all_refs:
1382 1390 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1383 1391 refs_select2.append({'text': element[1], 'children': children})
1384 1392
1385 1393 return {
1386 1394 'user': {
1387 1395 'user_id': repo.user.user_id,
1388 1396 'username': repo.user.username,
1389 1397 'firstname': repo.user.first_name,
1390 1398 'lastname': repo.user.last_name,
1391 1399 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1392 1400 },
1393 1401 'name': repo.repo_name,
1394 1402 'link': RepoModel().get_url(repo),
1395 1403 'description': h.chop_at_smart(repo.description_safe, '\n'),
1396 1404 'refs': {
1397 1405 'all_refs': all_refs,
1398 1406 'selected_ref': selected_ref,
1399 1407 'select2_refs': refs_select2
1400 1408 }
1401 1409 }
1402 1410
1403 1411 def generate_pullrequest_title(self, source, source_ref, target):
1404 1412 return u'{source}#{at_ref} to {target}'.format(
1405 1413 source=source,
1406 1414 at_ref=source_ref,
1407 1415 target=target,
1408 1416 )
1409 1417
1410 1418 def _cleanup_merge_workspace(self, pull_request):
1411 1419 # Merging related cleanup
1412 1420 repo_id = pull_request.target_repo.repo_id
1413 1421 target_scm = pull_request.target_repo.scm_instance()
1414 1422 workspace_id = self._workspace_id(pull_request)
1415 1423
1416 1424 try:
1417 1425 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1418 1426 except NotImplementedError:
1419 1427 pass
1420 1428
1421 1429 def _get_repo_pullrequest_sources(
1422 1430 self, repo, commit_id=None, branch=None, bookmark=None,
1423 1431 translator=None):
1424 1432 """
1425 1433 Return a structure with repo's interesting commits, suitable for
1426 1434 the selectors in pullrequest controller
1427 1435
1428 1436 :param commit_id: a commit that must be in the list somehow
1429 1437 and selected by default
1430 1438 :param branch: a branch that must be in the list and selected
1431 1439 by default - even if closed
1432 1440 :param bookmark: a bookmark that must be in the list and selected
1433 1441 """
1434 1442 _ = translator or get_current_request().translate
1435 1443
1436 1444 commit_id = safe_str(commit_id) if commit_id else None
1437 1445 branch = safe_unicode(branch) if branch else None
1438 1446 bookmark = safe_unicode(bookmark) if bookmark else None
1439 1447
1440 1448 selected = None
1441 1449
1442 1450 # order matters: first source that has commit_id in it will be selected
1443 1451 sources = []
1444 1452 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1445 1453 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1446 1454
1447 1455 if commit_id:
1448 1456 ref_commit = (h.short_id(commit_id), commit_id)
1449 1457 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1450 1458
1451 1459 sources.append(
1452 1460 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1453 1461 )
1454 1462
1455 1463 groups = []
1456 1464
1457 1465 for group_key, ref_list, group_name, match in sources:
1458 1466 group_refs = []
1459 1467 for ref_name, ref_id in ref_list:
1460 1468 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
1461 1469 group_refs.append((ref_key, ref_name))
1462 1470
1463 1471 if not selected:
1464 1472 if set([commit_id, match]) & set([ref_id, ref_name]):
1465 1473 selected = ref_key
1466 1474
1467 1475 if group_refs:
1468 1476 groups.append((group_refs, group_name))
1469 1477
1470 1478 if not selected:
1471 1479 ref = commit_id or branch or bookmark
1472 1480 if ref:
1473 1481 raise CommitDoesNotExistError(
1474 1482 u'No commit refs could be found matching: {}'.format(ref))
1475 1483 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1476 1484 selected = u'branch:{}:{}'.format(
1477 1485 safe_unicode(repo.DEFAULT_BRANCH_NAME),
1478 1486 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
1479 1487 )
1480 1488 elif repo.commit_ids:
1481 1489 # make the user select in this case
1482 1490 selected = None
1483 1491 else:
1484 1492 raise EmptyRepositoryError()
1485 1493 return groups, selected
1486 1494
1487 1495 def get_diff(self, source_repo, source_ref_id, target_ref_id,
1488 1496 hide_whitespace_changes, diff_context):
1489 1497
1490 1498 return self._get_diff_from_pr_or_version(
1491 1499 source_repo, source_ref_id, target_ref_id,
1492 1500 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1493 1501
1494 1502 def _get_diff_from_pr_or_version(
1495 1503 self, source_repo, source_ref_id, target_ref_id,
1496 1504 hide_whitespace_changes, diff_context):
1497 1505
1498 1506 target_commit = source_repo.get_commit(
1499 1507 commit_id=safe_str(target_ref_id))
1500 1508 source_commit = source_repo.get_commit(
1501 1509 commit_id=safe_str(source_ref_id))
1502 1510 if isinstance(source_repo, Repository):
1503 1511 vcs_repo = source_repo.scm_instance()
1504 1512 else:
1505 1513 vcs_repo = source_repo
1506 1514
1507 1515 # TODO: johbo: In the context of an update, we cannot reach
1508 1516 # the old commit anymore with our normal mechanisms. It needs
1509 1517 # some sort of special support in the vcs layer to avoid this
1510 1518 # workaround.
1511 1519 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1512 1520 vcs_repo.alias == 'git'):
1513 1521 source_commit.raw_id = safe_str(source_ref_id)
1514 1522
1515 1523 log.debug('calculating diff between '
1516 1524 'source_ref:%s and target_ref:%s for repo `%s`',
1517 1525 target_ref_id, source_ref_id,
1518 1526 safe_unicode(vcs_repo.path))
1519 1527
1520 1528 vcs_diff = vcs_repo.get_diff(
1521 1529 commit1=target_commit, commit2=source_commit,
1522 1530 ignore_whitespace=hide_whitespace_changes, context=diff_context)
1523 1531 return vcs_diff
1524 1532
1525 1533 def _is_merge_enabled(self, pull_request):
1526 1534 return self._get_general_setting(
1527 1535 pull_request, 'rhodecode_pr_merge_enabled')
1528 1536
1529 1537 def _use_rebase_for_merging(self, pull_request):
1530 1538 repo_type = pull_request.target_repo.repo_type
1531 1539 if repo_type == 'hg':
1532 1540 return self._get_general_setting(
1533 1541 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1534 1542 elif repo_type == 'git':
1535 1543 return self._get_general_setting(
1536 1544 pull_request, 'rhodecode_git_use_rebase_for_merging')
1537 1545
1538 1546 return False
1539 1547
1540 1548 def _close_branch_before_merging(self, pull_request):
1541 1549 repo_type = pull_request.target_repo.repo_type
1542 1550 if repo_type == 'hg':
1543 1551 return self._get_general_setting(
1544 1552 pull_request, 'rhodecode_hg_close_branch_before_merging')
1545 1553 elif repo_type == 'git':
1546 1554 return self._get_general_setting(
1547 1555 pull_request, 'rhodecode_git_close_branch_before_merging')
1548 1556
1549 1557 return False
1550 1558
1551 1559 def _get_general_setting(self, pull_request, settings_key, default=False):
1552 1560 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1553 1561 settings = settings_model.get_general_settings()
1554 1562 return settings.get(settings_key, default)
1555 1563
1556 1564 def _log_audit_action(self, action, action_data, user, pull_request):
1557 1565 audit_logger.store(
1558 1566 action=action,
1559 1567 action_data=action_data,
1560 1568 user=user,
1561 1569 repo=pull_request.target_repo)
1562 1570
1563 1571 def get_reviewer_functions(self):
1564 1572 """
1565 1573 Fetches functions for validation and fetching default reviewers.
1566 1574 If available we use the EE package, else we fallback to CE
1567 1575 package functions
1568 1576 """
1569 1577 try:
1570 1578 from rc_reviewers.utils import get_default_reviewers_data
1571 1579 from rc_reviewers.utils import validate_default_reviewers
1572 1580 except ImportError:
1573 1581 from rhodecode.apps.repository.utils import get_default_reviewers_data
1574 1582 from rhodecode.apps.repository.utils import validate_default_reviewers
1575 1583
1576 1584 return get_default_reviewers_data, validate_default_reviewers
1577 1585
1578 1586
1579 1587 class MergeCheck(object):
1580 1588 """
1581 1589 Perform Merge Checks and returns a check object which stores information
1582 1590 about merge errors, and merge conditions
1583 1591 """
1584 1592 TODO_CHECK = 'todo'
1585 1593 PERM_CHECK = 'perm'
1586 1594 REVIEW_CHECK = 'review'
1587 1595 MERGE_CHECK = 'merge'
1588 1596
1589 1597 def __init__(self):
1590 1598 self.review_status = None
1591 1599 self.merge_possible = None
1592 1600 self.merge_msg = ''
1593 1601 self.failed = None
1594 1602 self.errors = []
1595 1603 self.error_details = OrderedDict()
1596 1604
1597 1605 def push_error(self, error_type, message, error_key, details):
1598 1606 self.failed = True
1599 1607 self.errors.append([error_type, message])
1600 1608 self.error_details[error_key] = dict(
1601 1609 details=details,
1602 1610 error_type=error_type,
1603 1611 message=message
1604 1612 )
1605 1613
1606 1614 @classmethod
1607 1615 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1608 1616 force_shadow_repo_refresh=False):
1609 1617 _ = translator
1610 1618 merge_check = cls()
1611 1619
1612 1620 # permissions to merge
1613 1621 user_allowed_to_merge = PullRequestModel().check_user_merge(
1614 1622 pull_request, auth_user)
1615 1623 if not user_allowed_to_merge:
1616 1624 log.debug("MergeCheck: cannot merge, approval is pending.")
1617 1625
1618 1626 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1619 1627 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1620 1628 if fail_early:
1621 1629 return merge_check
1622 1630
1623 1631 # permission to merge into the target branch
1624 1632 target_commit_id = pull_request.target_ref_parts.commit_id
1625 1633 if pull_request.target_ref_parts.type == 'branch':
1626 1634 branch_name = pull_request.target_ref_parts.name
1627 1635 else:
1628 1636 # for mercurial we can always figure out the branch from the commit
1629 1637 # in case of bookmark
1630 1638 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1631 1639 branch_name = target_commit.branch
1632 1640
1633 1641 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1634 1642 pull_request.target_repo.repo_name, branch_name)
1635 1643 if branch_perm and branch_perm == 'branch.none':
1636 1644 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1637 1645 branch_name, rule)
1638 1646 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1639 1647 if fail_early:
1640 1648 return merge_check
1641 1649
1642 1650 # review status, must be always present
1643 1651 review_status = pull_request.calculated_review_status()
1644 1652 merge_check.review_status = review_status
1645 1653
1646 1654 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1647 1655 if not status_approved:
1648 1656 log.debug("MergeCheck: cannot merge, approval is pending.")
1649 1657
1650 1658 msg = _('Pull request reviewer approval is pending.')
1651 1659
1652 1660 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
1653 1661
1654 1662 if fail_early:
1655 1663 return merge_check
1656 1664
1657 1665 # left over TODOs
1658 1666 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
1659 1667 if todos:
1660 1668 log.debug("MergeCheck: cannot merge, {} "
1661 1669 "unresolved TODOs left.".format(len(todos)))
1662 1670
1663 1671 if len(todos) == 1:
1664 1672 msg = _('Cannot merge, {} TODO still not resolved.').format(
1665 1673 len(todos))
1666 1674 else:
1667 1675 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1668 1676 len(todos))
1669 1677
1670 1678 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1671 1679
1672 1680 if fail_early:
1673 1681 return merge_check
1674 1682
1675 1683 # merge possible, here is the filesystem simulation + shadow repo
1676 1684 merge_status, msg = PullRequestModel().merge_status(
1677 1685 pull_request, translator=translator,
1678 1686 force_shadow_repo_refresh=force_shadow_repo_refresh)
1679 1687 merge_check.merge_possible = merge_status
1680 1688 merge_check.merge_msg = msg
1681 1689 if not merge_status:
1682 1690 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
1683 1691 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1684 1692
1685 1693 if fail_early:
1686 1694 return merge_check
1687 1695
1688 1696 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1689 1697 return merge_check
1690 1698
1691 1699 @classmethod
1692 1700 def get_merge_conditions(cls, pull_request, translator):
1693 1701 _ = translator
1694 1702 merge_details = {}
1695 1703
1696 1704 model = PullRequestModel()
1697 1705 use_rebase = model._use_rebase_for_merging(pull_request)
1698 1706
1699 1707 if use_rebase:
1700 1708 merge_details['merge_strategy'] = dict(
1701 1709 details={},
1702 1710 message=_('Merge strategy: rebase')
1703 1711 )
1704 1712 else:
1705 1713 merge_details['merge_strategy'] = dict(
1706 1714 details={},
1707 1715 message=_('Merge strategy: explicit merge commit')
1708 1716 )
1709 1717
1710 1718 close_branch = model._close_branch_before_merging(pull_request)
1711 1719 if close_branch:
1712 1720 repo_type = pull_request.target_repo.repo_type
1713 1721 close_msg = ''
1714 1722 if repo_type == 'hg':
1715 1723 close_msg = _('Source branch will be closed after merge.')
1716 1724 elif repo_type == 'git':
1717 1725 close_msg = _('Source branch will be deleted after merge.')
1718 1726
1719 1727 merge_details['close_branch'] = dict(
1720 1728 details={},
1721 1729 message=close_msg
1722 1730 )
1723 1731
1724 1732 return merge_details
1725 1733
1726 1734
1727 1735 ChangeTuple = collections.namedtuple(
1728 1736 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1729 1737
1730 1738 FileChangeTuple = collections.namedtuple(
1731 1739 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,949 +1,949 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import textwrap
24 24
25 25 import rhodecode
26 26 from rhodecode.lib.utils2 import safe_unicode
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 MergeResponse, MergeFailureReason, Reference)
30 30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.model.comment import CommentsModel
33 33 from rhodecode.model.db import PullRequest, Session
34 34 from rhodecode.model.pull_request import PullRequestModel
35 35 from rhodecode.model.user import UserModel
36 36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37 37
38 38
39 39 pytestmark = [
40 40 pytest.mark.backends("git", "hg"),
41 41 ]
42 42
43 43
44 44 @pytest.mark.usefixtures('config_stub')
45 45 class TestPullRequestModel(object):
46 46
47 47 @pytest.fixture
48 48 def pull_request(self, request, backend, pr_util):
49 49 """
50 50 A pull request combined with multiples patches.
51 51 """
52 52 BackendClass = get_backend(backend.alias)
53 53 merge_resp = MergeResponse(
54 54 False, False, None, MergeFailureReason.UNKNOWN,
55 55 metadata={'exception': 'MockError'})
56 56 self.merge_patcher = mock.patch.object(
57 57 BackendClass, 'merge', return_value=merge_resp)
58 58 self.workspace_remove_patcher = mock.patch.object(
59 59 BackendClass, 'cleanup_merge_workspace')
60 60
61 61 self.workspace_remove_mock = self.workspace_remove_patcher.start()
62 62 self.merge_mock = self.merge_patcher.start()
63 63 self.comment_patcher = mock.patch(
64 64 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
65 65 self.comment_patcher.start()
66 66 self.notification_patcher = mock.patch(
67 67 'rhodecode.model.notification.NotificationModel.create')
68 68 self.notification_patcher.start()
69 69 self.helper_patcher = mock.patch(
70 70 'rhodecode.lib.helpers.route_path')
71 71 self.helper_patcher.start()
72 72
73 73 self.hook_patcher = mock.patch.object(PullRequestModel,
74 74 'trigger_pull_request_hook')
75 75 self.hook_mock = self.hook_patcher.start()
76 76
77 77 self.invalidation_patcher = mock.patch(
78 78 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
79 79 self.invalidation_mock = self.invalidation_patcher.start()
80 80
81 81 self.pull_request = pr_util.create_pull_request(
82 82 mergeable=True, name_suffix=u'Δ…Δ‡')
83 83 self.source_commit = self.pull_request.source_ref_parts.commit_id
84 84 self.target_commit = self.pull_request.target_ref_parts.commit_id
85 85 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
86 86 self.repo_id = self.pull_request.target_repo.repo_id
87 87
88 88 @request.addfinalizer
89 89 def cleanup_pull_request():
90 90 calls = [mock.call(
91 91 self.pull_request, self.pull_request.author, 'create')]
92 92 self.hook_mock.assert_has_calls(calls)
93 93
94 94 self.workspace_remove_patcher.stop()
95 95 self.merge_patcher.stop()
96 96 self.comment_patcher.stop()
97 97 self.notification_patcher.stop()
98 98 self.helper_patcher.stop()
99 99 self.hook_patcher.stop()
100 100 self.invalidation_patcher.stop()
101 101
102 102 return self.pull_request
103 103
104 104 def test_get_all(self, pull_request):
105 105 prs = PullRequestModel().get_all(pull_request.target_repo)
106 106 assert isinstance(prs, list)
107 107 assert len(prs) == 1
108 108
109 109 def test_count_all(self, pull_request):
110 110 pr_count = PullRequestModel().count_all(pull_request.target_repo)
111 111 assert pr_count == 1
112 112
113 113 def test_get_awaiting_review(self, pull_request):
114 114 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
115 115 assert isinstance(prs, list)
116 116 assert len(prs) == 1
117 117
118 118 def test_count_awaiting_review(self, pull_request):
119 119 pr_count = PullRequestModel().count_awaiting_review(
120 120 pull_request.target_repo)
121 121 assert pr_count == 1
122 122
123 123 def test_get_awaiting_my_review(self, pull_request):
124 124 PullRequestModel().update_reviewers(
125 125 pull_request, [(pull_request.author, ['author'], False, [])],
126 126 pull_request.author)
127 127 prs = PullRequestModel().get_awaiting_my_review(
128 128 pull_request.target_repo, user_id=pull_request.author.user_id)
129 129 assert isinstance(prs, list)
130 130 assert len(prs) == 1
131 131
132 132 def test_count_awaiting_my_review(self, pull_request):
133 133 PullRequestModel().update_reviewers(
134 134 pull_request, [(pull_request.author, ['author'], False, [])],
135 135 pull_request.author)
136 136 pr_count = PullRequestModel().count_awaiting_my_review(
137 137 pull_request.target_repo, user_id=pull_request.author.user_id)
138 138 assert pr_count == 1
139 139
140 140 def test_delete_calls_cleanup_merge(self, pull_request):
141 141 repo_id = pull_request.target_repo.repo_id
142 142 PullRequestModel().delete(pull_request, pull_request.author)
143 143
144 144 self.workspace_remove_mock.assert_called_once_with(
145 145 repo_id, self.workspace_id)
146 146
147 147 def test_close_calls_cleanup_and_hook(self, pull_request):
148 148 PullRequestModel().close_pull_request(
149 149 pull_request, pull_request.author)
150 150 repo_id = pull_request.target_repo.repo_id
151 151
152 152 self.workspace_remove_mock.assert_called_once_with(
153 153 repo_id, self.workspace_id)
154 154 self.hook_mock.assert_called_with(
155 155 self.pull_request, self.pull_request.author, 'close')
156 156
157 157 def test_merge_status(self, pull_request):
158 158 self.merge_mock.return_value = MergeResponse(
159 159 True, False, None, MergeFailureReason.NONE)
160 160
161 161 assert pull_request._last_merge_source_rev is None
162 162 assert pull_request._last_merge_target_rev is None
163 163 assert pull_request.last_merge_status is None
164 164
165 165 status, msg = PullRequestModel().merge_status(pull_request)
166 166 assert status is True
167 167 assert msg == 'This pull request can be automatically merged.'
168 168 self.merge_mock.assert_called_with(
169 169 self.repo_id, self.workspace_id,
170 170 pull_request.target_ref_parts,
171 171 pull_request.source_repo.scm_instance(),
172 172 pull_request.source_ref_parts, dry_run=True,
173 173 use_rebase=False, close_branch=False)
174 174
175 175 assert pull_request._last_merge_source_rev == self.source_commit
176 176 assert pull_request._last_merge_target_rev == self.target_commit
177 177 assert pull_request.last_merge_status is MergeFailureReason.NONE
178 178
179 179 self.merge_mock.reset_mock()
180 180 status, msg = PullRequestModel().merge_status(pull_request)
181 181 assert status is True
182 182 assert msg == 'This pull request can be automatically merged.'
183 183 assert self.merge_mock.called is False
184 184
185 185 def test_merge_status_known_failure(self, pull_request):
186 186 self.merge_mock.return_value = MergeResponse(
187 187 False, False, None, MergeFailureReason.MERGE_FAILED)
188 188
189 189 assert pull_request._last_merge_source_rev is None
190 190 assert pull_request._last_merge_target_rev is None
191 191 assert pull_request.last_merge_status is None
192 192
193 193 status, msg = PullRequestModel().merge_status(pull_request)
194 194 assert status is False
195 195 assert msg == 'This pull request cannot be merged because of merge conflicts.'
196 196 self.merge_mock.assert_called_with(
197 197 self.repo_id, self.workspace_id,
198 198 pull_request.target_ref_parts,
199 199 pull_request.source_repo.scm_instance(),
200 200 pull_request.source_ref_parts, dry_run=True,
201 201 use_rebase=False, close_branch=False)
202 202
203 203 assert pull_request._last_merge_source_rev == self.source_commit
204 204 assert pull_request._last_merge_target_rev == self.target_commit
205 205 assert (
206 206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
207 207
208 208 self.merge_mock.reset_mock()
209 209 status, msg = PullRequestModel().merge_status(pull_request)
210 210 assert status is False
211 211 assert msg == 'This pull request cannot be merged because of merge conflicts.'
212 212 assert self.merge_mock.called is False
213 213
214 214 def test_merge_status_unknown_failure(self, pull_request):
215 215 self.merge_mock.return_value = MergeResponse(
216 216 False, False, None, MergeFailureReason.UNKNOWN,
217 217 metadata={'exception': 'MockError'})
218 218
219 219 assert pull_request._last_merge_source_rev is None
220 220 assert pull_request._last_merge_target_rev is None
221 221 assert pull_request.last_merge_status is None
222 222
223 223 status, msg = PullRequestModel().merge_status(pull_request)
224 224 assert status is False
225 225 assert msg == (
226 226 'This pull request cannot be merged because of an unhandled exception. '
227 227 'MockError')
228 228 self.merge_mock.assert_called_with(
229 229 self.repo_id, self.workspace_id,
230 230 pull_request.target_ref_parts,
231 231 pull_request.source_repo.scm_instance(),
232 232 pull_request.source_ref_parts, dry_run=True,
233 233 use_rebase=False, close_branch=False)
234 234
235 235 assert pull_request._last_merge_source_rev is None
236 236 assert pull_request._last_merge_target_rev is None
237 237 assert pull_request.last_merge_status is None
238 238
239 239 self.merge_mock.reset_mock()
240 240 status, msg = PullRequestModel().merge_status(pull_request)
241 241 assert status is False
242 242 assert msg == (
243 243 'This pull request cannot be merged because of an unhandled exception. '
244 244 'MockError')
245 245 assert self.merge_mock.called is True
246 246
247 247 def test_merge_status_when_target_is_locked(self, pull_request):
248 248 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
249 249 status, msg = PullRequestModel().merge_status(pull_request)
250 250 assert status is False
251 251 assert msg == (
252 252 'This pull request cannot be merged because the target repository '
253 253 'is locked by user:1.')
254 254
255 255 def test_merge_status_requirements_check_target(self, pull_request):
256 256
257 257 def has_largefiles(self, repo):
258 258 return repo == pull_request.source_repo
259 259
260 260 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
261 261 with patcher:
262 262 status, msg = PullRequestModel().merge_status(pull_request)
263 263
264 264 assert status is False
265 265 assert msg == 'Target repository large files support is disabled.'
266 266
267 267 def test_merge_status_requirements_check_source(self, pull_request):
268 268
269 269 def has_largefiles(self, repo):
270 270 return repo == pull_request.target_repo
271 271
272 272 patcher = mock.patch.object(PullRequestModel, '_has_largefiles', has_largefiles)
273 273 with patcher:
274 274 status, msg = PullRequestModel().merge_status(pull_request)
275 275
276 276 assert status is False
277 277 assert msg == 'Source repository large files support is disabled.'
278 278
279 279 def test_merge(self, pull_request, merge_extras):
280 280 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
281 281 merge_ref = Reference(
282 282 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
283 283 self.merge_mock.return_value = MergeResponse(
284 284 True, True, merge_ref, MergeFailureReason.NONE)
285 285
286 286 merge_extras['repository'] = pull_request.target_repo.repo_name
287 287 PullRequestModel().merge_repo(
288 288 pull_request, pull_request.author, extras=merge_extras)
289 289
290 290 message = (
291 291 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
292 292 u'\n\n {pr_title}'.format(
293 293 pr_id=pull_request.pull_request_id,
294 294 source_repo=safe_unicode(
295 295 pull_request.source_repo.scm_instance().name),
296 296 source_ref_name=pull_request.source_ref_parts.name,
297 297 pr_title=safe_unicode(pull_request.title)
298 298 )
299 299 )
300 300 self.merge_mock.assert_called_with(
301 301 self.repo_id, self.workspace_id,
302 302 pull_request.target_ref_parts,
303 303 pull_request.source_repo.scm_instance(),
304 304 pull_request.source_ref_parts,
305 305 user_name=user.short_contact, user_email=user.email, message=message,
306 306 use_rebase=False, close_branch=False
307 307 )
308 308 self.invalidation_mock.assert_called_once_with(
309 309 pull_request.target_repo.repo_name)
310 310
311 311 self.hook_mock.assert_called_with(
312 312 self.pull_request, self.pull_request.author, 'merge')
313 313
314 314 pull_request = PullRequest.get(pull_request.pull_request_id)
315 315 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
316 316
317 317 def test_merge_with_status_lock(self, pull_request, merge_extras):
318 318 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
319 319 merge_ref = Reference(
320 320 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
321 321 self.merge_mock.return_value = MergeResponse(
322 322 True, True, merge_ref, MergeFailureReason.NONE)
323 323
324 324 merge_extras['repository'] = pull_request.target_repo.repo_name
325 325
326 326 with pull_request.set_state(PullRequest.STATE_UPDATING):
327 327 assert pull_request.pull_request_state == PullRequest.STATE_UPDATING
328 328 PullRequestModel().merge_repo(
329 329 pull_request, pull_request.author, extras=merge_extras)
330 330
331 331 assert pull_request.pull_request_state == PullRequest.STATE_CREATED
332 332
333 333 message = (
334 334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
335 335 u'\n\n {pr_title}'.format(
336 336 pr_id=pull_request.pull_request_id,
337 337 source_repo=safe_unicode(
338 338 pull_request.source_repo.scm_instance().name),
339 339 source_ref_name=pull_request.source_ref_parts.name,
340 340 pr_title=safe_unicode(pull_request.title)
341 341 )
342 342 )
343 343 self.merge_mock.assert_called_with(
344 344 self.repo_id, self.workspace_id,
345 345 pull_request.target_ref_parts,
346 346 pull_request.source_repo.scm_instance(),
347 347 pull_request.source_ref_parts,
348 348 user_name=user.short_contact, user_email=user.email, message=message,
349 349 use_rebase=False, close_branch=False
350 350 )
351 351 self.invalidation_mock.assert_called_once_with(
352 352 pull_request.target_repo.repo_name)
353 353
354 354 self.hook_mock.assert_called_with(
355 355 self.pull_request, self.pull_request.author, 'merge')
356 356
357 357 pull_request = PullRequest.get(pull_request.pull_request_id)
358 358 assert pull_request.merge_rev == '6126b7bfcc82ad2d3deaee22af926b082ce54cc6'
359 359
360 360 def test_merge_failed(self, pull_request, merge_extras):
361 361 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
362 362 merge_ref = Reference(
363 363 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
364 364 self.merge_mock.return_value = MergeResponse(
365 365 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
366 366
367 367 merge_extras['repository'] = pull_request.target_repo.repo_name
368 368 PullRequestModel().merge_repo(
369 369 pull_request, pull_request.author, extras=merge_extras)
370 370
371 371 message = (
372 372 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
373 373 u'\n\n {pr_title}'.format(
374 374 pr_id=pull_request.pull_request_id,
375 375 source_repo=safe_unicode(
376 376 pull_request.source_repo.scm_instance().name),
377 377 source_ref_name=pull_request.source_ref_parts.name,
378 378 pr_title=safe_unicode(pull_request.title)
379 379 )
380 380 )
381 381 self.merge_mock.assert_called_with(
382 382 self.repo_id, self.workspace_id,
383 383 pull_request.target_ref_parts,
384 384 pull_request.source_repo.scm_instance(),
385 385 pull_request.source_ref_parts,
386 386 user_name=user.short_contact, user_email=user.email, message=message,
387 387 use_rebase=False, close_branch=False
388 388 )
389 389
390 390 pull_request = PullRequest.get(pull_request.pull_request_id)
391 391 assert self.invalidation_mock.called is False
392 392 assert pull_request.merge_rev is None
393 393
394 394 def test_get_commit_ids(self, pull_request):
395 395 # The PR has been not merget yet, so expect an exception
396 396 with pytest.raises(ValueError):
397 397 PullRequestModel()._get_commit_ids(pull_request)
398 398
399 399 # Merge revision is in the revisions list
400 400 pull_request.merge_rev = pull_request.revisions[0]
401 401 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
402 402 assert commit_ids == pull_request.revisions
403 403
404 404 # Merge revision is not in the revisions list
405 405 pull_request.merge_rev = 'f000' * 10
406 406 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
407 407 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
408 408
409 409 def test_get_diff_from_pr_version(self, pull_request):
410 410 source_repo = pull_request.source_repo
411 411 source_ref_id = pull_request.source_ref_parts.commit_id
412 412 target_ref_id = pull_request.target_ref_parts.commit_id
413 413 diff = PullRequestModel()._get_diff_from_pr_or_version(
414 414 source_repo, source_ref_id, target_ref_id,
415 415 hide_whitespace_changes=False, diff_context=6)
416 416 assert 'file_1' in diff.raw
417 417
418 418 def test_generate_title_returns_unicode(self):
419 419 title = PullRequestModel().generate_pullrequest_title(
420 420 source='source-dummy',
421 421 source_ref='source-ref-dummy',
422 422 target='target-dummy',
423 423 )
424 424 assert type(title) == unicode
425 425
426 426
427 427 @pytest.mark.usefixtures('config_stub')
428 428 class TestIntegrationMerge(object):
429 429 @pytest.mark.parametrize('extra_config', (
430 430 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
431 431 ))
432 432 def test_merge_triggers_push_hooks(
433 433 self, pr_util, user_admin, capture_rcextensions, merge_extras,
434 434 extra_config):
435 435
436 436 pull_request = pr_util.create_pull_request(
437 437 approved=True, mergeable=True)
438 438 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
439 439 merge_extras['repository'] = pull_request.target_repo.repo_name
440 440 Session().commit()
441 441
442 442 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
443 443 merge_state = PullRequestModel().merge_repo(
444 444 pull_request, user_admin, extras=merge_extras)
445 445
446 446 assert merge_state.executed
447 447 assert '_pre_push_hook' in capture_rcextensions
448 448 assert '_push_hook' in capture_rcextensions
449 449
450 450 def test_merge_can_be_rejected_by_pre_push_hook(
451 451 self, pr_util, user_admin, capture_rcextensions, merge_extras):
452 452 pull_request = pr_util.create_pull_request(
453 453 approved=True, mergeable=True)
454 454 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
455 455 merge_extras['repository'] = pull_request.target_repo.repo_name
456 456 Session().commit()
457 457
458 458 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
459 459 pre_pull.side_effect = RepositoryError("Disallow push!")
460 460 merge_status = PullRequestModel().merge_repo(
461 461 pull_request, user_admin, extras=merge_extras)
462 462
463 463 assert not merge_status.executed
464 464 assert 'pre_push' not in capture_rcextensions
465 465 assert 'post_push' not in capture_rcextensions
466 466
467 467 def test_merge_fails_if_target_is_locked(
468 468 self, pr_util, user_regular, merge_extras):
469 469 pull_request = pr_util.create_pull_request(
470 470 approved=True, mergeable=True)
471 471 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
472 472 pull_request.target_repo.locked = locked_by
473 473 # TODO: johbo: Check if this can work based on the database, currently
474 474 # all data is pre-computed, that's why just updating the DB is not
475 475 # enough.
476 476 merge_extras['locked_by'] = locked_by
477 477 merge_extras['repository'] = pull_request.target_repo.repo_name
478 478 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
479 479 Session().commit()
480 480 merge_status = PullRequestModel().merge_repo(
481 481 pull_request, user_regular, extras=merge_extras)
482 482 assert not merge_status.executed
483 483
484 484
485 485 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
486 486 (False, 1, 0),
487 487 (True, 0, 1),
488 488 ])
489 489 def test_outdated_comments(
490 490 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
491 491 pull_request = pr_util.create_pull_request()
492 492 pr_util.create_inline_comment(file_path='not_in_updated_diff')
493 493
494 494 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
495 495 pr_util.add_one_commit()
496 496 assert_inline_comments(
497 497 pull_request, visible=inlines_count, outdated=outdated_count)
498 498 outdated_comment_mock.assert_called_with(pull_request)
499 499
500 500
501 501 @pytest.mark.parametrize('mr_type, expected_msg', [
502 502 (MergeFailureReason.NONE,
503 503 'This pull request can be automatically merged.'),
504 504 (MergeFailureReason.UNKNOWN,
505 505 'This pull request cannot be merged because of an unhandled exception. CRASH'),
506 506 (MergeFailureReason.MERGE_FAILED,
507 507 'This pull request cannot be merged because of merge conflicts.'),
508 508 (MergeFailureReason.PUSH_FAILED,
509 509 'This pull request could not be merged because push to target:`some-repo@merge_commit` failed.'),
510 510 (MergeFailureReason.TARGET_IS_NOT_HEAD,
511 511 'This pull request cannot be merged because the target `ref_name` is not a head.'),
512 512 (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES,
513 513 'This pull request cannot be merged because the source contains more branches than the target.'),
514 514 (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
515 'This pull request cannot be merged because the target has multiple heads: `a,b,c`.'),
515 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'),
516 516 (MergeFailureReason.TARGET_IS_LOCKED,
517 517 'This pull request cannot be merged because the target repository is locked by user:123.'),
518 518 (MergeFailureReason.MISSING_TARGET_REF,
519 519 'This pull request cannot be merged because the target reference `ref_name` is missing.'),
520 520 (MergeFailureReason.MISSING_SOURCE_REF,
521 521 'This pull request cannot be merged because the source reference `ref_name` is missing.'),
522 522 (MergeFailureReason.SUBREPO_MERGE_FAILED,
523 523 'This pull request cannot be merged because of conflicts related to sub repositories.'),
524 524
525 525 ])
526 526 def test_merge_response_message(mr_type, expected_msg):
527 527 merge_ref = Reference('type', 'ref_name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
528 528 metadata = {
529 529 'exception': "CRASH",
530 530 'target': 'some-repo',
531 531 'merge_commit': 'merge_commit',
532 532 'target_ref': merge_ref,
533 533 'source_ref': merge_ref,
534 534 'heads': ','.join(['a', 'b', 'c']),
535 535 'locked_by': 'user:123'}
536 536
537 537 merge_response = MergeResponse(True, True, merge_ref, mr_type, metadata=metadata)
538 538 assert merge_response.merge_status_message == expected_msg
539 539
540 540
541 541 @pytest.fixture
542 542 def merge_extras(user_regular):
543 543 """
544 544 Context for the vcs operation when running a merge.
545 545 """
546 546 extras = {
547 547 'ip': '127.0.0.1',
548 548 'username': user_regular.username,
549 549 'user_id': user_regular.user_id,
550 550 'action': 'push',
551 551 'repository': 'fake_target_repo_name',
552 552 'scm': 'git',
553 553 'config': 'fake_config_ini_path',
554 554 'repo_store': '',
555 555 'make_lock': None,
556 556 'locked_by': [None, None, None],
557 557 'server_url': 'http://test.example.com:5000',
558 558 'hooks': ['push', 'pull'],
559 559 'is_shadow_repo': False,
560 560 }
561 561 return extras
562 562
563 563
564 564 @pytest.mark.usefixtures('config_stub')
565 565 class TestUpdateCommentHandling(object):
566 566
567 567 @pytest.fixture(autouse=True, scope='class')
568 568 def enable_outdated_comments(self, request, baseapp):
569 569 config_patch = mock.patch.dict(
570 570 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
571 571 config_patch.start()
572 572
573 573 @request.addfinalizer
574 574 def cleanup():
575 575 config_patch.stop()
576 576
577 577 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
578 578 commits = [
579 579 {'message': 'a'},
580 580 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
581 581 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
582 582 ]
583 583 pull_request = pr_util.create_pull_request(
584 584 commits=commits, target_head='a', source_head='b', revisions=['b'])
585 585 pr_util.create_inline_comment(file_path='file_b')
586 586 pr_util.add_one_commit(head='c')
587 587
588 588 assert_inline_comments(pull_request, visible=1, outdated=0)
589 589
590 590 def test_comment_stays_unflagged_on_change_above(self, pr_util):
591 591 original_content = ''.join(
592 592 ['line {}\n'.format(x) for x in range(1, 11)])
593 593 updated_content = 'new_line_at_top\n' + original_content
594 594 commits = [
595 595 {'message': 'a'},
596 596 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
597 597 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
598 598 ]
599 599 pull_request = pr_util.create_pull_request(
600 600 commits=commits, target_head='a', source_head='b', revisions=['b'])
601 601
602 602 with outdated_comments_patcher():
603 603 comment = pr_util.create_inline_comment(
604 604 line_no=u'n8', file_path='file_b')
605 605 pr_util.add_one_commit(head='c')
606 606
607 607 assert_inline_comments(pull_request, visible=1, outdated=0)
608 608 assert comment.line_no == u'n9'
609 609
610 610 def test_comment_stays_unflagged_on_change_below(self, pr_util):
611 611 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
612 612 updated_content = original_content + 'new_line_at_end\n'
613 613 commits = [
614 614 {'message': 'a'},
615 615 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
616 616 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
617 617 ]
618 618 pull_request = pr_util.create_pull_request(
619 619 commits=commits, target_head='a', source_head='b', revisions=['b'])
620 620 pr_util.create_inline_comment(file_path='file_b')
621 621 pr_util.add_one_commit(head='c')
622 622
623 623 assert_inline_comments(pull_request, visible=1, outdated=0)
624 624
625 625 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
626 626 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
627 627 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
628 628 change_lines = list(base_lines)
629 629 change_lines.insert(6, 'line 6a added\n')
630 630
631 631 # Changes on the last line of sight
632 632 update_lines = list(change_lines)
633 633 update_lines[0] = 'line 1 changed\n'
634 634 update_lines[-1] = 'line 12 changed\n'
635 635
636 636 def file_b(lines):
637 637 return FileNode('file_b', ''.join(lines))
638 638
639 639 commits = [
640 640 {'message': 'a', 'added': [file_b(base_lines)]},
641 641 {'message': 'b', 'changed': [file_b(change_lines)]},
642 642 {'message': 'c', 'changed': [file_b(update_lines)]},
643 643 ]
644 644
645 645 pull_request = pr_util.create_pull_request(
646 646 commits=commits, target_head='a', source_head='b', revisions=['b'])
647 647 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
648 648
649 649 with outdated_comments_patcher():
650 650 pr_util.add_one_commit(head='c')
651 651 assert_inline_comments(pull_request, visible=0, outdated=1)
652 652
653 653 @pytest.mark.parametrize("change, content", [
654 654 ('changed', 'changed\n'),
655 655 ('removed', ''),
656 656 ], ids=['changed', 'removed'])
657 657 def test_comment_flagged_on_change(self, pr_util, change, content):
658 658 commits = [
659 659 {'message': 'a'},
660 660 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
661 661 {'message': 'c', change: [FileNode('file_b', content)]},
662 662 ]
663 663 pull_request = pr_util.create_pull_request(
664 664 commits=commits, target_head='a', source_head='b', revisions=['b'])
665 665 pr_util.create_inline_comment(file_path='file_b')
666 666
667 667 with outdated_comments_patcher():
668 668 pr_util.add_one_commit(head='c')
669 669 assert_inline_comments(pull_request, visible=0, outdated=1)
670 670
671 671
672 672 @pytest.mark.usefixtures('config_stub')
673 673 class TestUpdateChangedFiles(object):
674 674
675 675 def test_no_changes_on_unchanged_diff(self, pr_util):
676 676 commits = [
677 677 {'message': 'a'},
678 678 {'message': 'b',
679 679 'added': [FileNode('file_b', 'test_content b\n')]},
680 680 {'message': 'c',
681 681 'added': [FileNode('file_c', 'test_content c\n')]},
682 682 ]
683 683 # open a PR from a to b, adding file_b
684 684 pull_request = pr_util.create_pull_request(
685 685 commits=commits, target_head='a', source_head='b', revisions=['b'],
686 686 name_suffix='per-file-review')
687 687
688 688 # modify PR adding new file file_c
689 689 pr_util.add_one_commit(head='c')
690 690
691 691 assert_pr_file_changes(
692 692 pull_request,
693 693 added=['file_c'],
694 694 modified=[],
695 695 removed=[])
696 696
697 697 def test_modify_and_undo_modification_diff(self, pr_util):
698 698 commits = [
699 699 {'message': 'a'},
700 700 {'message': 'b',
701 701 'added': [FileNode('file_b', 'test_content b\n')]},
702 702 {'message': 'c',
703 703 'changed': [FileNode('file_b', 'test_content b modified\n')]},
704 704 {'message': 'd',
705 705 'changed': [FileNode('file_b', 'test_content b\n')]},
706 706 ]
707 707 # open a PR from a to b, adding file_b
708 708 pull_request = pr_util.create_pull_request(
709 709 commits=commits, target_head='a', source_head='b', revisions=['b'],
710 710 name_suffix='per-file-review')
711 711
712 712 # modify PR modifying file file_b
713 713 pr_util.add_one_commit(head='c')
714 714
715 715 assert_pr_file_changes(
716 716 pull_request,
717 717 added=[],
718 718 modified=['file_b'],
719 719 removed=[])
720 720
721 721 # move the head again to d, which rollbacks change,
722 722 # meaning we should indicate no changes
723 723 pr_util.add_one_commit(head='d')
724 724
725 725 assert_pr_file_changes(
726 726 pull_request,
727 727 added=[],
728 728 modified=[],
729 729 removed=[])
730 730
731 731 def test_updated_all_files_in_pr(self, pr_util):
732 732 commits = [
733 733 {'message': 'a'},
734 734 {'message': 'b', 'added': [
735 735 FileNode('file_a', 'test_content a\n'),
736 736 FileNode('file_b', 'test_content b\n'),
737 737 FileNode('file_c', 'test_content c\n')]},
738 738 {'message': 'c', 'changed': [
739 739 FileNode('file_a', 'test_content a changed\n'),
740 740 FileNode('file_b', 'test_content b changed\n'),
741 741 FileNode('file_c', 'test_content c changed\n')]},
742 742 ]
743 743 # open a PR from a to b, changing 3 files
744 744 pull_request = pr_util.create_pull_request(
745 745 commits=commits, target_head='a', source_head='b', revisions=['b'],
746 746 name_suffix='per-file-review')
747 747
748 748 pr_util.add_one_commit(head='c')
749 749
750 750 assert_pr_file_changes(
751 751 pull_request,
752 752 added=[],
753 753 modified=['file_a', 'file_b', 'file_c'],
754 754 removed=[])
755 755
756 756 def test_updated_and_removed_all_files_in_pr(self, pr_util):
757 757 commits = [
758 758 {'message': 'a'},
759 759 {'message': 'b', 'added': [
760 760 FileNode('file_a', 'test_content a\n'),
761 761 FileNode('file_b', 'test_content b\n'),
762 762 FileNode('file_c', 'test_content c\n')]},
763 763 {'message': 'c', 'removed': [
764 764 FileNode('file_a', 'test_content a changed\n'),
765 765 FileNode('file_b', 'test_content b changed\n'),
766 766 FileNode('file_c', 'test_content c changed\n')]},
767 767 ]
768 768 # open a PR from a to b, removing 3 files
769 769 pull_request = pr_util.create_pull_request(
770 770 commits=commits, target_head='a', source_head='b', revisions=['b'],
771 771 name_suffix='per-file-review')
772 772
773 773 pr_util.add_one_commit(head='c')
774 774
775 775 assert_pr_file_changes(
776 776 pull_request,
777 777 added=[],
778 778 modified=[],
779 779 removed=['file_a', 'file_b', 'file_c'])
780 780
781 781
782 782 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
783 783 model = PullRequestModel()
784 784 pull_request = pr_util.create_pull_request()
785 785 pr_util.update_source_repository()
786 786
787 787 model.update_commits(pull_request)
788 788
789 789 # Expect that it has a version entry now
790 790 assert len(model.get_versions(pull_request)) == 1
791 791
792 792
793 793 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
794 794 pull_request = pr_util.create_pull_request()
795 795 model = PullRequestModel()
796 796 model.update_commits(pull_request)
797 797
798 798 # Expect that it still has no versions
799 799 assert len(model.get_versions(pull_request)) == 0
800 800
801 801
802 802 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
803 803 model = PullRequestModel()
804 804 pull_request = pr_util.create_pull_request()
805 805 comment = pr_util.create_comment()
806 806 pr_util.update_source_repository()
807 807
808 808 model.update_commits(pull_request)
809 809
810 810 # Expect that the comment is linked to the pr version now
811 811 assert comment.pull_request_version == model.get_versions(pull_request)[0]
812 812
813 813
814 814 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
815 815 model = PullRequestModel()
816 816 pull_request = pr_util.create_pull_request()
817 817 pr_util.update_source_repository()
818 818 pr_util.update_source_repository()
819 819
820 820 model.update_commits(pull_request)
821 821
822 822 # Expect to find a new comment about the change
823 823 expected_message = textwrap.dedent(
824 824 """\
825 825 Pull request updated. Auto status change to |under_review|
826 826
827 827 .. role:: added
828 828 .. role:: removed
829 829 .. parsed-literal::
830 830
831 831 Changed commits:
832 832 * :added:`1 added`
833 833 * :removed:`0 removed`
834 834
835 835 Changed files:
836 836 * `A file_2 <#a_c--92ed3b5f07b4>`_
837 837
838 838 .. |under_review| replace:: *"Under Review"*"""
839 839 )
840 840 pull_request_comments = sorted(
841 841 pull_request.comments, key=lambda c: c.modified_at)
842 842 update_comment = pull_request_comments[-1]
843 843 assert update_comment.text == expected_message
844 844
845 845
846 846 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
847 847 pull_request = pr_util.create_pull_request()
848 848
849 849 # Avoiding default values
850 850 pull_request.status = PullRequest.STATUS_CLOSED
851 851 pull_request._last_merge_source_rev = "0" * 40
852 852 pull_request._last_merge_target_rev = "1" * 40
853 853 pull_request.last_merge_status = 1
854 854 pull_request.merge_rev = "2" * 40
855 855
856 856 # Remember automatic values
857 857 created_on = pull_request.created_on
858 858 updated_on = pull_request.updated_on
859 859
860 860 # Create a new version of the pull request
861 861 version = PullRequestModel()._create_version_from_snapshot(pull_request)
862 862
863 863 # Check attributes
864 864 assert version.title == pr_util.create_parameters['title']
865 865 assert version.description == pr_util.create_parameters['description']
866 866 assert version.status == PullRequest.STATUS_CLOSED
867 867
868 868 # versions get updated created_on
869 869 assert version.created_on != created_on
870 870
871 871 assert version.updated_on == updated_on
872 872 assert version.user_id == pull_request.user_id
873 873 assert version.revisions == pr_util.create_parameters['revisions']
874 874 assert version.source_repo == pr_util.source_repository
875 875 assert version.source_ref == pr_util.create_parameters['source_ref']
876 876 assert version.target_repo == pr_util.target_repository
877 877 assert version.target_ref == pr_util.create_parameters['target_ref']
878 878 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
879 879 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
880 880 assert version.last_merge_status == pull_request.last_merge_status
881 881 assert version.merge_rev == pull_request.merge_rev
882 882 assert version.pull_request == pull_request
883 883
884 884
885 885 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
886 886 version1 = pr_util.create_version_of_pull_request()
887 887 comment_linked = pr_util.create_comment(linked_to=version1)
888 888 comment_unlinked = pr_util.create_comment()
889 889 version2 = pr_util.create_version_of_pull_request()
890 890
891 891 PullRequestModel()._link_comments_to_version(version2)
892 892
893 893 # Expect that only the new comment is linked to version2
894 894 assert (
895 895 comment_unlinked.pull_request_version_id ==
896 896 version2.pull_request_version_id)
897 897 assert (
898 898 comment_linked.pull_request_version_id ==
899 899 version1.pull_request_version_id)
900 900 assert (
901 901 comment_unlinked.pull_request_version_id !=
902 902 comment_linked.pull_request_version_id)
903 903
904 904
905 905 def test_calculate_commits():
906 906 old_ids = [1, 2, 3]
907 907 new_ids = [1, 3, 4, 5]
908 908 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
909 909 assert change.added == [4, 5]
910 910 assert change.common == [1, 3]
911 911 assert change.removed == [2]
912 912 assert change.total == [1, 3, 4, 5]
913 913
914 914
915 915 def assert_inline_comments(pull_request, visible=None, outdated=None):
916 916 if visible is not None:
917 917 inline_comments = CommentsModel().get_inline_comments(
918 918 pull_request.target_repo.repo_id, pull_request=pull_request)
919 919 inline_cnt = CommentsModel().get_inline_comments_count(
920 920 inline_comments)
921 921 assert inline_cnt == visible
922 922 if outdated is not None:
923 923 outdated_comments = CommentsModel().get_outdated_comments(
924 924 pull_request.target_repo.repo_id, pull_request)
925 925 assert len(outdated_comments) == outdated
926 926
927 927
928 928 def assert_pr_file_changes(
929 929 pull_request, added=None, modified=None, removed=None):
930 930 pr_versions = PullRequestModel().get_versions(pull_request)
931 931 # always use first version, ie original PR to calculate changes
932 932 pull_request_version = pr_versions[0]
933 933 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
934 934 pull_request, pull_request_version)
935 935 file_changes = PullRequestModel()._calculate_file_changes(
936 936 old_diff_data, new_diff_data)
937 937
938 938 assert added == file_changes.added, \
939 939 'expected added:%s vs value:%s' % (added, file_changes.added)
940 940 assert modified == file_changes.modified, \
941 941 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
942 942 assert removed == file_changes.removed, \
943 943 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
944 944
945 945
946 946 def outdated_comments_patcher(use_outdated=True):
947 947 return mock.patch.object(
948 948 CommentsModel, 'use_outdated_comments',
949 949 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now