##// END OF EJS Templates
chore(code-cleanups): small fixes for readability
super-admin -
r5197:4a692945 default
parent child Browse files
Show More
@@ -1,1984 +1,1983 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Base module for all VCS systems
20 Base module for all VCS systems
21 """
21 """
22 import os
22 import os
23 import re
23 import re
24 import time
24 import time
25 import shutil
25 import shutil
26 import datetime
26 import datetime
27 import fnmatch
27 import fnmatch
28 import itertools
28 import itertools
29 import logging
29 import logging
30 import dataclasses
30 import dataclasses
31 import warnings
31 import warnings
32
32
33 from zope.cachedescriptors.property import Lazy as LazyProperty
33 from zope.cachedescriptors.property import Lazy as LazyProperty
34
34
35
35
36 import rhodecode
36 import rhodecode
37 from rhodecode.translation import lazy_ugettext
37 from rhodecode.translation import lazy_ugettext
38 from rhodecode.lib.utils2 import safe_str, CachedProperty
38 from rhodecode.lib.utils2 import safe_str, CachedProperty
39 from rhodecode.lib.vcs.utils import author_name, author_email
39 from rhodecode.lib.vcs.utils import author_name, author_email
40 from rhodecode.lib.vcs.conf import settings
40 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.exceptions import (
41 from rhodecode.lib.vcs.exceptions import (
42 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
42 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
43 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 NodeDoesNotExistError, NodeNotChangedError, VCSError,
44 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
45 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 RepositoryError)
46 RepositoryError)
47
47
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 FILEMODE_DEFAULT = 0o100644
52 FILEMODE_DEFAULT = 0o100644
53 FILEMODE_EXECUTABLE = 0o100755
53 FILEMODE_EXECUTABLE = 0o100755
54 EMPTY_COMMIT_ID = '0' * 40
54 EMPTY_COMMIT_ID = '0' * 40
55
55
56
56
57 @dataclasses.dataclass
57 @dataclasses.dataclass
58 class Reference:
58 class Reference:
59 type: str
59 type: str
60 name: str
60 name: str
61 commit_id: str
61 commit_id: str
62
62
63 def __iter__(self):
63 def __iter__(self):
64 yield self.type
64 yield self.type
65 yield self.name
65 yield self.name
66 yield self.commit_id
66 yield self.commit_id
67
67
68 @property
68 @property
69 def branch(self):
69 def branch(self):
70 if self.type == 'branch':
70 if self.type == 'branch':
71 return self.name
71 return self.name
72
72
73 @property
73 @property
74 def bookmark(self):
74 def bookmark(self):
75 if self.type == 'book':
75 if self.type == 'book':
76 return self.name
76 return self.name
77
77
78 @property
78 @property
79 def to_str(self):
79 def to_str(self):
80 return reference_to_unicode(self)
80 return reference_to_unicode(self)
81
81
82 def asdict(self):
82 def asdict(self):
83 return dict(
83 return dict(
84 type=self.type,
84 type=self.type,
85 name=self.name,
85 name=self.name,
86 commit_id=self.commit_id
86 commit_id=self.commit_id
87 )
87 )
88
88
89
89
90 def unicode_to_reference(raw: str):
90 def unicode_to_reference(raw: str):
91 """
91 """
92 Convert a unicode (or string) to a reference object.
92 Convert a unicode (or string) to a reference object.
93 If unicode evaluates to False it returns None.
93 If unicode evaluates to False it returns None.
94 """
94 """
95 if raw:
95 if raw:
96 refs = raw.split(':')
96 refs = raw.split(':')
97 return Reference(*refs)
97 return Reference(*refs)
98 else:
98 else:
99 return None
99 return None
100
100
101
101
102 def reference_to_unicode(ref: Reference):
102 def reference_to_unicode(ref: Reference):
103 """
103 """
104 Convert a reference object to unicode.
104 Convert a reference object to unicode.
105 If reference is None it returns None.
105 If reference is None it returns None.
106 """
106 """
107 if ref:
107 if ref:
108 return ':'.join(ref)
108 return ':'.join(ref)
109 else:
109 else:
110 return None
110 return None
111
111
112
112
113 class MergeFailureReason(object):
113 class MergeFailureReason(object):
114 """
114 """
115 Enumeration with all the reasons why the server side merge could fail.
115 Enumeration with all the reasons why the server side merge could fail.
116
116
117 DO NOT change the number of the reasons, as they may be stored in the
117 DO NOT change the number of the reasons, as they may be stored in the
118 database.
118 database.
119
119
120 Changing the name of a reason is acceptable and encouraged to deprecate old
120 Changing the name of a reason is acceptable and encouraged to deprecate old
121 reasons.
121 reasons.
122 """
122 """
123
123
124 # Everything went well.
124 # Everything went well.
125 NONE = 0
125 NONE = 0
126
126
127 # An unexpected exception was raised. Check the logs for more details.
127 # An unexpected exception was raised. Check the logs for more details.
128 UNKNOWN = 1
128 UNKNOWN = 1
129
129
130 # The merge was not successful, there are conflicts.
130 # The merge was not successful, there are conflicts.
131 MERGE_FAILED = 2
131 MERGE_FAILED = 2
132
132
133 # The merge succeeded but we could not push it to the target repository.
133 # The merge succeeded but we could not push it to the target repository.
134 PUSH_FAILED = 3
134 PUSH_FAILED = 3
135
135
136 # The specified target is not a head in the target repository.
136 # The specified target is not a head in the target repository.
137 TARGET_IS_NOT_HEAD = 4
137 TARGET_IS_NOT_HEAD = 4
138
138
139 # The source repository contains more branches than the target. Pushing
139 # The source repository contains more branches than the target. Pushing
140 # the merge will create additional branches in the target.
140 # the merge will create additional branches in the target.
141 HG_SOURCE_HAS_MORE_BRANCHES = 5
141 HG_SOURCE_HAS_MORE_BRANCHES = 5
142
142
143 # The target reference has multiple heads. That does not allow to correctly
143 # The target reference has multiple heads. That does not allow to correctly
144 # identify the target location. This could only happen for mercurial
144 # identify the target location. This could only happen for mercurial
145 # branches.
145 # branches.
146 HG_TARGET_HAS_MULTIPLE_HEADS = 6
146 HG_TARGET_HAS_MULTIPLE_HEADS = 6
147
147
148 # The target repository is locked
148 # The target repository is locked
149 TARGET_IS_LOCKED = 7
149 TARGET_IS_LOCKED = 7
150
150
151 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
151 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
152 # A involved commit could not be found.
152 # A involved commit could not be found.
153 _DEPRECATED_MISSING_COMMIT = 8
153 _DEPRECATED_MISSING_COMMIT = 8
154
154
155 # The target repo reference is missing.
155 # The target repo reference is missing.
156 MISSING_TARGET_REF = 9
156 MISSING_TARGET_REF = 9
157
157
158 # The source repo reference is missing.
158 # The source repo reference is missing.
159 MISSING_SOURCE_REF = 10
159 MISSING_SOURCE_REF = 10
160
160
161 # The merge was not successful, there are conflicts related to sub
161 # The merge was not successful, there are conflicts related to sub
162 # repositories.
162 # repositories.
163 SUBREPO_MERGE_FAILED = 11
163 SUBREPO_MERGE_FAILED = 11
164
164
165
165
166 class UpdateFailureReason(object):
166 class UpdateFailureReason(object):
167 """
167 """
168 Enumeration with all the reasons why the pull request update could fail.
168 Enumeration with all the reasons why the pull request update could fail.
169
169
170 DO NOT change the number of the reasons, as they may be stored in the
170 DO NOT change the number of the reasons, as they may be stored in the
171 database.
171 database.
172
172
173 Changing the name of a reason is acceptable and encouraged to deprecate old
173 Changing the name of a reason is acceptable and encouraged to deprecate old
174 reasons.
174 reasons.
175 """
175 """
176
176
177 # Everything went well.
177 # Everything went well.
178 NONE = 0
178 NONE = 0
179
179
180 # An unexpected exception was raised. Check the logs for more details.
180 # An unexpected exception was raised. Check the logs for more details.
181 UNKNOWN = 1
181 UNKNOWN = 1
182
182
183 # The pull request is up to date.
183 # The pull request is up to date.
184 NO_CHANGE = 2
184 NO_CHANGE = 2
185
185
186 # The pull request has a reference type that is not supported for update.
186 # The pull request has a reference type that is not supported for update.
187 WRONG_REF_TYPE = 3
187 WRONG_REF_TYPE = 3
188
188
189 # Update failed because the target reference is missing.
189 # Update failed because the target reference is missing.
190 MISSING_TARGET_REF = 4
190 MISSING_TARGET_REF = 4
191
191
192 # Update failed because the source reference is missing.
192 # Update failed because the source reference is missing.
193 MISSING_SOURCE_REF = 5
193 MISSING_SOURCE_REF = 5
194
194
195
195
196 class MergeResponse(object):
196 class MergeResponse(object):
197
197
198 # uses .format(**metadata) for variables
198 # uses .format(**metadata) for variables
199 MERGE_STATUS_MESSAGES = {
199 MERGE_STATUS_MESSAGES = {
200 MergeFailureReason.NONE: lazy_ugettext(
200 MergeFailureReason.NONE: lazy_ugettext(
201 'This pull request can be automatically merged.'),
201 'This pull request can be automatically merged.'),
202 MergeFailureReason.UNKNOWN: lazy_ugettext(
202 MergeFailureReason.UNKNOWN: lazy_ugettext(
203 'This pull request cannot be merged because of an unhandled exception. '
203 'This pull request cannot be merged because of an unhandled exception. '
204 '{exception}'),
204 '{exception}'),
205 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
205 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
206 'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
206 'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
207 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
207 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
208 'This pull request could not be merged because push to '
208 'This pull request could not be merged because push to '
209 'target:`{target}@{merge_commit}` failed.'),
209 'target:`{target}@{merge_commit}` failed.'),
210 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
210 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
211 'This pull request cannot be merged because the target '
211 'This pull request cannot be merged because the target '
212 '`{target_ref.name}` is not a head.'),
212 '`{target_ref.name}` is not a head.'),
213 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
213 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
214 'This pull request cannot be merged because the source contains '
214 'This pull request cannot be merged because the source contains '
215 'more branches than the target.'),
215 'more branches than the target.'),
216 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
216 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
217 'This pull request cannot be merged because the target `{target_ref.name}` '
217 'This pull request cannot be merged because the target `{target_ref.name}` '
218 'has multiple heads: `{heads}`.'),
218 'has multiple heads: `{heads}`.'),
219 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
219 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
220 'This pull request cannot be merged because the target repository is '
220 'This pull request cannot be merged because the target repository is '
221 'locked by {locked_by}.'),
221 'locked by {locked_by}.'),
222
222
223 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
223 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
224 'This pull request cannot be merged because the target '
224 'This pull request cannot be merged because the target '
225 'reference `{target_ref.name}` is missing.'),
225 'reference `{target_ref.name}` is missing.'),
226 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
226 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
227 'This pull request cannot be merged because the source '
227 'This pull request cannot be merged because the source '
228 'reference `{source_ref.name}` is missing.'),
228 'reference `{source_ref.name}` is missing.'),
229 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
229 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
230 'This pull request cannot be merged because of conflicts related '
230 'This pull request cannot be merged because of conflicts related '
231 'to sub repositories.'),
231 'to sub repositories.'),
232
232
233 # Deprecations
233 # Deprecations
234 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
234 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
235 'This pull request cannot be merged because the target or the '
235 'This pull request cannot be merged because the target or the '
236 'source reference is missing.'),
236 'source reference is missing.'),
237
237
238 }
238 }
239
239
240 def __init__(self, possible, executed, merge_ref: Reference, failure_reason, metadata=None):
240 def __init__(self, possible, executed, merge_ref: Reference, failure_reason, metadata=None):
241 self.possible = possible
241 self.possible = possible
242 self.executed = executed
242 self.executed = executed
243 self.merge_ref = merge_ref
243 self.merge_ref = merge_ref
244 self.failure_reason = failure_reason
244 self.failure_reason = failure_reason
245 self.metadata = metadata or {}
245 self.metadata = metadata or {}
246
246
247 def __repr__(self):
247 def __repr__(self):
248 return f'<MergeResponse:{self.label} {self.failure_reason}>'
248 return f'<MergeResponse:{self.label} {self.failure_reason}>'
249
249
250 def __eq__(self, other):
250 def __eq__(self, other):
251 same_instance = isinstance(other, self.__class__)
251 same_instance = isinstance(other, self.__class__)
252 return same_instance \
252 return same_instance \
253 and self.possible == other.possible \
253 and self.possible == other.possible \
254 and self.executed == other.executed \
254 and self.executed == other.executed \
255 and self.failure_reason == other.failure_reason
255 and self.failure_reason == other.failure_reason
256
256
257 @property
257 @property
258 def label(self):
258 def label(self):
259 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
259 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
260 not k.startswith('_'))
260 not k.startswith('_'))
261 return label_dict.get(self.failure_reason)
261 return label_dict.get(self.failure_reason)
262
262
263 @property
263 @property
264 def merge_status_message(self):
264 def merge_status_message(self):
265 """
265 """
266 Return a human friendly error message for the given merge status code.
266 Return a human friendly error message for the given merge status code.
267 """
267 """
268 msg = safe_str(self.MERGE_STATUS_MESSAGES[self.failure_reason])
268 msg = safe_str(self.MERGE_STATUS_MESSAGES[self.failure_reason])
269
269
270 try:
270 try:
271 return msg.format(**self.metadata)
271 return msg.format(**self.metadata)
272 except Exception:
272 except Exception:
273 log.exception('Failed to format %s message', self)
273 log.exception('Failed to format %s message', self)
274 return msg
274 return msg
275
275
276 def asdict(self):
276 def asdict(self):
277 data = {}
277 data = {}
278 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
278 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
279 'merge_status_message']:
279 'merge_status_message']:
280 data[k] = getattr(self, k)
280 data[k] = getattr(self, k)
281 return data
281 return data
282
282
283
283
284 class TargetRefMissing(ValueError):
284 class TargetRefMissing(ValueError):
285 pass
285 pass
286
286
287
287
288 class SourceRefMissing(ValueError):
288 class SourceRefMissing(ValueError):
289 pass
289 pass
290
290
291
291
292 class BaseRepository(object):
292 class BaseRepository(object):
293 """
293 """
294 Base Repository for final backends
294 Base Repository for final backends
295
295
296 .. attribute:: DEFAULT_BRANCH_NAME
296 .. attribute:: DEFAULT_BRANCH_NAME
297
297
298 name of default branch (i.e. "trunk" for svn, "master" for git etc.
298 name of default branch (i.e. "trunk" for svn, "master" for git etc.
299
299
300 .. attribute:: commit_ids
300 .. attribute:: commit_ids
301
301
302 list of all available commit ids, in ascending order
302 list of all available commit ids, in ascending order
303
303
304 .. attribute:: path
304 .. attribute:: path
305
305
306 absolute path to the repository
306 absolute path to the repository
307
307
308 .. attribute:: bookmarks
308 .. attribute:: bookmarks
309
309
310 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
310 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
311 there are no bookmarks or the backend implementation does not support
311 there are no bookmarks or the backend implementation does not support
312 bookmarks.
312 bookmarks.
313
313
314 .. attribute:: tags
314 .. attribute:: tags
315
315
316 Mapping from name to :term:`Commit ID` of the tag.
316 Mapping from name to :term:`Commit ID` of the tag.
317
317
318 """
318 """
319
319
320 DEFAULT_BRANCH_NAME = None
320 DEFAULT_BRANCH_NAME = None
321 DEFAULT_CONTACT = "Unknown"
321 DEFAULT_CONTACT = "Unknown"
322 DEFAULT_DESCRIPTION = "unknown"
322 DEFAULT_DESCRIPTION = "unknown"
323 EMPTY_COMMIT_ID = '0' * 40
323 EMPTY_COMMIT_ID = '0' * 40
324 COMMIT_ID_PAT = re.compile(r'[0-9a-fA-F]{40}')
324 COMMIT_ID_PAT = re.compile(r'[0-9a-fA-F]{40}')
325
325
326 path = None
326 path = None
327
327
328 _is_empty = None
328 _is_empty = None
329 _commit_ids = {}
329 _commit_ids = {}
330
330
331 def __init__(self, repo_path, config=None, create=False, **kwargs):
331 def __init__(self, repo_path, config=None, create=False, **kwargs):
332 """
332 """
333 Initializes repository. Raises RepositoryError if repository could
333 Initializes repository. Raises RepositoryError if repository could
334 not be find at the given ``repo_path`` or directory at ``repo_path``
334 not be find at the given ``repo_path`` or directory at ``repo_path``
335 exists and ``create`` is set to True.
335 exists and ``create`` is set to True.
336
336
337 :param repo_path: local path of the repository
337 :param repo_path: local path of the repository
338 :param config: repository configuration
338 :param config: repository configuration
339 :param create=False: if set to True, would try to create repository.
339 :param create=False: if set to True, would try to create repository.
340 :param src_url=None: if set, should be proper url from which repository
340 :param src_url=None: if set, should be proper url from which repository
341 would be cloned; requires ``create`` parameter to be set to True -
341 would be cloned; requires ``create`` parameter to be set to True -
342 raises RepositoryError if src_url is set and create evaluates to
342 raises RepositoryError if src_url is set and create evaluates to
343 False
343 False
344 """
344 """
345 raise NotImplementedError
345 raise NotImplementedError
346
346
347 def __repr__(self):
347 def __repr__(self):
348 return f'<{self.__class__.__name__} at {self.path}>'
348 return f'<{self.__class__.__name__} at {self.path}>'
349
349
350 def __len__(self):
350 def __len__(self):
351 return self.count()
351 return self.count()
352
352
353 def __eq__(self, other):
353 def __eq__(self, other):
354 same_instance = isinstance(other, self.__class__)
354 same_instance = isinstance(other, self.__class__)
355 return same_instance and other.path == self.path
355 return same_instance and other.path == self.path
356
356
357 def __ne__(self, other):
357 def __ne__(self, other):
358 return not self.__eq__(other)
358 return not self.__eq__(other)
359
359
360 def get_create_shadow_cache_pr_path(self, db_repo):
360 def get_create_shadow_cache_pr_path(self, db_repo):
361 path = db_repo.cached_diffs_dir
361 path = db_repo.cached_diffs_dir
362 if not os.path.exists(path):
362 if not os.path.exists(path):
363 os.makedirs(path, 0o755)
363 os.makedirs(path, 0o755)
364 return path
364 return path
365
365
366 @classmethod
366 @classmethod
367 def get_default_config(cls, default=None):
367 def get_default_config(cls, default=None):
368 config = Config()
368 config = Config()
369 if default and isinstance(default, list):
369 if default and isinstance(default, list):
370 for section, key, val in default:
370 for section, key, val in default:
371 config.set(section, key, val)
371 config.set(section, key, val)
372 return config
372 return config
373
373
374 @LazyProperty
374 @LazyProperty
375 def _remote(self):
375 def _remote(self):
376 raise NotImplementedError
376 raise NotImplementedError
377
377
378 def _heads(self, branch=None):
378 def _heads(self, branch=None):
379 return []
379 return []
380
380
381 @LazyProperty
381 @LazyProperty
382 def EMPTY_COMMIT(self):
382 def EMPTY_COMMIT(self):
383 return EmptyCommit(self.EMPTY_COMMIT_ID)
383 return EmptyCommit(self.EMPTY_COMMIT_ID)
384
384
385 @LazyProperty
385 @LazyProperty
386 def alias(self):
386 def alias(self):
387 for k, v in settings.BACKENDS.items():
387 for k, v in settings.BACKENDS.items():
388 if v.split('.')[-1] == str(self.__class__.__name__):
388 if v.split('.')[-1] == str(self.__class__.__name__):
389 return k
389 return k
390
390
391 @LazyProperty
391 @LazyProperty
392 def name(self):
392 def name(self):
393 return safe_str(os.path.basename(self.path))
393 return safe_str(os.path.basename(self.path))
394
394
395 @LazyProperty
395 @LazyProperty
396 def description(self):
396 def description(self):
397 raise NotImplementedError
397 raise NotImplementedError
398
398
399 def refs(self):
399 def refs(self):
400 """
400 """
401 returns a `dict` with branches, bookmarks, tags, and closed_branches
401 returns a `dict` with branches, bookmarks, tags, and closed_branches
402 for this repository
402 for this repository
403 """
403 """
404 return dict(
404 return dict(
405 branches=self.branches,
405 branches=self.branches,
406 branches_closed=self.branches_closed,
406 branches_closed=self.branches_closed,
407 tags=self.tags,
407 tags=self.tags,
408 bookmarks=self.bookmarks
408 bookmarks=self.bookmarks
409 )
409 )
410
410
411 @LazyProperty
411 @LazyProperty
412 def branches(self):
412 def branches(self):
413 """
413 """
414 A `dict` which maps branch names to commit ids.
414 A `dict` which maps branch names to commit ids.
415 """
415 """
416 raise NotImplementedError
416 raise NotImplementedError
417
417
418 @LazyProperty
418 @LazyProperty
419 def branches_closed(self):
419 def branches_closed(self):
420 """
420 """
421 A `dict` which maps tags names to commit ids.
421 A `dict` which maps tags names to commit ids.
422 """
422 """
423 raise NotImplementedError
423 raise NotImplementedError
424
424
425 @LazyProperty
425 @LazyProperty
426 def bookmarks(self):
426 def bookmarks(self):
427 """
427 """
428 A `dict` which maps tags names to commit ids.
428 A `dict` which maps tags names to commit ids.
429 """
429 """
430 raise NotImplementedError
430 raise NotImplementedError
431
431
432 @LazyProperty
432 @LazyProperty
433 def tags(self):
433 def tags(self):
434 """
434 """
435 A `dict` which maps tags names to commit ids.
435 A `dict` which maps tags names to commit ids.
436 """
436 """
437 raise NotImplementedError
437 raise NotImplementedError
438
438
439 @LazyProperty
439 @LazyProperty
440 def size(self):
440 def size(self):
441 """
441 """
442 Returns combined size in bytes for all repository files
442 Returns combined size in bytes for all repository files
443 """
443 """
444 tip = self.get_commit()
444 tip = self.get_commit()
445 return tip.size
445 return tip.size
446
446
447 def size_at_commit(self, commit_id):
447 def size_at_commit(self, commit_id):
448 commit = self.get_commit(commit_id)
448 commit = self.get_commit(commit_id)
449 return commit.size
449 return commit.size
450
450
451 def _check_for_empty(self):
451 def _check_for_empty(self):
452 no_commits = len(self._commit_ids) == 0
452 no_commits = len(self._commit_ids) == 0
453 if no_commits:
453 if no_commits:
454 # check on remote to be sure
454 # check on remote to be sure
455 return self._remote.is_empty()
455 return self._remote.is_empty()
456 else:
456 else:
457 return False
457 return False
458
458
459 def is_empty(self):
459 def is_empty(self):
460 if rhodecode.is_test:
460 if rhodecode.is_test:
461 return self._check_for_empty()
461 return self._check_for_empty()
462
462
463 if self._is_empty is None:
463 if self._is_empty is None:
464 # cache empty for production, but not tests
464 # cache empty for production, but not tests
465 self._is_empty = self._check_for_empty()
465 self._is_empty = self._check_for_empty()
466
466
467 return self._is_empty
467 return self._is_empty
468
468
469 @staticmethod
469 @staticmethod
470 def check_url(url, config):
470 def check_url(url, config):
471 """
471 """
472 Function will check given url and try to verify if it's a valid
472 Function will check given url and try to verify if it's a valid
473 link.
473 link.
474 """
474 """
475 raise NotImplementedError
475 raise NotImplementedError
476
476
477 @staticmethod
477 @staticmethod
478 def is_valid_repository(path):
478 def is_valid_repository(path):
479 """
479 """
480 Check if given `path` contains a valid repository of this backend
480 Check if given `path` contains a valid repository of this backend
481 """
481 """
482 raise NotImplementedError
482 raise NotImplementedError
483
483
484 # ==========================================================================
484 # ==========================================================================
485 # COMMITS
485 # COMMITS
486 # ==========================================================================
486 # ==========================================================================
487
487
488 @CachedProperty
488 @CachedProperty
489 def commit_ids(self):
489 def commit_ids(self):
490 raise NotImplementedError
490 raise NotImplementedError
491
491
492 def append_commit_id(self, commit_id):
492 def append_commit_id(self, commit_id):
493 if commit_id not in self.commit_ids:
493 if commit_id not in self.commit_ids:
494 self._rebuild_cache(self.commit_ids + [commit_id])
494 self._rebuild_cache(self.commit_ids + [commit_id])
495
495
496 # clear cache
496 # clear cache
497 self._invalidate_prop_cache('commit_ids')
497 self._invalidate_prop_cache('commit_ids')
498 self._is_empty = False
498 self._is_empty = False
499
499
500 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
500 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
501 translate_tag=None, maybe_unreachable=False, reference_obj=None):
501 translate_tag=None, maybe_unreachable=False, reference_obj=None):
502 """
502 """
503 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
503 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
504 are both None, most recent commit is returned.
504 are both None, most recent commit is returned.
505
505
506 :param pre_load: Optional. List of commit attributes to load.
506 :param pre_load: Optional. List of commit attributes to load.
507
507
508 :raises ``EmptyRepositoryError``: if there are no commits
508 :raises ``EmptyRepositoryError``: if there are no commits
509 """
509 """
510 raise NotImplementedError
510 raise NotImplementedError
511
511
512 def __iter__(self):
512 def __iter__(self):
513 for commit_id in self.commit_ids:
513 for commit_id in self.commit_ids:
514 yield self.get_commit(commit_id=commit_id)
514 yield self.get_commit(commit_id=commit_id)
515
515
516 def get_commits(
516 def get_commits(
517 self, start_id=None, end_id=None, start_date=None, end_date=None,
517 self, start_id=None, end_id=None, start_date=None, end_date=None,
518 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
518 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
519 """
519 """
520 Returns iterator of `BaseCommit` objects from start to end
520 Returns iterator of `BaseCommit` objects from start to end
521 not inclusive. This should behave just like a list, ie. end is not
521 not inclusive. This should behave just like a list, ie. end is not
522 inclusive.
522 inclusive.
523
523
524 :param start_id: None or str, must be a valid commit id
524 :param start_id: None or str, must be a valid commit id
525 :param end_id: None or str, must be a valid commit id
525 :param end_id: None or str, must be a valid commit id
526 :param start_date:
526 :param start_date:
527 :param end_date:
527 :param end_date:
528 :param branch_name:
528 :param branch_name:
529 :param show_hidden:
529 :param show_hidden:
530 :param pre_load:
530 :param pre_load:
531 :param translate_tags:
531 :param translate_tags:
532 """
532 """
533 raise NotImplementedError
533 raise NotImplementedError
534
534
535 def __getitem__(self, key):
535 def __getitem__(self, key):
536 """
536 """
537 Allows index based access to the commit objects of this repository.
537 Allows index based access to the commit objects of this repository.
538 """
538 """
539 pre_load = ["author", "branch", "date", "message", "parents"]
539 pre_load = ["author", "branch", "date", "message", "parents"]
540 if isinstance(key, slice):
540 if isinstance(key, slice):
541 return self._get_range(key, pre_load)
541 return self._get_range(key, pre_load)
542 return self.get_commit(commit_idx=key, pre_load=pre_load)
542 return self.get_commit(commit_idx=key, pre_load=pre_load)
543
543
544 def _get_range(self, slice_obj, pre_load):
544 def _get_range(self, slice_obj, pre_load):
545 for commit_id in self.commit_ids.__getitem__(slice_obj):
545 for commit_id in self.commit_ids.__getitem__(slice_obj):
546 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
546 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
547
547
548 def count(self):
548 def count(self):
549 return len(self.commit_ids)
549 return len(self.commit_ids)
550
550
551 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
551 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
552 """
552 """
553 Creates and returns a tag for the given ``commit_id``.
553 Creates and returns a tag for the given ``commit_id``.
554
554
555 :param name: name for new tag
555 :param name: name for new tag
556 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
556 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
557 :param commit_id: commit id for which new tag would be created
557 :param commit_id: commit id for which new tag would be created
558 :param message: message of the tag's commit
558 :param message: message of the tag's commit
559 :param date: date of tag's commit
559 :param date: date of tag's commit
560
560
561 :raises TagAlreadyExistError: if tag with same name already exists
561 :raises TagAlreadyExistError: if tag with same name already exists
562 """
562 """
563 raise NotImplementedError
563 raise NotImplementedError
564
564
565 def remove_tag(self, name, user, message=None, date=None):
565 def remove_tag(self, name, user, message=None, date=None):
566 """
566 """
567 Removes tag with the given ``name``.
567 Removes tag with the given ``name``.
568
568
569 :param name: name of the tag to be removed
569 :param name: name of the tag to be removed
570 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
570 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
571 :param message: message of the tag's removal commit
571 :param message: message of the tag's removal commit
572 :param date: date of tag's removal commit
572 :param date: date of tag's removal commit
573
573
574 :raises TagDoesNotExistError: if tag with given name does not exists
574 :raises TagDoesNotExistError: if tag with given name does not exists
575 """
575 """
576 raise NotImplementedError
576 raise NotImplementedError
577
577
578 def get_diff(
578 def get_diff(
579 self, commit1, commit2, path=None, ignore_whitespace=False,
579 self, commit1, commit2, path=None, ignore_whitespace=False,
580 context=3, path1=None):
580 context=3, path1=None):
581 """
581 """
582 Returns (git like) *diff*, as plain text. Shows changes introduced by
582 Returns (git like) *diff*, as plain text. Shows changes introduced by
583 `commit2` since `commit1`.
583 `commit2` since `commit1`.
584
584
585 :param commit1: Entry point from which diff is shown. Can be
585 :param commit1: Entry point from which diff is shown. Can be
586 ``self.EMPTY_COMMIT`` - in this case, patch showing all
586 ``self.EMPTY_COMMIT`` - in this case, patch showing all
587 the changes since empty state of the repository until `commit2`
587 the changes since empty state of the repository until `commit2`
588 :param commit2: Until which commit changes should be shown.
588 :param commit2: Until which commit changes should be shown.
589 :param path: Can be set to a path of a file to create a diff of that
589 :param path: Can be set to a path of a file to create a diff of that
590 file. If `path1` is also set, this value is only associated to
590 file. If `path1` is also set, this value is only associated to
591 `commit2`.
591 `commit2`.
592 :param ignore_whitespace: If set to ``True``, would not show whitespace
592 :param ignore_whitespace: If set to ``True``, would not show whitespace
593 changes. Defaults to ``False``.
593 changes. Defaults to ``False``.
594 :param context: How many lines before/after changed lines should be
594 :param context: How many lines before/after changed lines should be
595 shown. Defaults to ``3``.
595 shown. Defaults to ``3``.
596 :param path1: Can be set to a path to associate with `commit1`. This
596 :param path1: Can be set to a path to associate with `commit1`. This
597 parameter works only for backends which support diff generation for
597 parameter works only for backends which support diff generation for
598 different paths. Other backends will raise a `ValueError` if `path1`
598 different paths. Other backends will raise a `ValueError` if `path1`
599 is set and has a different value than `path`.
599 is set and has a different value than `path`.
600 :param file_path: filter this diff by given path pattern
600 :param file_path: filter this diff by given path pattern
601 """
601 """
602 raise NotImplementedError
602 raise NotImplementedError
603
603
604 def strip(self, commit_id, branch=None):
604 def strip(self, commit_id, branch=None):
605 """
605 """
606 Strip given commit_id from the repository
606 Strip given commit_id from the repository
607 """
607 """
608 raise NotImplementedError
608 raise NotImplementedError
609
609
610 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
610 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
611 """
611 """
612 Return a latest common ancestor commit if one exists for this repo
612 Return a latest common ancestor commit if one exists for this repo
613 `commit_id1` vs `commit_id2` from `repo2`.
613 `commit_id1` vs `commit_id2` from `repo2`.
614
614
615 :param commit_id1: Commit it from this repository to use as a
615 :param commit_id1: Commit it from this repository to use as a
616 target for the comparison.
616 target for the comparison.
617 :param commit_id2: Source commit id to use for comparison.
617 :param commit_id2: Source commit id to use for comparison.
618 :param repo2: Source repository to use for comparison.
618 :param repo2: Source repository to use for comparison.
619 """
619 """
620 raise NotImplementedError
620 raise NotImplementedError
621
621
622 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
622 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
623 """
623 """
624 Compare this repository's revision `commit_id1` with `commit_id2`.
624 Compare this repository's revision `commit_id1` with `commit_id2`.
625
625
626 Returns a tuple(commits, ancestor) that would be merged from
626 Returns a tuple(commits, ancestor) that would be merged from
627 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
627 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
628 will be returned as ancestor.
628 will be returned as ancestor.
629
629
630 :param commit_id1: Commit it from this repository to use as a
630 :param commit_id1: Commit it from this repository to use as a
631 target for the comparison.
631 target for the comparison.
632 :param commit_id2: Source commit id to use for comparison.
632 :param commit_id2: Source commit id to use for comparison.
633 :param repo2: Source repository to use for comparison.
633 :param repo2: Source repository to use for comparison.
634 :param merge: If set to ``True`` will do a merge compare which also
634 :param merge: If set to ``True`` will do a merge compare which also
635 returns the common ancestor.
635 returns the common ancestor.
636 :param pre_load: Optional. List of commit attributes to load.
636 :param pre_load: Optional. List of commit attributes to load.
637 """
637 """
638 raise NotImplementedError
638 raise NotImplementedError
639
639
640 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
640 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
641 user_name='', user_email='', message='', dry_run=False,
641 user_name='', user_email='', message='', dry_run=False,
642 use_rebase=False, close_branch=False):
642 use_rebase=False, close_branch=False):
643 """
643 """
644 Merge the revisions specified in `source_ref` from `source_repo`
644 Merge the revisions specified in `source_ref` from `source_repo`
645 onto the `target_ref` of this repository.
645 onto the `target_ref` of this repository.
646
646
647 `source_ref` and `target_ref` are named tupls with the following
647 `source_ref` and `target_ref` are named tupls with the following
648 fields `type`, `name` and `commit_id`.
648 fields `type`, `name` and `commit_id`.
649
649
650 Returns a MergeResponse named tuple with the following fields
650 Returns a MergeResponse named tuple with the following fields
651 'possible', 'executed', 'source_commit', 'target_commit',
651 'possible', 'executed', 'source_commit', 'target_commit',
652 'merge_commit'.
652 'merge_commit'.
653
653
654 :param repo_id: `repo_id` target repo id.
654 :param repo_id: `repo_id` target repo id.
655 :param workspace_id: `workspace_id` unique identifier.
655 :param workspace_id: `workspace_id` unique identifier.
656 :param target_ref: `target_ref` points to the commit on top of which
656 :param target_ref: `target_ref` points to the commit on top of which
657 the `source_ref` should be merged.
657 the `source_ref` should be merged.
658 :param source_repo: The repository that contains the commits to be
658 :param source_repo: The repository that contains the commits to be
659 merged.
659 merged.
660 :param source_ref: `source_ref` points to the topmost commit from
660 :param source_ref: `source_ref` points to the topmost commit from
661 the `source_repo` which should be merged.
661 the `source_repo` which should be merged.
662 :param user_name: Merge commit `user_name`.
662 :param user_name: Merge commit `user_name`.
663 :param user_email: Merge commit `user_email`.
663 :param user_email: Merge commit `user_email`.
664 :param message: Merge commit `message`.
664 :param message: Merge commit `message`.
665 :param dry_run: If `True` the merge will not take place.
665 :param dry_run: If `True` the merge will not take place.
666 :param use_rebase: If `True` commits from the source will be rebased
666 :param use_rebase: If `True` commits from the source will be rebased
667 on top of the target instead of being merged.
667 on top of the target instead of being merged.
668 :param close_branch: If `True` branch will be close before merging it
668 :param close_branch: If `True` branch will be close before merging it
669 """
669 """
670 if dry_run:
670 if dry_run:
671 message = message or settings.MERGE_DRY_RUN_MESSAGE
671 message = message or settings.MERGE_DRY_RUN_MESSAGE
672 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
672 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
673 user_name = user_name or settings.MERGE_DRY_RUN_USER
673 user_name = user_name or settings.MERGE_DRY_RUN_USER
674 else:
674 else:
675 if not user_name:
675 if not user_name:
676 raise ValueError('user_name cannot be empty')
676 raise ValueError('user_name cannot be empty')
677 if not user_email:
677 if not user_email:
678 raise ValueError('user_email cannot be empty')
678 raise ValueError('user_email cannot be empty')
679 if not message:
679 if not message:
680 raise ValueError('message cannot be empty')
680 raise ValueError('message cannot be empty')
681
681
682 try:
682 try:
683 return self._merge_repo(
683 return self._merge_repo(
684 repo_id, workspace_id, target_ref, source_repo,
684 repo_id, workspace_id, target_ref, source_repo,
685 source_ref, message, user_name, user_email, dry_run=dry_run,
685 source_ref, message, user_name, user_email, dry_run=dry_run,
686 use_rebase=use_rebase, close_branch=close_branch)
686 use_rebase=use_rebase, close_branch=close_branch)
687 except RepositoryError as exc:
687 except RepositoryError as exc:
688 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
688 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
689 return MergeResponse(
689 return MergeResponse(
690 False, False, None, MergeFailureReason.UNKNOWN,
690 False, False, None, MergeFailureReason.UNKNOWN,
691 metadata={'exception': str(exc)})
691 metadata={'exception': str(exc)})
692
692
693 def _merge_repo(self, repo_id, workspace_id, target_ref,
693 def _merge_repo(self, repo_id, workspace_id, target_ref,
694 source_repo, source_ref, merge_message,
694 source_repo, source_ref, merge_message,
695 merger_name, merger_email, dry_run=False,
695 merger_name, merger_email, dry_run=False,
696 use_rebase=False, close_branch=False):
696 use_rebase=False, close_branch=False):
697 """Internal implementation of merge."""
697 """Internal implementation of merge."""
698 raise NotImplementedError
698 raise NotImplementedError
699
699
700 def _maybe_prepare_merge_workspace(
700 def _maybe_prepare_merge_workspace(
701 self, repo_id, workspace_id, target_ref, source_ref):
701 self, repo_id, workspace_id, target_ref, source_ref):
702 """
702 """
703 Create the merge workspace.
703 Create the merge workspace.
704
704
705 :param workspace_id: `workspace_id` unique identifier.
705 :param workspace_id: `workspace_id` unique identifier.
706 """
706 """
707 raise NotImplementedError
707 raise NotImplementedError
708
708
709 @classmethod
709 @classmethod
710 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
710 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
711 """
711 """
712 Legacy version that was used before. We still need it for
712 Legacy version that was used before. We still need it for
713 backward compat
713 backward compat
714 """
714 """
715 return os.path.join(
715 return os.path.join(
716 os.path.dirname(repo_path),
716 os.path.dirname(repo_path),
717 f'.__shadow_{os.path.basename(repo_path)}_{workspace_id}')
717 f'.__shadow_{os.path.basename(repo_path)}_{workspace_id}')
718
718
719 @classmethod
719 @classmethod
720 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
720 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
721 # The name of the shadow repository must start with '.', so it is
721 # The name of the shadow repository must start with '.', so it is
722 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
722 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
723 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
723 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
724 if os.path.exists(legacy_repository_path):
724 if os.path.exists(legacy_repository_path):
725 return legacy_repository_path
725 return legacy_repository_path
726 else:
726 else:
727 return os.path.join(
727 return os.path.join(
728 os.path.dirname(repo_path),
728 os.path.dirname(repo_path),
729 f'.__shadow_repo_{repo_id}_{workspace_id}')
729 f'.__shadow_repo_{repo_id}_{workspace_id}')
730
730
731 def cleanup_merge_workspace(self, repo_id, workspace_id):
731 def cleanup_merge_workspace(self, repo_id, workspace_id):
732 """
732 """
733 Remove merge workspace.
733 Remove merge workspace.
734
734
735 This function MUST not fail in case there is no workspace associated to
735 This function MUST not fail in case there is no workspace associated to
736 the given `workspace_id`.
736 the given `workspace_id`.
737
737
738 :param workspace_id: `workspace_id` unique identifier.
738 :param workspace_id: `workspace_id` unique identifier.
739 """
739 """
740 shadow_repository_path = self._get_shadow_repository_path(
740 shadow_repository_path = self._get_shadow_repository_path(
741 self.path, repo_id, workspace_id)
741 self.path, repo_id, workspace_id)
742 shadow_repository_path_del = '{}.{}.delete'.format(
742 shadow_repository_path_del = '{}.{}.delete'.format(
743 shadow_repository_path, time.time())
743 shadow_repository_path, time.time())
744
744
745 # move the shadow repo, so it never conflicts with the one used.
745 # move the shadow repo, so it never conflicts with the one used.
746 # we use this method because shutil.rmtree had some edge case problems
746 # we use this method because shutil.rmtree had some edge case problems
747 # removing symlinked repositories
747 # removing symlinked repositories
748 if not os.path.isdir(shadow_repository_path):
748 if not os.path.isdir(shadow_repository_path):
749 return
749 return
750
750
751 shutil.move(shadow_repository_path, shadow_repository_path_del)
751 shutil.move(shadow_repository_path, shadow_repository_path_del)
752 try:
752 try:
753 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
753 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
754 except Exception:
754 except Exception:
755 log.exception('Failed to gracefully remove shadow repo under %s',
755 log.exception('Failed to gracefully remove shadow repo under %s',
756 shadow_repository_path_del)
756 shadow_repository_path_del)
757 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
757 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
758
758
759 # ========== #
759 # ========== #
760 # COMMIT API #
760 # COMMIT API #
761 # ========== #
761 # ========== #
762
762
763 @LazyProperty
763 @LazyProperty
764 def in_memory_commit(self):
764 def in_memory_commit(self):
765 """
765 """
766 Returns :class:`InMemoryCommit` object for this repository.
766 Returns :class:`InMemoryCommit` object for this repository.
767 """
767 """
768 raise NotImplementedError
768 raise NotImplementedError
769
769
770 # ======================== #
770 # ======================== #
771 # UTILITIES FOR SUBCLASSES #
771 # UTILITIES FOR SUBCLASSES #
772 # ======================== #
772 # ======================== #
773
773
774 def _validate_diff_commits(self, commit1, commit2):
774 def _validate_diff_commits(self, commit1, commit2):
775 """
775 """
776 Validates that the given commits are related to this repository.
776 Validates that the given commits are related to this repository.
777
777
778 Intended as a utility for sub classes to have a consistent validation
778 Intended as a utility for sub classes to have a consistent validation
779 of input parameters in methods like :meth:`get_diff`.
779 of input parameters in methods like :meth:`get_diff`.
780 """
780 """
781 self._validate_commit(commit1)
781 self._validate_commit(commit1)
782 self._validate_commit(commit2)
782 self._validate_commit(commit2)
783 if (isinstance(commit1, EmptyCommit) and
783 if (isinstance(commit1, EmptyCommit) and
784 isinstance(commit2, EmptyCommit)):
784 isinstance(commit2, EmptyCommit)):
785 raise ValueError("Cannot compare two empty commits")
785 raise ValueError("Cannot compare two empty commits")
786
786
787 def _validate_commit(self, commit):
787 def _validate_commit(self, commit):
788 if not isinstance(commit, BaseCommit):
788 if not isinstance(commit, BaseCommit):
789 raise TypeError(
789 raise TypeError(
790 "%s is not of type BaseCommit" % repr(commit))
790 "%s is not of type BaseCommit" % repr(commit))
791 if commit.repository != self and not isinstance(commit, EmptyCommit):
791 if commit.repository != self and not isinstance(commit, EmptyCommit):
792 raise ValueError(
792 raise ValueError(
793 "Commit %s must be a valid commit from this repository %s, "
793 "Commit %s must be a valid commit from this repository %s, "
794 "related to this repository instead %s." %
794 "related to this repository instead %s." %
795 (commit, self, commit.repository))
795 (commit, self, commit.repository))
796
796
797 def _validate_commit_id(self, commit_id):
797 def _validate_commit_id(self, commit_id):
798 if not isinstance(commit_id, str):
798 if not isinstance(commit_id, str):
799 raise TypeError(f"commit_id must be a string value got {type(commit_id)} instead")
799 raise TypeError(f"commit_id must be a string value got {type(commit_id)} instead")
800
800
801 def _validate_commit_idx(self, commit_idx):
801 def _validate_commit_idx(self, commit_idx):
802 if not isinstance(commit_idx, int):
802 if not isinstance(commit_idx, int):
803 raise TypeError(f"commit_idx must be a numeric value, got {type(commit_idx)}")
803 raise TypeError(f"commit_idx must be a numeric value, got {type(commit_idx)}")
804
804
805 def _validate_branch_name(self, branch_name):
805 def _validate_branch_name(self, branch_name):
806 if branch_name and branch_name not in self.branches_all:
806 if branch_name and branch_name not in self.branches_all:
807 msg = (f"Branch {branch_name} not found in {self}")
807 msg = (f"Branch {branch_name} not found in {self}")
808 raise BranchDoesNotExistError(msg)
808 raise BranchDoesNotExistError(msg)
809
809
810 #
810 #
811 # Supporting deprecated API parts
811 # Supporting deprecated API parts
812 # TODO: johbo: consider to move this into a mixin
812 # TODO: johbo: consider to move this into a mixin
813 #
813 #
814
814
815 @property
815 @property
816 def EMPTY_CHANGESET(self):
816 def EMPTY_CHANGESET(self):
817 warnings.warn(
817 warnings.warn(
818 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
818 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
819 return self.EMPTY_COMMIT_ID
819 return self.EMPTY_COMMIT_ID
820
820
821 @property
821 @property
822 def revisions(self):
822 def revisions(self):
823 warnings.warn("Use commits attribute instead", DeprecationWarning)
823 warnings.warn("Use commits attribute instead", DeprecationWarning)
824 return self.commit_ids
824 return self.commit_ids
825
825
826 @revisions.setter
826 @revisions.setter
827 def revisions(self, value):
827 def revisions(self, value):
828 warnings.warn("Use commits attribute instead", DeprecationWarning)
828 warnings.warn("Use commits attribute instead", DeprecationWarning)
829 self.commit_ids = value
829 self.commit_ids = value
830
830
831 def get_changeset(self, revision=None, pre_load=None):
831 def get_changeset(self, revision=None, pre_load=None):
832 warnings.warn("Use get_commit instead", DeprecationWarning)
832 warnings.warn("Use get_commit instead", DeprecationWarning)
833 commit_id = None
833 commit_id = None
834 commit_idx = None
834 commit_idx = None
835 if isinstance(revision, str):
835 if isinstance(revision, str):
836 commit_id = revision
836 commit_id = revision
837 else:
837 else:
838 commit_idx = revision
838 commit_idx = revision
839 return self.get_commit(
839 return self.get_commit(
840 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
840 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
841
841
842 def get_changesets(
842 def get_changesets(
843 self, start=None, end=None, start_date=None, end_date=None,
843 self, start=None, end=None, start_date=None, end_date=None,
844 branch_name=None, pre_load=None):
844 branch_name=None, pre_load=None):
845 warnings.warn("Use get_commits instead", DeprecationWarning)
845 warnings.warn("Use get_commits instead", DeprecationWarning)
846 start_id = self._revision_to_commit(start)
846 start_id = self._revision_to_commit(start)
847 end_id = self._revision_to_commit(end)
847 end_id = self._revision_to_commit(end)
848 return self.get_commits(
848 return self.get_commits(
849 start_id=start_id, end_id=end_id, start_date=start_date,
849 start_id=start_id, end_id=end_id, start_date=start_date,
850 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
850 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
851
851
852 def _revision_to_commit(self, revision):
852 def _revision_to_commit(self, revision):
853 """
853 """
854 Translates a revision to a commit_id
854 Translates a revision to a commit_id
855
855
856 Helps to support the old changeset based API which allows to use
856 Helps to support the old changeset based API which allows to use
857 commit ids and commit indices interchangeable.
857 commit ids and commit indices interchangeable.
858 """
858 """
859 if revision is None:
859 if revision is None:
860 return revision
860 return revision
861
861
862 if isinstance(revision, str):
862 if isinstance(revision, str):
863 commit_id = revision
863 commit_id = revision
864 else:
864 else:
865 commit_id = self.commit_ids[revision]
865 commit_id = self.commit_ids[revision]
866 return commit_id
866 return commit_id
867
867
868 @property
868 @property
869 def in_memory_changeset(self):
869 def in_memory_changeset(self):
870 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
870 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
871 return self.in_memory_commit
871 return self.in_memory_commit
872
872
873 def get_path_permissions(self, username):
873 def get_path_permissions(self, username):
874 """
874 """
875 Returns a path permission checker or None if not supported
875 Returns a path permission checker or None if not supported
876
876
877 :param username: session user name
877 :param username: session user name
878 :return: an instance of BasePathPermissionChecker or None
878 :return: an instance of BasePathPermissionChecker or None
879 """
879 """
880 return None
880 return None
881
881
882 def install_hooks(self, force=False):
882 def install_hooks(self, force=False):
883 return self._remote.install_hooks(force)
883 return self._remote.install_hooks(force)
884
884
885 def get_hooks_info(self):
885 def get_hooks_info(self):
886 return self._remote.get_hooks_info()
886 return self._remote.get_hooks_info()
887
887
888 def vcsserver_invalidate_cache(self, delete=False):
888 def vcsserver_invalidate_cache(self, delete=False):
889 return self._remote.vcsserver_invalidate_cache(delete)
889 return self._remote.vcsserver_invalidate_cache(delete)
890
890
891
891
892 class BaseCommit(object):
892 class BaseCommit(object):
893 """
893 """
894 Each backend should implement it's commit representation.
894 Each backend should implement it's commit representation.
895
895
896 **Attributes**
896 **Attributes**
897
897
898 ``repository``
898 ``repository``
899 repository object within which commit exists
899 repository object within which commit exists
900
900
901 ``id``
901 ``id``
902 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
902 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
903 just ``tip``.
903 just ``tip``.
904
904
905 ``raw_id``
905 ``raw_id``
906 raw commit representation (i.e. full 40 length sha for git
906 raw commit representation (i.e. full 40 length sha for git
907 backend)
907 backend)
908
908
909 ``short_id``
909 ``short_id``
910 shortened (if apply) version of ``raw_id``; it would be simple
910 shortened (if apply) version of ``raw_id``; it would be simple
911 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
911 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
912 as ``raw_id`` for subversion
912 as ``raw_id`` for subversion
913
913
914 ``idx``
914 ``idx``
915 commit index
915 commit index
916
916
917 ``files``
917 ``files``
918 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
918 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
919
919
920 ``dirs``
920 ``dirs``
921 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
921 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
922
922
923 ``nodes``
923 ``nodes``
924 combined list of ``Node`` objects
924 combined list of ``Node`` objects
925
925
926 ``author``
926 ``author``
927 author of the commit, as unicode
927 author of the commit, as unicode
928
928
929 ``message``
929 ``message``
930 message of the commit, as unicode
930 message of the commit, as unicode
931
931
932 ``parents``
932 ``parents``
933 list of parent commits
933 list of parent commits
934
934
935 """
935 """
936 repository = None
936 repository = None
937 branch = None
937 branch = None
938
938
939 """
939 """
940 Depending on the backend this should be set to the branch name of the
940 Depending on the backend this should be set to the branch name of the
941 commit. Backends not supporting branches on commits should leave this
941 commit. Backends not supporting branches on commits should leave this
942 value as ``None``.
942 value as ``None``.
943 """
943 """
944
944
945 _ARCHIVE_PREFIX_TEMPLATE = '{repo_name}-{short_id}'
945 _ARCHIVE_PREFIX_TEMPLATE = '{repo_name}-{short_id}'
946 """
946 """
947 This template is used to generate a default prefix for repository archives
947 This template is used to generate a default prefix for repository archives
948 if no prefix has been specified.
948 if no prefix has been specified.
949 """
949 """
950
950
951 def __repr__(self):
951 def __repr__(self):
952 return self.__str__()
952 return self.__str__()
953
953
954 def __str__(self):
954 def __str__(self):
955 return f'<{self.__class__.__name__} at {self.idx}:{self.short_id}>'
955 return f'<{self.__class__.__name__} at {self.idx}:{self.short_id}>'
956
956
957 def __eq__(self, other):
957 def __eq__(self, other):
958 same_instance = isinstance(other, self.__class__)
958 same_instance = isinstance(other, self.__class__)
959 return same_instance and self.raw_id == other.raw_id
959 return same_instance and self.raw_id == other.raw_id
960
960
961 def __json__(self):
961 def __json__(self):
962 parents = []
962 parents = []
963 try:
963 try:
964 for parent in self.parents:
964 for parent in self.parents:
965 parents.append({'raw_id': parent.raw_id})
965 parents.append({'raw_id': parent.raw_id})
966 except NotImplementedError:
966 except NotImplementedError:
967 # empty commit doesn't have parents implemented
967 # empty commit doesn't have parents implemented
968 pass
968 pass
969
969
970 return {
970 return {
971 'short_id': self.short_id,
971 'short_id': self.short_id,
972 'raw_id': self.raw_id,
972 'raw_id': self.raw_id,
973 'revision': self.idx,
973 'revision': self.idx,
974 'message': self.message,
974 'message': self.message,
975 'date': self.date,
975 'date': self.date,
976 'author': self.author,
976 'author': self.author,
977 'parents': parents,
977 'parents': parents,
978 'branch': self.branch
978 'branch': self.branch
979 }
979 }
980
980
981 def __getstate__(self):
981 def __getstate__(self):
982 d = self.__dict__.copy()
982 d = self.__dict__.copy()
983 d.pop('_remote', None)
983 d.pop('_remote', None)
984 d.pop('repository', None)
984 d.pop('repository', None)
985 return d
985 return d
986
986
987 def get_remote(self):
987 def get_remote(self):
988 return self._remote
988 return self._remote
989
989
990 def serialize(self):
990 def serialize(self):
991 return self.__json__()
991 return self.__json__()
992
992
993 def _get_refs(self):
993 def _get_refs(self):
994 return {
994 return {
995 'branches': [self.branch] if self.branch else [],
995 'branches': [self.branch] if self.branch else [],
996 'bookmarks': getattr(self, 'bookmarks', []),
996 'bookmarks': getattr(self, 'bookmarks', []),
997 'tags': self.tags
997 'tags': self.tags
998 }
998 }
999
999
1000 @LazyProperty
1000 @LazyProperty
1001 def last(self):
1001 def last(self):
1002 """
1002 """
1003 ``True`` if this is last commit in repository, ``False``
1003 ``True`` if this is last commit in repository, ``False``
1004 otherwise; trying to access this attribute while there is no
1004 otherwise; trying to access this attribute while there is no
1005 commits would raise `EmptyRepositoryError`
1005 commits would raise `EmptyRepositoryError`
1006 """
1006 """
1007 if self.repository is None:
1007 if self.repository is None:
1008 raise CommitError("Cannot check if it's most recent commit")
1008 raise CommitError("Cannot check if it's most recent commit")
1009 return self.raw_id == self.repository.commit_ids[-1]
1009 return self.raw_id == self.repository.commit_ids[-1]
1010
1010
1011 @LazyProperty
1011 @LazyProperty
1012 def parents(self):
1012 def parents(self):
1013 """
1013 """
1014 Returns list of parent commits.
1014 Returns list of parent commits.
1015 """
1015 """
1016 raise NotImplementedError
1016 raise NotImplementedError
1017
1017
1018 @LazyProperty
1018 @LazyProperty
1019 def first_parent(self):
1019 def first_parent(self):
1020 """
1020 """
1021 Returns list of parent commits.
1021 Returns list of parent commits.
1022 """
1022 """
1023 return self.parents[0] if self.parents else EmptyCommit()
1023 return self.parents[0] if self.parents else EmptyCommit()
1024
1024
1025 @property
1025 @property
1026 def merge(self):
1026 def merge(self):
1027 """
1027 """
1028 Returns boolean if commit is a merge.
1028 Returns boolean if commit is a merge.
1029 """
1029 """
1030 return len(self.parents) > 1
1030 return len(self.parents) > 1
1031
1031
1032 @LazyProperty
1032 @LazyProperty
1033 def children(self):
1033 def children(self):
1034 """
1034 """
1035 Returns list of child commits.
1035 Returns list of child commits.
1036 """
1036 """
1037 raise NotImplementedError
1037 raise NotImplementedError
1038
1038
1039 @LazyProperty
1039 @LazyProperty
1040 def id(self):
1040 def id(self):
1041 """
1041 """
1042 Returns string identifying this commit.
1042 Returns string identifying this commit.
1043 """
1043 """
1044 raise NotImplementedError
1044 raise NotImplementedError
1045
1045
1046 @LazyProperty
1046 @LazyProperty
1047 def raw_id(self):
1047 def raw_id(self):
1048 """
1048 """
1049 Returns raw string identifying this commit.
1049 Returns raw string identifying this commit.
1050 """
1050 """
1051 raise NotImplementedError
1051 raise NotImplementedError
1052
1052
1053 @LazyProperty
1053 @LazyProperty
1054 def short_id(self):
1054 def short_id(self):
1055 """
1055 """
1056 Returns shortened version of ``raw_id`` attribute, as string,
1056 Returns shortened version of ``raw_id`` attribute, as string,
1057 identifying this commit, useful for presentation to users.
1057 identifying this commit, useful for presentation to users.
1058 """
1058 """
1059 raise NotImplementedError
1059 raise NotImplementedError
1060
1060
1061 @LazyProperty
1061 @LazyProperty
1062 def idx(self):
1062 def idx(self):
1063 """
1063 """
1064 Returns integer identifying this commit.
1064 Returns integer identifying this commit.
1065 """
1065 """
1066 raise NotImplementedError
1066 raise NotImplementedError
1067
1067
1068 @LazyProperty
1068 @LazyProperty
1069 def committer(self):
1069 def committer(self):
1070 """
1070 """
1071 Returns committer for this commit
1071 Returns committer for this commit
1072 """
1072 """
1073 raise NotImplementedError
1073 raise NotImplementedError
1074
1074
1075 @LazyProperty
1075 @LazyProperty
1076 def committer_name(self):
1076 def committer_name(self):
1077 """
1077 """
1078 Returns committer name for this commit
1078 Returns committer name for this commit
1079 """
1079 """
1080
1080
1081 return author_name(self.committer)
1081 return author_name(self.committer)
1082
1082
1083 @LazyProperty
1083 @LazyProperty
1084 def committer_email(self):
1084 def committer_email(self):
1085 """
1085 """
1086 Returns committer email address for this commit
1086 Returns committer email address for this commit
1087 """
1087 """
1088
1088
1089 return author_email(self.committer)
1089 return author_email(self.committer)
1090
1090
1091 @LazyProperty
1091 @LazyProperty
1092 def author(self):
1092 def author(self):
1093 """
1093 """
1094 Returns author for this commit
1094 Returns author for this commit
1095 """
1095 """
1096
1096
1097 raise NotImplementedError
1097 raise NotImplementedError
1098
1098
1099 @LazyProperty
1099 @LazyProperty
1100 def author_name(self):
1100 def author_name(self):
1101 """
1101 """
1102 Returns author name for this commit
1102 Returns author name for this commit
1103 """
1103 """
1104
1104
1105 return author_name(self.author)
1105 return author_name(self.author)
1106
1106
1107 @LazyProperty
1107 @LazyProperty
1108 def author_email(self):
1108 def author_email(self):
1109 """
1109 """
1110 Returns author email address for this commit
1110 Returns author email address for this commit
1111 """
1111 """
1112
1112
1113 return author_email(self.author)
1113 return author_email(self.author)
1114
1114
1115 def get_file_mode(self, path: bytes):
1115 def get_file_mode(self, path: bytes):
1116 """
1116 """
1117 Returns stat mode of the file at `path`.
1117 Returns stat mode of the file at `path`.
1118 """
1118 """
1119 raise NotImplementedError
1119 raise NotImplementedError
1120
1120
1121 def is_link(self, path):
1121 def is_link(self, path):
1122 """
1122 """
1123 Returns ``True`` if given `path` is a symlink
1123 Returns ``True`` if given `path` is a symlink
1124 """
1124 """
1125 raise NotImplementedError
1125 raise NotImplementedError
1126
1126
1127 def is_node_binary(self, path):
1127 def is_node_binary(self, path):
1128 """
1128 """
1129 Returns ``True`` is given path is a binary file
1129 Returns ``True`` is given path is a binary file
1130 """
1130 """
1131 raise NotImplementedError
1131 raise NotImplementedError
1132
1132
1133 def node_md5_hash(self, path):
1133 def node_md5_hash(self, path):
1134 """
1134 """
1135 Returns md5 hash of a node data
1135 Returns md5 hash of a node data
1136 """
1136 """
1137 raise NotImplementedError
1137 raise NotImplementedError
1138
1138
1139 def get_file_content(self, path) -> bytes:
1139 def get_file_content(self, path) -> bytes:
1140 """
1140 """
1141 Returns content of the file at the given `path`.
1141 Returns content of the file at the given `path`.
1142 """
1142 """
1143 raise NotImplementedError
1143 raise NotImplementedError
1144
1144
1145 def get_file_content_streamed(self, path):
1145 def get_file_content_streamed(self, path):
1146 """
1146 """
1147 returns a streaming response from vcsserver with file content
1147 returns a streaming response from vcsserver with file content
1148 """
1148 """
1149 raise NotImplementedError
1149 raise NotImplementedError
1150
1150
1151 def get_file_size(self, path):
1151 def get_file_size(self, path):
1152 """
1152 """
1153 Returns size of the file at the given `path`.
1153 Returns size of the file at the given `path`.
1154 """
1154 """
1155 raise NotImplementedError
1155 raise NotImplementedError
1156
1156
1157 def get_path_commit(self, path, pre_load=None):
1157 def get_path_commit(self, path, pre_load=None):
1158 """
1158 """
1159 Returns last commit of the file at the given `path`.
1159 Returns last commit of the file at the given `path`.
1160
1160
1161 :param pre_load: Optional. List of commit attributes to load.
1161 :param pre_load: Optional. List of commit attributes to load.
1162 """
1162 """
1163 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1163 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1164 if not commits:
1164 if not commits:
1165 raise RepositoryError(
1165 raise RepositoryError(
1166 'Failed to fetch history for path {}. '
1166 'Failed to fetch history for path {}. '
1167 'Please check if such path exists in your repository'.format(
1167 'Please check if such path exists in your repository'.format(
1168 path))
1168 path))
1169 return commits[0]
1169 return commits[0]
1170
1170
1171 def get_path_history(self, path, limit=None, pre_load=None):
1171 def get_path_history(self, path, limit=None, pre_load=None):
1172 """
1172 """
1173 Returns history of file as reversed list of :class:`BaseCommit`
1173 Returns history of file as reversed list of :class:`BaseCommit`
1174 objects for which file at given `path` has been modified.
1174 objects for which file at given `path` has been modified.
1175
1175
1176 :param limit: Optional. Allows to limit the size of the returned
1176 :param limit: Optional. Allows to limit the size of the returned
1177 history. This is intended as a hint to the underlying backend, so
1177 history. This is intended as a hint to the underlying backend, so
1178 that it can apply optimizations depending on the limit.
1178 that it can apply optimizations depending on the limit.
1179 :param pre_load: Optional. List of commit attributes to load.
1179 :param pre_load: Optional. List of commit attributes to load.
1180 """
1180 """
1181 raise NotImplementedError
1181 raise NotImplementedError
1182
1182
1183 def get_file_annotate(self, path, pre_load=None):
1183 def get_file_annotate(self, path, pre_load=None):
1184 """
1184 """
1185 Returns a generator of four element tuples with
1185 Returns a generator of four element tuples with
1186 lineno, sha, commit lazy loader and line
1186 lineno, sha, commit lazy loader and line
1187
1187
1188 :param pre_load: Optional. List of commit attributes to load.
1188 :param pre_load: Optional. List of commit attributes to load.
1189 """
1189 """
1190 raise NotImplementedError
1190 raise NotImplementedError
1191
1191
1192 def get_nodes(self, path, pre_load=None):
1192 def get_nodes(self, path, pre_load=None):
1193 """
1193 """
1194 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1194 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1195 state of commit at the given ``path``.
1195 state of commit at the given ``path``.
1196
1196
1197 :raises ``CommitError``: if node at the given ``path`` is not
1197 :raises ``CommitError``: if node at the given ``path`` is not
1198 instance of ``DirNode``
1198 instance of ``DirNode``
1199 """
1199 """
1200 raise NotImplementedError
1200 raise NotImplementedError
1201
1201
1202 def get_node(self, path):
1202 def get_node(self, path):
1203 """
1203 """
1204 Returns ``Node`` object from the given ``path``.
1204 Returns ``Node`` object from the given ``path``.
1205
1205
1206 :raises ``NodeDoesNotExistError``: if there is no node at the given
1206 :raises ``NodeDoesNotExistError``: if there is no node at the given
1207 ``path``
1207 ``path``
1208 """
1208 """
1209 raise NotImplementedError
1209 raise NotImplementedError
1210
1210
1211 def get_largefile_node(self, path):
1211 def get_largefile_node(self, path):
1212 """
1212 """
1213 Returns the path to largefile from Mercurial/Git-lfs storage.
1213 Returns the path to largefile from Mercurial/Git-lfs storage.
1214 or None if it's not a largefile node
1214 or None if it's not a largefile node
1215 """
1215 """
1216 return None
1216 return None
1217
1217
1218 def archive_repo(self, archive_name_key, kind='tgz', subrepos=None,
1218 def archive_repo(self, archive_name_key, kind='tgz', subrepos=None,
1219 archive_dir_name=None, write_metadata=False, mtime=None,
1219 archive_dir_name=None, write_metadata=False, mtime=None,
1220 archive_at_path='/', cache_config=None):
1220 archive_at_path='/', cache_config=None):
1221 """
1221 """
1222 Creates an archive containing the contents of the repository.
1222 Creates an archive containing the contents of the repository.
1223
1223
1224 :param archive_name_key: unique key under this archive should be generated
1224 :param archive_name_key: unique key under this archive should be generated
1225 :param kind: one of the following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1225 :param kind: one of the following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1226 :param archive_dir_name: name of root directory in archive.
1226 :param archive_dir_name: name of root directory in archive.
1227 Default is repository name and commit's short_id joined with dash:
1227 Default is repository name and commit's short_id joined with dash:
1228 ``"{repo_name}-{short_id}"``.
1228 ``"{repo_name}-{short_id}"``.
1229 :param write_metadata: write a metadata file into archive.
1229 :param write_metadata: write a metadata file into archive.
1230 :param mtime: custom modification time for archive creation, defaults
1230 :param mtime: custom modification time for archive creation, defaults
1231 to time.time() if not given.
1231 to time.time() if not given.
1232 :param archive_at_path: pack files at this path (default '/')
1232 :param archive_at_path: pack files at this path (default '/')
1233 :param cache_config: config spec to send to vcsserver to configure the backend to store files
1233 :param cache_config: config spec to send to vcsserver to configure the backend to store files
1234
1234
1235 :raise VCSError: If prefix has a problem.
1235 :raise VCSError: If prefix has a problem.
1236 """
1236 """
1237 cache_config = cache_config or {}
1237 cache_config = cache_config or {}
1238 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1238 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1239 if kind not in allowed_kinds:
1239 if kind not in allowed_kinds:
1240 raise ImproperArchiveTypeError(
1240 raise ImproperArchiveTypeError(
1241 f'Archive kind ({kind}) not supported use one of {allowed_kinds}')
1241 f'Archive kind ({kind}) not supported use one of {allowed_kinds}')
1242
1242
1243 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1243 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1244 mtime = mtime is not None or time.mktime(self.date.timetuple())
1244 mtime = mtime is not None or time.mktime(self.date.timetuple())
1245 commit_id = self.raw_id
1245 commit_id = self.raw_id
1246
1246
1247 return self.repository._remote.archive_repo(
1247 return self.repository._remote.archive_repo(
1248 archive_name_key, kind, mtime, archive_at_path,
1248 archive_name_key, kind, mtime, archive_at_path,
1249 archive_dir_name, commit_id, cache_config)
1249 archive_dir_name, commit_id, cache_config)
1250
1250
1251 def _validate_archive_prefix(self, archive_dir_name):
1251 def _validate_archive_prefix(self, archive_dir_name):
1252 if archive_dir_name is None:
1252 if archive_dir_name is None:
1253 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1253 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1254 repo_name=safe_str(self.repository.name),
1254 repo_name=safe_str(self.repository.name),
1255 short_id=self.short_id)
1255 short_id=self.short_id)
1256 elif not isinstance(archive_dir_name, str):
1256 elif not isinstance(archive_dir_name, str):
1257 raise ValueError(f"archive_dir_name is not str object but: {type(archive_dir_name)}")
1257 raise ValueError(f"archive_dir_name is not str object but: {type(archive_dir_name)}")
1258 elif archive_dir_name.startswith('/'):
1258 elif archive_dir_name.startswith('/'):
1259 raise VCSError("Prefix cannot start with leading slash")
1259 raise VCSError("Prefix cannot start with leading slash")
1260 elif archive_dir_name.strip() == '':
1260 elif archive_dir_name.strip() == '':
1261 raise VCSError("Prefix cannot be empty")
1261 raise VCSError("Prefix cannot be empty")
1262 elif not archive_dir_name.isascii():
1262 elif not archive_dir_name.isascii():
1263 raise VCSError("Prefix cannot contain non ascii characters")
1263 raise VCSError("Prefix cannot contain non ascii characters")
1264 return archive_dir_name
1264 return archive_dir_name
1265
1265
1266 @LazyProperty
1266 @LazyProperty
1267 def root(self):
1267 def root(self):
1268 """
1268 """
1269 Returns ``RootNode`` object for this commit.
1269 Returns ``RootNode`` object for this commit.
1270 """
1270 """
1271 return self.get_node('')
1271 return self.get_node('')
1272
1272
1273 def next(self, branch=None):
1273 def next(self, branch=None):
1274 """
1274 """
1275 Returns next commit from current, if branch is gives it will return
1275 Returns next commit from current, if branch is gives it will return
1276 next commit belonging to this branch
1276 next commit belonging to this branch
1277
1277
1278 :param branch: show commits within the given named branch
1278 :param branch: show commits within the given named branch
1279 """
1279 """
1280 indexes = range(self.idx + 1, self.repository.count())
1280 indexes = range(self.idx + 1, self.repository.count())
1281 return self._find_next(indexes, branch)
1281 return self._find_next(indexes, branch)
1282
1282
1283 def prev(self, branch=None):
1283 def prev(self, branch=None):
1284 """
1284 """
1285 Returns previous commit from current, if branch is gives it will
1285 Returns previous commit from current, if branch is gives it will
1286 return previous commit belonging to this branch
1286 return previous commit belonging to this branch
1287
1287
1288 :param branch: show commit within the given named branch
1288 :param branch: show commit within the given named branch
1289 """
1289 """
1290 indexes = range(self.idx - 1, -1, -1)
1290 indexes = range(self.idx - 1, -1, -1)
1291 return self._find_next(indexes, branch)
1291 return self._find_next(indexes, branch)
1292
1292
1293 def _find_next(self, indexes, branch=None):
1293 def _find_next(self, indexes, branch=None):
1294 if branch and self.branch != branch:
1294 if branch and self.branch != branch:
1295 raise VCSError('Branch option used on commit not belonging '
1295 raise VCSError('Branch option used on commit not belonging '
1296 'to that branch')
1296 'to that branch')
1297
1297
1298 for next_idx in indexes:
1298 for next_idx in indexes:
1299 commit = self.repository.get_commit(commit_idx=next_idx)
1299 commit = self.repository.get_commit(commit_idx=next_idx)
1300 if branch and branch != commit.branch:
1300 if branch and branch != commit.branch:
1301 continue
1301 continue
1302 return commit
1302 return commit
1303 raise CommitDoesNotExistError
1303 raise CommitDoesNotExistError
1304
1304
1305 def diff(self, ignore_whitespace=True, context=3):
1305 def diff(self, ignore_whitespace=True, context=3):
1306 """
1306 """
1307 Returns a `Diff` object representing the change made by this commit.
1307 Returns a `Diff` object representing the change made by this commit.
1308 """
1308 """
1309 parent = self.first_parent
1309 parent = self.first_parent
1310 diff = self.repository.get_diff(
1310 diff = self.repository.get_diff(
1311 parent, self,
1311 parent, self,
1312 ignore_whitespace=ignore_whitespace,
1312 ignore_whitespace=ignore_whitespace,
1313 context=context)
1313 context=context)
1314 return diff
1314 return diff
1315
1315
1316 @LazyProperty
1316 @LazyProperty
1317 def added(self):
1317 def added(self):
1318 """
1318 """
1319 Returns list of added ``FileNode`` objects.
1319 Returns list of added ``FileNode`` objects.
1320 """
1320 """
1321 raise NotImplementedError
1321 raise NotImplementedError
1322
1322
1323 @LazyProperty
1323 @LazyProperty
1324 def changed(self):
1324 def changed(self):
1325 """
1325 """
1326 Returns list of modified ``FileNode`` objects.
1326 Returns list of modified ``FileNode`` objects.
1327 """
1327 """
1328 raise NotImplementedError
1328 raise NotImplementedError
1329
1329
1330 @LazyProperty
1330 @LazyProperty
1331 def removed(self):
1331 def removed(self):
1332 """
1332 """
1333 Returns list of removed ``FileNode`` objects.
1333 Returns list of removed ``FileNode`` objects.
1334 """
1334 """
1335 raise NotImplementedError
1335 raise NotImplementedError
1336
1336
1337 @LazyProperty
1337 @LazyProperty
1338 def size(self):
1338 def size(self):
1339 """
1339 """
1340 Returns total number of bytes from contents of all filenodes.
1340 Returns total number of bytes from contents of all filenodes.
1341 """
1341 """
1342 return sum(node.size for node in self.get_filenodes_generator())
1342 return sum(node.size for node in self.get_filenodes_generator())
1343
1343
1344 def walk(self, topurl=''):
1344 def walk(self, topurl=''):
1345 """
1345 """
1346 Similar to os.walk method. Insted of filesystem it walks through
1346 Similar to os.walk method. Insted of filesystem it walks through
1347 commit starting at given ``topurl``. Returns generator of tuples
1347 commit starting at given ``topurl``. Returns generator of tuples
1348 (top_node, dirnodes, filenodes).
1348 (top_node, dirnodes, filenodes).
1349 """
1349 """
1350 from rhodecode.lib.vcs.nodes import DirNode
1350 from rhodecode.lib.vcs.nodes import DirNode
1351
1351
1352 if isinstance(topurl, DirNode):
1352 if isinstance(topurl, DirNode):
1353 top_node = topurl
1353 top_node = topurl
1354 else:
1354 else:
1355 top_node = self.get_node(topurl)
1355 top_node = self.get_node(topurl)
1356
1356
1357 has_default_pre_load = False
1357 has_default_pre_load = False
1358 if isinstance(top_node, DirNode):
1358 if isinstance(top_node, DirNode):
1359 # used to inject as we walk same defaults as given top_node
1359 # used to inject as we walk same defaults as given top_node
1360 default_pre_load = top_node.default_pre_load
1360 default_pre_load = top_node.default_pre_load
1361 has_default_pre_load = True
1361 has_default_pre_load = True
1362
1362
1363 if not top_node.is_dir():
1363 if not top_node.is_dir():
1364 return
1364 return
1365 yield top_node, top_node.dirs, top_node.files
1365 yield top_node, top_node.dirs, top_node.files
1366 for dir_node in top_node.dirs:
1366 for dir_node in top_node.dirs:
1367 if has_default_pre_load:
1367 if has_default_pre_load:
1368 dir_node.default_pre_load = default_pre_load
1368 dir_node.default_pre_load = default_pre_load
1369 yield from self.walk(dir_node)
1369 yield from self.walk(dir_node)
1370
1370
1371 def get_filenodes_generator(self):
1371 def get_filenodes_generator(self):
1372 """
1372 """
1373 Returns generator that yields *all* file nodes.
1373 Returns generator that yields *all* file nodes.
1374 """
1374 """
1375 for topnode, dirs, files in self.walk():
1375 for topnode, dirs, files in self.walk():
1376 yield from files
1376 yield from files
1377
1377
1378 #
1378 #
1379 # Utilities for sub classes to support consistent behavior
1379 # Utilities for sub classes to support consistent behavior
1380 #
1380 #
1381
1381
1382 def no_node_at_path(self, path):
1382 def no_node_at_path(self, path):
1383 return NodeDoesNotExistError(
1383 return NodeDoesNotExistError(
1384 f"There is no file nor directory at the given path: "
1384 f"There is no file nor directory at the given path: "
1385 f"`{safe_str(path)}` at commit {self.short_id}")
1385 f"`{safe_str(path)}` at commit {self.short_id}")
1386
1386
1387 def _fix_path(self, path: str) -> str:
1387 def _fix_path(self, path: str) -> str:
1388 """
1388 """
1389 Paths are stored without trailing slash so we need to get rid off it if
1389 Paths are stored without trailing slash so we need to get rid off it if
1390 needed.
1390 needed.
1391 """
1391 """
1392 return safe_str(path).rstrip('/')
1392 return safe_str(path).rstrip('/')
1393
1393
1394 #
1394 #
1395 # Deprecated API based on changesets
1395 # Deprecated API based on changesets
1396 #
1396 #
1397
1397
1398 @property
1398 @property
1399 def revision(self):
1399 def revision(self):
1400 warnings.warn("Use idx instead", DeprecationWarning)
1400 warnings.warn("Use idx instead", DeprecationWarning)
1401 return self.idx
1401 return self.idx
1402
1402
1403 @revision.setter
1403 @revision.setter
1404 def revision(self, value):
1404 def revision(self, value):
1405 warnings.warn("Use idx instead", DeprecationWarning)
1405 warnings.warn("Use idx instead", DeprecationWarning)
1406 self.idx = value
1406 self.idx = value
1407
1407
1408 def get_file_changeset(self, path):
1408 def get_file_changeset(self, path):
1409 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1409 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1410 return self.get_path_commit(path)
1410 return self.get_path_commit(path)
1411
1411
1412
1412
1413 class BaseChangesetClass(type):
1413 class BaseChangesetClass(type):
1414
1414
1415 def __instancecheck__(self, instance):
1415 def __instancecheck__(self, instance):
1416 return isinstance(instance, BaseCommit)
1416 return isinstance(instance, BaseCommit)
1417
1417
1418
1418
1419 class BaseChangeset(BaseCommit, metaclass=BaseChangesetClass):
1419 class BaseChangeset(BaseCommit, metaclass=BaseChangesetClass):
1420
1420
1421 def __new__(cls, *args, **kwargs):
1421 def __new__(cls, *args, **kwargs):
1422 warnings.warn(
1422 warnings.warn(
1423 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1423 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1424 return super().__new__(cls, *args, **kwargs)
1424 return super().__new__(cls, *args, **kwargs)
1425
1425
1426
1426
1427 class BaseInMemoryCommit(object):
1427 class BaseInMemoryCommit(object):
1428 """
1428 """
1429 Represents differences between repository's state (most recent head) and
1429 Represents differences between repository's state (most recent head) and
1430 changes made *in place*.
1430 changes made *in place*.
1431
1431
1432 **Attributes**
1432 **Attributes**
1433
1433
1434 ``repository``
1434 ``repository``
1435 repository object for this in-memory-commit
1435 repository object for this in-memory-commit
1436
1436
1437 ``added``
1437 ``added``
1438 list of ``FileNode`` objects marked as *added*
1438 list of ``FileNode`` objects marked as *added*
1439
1439
1440 ``changed``
1440 ``changed``
1441 list of ``FileNode`` objects marked as *changed*
1441 list of ``FileNode`` objects marked as *changed*
1442
1442
1443 ``removed``
1443 ``removed``
1444 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1444 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1445 *removed*
1445 *removed*
1446
1446
1447 ``parents``
1447 ``parents``
1448 list of :class:`BaseCommit` instances representing parents of
1448 list of :class:`BaseCommit` instances representing parents of
1449 in-memory commit. Should always be 2-element sequence.
1449 in-memory commit. Should always be 2-element sequence.
1450
1450
1451 """
1451 """
1452
1452
1453 def __init__(self, repository):
1453 def __init__(self, repository):
1454 self.repository = repository
1454 self.repository = repository
1455 self.added = []
1455 self.added = []
1456 self.changed = []
1456 self.changed = []
1457 self.removed = []
1457 self.removed = []
1458 self.parents = []
1458 self.parents = []
1459
1459
1460 def add(self, *filenodes):
1460 def add(self, *filenodes):
1461 """
1461 """
1462 Marks given ``FileNode`` objects as *to be committed*.
1462 Marks given ``FileNode`` objects as *to be committed*.
1463
1463
1464 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1464 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1465 latest commit
1465 latest commit
1466 :raises ``NodeAlreadyAddedError``: if node with same path is already
1466 :raises ``NodeAlreadyAddedError``: if node with same path is already
1467 marked as *added*
1467 marked as *added*
1468 """
1468 """
1469 # Check if not already marked as *added* first
1469 # Check if not already marked as *added* first
1470 for node in filenodes:
1470 for node in filenodes:
1471 if node.path in (n.path for n in self.added):
1471 if node.path in (n.path for n in self.added):
1472 raise NodeAlreadyAddedError(
1472 raise NodeAlreadyAddedError(
1473 "Such FileNode %s is already marked for addition"
1473 "Such FileNode %s is already marked for addition"
1474 % node.path)
1474 % node.path)
1475 for node in filenodes:
1475 for node in filenodes:
1476 self.added.append(node)
1476 self.added.append(node)
1477
1477
1478 def change(self, *filenodes):
1478 def change(self, *filenodes):
1479 """
1479 """
1480 Marks given ``FileNode`` objects to be *changed* in next commit.
1480 Marks given ``FileNode`` objects to be *changed* in next commit.
1481
1481
1482 :raises ``EmptyRepositoryError``: if there are no commits yet
1482 :raises ``EmptyRepositoryError``: if there are no commits yet
1483 :raises ``NodeAlreadyExistsError``: if node with same path is already
1483 :raises ``NodeAlreadyExistsError``: if node with same path is already
1484 marked to be *changed*
1484 marked to be *changed*
1485 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1485 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1486 marked to be *removed*
1486 marked to be *removed*
1487 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1487 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1488 commit
1488 commit
1489 :raises ``NodeNotChangedError``: if node hasn't really be changed
1489 :raises ``NodeNotChangedError``: if node hasn't really be changed
1490 """
1490 """
1491 for node in filenodes:
1491 for node in filenodes:
1492 if node.path in (n.path for n in self.removed):
1492 if node.path in (n.path for n in self.removed):
1493 raise NodeAlreadyRemovedError(
1493 raise NodeAlreadyRemovedError(
1494 "Node at %s is already marked as removed" % node.path)
1494 "Node at %s is already marked as removed" % node.path)
1495 try:
1495 try:
1496 self.repository.get_commit()
1496 self.repository.get_commit()
1497 except EmptyRepositoryError:
1497 except EmptyRepositoryError:
1498 raise EmptyRepositoryError(
1498 raise EmptyRepositoryError(
1499 "Nothing to change - try to *add* new nodes rather than "
1499 "Nothing to change - try to *add* new nodes rather than "
1500 "changing them")
1500 "changing them")
1501 for node in filenodes:
1501 for node in filenodes:
1502 if node.path in (n.path for n in self.changed):
1502 if node.path in (n.path for n in self.changed):
1503 raise NodeAlreadyChangedError(
1503 raise NodeAlreadyChangedError(
1504 "Node at '%s' is already marked as changed" % node.path)
1504 "Node at '%s' is already marked as changed" % node.path)
1505 self.changed.append(node)
1505 self.changed.append(node)
1506
1506
1507 def remove(self, *filenodes):
1507 def remove(self, *filenodes):
1508 """
1508 """
1509 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1509 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1510 *removed* in next commit.
1510 *removed* in next commit.
1511
1511
1512 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1512 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1513 be *removed*
1513 be *removed*
1514 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1514 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1515 be *changed*
1515 be *changed*
1516 """
1516 """
1517 for node in filenodes:
1517 for node in filenodes:
1518 if node.path in (n.path for n in self.removed):
1518 if node.path in (n.path for n in self.removed):
1519 raise NodeAlreadyRemovedError(
1519 raise NodeAlreadyRemovedError(
1520 "Node is already marked to for removal at %s" % node.path)
1520 "Node is already marked to for removal at %s" % node.path)
1521 if node.path in (n.path for n in self.changed):
1521 if node.path in (n.path for n in self.changed):
1522 raise NodeAlreadyChangedError(
1522 raise NodeAlreadyChangedError(
1523 "Node is already marked to be changed at %s" % node.path)
1523 "Node is already marked to be changed at %s" % node.path)
1524 # We only mark node as *removed* - real removal is done by
1524 # We only mark node as *removed* - real removal is done by
1525 # commit method
1525 # commit method
1526 self.removed.append(node)
1526 self.removed.append(node)
1527
1527
1528 def reset(self):
1528 def reset(self):
1529 """
1529 """
1530 Resets this instance to initial state (cleans ``added``, ``changed``
1530 Resets this instance to initial state (cleans ``added``, ``changed``
1531 and ``removed`` lists).
1531 and ``removed`` lists).
1532 """
1532 """
1533 self.added = []
1533 self.added = []
1534 self.changed = []
1534 self.changed = []
1535 self.removed = []
1535 self.removed = []
1536 self.parents = []
1536 self.parents = []
1537
1537
1538 def get_ipaths(self):
1538 def get_ipaths(self):
1539 """
1539 """
1540 Returns generator of paths from nodes marked as added, changed or
1540 Returns generator of paths from nodes marked as added, changed or
1541 removed.
1541 removed.
1542 """
1542 """
1543 for node in itertools.chain(self.added, self.changed, self.removed):
1543 for node in itertools.chain(self.added, self.changed, self.removed):
1544 yield node.path
1544 yield node.path
1545
1545
1546 def get_paths(self):
1546 def get_paths(self):
1547 """
1547 """
1548 Returns list of paths from nodes marked as added, changed or removed.
1548 Returns list of paths from nodes marked as added, changed or removed.
1549 """
1549 """
1550 return list(self.get_ipaths())
1550 return list(self.get_ipaths())
1551
1551
1552 def check_integrity(self, parents=None):
1552 def check_integrity(self, parents=None):
1553 """
1553 """
1554 Checks in-memory commit's integrity. Also, sets parents if not
1554 Checks in-memory commit's integrity. Also, sets parents if not
1555 already set.
1555 already set.
1556
1556
1557 :raises CommitError: if any error occurs (i.e.
1557 :raises CommitError: if any error occurs (i.e.
1558 ``NodeDoesNotExistError``).
1558 ``NodeDoesNotExistError``).
1559 """
1559 """
1560 if not self.parents:
1560 if not self.parents:
1561 parents = parents or []
1561 parents = parents or []
1562 if len(parents) == 0:
1562 if len(parents) == 0:
1563 try:
1563 try:
1564 parents = [self.repository.get_commit(), None]
1564 parents = [self.repository.get_commit(), None]
1565 except EmptyRepositoryError:
1565 except EmptyRepositoryError:
1566 parents = [None, None]
1566 parents = [None, None]
1567 elif len(parents) == 1:
1567 elif len(parents) == 1:
1568 parents += [None]
1568 parents += [None]
1569 self.parents = parents
1569 self.parents = parents
1570
1570
1571 # Local parents, only if not None
1571 # Local parents, only if not None
1572 parents = [p for p in self.parents if p]
1572 parents = [p for p in self.parents if p]
1573
1573
1574 # Check nodes marked as added
1574 # Check nodes marked as added
1575 for p in parents:
1575 for p in parents:
1576 for node in self.added:
1576 for node in self.added:
1577 try:
1577 try:
1578 p.get_node(node.path)
1578 p.get_node(node.path)
1579 except NodeDoesNotExistError:
1579 except NodeDoesNotExistError:
1580 pass
1580 pass
1581 else:
1581 else:
1582 raise NodeAlreadyExistsError(
1582 raise NodeAlreadyExistsError(
1583 f"Node `{node.path}` already exists at {p}")
1583 f"Node `{node.path}` already exists at {p}")
1584
1584
1585 # Check nodes marked as changed
1585 # Check nodes marked as changed
1586 missing = set(self.changed)
1586 missing = set(self.changed)
1587 not_changed = set(self.changed)
1587 not_changed = set(self.changed)
1588 if self.changed and not parents:
1588 if self.changed and not parents:
1589 raise NodeDoesNotExistError(str(self.changed[0].path))
1589 raise NodeDoesNotExistError(str(self.changed[0].path))
1590 for p in parents:
1590 for p in parents:
1591 for node in self.changed:
1591 for node in self.changed:
1592 try:
1592 try:
1593 old = p.get_node(node.path)
1593 old = p.get_node(node.path)
1594 missing.remove(node)
1594 missing.remove(node)
1595 # if content actually changed, remove node from not_changed
1595 # if content actually changed, remove node from not_changed
1596 if old.content != node.content:
1596 if old.content != node.content:
1597 not_changed.remove(node)
1597 not_changed.remove(node)
1598 except NodeDoesNotExistError:
1598 except NodeDoesNotExistError:
1599 pass
1599 pass
1600 if self.changed and missing:
1600 if self.changed and missing:
1601 raise NodeDoesNotExistError(
1601 raise NodeDoesNotExistError(
1602 "Node `%s` marked as modified but missing in parents: %s"
1602 f"Node `{node.path}` marked as modified but missing in parents: {parents}")
1603 % (node.path, parents))
1604
1603
1605 if self.changed and not_changed:
1604 if self.changed and not_changed:
1606 raise NodeNotChangedError(
1605 raise NodeNotChangedError(
1607 "Node `%s` wasn't actually changed (parents: %s)"
1606 "Node `%s` wasn't actually changed (parents: %s)"
1608 % (not_changed.pop().path, parents))
1607 % (not_changed.pop().path, parents))
1609
1608
1610 # Check nodes marked as removed
1609 # Check nodes marked as removed
1611 if self.removed and not parents:
1610 if self.removed and not parents:
1612 raise NodeDoesNotExistError(
1611 raise NodeDoesNotExistError(
1613 "Cannot remove node at %s as there "
1612 "Cannot remove node at %s as there "
1614 "were no parents specified" % self.removed[0].path)
1613 "were no parents specified" % self.removed[0].path)
1615 really_removed = set()
1614 really_removed = set()
1616 for p in parents:
1615 for p in parents:
1617 for node in self.removed:
1616 for node in self.removed:
1618 try:
1617 try:
1619 p.get_node(node.path)
1618 p.get_node(node.path)
1620 really_removed.add(node)
1619 really_removed.add(node)
1621 except CommitError:
1620 except CommitError:
1622 pass
1621 pass
1623 not_removed = set(self.removed) - really_removed
1622 not_removed = set(self.removed) - really_removed
1624 if not_removed:
1623 if not_removed:
1625 # TODO: johbo: This code branch does not seem to be covered
1624 # TODO: johbo: This code branch does not seem to be covered
1626 raise NodeDoesNotExistError(
1625 raise NodeDoesNotExistError(
1627 "Cannot remove node at %s from "
1626 "Cannot remove node at %s from "
1628 "following parents: %s" % (not_removed, parents))
1627 "following parents: %s" % (not_removed, parents))
1629
1628
1630 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1629 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1631 """
1630 """
1632 Performs in-memory commit (doesn't check workdir in any way) and
1631 Performs in-memory commit (doesn't check workdir in any way) and
1633 returns newly created :class:`BaseCommit`. Updates repository's
1632 returns newly created :class:`BaseCommit`. Updates repository's
1634 attribute `commits`.
1633 attribute `commits`.
1635
1634
1636 .. note::
1635 .. note::
1637
1636
1638 While overriding this method each backend's should call
1637 While overriding this method each backend's should call
1639 ``self.check_integrity(parents)`` in the first place.
1638 ``self.check_integrity(parents)`` in the first place.
1640
1639
1641 :param message: message of the commit
1640 :param message: message of the commit
1642 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1641 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1643 :param parents: single parent or sequence of parents from which commit
1642 :param parents: single parent or sequence of parents from which commit
1644 would be derived
1643 would be derived
1645 :param date: ``datetime.datetime`` instance. Defaults to
1644 :param date: ``datetime.datetime`` instance. Defaults to
1646 ``datetime.datetime.now()``.
1645 ``datetime.datetime.now()``.
1647 :param branch: branch name, as string. If none given, default backend's
1646 :param branch: branch name, as string. If none given, default backend's
1648 branch would be used.
1647 branch would be used.
1649
1648
1650 :raises ``CommitError``: if any error occurs while committing
1649 :raises ``CommitError``: if any error occurs while committing
1651 """
1650 """
1652 raise NotImplementedError
1651 raise NotImplementedError
1653
1652
1654
1653
1655 class BaseInMemoryChangesetClass(type):
1654 class BaseInMemoryChangesetClass(type):
1656
1655
1657 def __instancecheck__(self, instance):
1656 def __instancecheck__(self, instance):
1658 return isinstance(instance, BaseInMemoryCommit)
1657 return isinstance(instance, BaseInMemoryCommit)
1659
1658
1660
1659
1661 class BaseInMemoryChangeset(BaseInMemoryCommit, metaclass=BaseInMemoryChangesetClass):
1660 class BaseInMemoryChangeset(BaseInMemoryCommit, metaclass=BaseInMemoryChangesetClass):
1662
1661
1663 def __new__(cls, *args, **kwargs):
1662 def __new__(cls, *args, **kwargs):
1664 warnings.warn(
1663 warnings.warn(
1665 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1664 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1666 return super().__new__(cls, *args, **kwargs)
1665 return super().__new__(cls, *args, **kwargs)
1667
1666
1668
1667
1669 class EmptyCommit(BaseCommit):
1668 class EmptyCommit(BaseCommit):
1670 """
1669 """
1671 An dummy empty commit. It's possible to pass hash when creating
1670 An dummy empty commit. It's possible to pass hash when creating
1672 an EmptyCommit
1671 an EmptyCommit
1673 """
1672 """
1674
1673
1675 def __init__(
1674 def __init__(
1676 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1675 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1677 message='', author='', date=None):
1676 message='', author='', date=None):
1678 self._empty_commit_id = commit_id
1677 self._empty_commit_id = commit_id
1679 # TODO: johbo: Solve idx parameter, default value does not make
1678 # TODO: johbo: Solve idx parameter, default value does not make
1680 # too much sense
1679 # too much sense
1681 self.idx = idx
1680 self.idx = idx
1682 self.message = message
1681 self.message = message
1683 self.author = author
1682 self.author = author
1684 self.date = date or datetime.datetime.fromtimestamp(0)
1683 self.date = date or datetime.datetime.fromtimestamp(0)
1685 self.repository = repo
1684 self.repository = repo
1686 self.alias = alias
1685 self.alias = alias
1687
1686
1688 @LazyProperty
1687 @LazyProperty
1689 def raw_id(self):
1688 def raw_id(self):
1690 """
1689 """
1691 Returns raw string identifying this commit, useful for web
1690 Returns raw string identifying this commit, useful for web
1692 representation.
1691 representation.
1693 """
1692 """
1694
1693
1695 return self._empty_commit_id
1694 return self._empty_commit_id
1696
1695
1697 @LazyProperty
1696 @LazyProperty
1698 def branch(self):
1697 def branch(self):
1699 if self.alias:
1698 if self.alias:
1700 from rhodecode.lib.vcs.backends import get_backend
1699 from rhodecode.lib.vcs.backends import get_backend
1701 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1700 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1702
1701
1703 @LazyProperty
1702 @LazyProperty
1704 def short_id(self):
1703 def short_id(self):
1705 return self.raw_id[:12]
1704 return self.raw_id[:12]
1706
1705
1707 @LazyProperty
1706 @LazyProperty
1708 def id(self):
1707 def id(self):
1709 return self.raw_id
1708 return self.raw_id
1710
1709
1711 def get_path_commit(self, path, pre_load=None):
1710 def get_path_commit(self, path, pre_load=None):
1712 return self
1711 return self
1713
1712
1714 def get_file_content(self, path) -> bytes:
1713 def get_file_content(self, path) -> bytes:
1715 return b''
1714 return b''
1716
1715
1717 def get_file_content_streamed(self, path):
1716 def get_file_content_streamed(self, path):
1718 yield self.get_file_content(path)
1717 yield self.get_file_content(path)
1719
1718
1720 def get_file_size(self, path):
1719 def get_file_size(self, path):
1721 return 0
1720 return 0
1722
1721
1723
1722
1724 class EmptyChangesetClass(type):
1723 class EmptyChangesetClass(type):
1725
1724
1726 def __instancecheck__(self, instance):
1725 def __instancecheck__(self, instance):
1727 return isinstance(instance, EmptyCommit)
1726 return isinstance(instance, EmptyCommit)
1728
1727
1729
1728
1730 class EmptyChangeset(EmptyCommit, metaclass=EmptyChangesetClass):
1729 class EmptyChangeset(EmptyCommit, metaclass=EmptyChangesetClass):
1731
1730
1732 def __new__(cls, *args, **kwargs):
1731 def __new__(cls, *args, **kwargs):
1733 warnings.warn(
1732 warnings.warn(
1734 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1733 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1735 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1734 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1736
1735
1737 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1736 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1738 alias=None, revision=-1, message='', author='', date=None):
1737 alias=None, revision=-1, message='', author='', date=None):
1739 if requested_revision is not None:
1738 if requested_revision is not None:
1740 warnings.warn(
1739 warnings.warn(
1741 "Parameter requested_revision not supported anymore",
1740 "Parameter requested_revision not supported anymore",
1742 DeprecationWarning)
1741 DeprecationWarning)
1743 super().__init__(
1742 super().__init__(
1744 commit_id=cs, repo=repo, alias=alias, idx=revision,
1743 commit_id=cs, repo=repo, alias=alias, idx=revision,
1745 message=message, author=author, date=date)
1744 message=message, author=author, date=date)
1746
1745
1747 @property
1746 @property
1748 def revision(self):
1747 def revision(self):
1749 warnings.warn("Use idx instead", DeprecationWarning)
1748 warnings.warn("Use idx instead", DeprecationWarning)
1750 return self.idx
1749 return self.idx
1751
1750
1752 @revision.setter
1751 @revision.setter
1753 def revision(self, value):
1752 def revision(self, value):
1754 warnings.warn("Use idx instead", DeprecationWarning)
1753 warnings.warn("Use idx instead", DeprecationWarning)
1755 self.idx = value
1754 self.idx = value
1756
1755
1757
1756
1758 class EmptyRepository(BaseRepository):
1757 class EmptyRepository(BaseRepository):
1759 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1758 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1760 pass
1759 pass
1761
1760
1762 def get_diff(self, *args, **kwargs):
1761 def get_diff(self, *args, **kwargs):
1763 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1762 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1764 return GitDiff(b'')
1763 return GitDiff(b'')
1765
1764
1766
1765
1767 class CollectionGenerator(object):
1766 class CollectionGenerator(object):
1768
1767
1769 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1768 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1770 self.repo = repo
1769 self.repo = repo
1771 self.commit_ids = commit_ids
1770 self.commit_ids = commit_ids
1772 self.collection_size = collection_size
1771 self.collection_size = collection_size
1773 self.pre_load = pre_load
1772 self.pre_load = pre_load
1774 self.translate_tag = translate_tag
1773 self.translate_tag = translate_tag
1775
1774
1776 def __len__(self):
1775 def __len__(self):
1777 if self.collection_size is not None:
1776 if self.collection_size is not None:
1778 return self.collection_size
1777 return self.collection_size
1779 return self.commit_ids.__len__()
1778 return self.commit_ids.__len__()
1780
1779
1781 def __iter__(self):
1780 def __iter__(self):
1782 for commit_id in self.commit_ids:
1781 for commit_id in self.commit_ids:
1783 # TODO: johbo: Mercurial passes in commit indices or commit ids
1782 # TODO: johbo: Mercurial passes in commit indices or commit ids
1784 yield self._commit_factory(commit_id)
1783 yield self._commit_factory(commit_id)
1785
1784
1786 def _commit_factory(self, commit_id):
1785 def _commit_factory(self, commit_id):
1787 """
1786 """
1788 Allows backends to override the way commits are generated.
1787 Allows backends to override the way commits are generated.
1789 """
1788 """
1790 return self.repo.get_commit(
1789 return self.repo.get_commit(
1791 commit_id=commit_id, pre_load=self.pre_load,
1790 commit_id=commit_id, pre_load=self.pre_load,
1792 translate_tag=self.translate_tag)
1791 translate_tag=self.translate_tag)
1793
1792
1794 def __getitem__(self, key):
1793 def __getitem__(self, key):
1795 """Return either a single element by index, or a sliced collection."""
1794 """Return either a single element by index, or a sliced collection."""
1796
1795
1797 if isinstance(key, slice):
1796 if isinstance(key, slice):
1798 commit_ids = self.commit_ids[key.start:key.stop]
1797 commit_ids = self.commit_ids[key.start:key.stop]
1799
1798
1800 else:
1799 else:
1801 # single item
1800 # single item
1802 commit_ids = self.commit_ids[key]
1801 commit_ids = self.commit_ids[key]
1803
1802
1804 return self.__class__(
1803 return self.__class__(
1805 self.repo, commit_ids, pre_load=self.pre_load,
1804 self.repo, commit_ids, pre_load=self.pre_load,
1806 translate_tag=self.translate_tag)
1805 translate_tag=self.translate_tag)
1807
1806
1808 def __repr__(self):
1807 def __repr__(self):
1809 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1808 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1810
1809
1811
1810
1812 class Config(object):
1811 class Config(object):
1813 """
1812 """
1814 Represents the configuration for a repository.
1813 Represents the configuration for a repository.
1815
1814
1816 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1815 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1817 standard library. It implements only the needed subset.
1816 standard library. It implements only the needed subset.
1818 """
1817 """
1819
1818
1820 def __init__(self):
1819 def __init__(self):
1821 self._values = {}
1820 self._values = {}
1822
1821
1823 def copy(self):
1822 def copy(self):
1824 clone = Config()
1823 clone = Config()
1825 for section, values in self._values.items():
1824 for section, values in self._values.items():
1826 clone._values[section] = values.copy()
1825 clone._values[section] = values.copy()
1827 return clone
1826 return clone
1828
1827
1829 def __repr__(self):
1828 def __repr__(self):
1830 return '<Config({} sections) at {}>'.format(
1829 return '<Config({} sections) at {}>'.format(
1831 len(self._values), hex(id(self)))
1830 len(self._values), hex(id(self)))
1832
1831
1833 def items(self, section):
1832 def items(self, section):
1834 return self._values.get(section, {}).items()
1833 return self._values.get(section, {}).items()
1835
1834
1836 def get(self, section, option):
1835 def get(self, section, option):
1837 return self._values.get(section, {}).get(option)
1836 return self._values.get(section, {}).get(option)
1838
1837
1839 def set(self, section, option, value):
1838 def set(self, section, option, value):
1840 section_values = self._values.setdefault(section, {})
1839 section_values = self._values.setdefault(section, {})
1841 section_values[option] = value
1840 section_values[option] = value
1842
1841
1843 def clear_section(self, section):
1842 def clear_section(self, section):
1844 self._values[section] = {}
1843 self._values[section] = {}
1845
1844
1846 def serialize(self):
1845 def serialize(self):
1847 """
1846 """
1848 Creates a list of three tuples (section, key, value) representing
1847 Creates a list of three tuples (section, key, value) representing
1849 this config object.
1848 this config object.
1850 """
1849 """
1851 items = []
1850 items = []
1852 for section in self._values:
1851 for section in self._values:
1853 for option, value in self._values[section].items():
1852 for option, value in self._values[section].items():
1854 items.append(
1853 items.append(
1855 (safe_str(section), safe_str(option), safe_str(value)))
1854 (safe_str(section), safe_str(option), safe_str(value)))
1856 return items
1855 return items
1857
1856
1858
1857
1859 class Diff(object):
1858 class Diff(object):
1860 """
1859 """
1861 Represents a diff result from a repository backend.
1860 Represents a diff result from a repository backend.
1862
1861
1863 Subclasses have to provide a backend specific value for
1862 Subclasses have to provide a backend specific value for
1864 :attr:`_header_re` and :attr:`_meta_re`.
1863 :attr:`_header_re` and :attr:`_meta_re`.
1865 """
1864 """
1866 _meta_re = None
1865 _meta_re = None
1867 _header_re: bytes = re.compile(br"")
1866 _header_re: bytes = re.compile(br"")
1868
1867
1869 def __init__(self, raw_diff: bytes):
1868 def __init__(self, raw_diff: bytes):
1870 if not isinstance(raw_diff, bytes):
1869 if not isinstance(raw_diff, bytes):
1871 raise Exception(f'raw_diff must be bytes - got {type(raw_diff)}')
1870 raise Exception(f'raw_diff must be bytes - got {type(raw_diff)}')
1872
1871
1873 self.raw = memoryview(raw_diff)
1872 self.raw = memoryview(raw_diff)
1874
1873
1875 def get_header_re(self):
1874 def get_header_re(self):
1876 return self._header_re
1875 return self._header_re
1877
1876
1878 def chunks(self):
1877 def chunks(self):
1879 """
1878 """
1880 split the diff in chunks of separate --git a/file b/file chunks
1879 split the diff in chunks of separate --git a/file b/file chunks
1881 to make diffs consistent we must prepend with \n, and make sure
1880 to make diffs consistent we must prepend with \n, and make sure
1882 we can detect last chunk as this was also has special rule
1881 we can detect last chunk as this was also has special rule
1883 """
1882 """
1884
1883
1885 diff_parts = (b'\n' + bytes(self.raw)).split(b'\ndiff --git')
1884 diff_parts = (b'\n' + bytes(self.raw)).split(b'\ndiff --git')
1886
1885
1887 chunks = diff_parts[1:]
1886 chunks = diff_parts[1:]
1888 total_chunks = len(chunks)
1887 total_chunks = len(chunks)
1889
1888
1890 def diff_iter(_chunks):
1889 def diff_iter(_chunks):
1891 for cur_chunk, chunk in enumerate(_chunks, start=1):
1890 for cur_chunk, chunk in enumerate(_chunks, start=1):
1892 yield DiffChunk(chunk, self, cur_chunk == total_chunks)
1891 yield DiffChunk(chunk, self, cur_chunk == total_chunks)
1893 return diff_iter(chunks)
1892 return diff_iter(chunks)
1894
1893
1895
1894
1896 class DiffChunk(object):
1895 class DiffChunk(object):
1897
1896
1898 def __init__(self, chunk: bytes, diff_obj: Diff, is_last_chunk: bool):
1897 def __init__(self, chunk: bytes, diff_obj: Diff, is_last_chunk: bool):
1899 self.diff_obj = diff_obj
1898 self.diff_obj = diff_obj
1900
1899
1901 # since we split by \ndiff --git that part is lost from original diff
1900 # since we split by \ndiff --git that part is lost from original diff
1902 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1901 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1903 if not is_last_chunk:
1902 if not is_last_chunk:
1904 chunk += b'\n'
1903 chunk += b'\n'
1905 header_re = self.diff_obj.get_header_re()
1904 header_re = self.diff_obj.get_header_re()
1906 match = header_re.match(chunk)
1905 match = header_re.match(chunk)
1907 self.header = match.groupdict()
1906 self.header = match.groupdict()
1908 self.diff = chunk[match.end():]
1907 self.diff = chunk[match.end():]
1909 self.raw = chunk
1908 self.raw = chunk
1910
1909
1911 @property
1910 @property
1912 def header_as_str(self):
1911 def header_as_str(self):
1913 if self.header:
1912 if self.header:
1914 def safe_str_on_bytes(val):
1913 def safe_str_on_bytes(val):
1915 if isinstance(val, bytes):
1914 if isinstance(val, bytes):
1916 return safe_str(val)
1915 return safe_str(val)
1917 return val
1916 return val
1918 return {safe_str(k): safe_str_on_bytes(v) for k, v in self.header.items()}
1917 return {safe_str(k): safe_str_on_bytes(v) for k, v in self.header.items()}
1919
1918
1920 def __repr__(self):
1919 def __repr__(self):
1921 return f'DiffChunk({self.header_as_str})'
1920 return f'DiffChunk({self.header_as_str})'
1922
1921
1923
1922
1924 class BasePathPermissionChecker(object):
1923 class BasePathPermissionChecker(object):
1925
1924
1926 @staticmethod
1925 @staticmethod
1927 def create_from_patterns(includes, excludes):
1926 def create_from_patterns(includes, excludes):
1928 if includes and '*' in includes and not excludes:
1927 if includes and '*' in includes and not excludes:
1929 return AllPathPermissionChecker()
1928 return AllPathPermissionChecker()
1930 elif excludes and '*' in excludes:
1929 elif excludes and '*' in excludes:
1931 return NonePathPermissionChecker()
1930 return NonePathPermissionChecker()
1932 else:
1931 else:
1933 return PatternPathPermissionChecker(includes, excludes)
1932 return PatternPathPermissionChecker(includes, excludes)
1934
1933
1935 @property
1934 @property
1936 def has_full_access(self):
1935 def has_full_access(self):
1937 raise NotImplementedError()
1936 raise NotImplementedError()
1938
1937
1939 def has_access(self, path):
1938 def has_access(self, path):
1940 raise NotImplementedError()
1939 raise NotImplementedError()
1941
1940
1942
1941
1943 class AllPathPermissionChecker(BasePathPermissionChecker):
1942 class AllPathPermissionChecker(BasePathPermissionChecker):
1944
1943
1945 @property
1944 @property
1946 def has_full_access(self):
1945 def has_full_access(self):
1947 return True
1946 return True
1948
1947
1949 def has_access(self, path):
1948 def has_access(self, path):
1950 return True
1949 return True
1951
1950
1952
1951
1953 class NonePathPermissionChecker(BasePathPermissionChecker):
1952 class NonePathPermissionChecker(BasePathPermissionChecker):
1954
1953
1955 @property
1954 @property
1956 def has_full_access(self):
1955 def has_full_access(self):
1957 return False
1956 return False
1958
1957
1959 def has_access(self, path):
1958 def has_access(self, path):
1960 return False
1959 return False
1961
1960
1962
1961
1963 class PatternPathPermissionChecker(BasePathPermissionChecker):
1962 class PatternPathPermissionChecker(BasePathPermissionChecker):
1964
1963
1965 def __init__(self, includes, excludes):
1964 def __init__(self, includes, excludes):
1966 self.includes = includes
1965 self.includes = includes
1967 self.excludes = excludes
1966 self.excludes = excludes
1968 self.includes_re = [] if not includes else [
1967 self.includes_re = [] if not includes else [
1969 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1968 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1970 self.excludes_re = [] if not excludes else [
1969 self.excludes_re = [] if not excludes else [
1971 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1970 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1972
1971
1973 @property
1972 @property
1974 def has_full_access(self):
1973 def has_full_access(self):
1975 return '*' in self.includes and not self.excludes
1974 return '*' in self.includes and not self.excludes
1976
1975
1977 def has_access(self, path):
1976 def has_access(self, path):
1978 for regex in self.excludes_re:
1977 for regex in self.excludes_re:
1979 if regex.match(path):
1978 if regex.match(path):
1980 return False
1979 return False
1981 for regex in self.includes_re:
1980 for regex in self.includes_re:
1982 if regex.match(path):
1981 if regex.match(path):
1983 return True
1982 return True
1984 return False
1983 return False
@@ -1,2392 +1,2390 b''
1 # Copyright (C) 2012-2023 RhodeCode GmbH
1 # Copyright (C) 2012-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 """
20 """
21 pull request model for RhodeCode
21 pull request model for RhodeCode
22 """
22 """
23
23
24 import logging
24 import logging
25 import os
25 import os
26
26
27 import datetime
27 import datetime
28 import urllib.request
28 import urllib.request
29 import urllib.parse
29 import urllib.parse
30 import urllib.error
30 import urllib.error
31 import collections
31 import collections
32
32
33 import dataclasses as dataclasses
33 import dataclasses as dataclasses
34 from pyramid.threadlocal import get_current_request
34 from pyramid.threadlocal import get_current_request
35
35
36 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.nodes import FileNode
37 from rhodecode.translation import lazy_ugettext
37 from rhodecode.translation import lazy_ugettext
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import helpers as h, hooks_utils, diffs
39 from rhodecode.lib import audit_logger
39 from rhodecode.lib import audit_logger
40 from collections import OrderedDict
40 from collections import OrderedDict
41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
42 from rhodecode.lib.ext_json import sjson as json
42 from rhodecode.lib.ext_json import sjson as json
43 from rhodecode.lib.markup_renderer import (
43 from rhodecode.lib.markup_renderer import (
44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 from rhodecode.lib.hash_utils import md5_safe
45 from rhodecode.lib.hash_utils import md5_safe
46 from rhodecode.lib.str_utils import safe_str
46 from rhodecode.lib.str_utils import safe_str
47 from rhodecode.lib.utils2 import AttributeDict, get_current_rhodecode_user
47 from rhodecode.lib.utils2 import AttributeDict, get_current_rhodecode_user
48 from rhodecode.lib.vcs.backends.base import (
48 from rhodecode.lib.vcs.backends.base import (
49 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
49 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
50 TargetRefMissing, SourceRefMissing)
50 TargetRefMissing, SourceRefMissing)
51 from rhodecode.lib.vcs.conf import settings as vcs_settings
51 from rhodecode.lib.vcs.conf import settings as vcs_settings
52 from rhodecode.lib.vcs.exceptions import (
52 from rhodecode.lib.vcs.exceptions import (
53 CommitDoesNotExistError, EmptyRepositoryError)
53 CommitDoesNotExistError, EmptyRepositoryError)
54 from rhodecode.model import BaseModel
54 from rhodecode.model import BaseModel
55 from rhodecode.model.changeset_status import ChangesetStatusModel
55 from rhodecode.model.changeset_status import ChangesetStatusModel
56 from rhodecode.model.comment import CommentsModel
56 from rhodecode.model.comment import CommentsModel
57 from rhodecode.model.db import (
57 from rhodecode.model.db import (
58 aliased, null, lazyload, and_, or_, select, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
58 aliased, null, lazyload, and_, or_, select, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
59 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
59 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
60 from rhodecode.model.meta import Session
60 from rhodecode.model.meta import Session
61 from rhodecode.model.notification import NotificationModel, \
61 from rhodecode.model.notification import NotificationModel, \
62 EmailNotificationModel
62 EmailNotificationModel
63 from rhodecode.model.scm import ScmModel
63 from rhodecode.model.scm import ScmModel
64 from rhodecode.model.settings import VcsSettingsModel
64 from rhodecode.model.settings import VcsSettingsModel
65
65
66
66
67 log = logging.getLogger(__name__)
67 log = logging.getLogger(__name__)
68
68
69
69
70 # Data structure to hold the response data when updating commits during a pull
70 # Data structure to hold the response data when updating commits during a pull
71 # request update.
71 # request update.
72 class UpdateResponse(object):
72 class UpdateResponse(object):
73
73
74 def __init__(self, executed, reason, new, old, common_ancestor_id,
74 def __init__(self, executed, reason, new, old, common_ancestor_id,
75 commit_changes, source_changed, target_changed):
75 commit_changes, source_changed, target_changed):
76
76
77 self.executed = executed
77 self.executed = executed
78 self.reason = reason
78 self.reason = reason
79 self.new = new
79 self.new = new
80 self.old = old
80 self.old = old
81 self.common_ancestor_id = common_ancestor_id
81 self.common_ancestor_id = common_ancestor_id
82 self.changes = commit_changes
82 self.changes = commit_changes
83 self.source_changed = source_changed
83 self.source_changed = source_changed
84 self.target_changed = target_changed
84 self.target_changed = target_changed
85
85
86
86
87 def get_diff_info(
87 def get_diff_info(
88 source_repo, source_ref, target_repo, target_ref, get_authors=False,
88 source_repo, source_ref, target_repo, target_ref, get_authors=False,
89 get_commit_authors=True):
89 get_commit_authors=True):
90 """
90 """
91 Calculates detailed diff information for usage in preview of creation of a pull-request.
91 Calculates detailed diff information for usage in preview of creation of a pull-request.
92 This is also used for default reviewers logic
92 This is also used for default reviewers logic
93 """
93 """
94
94
95 source_scm = source_repo.scm_instance()
95 source_scm = source_repo.scm_instance()
96 target_scm = target_repo.scm_instance()
96 target_scm = target_repo.scm_instance()
97
97
98 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
98 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
99 if not ancestor_id:
99 if not ancestor_id:
100 raise ValueError(
100 raise ValueError(
101 'cannot calculate diff info without a common ancestor. '
101 'cannot calculate diff info without a common ancestor. '
102 'Make sure both repositories are related, and have a common forking commit.')
102 'Make sure both repositories are related, and have a common forking commit.')
103
103
104 # case here is that want a simple diff without incoming commits,
104 # case here is that want a simple diff without incoming commits,
105 # previewing what will be merged based only on commits in the source.
105 # previewing what will be merged based only on commits in the source.
106 log.debug('Using ancestor %s as source_ref instead of %s',
106 log.debug('Using ancestor %s as source_ref instead of %s',
107 ancestor_id, source_ref)
107 ancestor_id, source_ref)
108
108
109 # source of changes now is the common ancestor
109 # source of changes now is the common ancestor
110 source_commit = source_scm.get_commit(commit_id=ancestor_id)
110 source_commit = source_scm.get_commit(commit_id=ancestor_id)
111 # target commit becomes the source ref as it is the last commit
111 # target commit becomes the source ref as it is the last commit
112 # for diff generation this logic gives proper diff
112 # for diff generation this logic gives proper diff
113 target_commit = source_scm.get_commit(commit_id=source_ref)
113 target_commit = source_scm.get_commit(commit_id=source_ref)
114
114
115 vcs_diff = \
115 vcs_diff = \
116 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
116 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
117 ignore_whitespace=False, context=3)
117 ignore_whitespace=False, context=3)
118
118
119 diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff',
119 diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff',
120 diff_limit=0, file_limit=0, show_full_diff=True)
120 diff_limit=0, file_limit=0, show_full_diff=True)
121
121
122 _parsed = diff_processor.prepare()
122 _parsed = diff_processor.prepare()
123
123
124 all_files = []
124 all_files = []
125 all_files_changes = []
125 all_files_changes = []
126 changed_lines = {}
126 changed_lines = {}
127 stats = [0, 0]
127 stats = [0, 0]
128 for f in _parsed:
128 for f in _parsed:
129 all_files.append(f['filename'])
129 all_files.append(f['filename'])
130 all_files_changes.append({
130 all_files_changes.append({
131 'filename': f['filename'],
131 'filename': f['filename'],
132 'stats': f['stats']
132 'stats': f['stats']
133 })
133 })
134 stats[0] += f['stats']['added']
134 stats[0] += f['stats']['added']
135 stats[1] += f['stats']['deleted']
135 stats[1] += f['stats']['deleted']
136
136
137 changed_lines[f['filename']] = []
137 changed_lines[f['filename']] = []
138 if len(f['chunks']) < 2:
138 if len(f['chunks']) < 2:
139 continue
139 continue
140 # first line is "context" information
140 # first line is "context" information
141 for chunks in f['chunks'][1:]:
141 for chunks in f['chunks'][1:]:
142 for chunk in chunks['lines']:
142 for chunk in chunks['lines']:
143 if chunk['action'] not in ('del', 'mod'):
143 if chunk['action'] not in ('del', 'mod'):
144 continue
144 continue
145 changed_lines[f['filename']].append(chunk['old_lineno'])
145 changed_lines[f['filename']].append(chunk['old_lineno'])
146
146
147 commit_authors = []
147 commit_authors = []
148 user_counts = {}
148 user_counts = {}
149 email_counts = {}
149 email_counts = {}
150 author_counts = {}
150 author_counts = {}
151 _commit_cache = {}
151 _commit_cache = {}
152
152
153 commits = []
153 commits = []
154 if get_commit_authors:
154 if get_commit_authors:
155 log.debug('Obtaining commit authors from set of commits')
155 log.debug('Obtaining commit authors from set of commits')
156 _compare_data = target_scm.compare(
156 _compare_data = target_scm.compare(
157 target_ref, source_ref, source_scm, merge=True,
157 target_ref, source_ref, source_scm, merge=True,
158 pre_load=["author", "date", "message"]
158 pre_load=["author", "date", "message"]
159 )
159 )
160
160
161 for commit in _compare_data:
161 for commit in _compare_data:
162 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
162 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
163 # at this function which is later called via JSON serialization
163 # at this function which is later called via JSON serialization
164 serialized_commit = dict(
164 serialized_commit = dict(
165 author=commit.author,
165 author=commit.author,
166 date=commit.date,
166 date=commit.date,
167 message=commit.message,
167 message=commit.message,
168 commit_id=commit.raw_id,
168 commit_id=commit.raw_id,
169 raw_id=commit.raw_id
169 raw_id=commit.raw_id
170 )
170 )
171 commits.append(serialized_commit)
171 commits.append(serialized_commit)
172 user = User.get_from_cs_author(serialized_commit['author'])
172 user = User.get_from_cs_author(serialized_commit['author'])
173 if user and user not in commit_authors:
173 if user and user not in commit_authors:
174 commit_authors.append(user)
174 commit_authors.append(user)
175
175
176 # lines
176 # lines
177 if get_authors:
177 if get_authors:
178 log.debug('Calculating authors of changed files')
178 log.debug('Calculating authors of changed files')
179 target_commit = source_repo.get_commit(ancestor_id)
179 target_commit = source_repo.get_commit(ancestor_id)
180
180
181 for fname, lines in changed_lines.items():
181 for fname, lines in changed_lines.items():
182
182
183 try:
183 try:
184 node = target_commit.get_node(fname, pre_load=["is_binary"])
184 node = target_commit.get_node(fname, pre_load=["is_binary"])
185 except Exception:
185 except Exception:
186 log.exception("Failed to load node with path %s", fname)
186 log.exception("Failed to load node with path %s", fname)
187 continue
187 continue
188
188
189 if not isinstance(node, FileNode):
189 if not isinstance(node, FileNode):
190 continue
190 continue
191
191
192 # NOTE(marcink): for binary node we don't do annotation, just use last author
192 # NOTE(marcink): for binary node we don't do annotation, just use last author
193 if node.is_binary:
193 if node.is_binary:
194 author = node.last_commit.author
194 author = node.last_commit.author
195 email = node.last_commit.author_email
195 email = node.last_commit.author_email
196
196
197 user = User.get_from_cs_author(author)
197 user = User.get_from_cs_author(author)
198 if user:
198 if user:
199 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
199 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
200 author_counts[author] = author_counts.get(author, 0) + 1
200 author_counts[author] = author_counts.get(author, 0) + 1
201 email_counts[email] = email_counts.get(email, 0) + 1
201 email_counts[email] = email_counts.get(email, 0) + 1
202
202
203 continue
203 continue
204
204
205 for annotation in node.annotate:
205 for annotation in node.annotate:
206 line_no, commit_id, get_commit_func, line_text = annotation
206 line_no, commit_id, get_commit_func, line_text = annotation
207 if line_no in lines:
207 if line_no in lines:
208 if commit_id not in _commit_cache:
208 if commit_id not in _commit_cache:
209 _commit_cache[commit_id] = get_commit_func()
209 _commit_cache[commit_id] = get_commit_func()
210 commit = _commit_cache[commit_id]
210 commit = _commit_cache[commit_id]
211 author = commit.author
211 author = commit.author
212 email = commit.author_email
212 email = commit.author_email
213 user = User.get_from_cs_author(author)
213 user = User.get_from_cs_author(author)
214 if user:
214 if user:
215 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
215 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
216 author_counts[author] = author_counts.get(author, 0) + 1
216 author_counts[author] = author_counts.get(author, 0) + 1
217 email_counts[email] = email_counts.get(email, 0) + 1
217 email_counts[email] = email_counts.get(email, 0) + 1
218
218
219 log.debug('Default reviewers processing finished')
219 log.debug('Default reviewers processing finished')
220
220
221 return {
221 return {
222 'commits': commits,
222 'commits': commits,
223 'files': all_files_changes,
223 'files': all_files_changes,
224 'stats': stats,
224 'stats': stats,
225 'ancestor': ancestor_id,
225 'ancestor': ancestor_id,
226 # original authors of modified files
226 # original authors of modified files
227 'original_authors': {
227 'original_authors': {
228 'users': user_counts,
228 'users': user_counts,
229 'authors': author_counts,
229 'authors': author_counts,
230 'emails': email_counts,
230 'emails': email_counts,
231 },
231 },
232 'commit_authors': commit_authors
232 'commit_authors': commit_authors
233 }
233 }
234
234
235
235
236 class PullRequestModel(BaseModel):
236 class PullRequestModel(BaseModel):
237
237
238 cls = PullRequest
238 cls = PullRequest
239
239
240 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
240 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
241
241
242 UPDATE_STATUS_MESSAGES = {
242 UPDATE_STATUS_MESSAGES = {
243 UpdateFailureReason.NONE: lazy_ugettext(
243 UpdateFailureReason.NONE: lazy_ugettext(
244 'Pull request update successful.'),
244 'Pull request update successful.'),
245 UpdateFailureReason.UNKNOWN: lazy_ugettext(
245 UpdateFailureReason.UNKNOWN: lazy_ugettext(
246 'Pull request update failed because of an unknown error.'),
246 'Pull request update failed because of an unknown error.'),
247 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
247 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
248 'No update needed because the source and target have not changed.'),
248 'No update needed because the source and target have not changed.'),
249 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
249 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
250 'Pull request cannot be updated because the reference type is '
250 'Pull request cannot be updated because the reference type is '
251 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
251 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
252 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
252 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
253 'This pull request cannot be updated because the target '
253 'This pull request cannot be updated because the target '
254 'reference is missing.'),
254 'reference is missing.'),
255 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
255 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
256 'This pull request cannot be updated because the source '
256 'This pull request cannot be updated because the source '
257 'reference is missing.'),
257 'reference is missing.'),
258 }
258 }
259 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
259 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
260 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
260 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
261
261
262 def __get_pull_request(self, pull_request):
262 def __get_pull_request(self, pull_request):
263 return self._get_instance((
263 return self._get_instance((
264 PullRequest, PullRequestVersion), pull_request)
264 PullRequest, PullRequestVersion), pull_request)
265
265
266 def _check_perms(self, perms, pull_request, user, api=False):
266 def _check_perms(self, perms, pull_request, user, api=False):
267 if not api:
267 if not api:
268 return h.HasRepoPermissionAny(*perms)(
268 return h.HasRepoPermissionAny(*perms)(
269 user=user, repo_name=pull_request.target_repo.repo_name)
269 user=user, repo_name=pull_request.target_repo.repo_name)
270 else:
270 else:
271 return h.HasRepoPermissionAnyApi(*perms)(
271 return h.HasRepoPermissionAnyApi(*perms)(
272 user=user, repo_name=pull_request.target_repo.repo_name)
272 user=user, repo_name=pull_request.target_repo.repo_name)
273
273
274 def check_user_read(self, pull_request, user, api=False):
274 def check_user_read(self, pull_request, user, api=False):
275 _perms = ('repository.admin', 'repository.write', 'repository.read',)
275 _perms = ('repository.admin', 'repository.write', 'repository.read',)
276 return self._check_perms(_perms, pull_request, user, api)
276 return self._check_perms(_perms, pull_request, user, api)
277
277
278 def check_user_merge(self, pull_request, user, api=False):
278 def check_user_merge(self, pull_request, user, api=False):
279 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
279 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
280 return self._check_perms(_perms, pull_request, user, api)
280 return self._check_perms(_perms, pull_request, user, api)
281
281
282 def check_user_update(self, pull_request, user, api=False):
282 def check_user_update(self, pull_request, user, api=False):
283 owner = user.user_id == pull_request.user_id
283 owner = user.user_id == pull_request.user_id
284 return self.check_user_merge(pull_request, user, api) or owner
284 return self.check_user_merge(pull_request, user, api) or owner
285
285
286 def check_user_delete(self, pull_request, user):
286 def check_user_delete(self, pull_request, user):
287 owner = user.user_id == pull_request.user_id
287 owner = user.user_id == pull_request.user_id
288 _perms = ('repository.admin',)
288 _perms = ('repository.admin',)
289 return self._check_perms(_perms, pull_request, user) or owner
289 return self._check_perms(_perms, pull_request, user) or owner
290
290
291 def is_user_reviewer(self, pull_request, user):
291 def is_user_reviewer(self, pull_request, user):
292 return user.user_id in [
292 return user.user_id in [
293 x.user_id for x in
293 x.user_id for x in
294 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
294 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
295 if x.user
295 if x.user
296 ]
296 ]
297
297
298 def check_user_change_status(self, pull_request, user, api=False):
298 def check_user_change_status(self, pull_request, user, api=False):
299 return self.check_user_update(pull_request, user, api) \
299 return self.check_user_update(pull_request, user, api) \
300 or self.is_user_reviewer(pull_request, user)
300 or self.is_user_reviewer(pull_request, user)
301
301
302 def check_user_comment(self, pull_request, user):
302 def check_user_comment(self, pull_request, user):
303 owner = user.user_id == pull_request.user_id
303 owner = user.user_id == pull_request.user_id
304 return self.check_user_read(pull_request, user) or owner
304 return self.check_user_read(pull_request, user) or owner
305
305
306 def get(self, pull_request):
306 def get(self, pull_request):
307 return self.__get_pull_request(pull_request)
307 return self.__get_pull_request(pull_request)
308
308
309 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
309 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
310 statuses=None, opened_by=None, order_by=None,
310 statuses=None, opened_by=None, order_by=None,
311 order_dir='desc', only_created=False):
311 order_dir='desc', only_created=False):
312 repo = None
312 repo = None
313 if repo_name:
313 if repo_name:
314 repo = self._get_repo(repo_name)
314 repo = self._get_repo(repo_name)
315
315
316 q = PullRequest.query()
316 q = PullRequest.query()
317
317
318 if search_q:
318 if search_q:
319 like_expression = u'%{}%'.format(safe_str(search_q))
319 like_expression = u'%{}%'.format(safe_str(search_q))
320 q = q.join(User, User.user_id == PullRequest.user_id)
320 q = q.join(User, User.user_id == PullRequest.user_id)
321 q = q.filter(or_(
321 q = q.filter(or_(
322 cast(PullRequest.pull_request_id, String).ilike(like_expression),
322 cast(PullRequest.pull_request_id, String).ilike(like_expression),
323 User.username.ilike(like_expression),
323 User.username.ilike(like_expression),
324 PullRequest.title.ilike(like_expression),
324 PullRequest.title.ilike(like_expression),
325 PullRequest.description.ilike(like_expression),
325 PullRequest.description.ilike(like_expression),
326 ))
326 ))
327
327
328 # source or target
328 # source or target
329 if repo and source:
329 if repo and source:
330 q = q.filter(PullRequest.source_repo == repo)
330 q = q.filter(PullRequest.source_repo == repo)
331 elif repo:
331 elif repo:
332 q = q.filter(PullRequest.target_repo == repo)
332 q = q.filter(PullRequest.target_repo == repo)
333
333
334 # closed,opened
334 # closed,opened
335 if statuses:
335 if statuses:
336 q = q.filter(PullRequest.status.in_(statuses))
336 q = q.filter(PullRequest.status.in_(statuses))
337
337
338 # opened by filter
338 # opened by filter
339 if opened_by:
339 if opened_by:
340 q = q.filter(PullRequest.user_id.in_(opened_by))
340 q = q.filter(PullRequest.user_id.in_(opened_by))
341
341
342 # only get those that are in "created" state
342 # only get those that are in "created" state
343 if only_created:
343 if only_created:
344 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
344 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
345
345
346 order_map = {
346 order_map = {
347 'name_raw': PullRequest.pull_request_id,
347 'name_raw': PullRequest.pull_request_id,
348 'id': PullRequest.pull_request_id,
348 'id': PullRequest.pull_request_id,
349 'title': PullRequest.title,
349 'title': PullRequest.title,
350 'updated_on_raw': PullRequest.updated_on,
350 'updated_on_raw': PullRequest.updated_on,
351 'target_repo': PullRequest.target_repo_id
351 'target_repo': PullRequest.target_repo_id
352 }
352 }
353 if order_by and order_by in order_map:
353 if order_by and order_by in order_map:
354 if order_dir == 'asc':
354 if order_dir == 'asc':
355 q = q.order_by(order_map[order_by].asc())
355 q = q.order_by(order_map[order_by].asc())
356 else:
356 else:
357 q = q.order_by(order_map[order_by].desc())
357 q = q.order_by(order_map[order_by].desc())
358
358
359 return q
359 return q
360
360
361 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
361 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
362 opened_by=None):
362 opened_by=None):
363 """
363 """
364 Count the number of pull requests for a specific repository.
364 Count the number of pull requests for a specific repository.
365
365
366 :param repo_name: target or source repo
366 :param repo_name: target or source repo
367 :param search_q: filter by text
367 :param search_q: filter by text
368 :param source: boolean flag to specify if repo_name refers to source
368 :param source: boolean flag to specify if repo_name refers to source
369 :param statuses: list of pull request statuses
369 :param statuses: list of pull request statuses
370 :param opened_by: author user of the pull request
370 :param opened_by: author user of the pull request
371 :returns: int number of pull requests
371 :returns: int number of pull requests
372 """
372 """
373 q = self._prepare_get_all_query(
373 q = self._prepare_get_all_query(
374 repo_name, search_q=search_q, source=source, statuses=statuses,
374 repo_name, search_q=search_q, source=source, statuses=statuses,
375 opened_by=opened_by)
375 opened_by=opened_by)
376
376
377 return q.count()
377 return q.count()
378
378
379 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
379 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
380 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
380 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
381 """
381 """
382 Get all pull requests for a specific repository.
382 Get all pull requests for a specific repository.
383
383
384 :param repo_name: target or source repo
384 :param repo_name: target or source repo
385 :param search_q: filter by text
385 :param search_q: filter by text
386 :param source: boolean flag to specify if repo_name refers to source
386 :param source: boolean flag to specify if repo_name refers to source
387 :param statuses: list of pull request statuses
387 :param statuses: list of pull request statuses
388 :param opened_by: author user of the pull request
388 :param opened_by: author user of the pull request
389 :param offset: pagination offset
389 :param offset: pagination offset
390 :param length: length of returned list
390 :param length: length of returned list
391 :param order_by: order of the returned list
391 :param order_by: order of the returned list
392 :param order_dir: 'asc' or 'desc' ordering direction
392 :param order_dir: 'asc' or 'desc' ordering direction
393 :returns: list of pull requests
393 :returns: list of pull requests
394 """
394 """
395 q = self._prepare_get_all_query(
395 q = self._prepare_get_all_query(
396 repo_name, search_q=search_q, source=source, statuses=statuses,
396 repo_name, search_q=search_q, source=source, statuses=statuses,
397 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
397 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
398
398
399 if length:
399 if length:
400 pull_requests = q.limit(length).offset(offset).all()
400 pull_requests = q.limit(length).offset(offset).all()
401 else:
401 else:
402 pull_requests = q.all()
402 pull_requests = q.all()
403
403
404 return pull_requests
404 return pull_requests
405
405
406 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
406 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
407 """
407 """
408 Count the number of pull requests for a specific repository that are
408 Count the number of pull requests for a specific repository that are
409 awaiting review.
409 awaiting review.
410
410
411 :param repo_name: target or source repo
411 :param repo_name: target or source repo
412 :param search_q: filter by text
412 :param search_q: filter by text
413 :param statuses: list of pull request statuses
413 :param statuses: list of pull request statuses
414 :returns: int number of pull requests
414 :returns: int number of pull requests
415 """
415 """
416 pull_requests = self.get_awaiting_review(
416 pull_requests = self.get_awaiting_review(
417 repo_name, search_q=search_q, statuses=statuses)
417 repo_name, search_q=search_q, statuses=statuses)
418
418
419 return len(pull_requests)
419 return len(pull_requests)
420
420
421 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
421 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
422 offset=0, length=None, order_by=None, order_dir='desc'):
422 offset=0, length=None, order_by=None, order_dir='desc'):
423 """
423 """
424 Get all pull requests for a specific repository that are awaiting
424 Get all pull requests for a specific repository that are awaiting
425 review.
425 review.
426
426
427 :param repo_name: target or source repo
427 :param repo_name: target or source repo
428 :param search_q: filter by text
428 :param search_q: filter by text
429 :param statuses: list of pull request statuses
429 :param statuses: list of pull request statuses
430 :param offset: pagination offset
430 :param offset: pagination offset
431 :param length: length of returned list
431 :param length: length of returned list
432 :param order_by: order of the returned list
432 :param order_by: order of the returned list
433 :param order_dir: 'asc' or 'desc' ordering direction
433 :param order_dir: 'asc' or 'desc' ordering direction
434 :returns: list of pull requests
434 :returns: list of pull requests
435 """
435 """
436 pull_requests = self.get_all(
436 pull_requests = self.get_all(
437 repo_name, search_q=search_q, statuses=statuses,
437 repo_name, search_q=search_q, statuses=statuses,
438 order_by=order_by, order_dir=order_dir)
438 order_by=order_by, order_dir=order_dir)
439
439
440 _filtered_pull_requests = []
440 _filtered_pull_requests = []
441 for pr in pull_requests:
441 for pr in pull_requests:
442 status = pr.calculated_review_status()
442 status = pr.calculated_review_status()
443 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
443 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
444 ChangesetStatus.STATUS_UNDER_REVIEW]:
444 ChangesetStatus.STATUS_UNDER_REVIEW]:
445 _filtered_pull_requests.append(pr)
445 _filtered_pull_requests.append(pr)
446 if length:
446 if length:
447 return _filtered_pull_requests[offset:offset+length]
447 return _filtered_pull_requests[offset:offset+length]
448 else:
448 else:
449 return _filtered_pull_requests
449 return _filtered_pull_requests
450
450
451 def _prepare_awaiting_my_review_review_query(
451 def _prepare_awaiting_my_review_review_query(
452 self, repo_name, user_id, search_q=None, statuses=None,
452 self, repo_name, user_id, search_q=None, statuses=None,
453 order_by=None, order_dir='desc'):
453 order_by=None, order_dir='desc'):
454
454
455 for_review_statuses = [
455 for_review_statuses = [
456 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
456 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
457 ]
457 ]
458
458
459 pull_request_alias = aliased(PullRequest)
459 pull_request_alias = aliased(PullRequest)
460 status_alias = aliased(ChangesetStatus)
460 status_alias = aliased(ChangesetStatus)
461 reviewers_alias = aliased(PullRequestReviewers)
461 reviewers_alias = aliased(PullRequestReviewers)
462 repo_alias = aliased(Repository)
462 repo_alias = aliased(Repository)
463
463
464 last_ver_subq = Session()\
464 last_ver_subq = Session()\
465 .query(func.min(ChangesetStatus.version)) \
465 .query(func.min(ChangesetStatus.version)) \
466 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
466 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
467 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
467 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
468 .subquery()
468 .subquery()
469
469
470 q = Session().query(pull_request_alias) \
470 q = Session().query(pull_request_alias) \
471 .options(lazyload(pull_request_alias.author)) \
471 .options(lazyload(pull_request_alias.author)) \
472 .join(reviewers_alias,
472 .join(reviewers_alias,
473 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
473 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
474 .join(repo_alias,
474 .join(repo_alias,
475 repo_alias.repo_id == pull_request_alias.target_repo_id) \
475 repo_alias.repo_id == pull_request_alias.target_repo_id) \
476 .outerjoin(status_alias,
476 .outerjoin(status_alias,
477 and_(status_alias.user_id == reviewers_alias.user_id,
477 and_(status_alias.user_id == reviewers_alias.user_id,
478 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
478 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
479 .filter(or_(status_alias.version == null(),
479 .filter(or_(status_alias.version == null(),
480 status_alias.version == last_ver_subq)) \
480 status_alias.version == last_ver_subq)) \
481 .filter(reviewers_alias.user_id == user_id) \
481 .filter(reviewers_alias.user_id == user_id) \
482 .filter(repo_alias.repo_name == repo_name) \
482 .filter(repo_alias.repo_name == repo_name) \
483 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
483 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
484 .group_by(pull_request_alias)
484 .group_by(pull_request_alias)
485
485
486 # closed,opened
486 # closed,opened
487 if statuses:
487 if statuses:
488 q = q.filter(pull_request_alias.status.in_(statuses))
488 q = q.filter(pull_request_alias.status.in_(statuses))
489
489
490 if search_q:
490 if search_q:
491 like_expression = u'%{}%'.format(safe_str(search_q))
491 like_expression = u'%{}%'.format(safe_str(search_q))
492 q = q.join(User, User.user_id == pull_request_alias.user_id)
492 q = q.join(User, User.user_id == pull_request_alias.user_id)
493 q = q.filter(or_(
493 q = q.filter(or_(
494 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
494 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
495 User.username.ilike(like_expression),
495 User.username.ilike(like_expression),
496 pull_request_alias.title.ilike(like_expression),
496 pull_request_alias.title.ilike(like_expression),
497 pull_request_alias.description.ilike(like_expression),
497 pull_request_alias.description.ilike(like_expression),
498 ))
498 ))
499
499
500 order_map = {
500 order_map = {
501 'name_raw': pull_request_alias.pull_request_id,
501 'name_raw': pull_request_alias.pull_request_id,
502 'title': pull_request_alias.title,
502 'title': pull_request_alias.title,
503 'updated_on_raw': pull_request_alias.updated_on,
503 'updated_on_raw': pull_request_alias.updated_on,
504 'target_repo': pull_request_alias.target_repo_id
504 'target_repo': pull_request_alias.target_repo_id
505 }
505 }
506 if order_by and order_by in order_map:
506 if order_by and order_by in order_map:
507 if order_dir == 'asc':
507 if order_dir == 'asc':
508 q = q.order_by(order_map[order_by].asc())
508 q = q.order_by(order_map[order_by].asc())
509 else:
509 else:
510 q = q.order_by(order_map[order_by].desc())
510 q = q.order_by(order_map[order_by].desc())
511
511
512 return q
512 return q
513
513
514 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
514 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
515 """
515 """
516 Count the number of pull requests for a specific repository that are
516 Count the number of pull requests for a specific repository that are
517 awaiting review from a specific user.
517 awaiting review from a specific user.
518
518
519 :param repo_name: target or source repo
519 :param repo_name: target or source repo
520 :param user_id: reviewer user of the pull request
520 :param user_id: reviewer user of the pull request
521 :param search_q: filter by text
521 :param search_q: filter by text
522 :param statuses: list of pull request statuses
522 :param statuses: list of pull request statuses
523 :returns: int number of pull requests
523 :returns: int number of pull requests
524 """
524 """
525 q = self._prepare_awaiting_my_review_review_query(
525 q = self._prepare_awaiting_my_review_review_query(
526 repo_name, user_id, search_q=search_q, statuses=statuses)
526 repo_name, user_id, search_q=search_q, statuses=statuses)
527 return q.count()
527 return q.count()
528
528
529 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
529 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
530 offset=0, length=None, order_by=None, order_dir='desc'):
530 offset=0, length=None, order_by=None, order_dir='desc'):
531 """
531 """
532 Get all pull requests for a specific repository that are awaiting
532 Get all pull requests for a specific repository that are awaiting
533 review from a specific user.
533 review from a specific user.
534
534
535 :param repo_name: target or source repo
535 :param repo_name: target or source repo
536 :param user_id: reviewer user of the pull request
536 :param user_id: reviewer user of the pull request
537 :param search_q: filter by text
537 :param search_q: filter by text
538 :param statuses: list of pull request statuses
538 :param statuses: list of pull request statuses
539 :param offset: pagination offset
539 :param offset: pagination offset
540 :param length: length of returned list
540 :param length: length of returned list
541 :param order_by: order of the returned list
541 :param order_by: order of the returned list
542 :param order_dir: 'asc' or 'desc' ordering direction
542 :param order_dir: 'asc' or 'desc' ordering direction
543 :returns: list of pull requests
543 :returns: list of pull requests
544 """
544 """
545
545
546 q = self._prepare_awaiting_my_review_review_query(
546 q = self._prepare_awaiting_my_review_review_query(
547 repo_name, user_id, search_q=search_q, statuses=statuses,
547 repo_name, user_id, search_q=search_q, statuses=statuses,
548 order_by=order_by, order_dir=order_dir)
548 order_by=order_by, order_dir=order_dir)
549
549
550 if length:
550 if length:
551 pull_requests = q.limit(length).offset(offset).all()
551 pull_requests = q.limit(length).offset(offset).all()
552 else:
552 else:
553 pull_requests = q.all()
553 pull_requests = q.all()
554
554
555 return pull_requests
555 return pull_requests
556
556
557 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
557 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
558 order_by=None, order_dir='desc'):
558 order_by=None, order_dir='desc'):
559 """
559 """
560 return a query of pull-requests user is an creator, or he's added as a reviewer
560 return a query of pull-requests user is an creator, or he's added as a reviewer
561 """
561 """
562 q = PullRequest.query()
562 q = PullRequest.query()
563 if user_id:
563 if user_id:
564
564
565 base_query = select(PullRequestReviewers)\
565 base_query = select(PullRequestReviewers)\
566 .where(PullRequestReviewers.user_id == user_id)\
566 .where(PullRequestReviewers.user_id == user_id)\
567 .with_only_columns(PullRequestReviewers.pull_request_id)
567 .with_only_columns(PullRequestReviewers.pull_request_id)
568
568
569 user_filter = or_(
569 user_filter = or_(
570 PullRequest.user_id == user_id,
570 PullRequest.user_id == user_id,
571 PullRequest.pull_request_id.in_(base_query)
571 PullRequest.pull_request_id.in_(base_query)
572 )
572 )
573 q = PullRequest.query().filter(user_filter)
573 q = PullRequest.query().filter(user_filter)
574
574
575 # closed,opened
575 # closed,opened
576 if statuses:
576 if statuses:
577 q = q.filter(PullRequest.status.in_(statuses))
577 q = q.filter(PullRequest.status.in_(statuses))
578
578
579 if query:
579 if query:
580 like_expression = u'%{}%'.format(safe_str(query))
580 like_expression = u'%{}%'.format(safe_str(query))
581 q = q.join(User, User.user_id == PullRequest.user_id)
581 q = q.join(User, User.user_id == PullRequest.user_id)
582 q = q.filter(or_(
582 q = q.filter(or_(
583 cast(PullRequest.pull_request_id, String).ilike(like_expression),
583 cast(PullRequest.pull_request_id, String).ilike(like_expression),
584 User.username.ilike(like_expression),
584 User.username.ilike(like_expression),
585 PullRequest.title.ilike(like_expression),
585 PullRequest.title.ilike(like_expression),
586 PullRequest.description.ilike(like_expression),
586 PullRequest.description.ilike(like_expression),
587 ))
587 ))
588
588
589 order_map = {
589 order_map = {
590 'name_raw': PullRequest.pull_request_id,
590 'name_raw': PullRequest.pull_request_id,
591 'title': PullRequest.title,
591 'title': PullRequest.title,
592 'updated_on_raw': PullRequest.updated_on,
592 'updated_on_raw': PullRequest.updated_on,
593 'target_repo': PullRequest.target_repo_id
593 'target_repo': PullRequest.target_repo_id
594 }
594 }
595 if order_by and order_by in order_map:
595 if order_by and order_by in order_map:
596 if order_dir == 'asc':
596 if order_dir == 'asc':
597 q = q.order_by(order_map[order_by].asc())
597 q = q.order_by(order_map[order_by].asc())
598 else:
598 else:
599 q = q.order_by(order_map[order_by].desc())
599 q = q.order_by(order_map[order_by].desc())
600
600
601 return q
601 return q
602
602
603 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
603 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
604 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
604 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
605 return q.count()
605 return q.count()
606
606
607 def get_im_participating_in(
607 def get_im_participating_in(
608 self, user_id=None, statuses=None, query='', offset=0,
608 self, user_id=None, statuses=None, query='', offset=0,
609 length=None, order_by=None, order_dir='desc'):
609 length=None, order_by=None, order_dir='desc'):
610 """
610 """
611 Get all Pull requests that i'm participating in as a reviewer, or i have opened
611 Get all Pull requests that i'm participating in as a reviewer, or i have opened
612 """
612 """
613
613
614 q = self._prepare_im_participating_query(
614 q = self._prepare_im_participating_query(
615 user_id, statuses=statuses, query=query, order_by=order_by,
615 user_id, statuses=statuses, query=query, order_by=order_by,
616 order_dir=order_dir)
616 order_dir=order_dir)
617
617
618 if length:
618 if length:
619 pull_requests = q.limit(length).offset(offset).all()
619 pull_requests = q.limit(length).offset(offset).all()
620 else:
620 else:
621 pull_requests = q.all()
621 pull_requests = q.all()
622
622
623 return pull_requests
623 return pull_requests
624
624
625 def _prepare_participating_in_for_review_query(
625 def _prepare_participating_in_for_review_query(
626 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
626 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
627
627
628 for_review_statuses = [
628 for_review_statuses = [
629 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
629 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
630 ]
630 ]
631
631
632 pull_request_alias = aliased(PullRequest)
632 pull_request_alias = aliased(PullRequest)
633 status_alias = aliased(ChangesetStatus)
633 status_alias = aliased(ChangesetStatus)
634 reviewers_alias = aliased(PullRequestReviewers)
634 reviewers_alias = aliased(PullRequestReviewers)
635
635
636 last_ver_subq = Session()\
636 last_ver_subq = Session()\
637 .query(func.min(ChangesetStatus.version)) \
637 .query(func.min(ChangesetStatus.version)) \
638 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
638 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
639 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
639 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
640 .subquery()
640 .subquery()
641
641
642 q = Session().query(pull_request_alias) \
642 q = Session().query(pull_request_alias) \
643 .options(lazyload(pull_request_alias.author)) \
643 .options(lazyload(pull_request_alias.author)) \
644 .join(reviewers_alias,
644 .join(reviewers_alias,
645 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
645 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
646 .outerjoin(status_alias,
646 .outerjoin(status_alias,
647 and_(status_alias.user_id == reviewers_alias.user_id,
647 and_(status_alias.user_id == reviewers_alias.user_id,
648 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
648 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
649 .filter(or_(status_alias.version == null(),
649 .filter(or_(status_alias.version == null(),
650 status_alias.version == last_ver_subq)) \
650 status_alias.version == last_ver_subq)) \
651 .filter(reviewers_alias.user_id == user_id) \
651 .filter(reviewers_alias.user_id == user_id) \
652 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
652 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
653 .group_by(pull_request_alias)
653 .group_by(pull_request_alias)
654
654
655 # closed,opened
655 # closed,opened
656 if statuses:
656 if statuses:
657 q = q.filter(pull_request_alias.status.in_(statuses))
657 q = q.filter(pull_request_alias.status.in_(statuses))
658
658
659 if query:
659 if query:
660 like_expression = u'%{}%'.format(safe_str(query))
660 like_expression = u'%{}%'.format(safe_str(query))
661 q = q.join(User, User.user_id == pull_request_alias.user_id)
661 q = q.join(User, User.user_id == pull_request_alias.user_id)
662 q = q.filter(or_(
662 q = q.filter(or_(
663 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
663 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
664 User.username.ilike(like_expression),
664 User.username.ilike(like_expression),
665 pull_request_alias.title.ilike(like_expression),
665 pull_request_alias.title.ilike(like_expression),
666 pull_request_alias.description.ilike(like_expression),
666 pull_request_alias.description.ilike(like_expression),
667 ))
667 ))
668
668
669 order_map = {
669 order_map = {
670 'name_raw': pull_request_alias.pull_request_id,
670 'name_raw': pull_request_alias.pull_request_id,
671 'title': pull_request_alias.title,
671 'title': pull_request_alias.title,
672 'updated_on_raw': pull_request_alias.updated_on,
672 'updated_on_raw': pull_request_alias.updated_on,
673 'target_repo': pull_request_alias.target_repo_id
673 'target_repo': pull_request_alias.target_repo_id
674 }
674 }
675 if order_by and order_by in order_map:
675 if order_by and order_by in order_map:
676 if order_dir == 'asc':
676 if order_dir == 'asc':
677 q = q.order_by(order_map[order_by].asc())
677 q = q.order_by(order_map[order_by].asc())
678 else:
678 else:
679 q = q.order_by(order_map[order_by].desc())
679 q = q.order_by(order_map[order_by].desc())
680
680
681 return q
681 return q
682
682
683 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
683 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
684 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
684 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
685 return q.count()
685 return q.count()
686
686
687 def get_im_participating_in_for_review(
687 def get_im_participating_in_for_review(
688 self, user_id, statuses=None, query='', offset=0,
688 self, user_id, statuses=None, query='', offset=0,
689 length=None, order_by=None, order_dir='desc'):
689 length=None, order_by=None, order_dir='desc'):
690 """
690 """
691 Get all Pull requests that needs user approval or rejection
691 Get all Pull requests that needs user approval or rejection
692 """
692 """
693
693
694 q = self._prepare_participating_in_for_review_query(
694 q = self._prepare_participating_in_for_review_query(
695 user_id, statuses=statuses, query=query, order_by=order_by,
695 user_id, statuses=statuses, query=query, order_by=order_by,
696 order_dir=order_dir)
696 order_dir=order_dir)
697
697
698 if length:
698 if length:
699 pull_requests = q.limit(length).offset(offset).all()
699 pull_requests = q.limit(length).offset(offset).all()
700 else:
700 else:
701 pull_requests = q.all()
701 pull_requests = q.all()
702
702
703 return pull_requests
703 return pull_requests
704
704
705 def get_versions(self, pull_request):
705 def get_versions(self, pull_request):
706 """
706 """
707 returns version of pull request sorted by ID descending
707 returns version of pull request sorted by ID descending
708 """
708 """
709 return PullRequestVersion.query()\
709 return PullRequestVersion.query()\
710 .filter(PullRequestVersion.pull_request == pull_request)\
710 .filter(PullRequestVersion.pull_request == pull_request)\
711 .order_by(PullRequestVersion.pull_request_version_id.asc())\
711 .order_by(PullRequestVersion.pull_request_version_id.asc())\
712 .all()
712 .all()
713
713
714 def get_pr_version(self, pull_request_id, version=None):
714 def get_pr_version(self, pull_request_id, version=None):
715 at_version = None
715 at_version = None
716
716
717 if version and version == 'latest':
717 if version and version == 'latest':
718 pull_request_ver = PullRequest.get(pull_request_id)
718 pull_request_ver = PullRequest.get(pull_request_id)
719 pull_request_obj = pull_request_ver
719 pull_request_obj = pull_request_ver
720 _org_pull_request_obj = pull_request_obj
720 _org_pull_request_obj = pull_request_obj
721 at_version = 'latest'
721 at_version = 'latest'
722 elif version:
722 elif version:
723 pull_request_ver = PullRequestVersion.get_or_404(version)
723 pull_request_ver = PullRequestVersion.get_or_404(version)
724 pull_request_obj = pull_request_ver
724 pull_request_obj = pull_request_ver
725 _org_pull_request_obj = pull_request_ver.pull_request
725 _org_pull_request_obj = pull_request_ver.pull_request
726 at_version = pull_request_ver.pull_request_version_id
726 at_version = pull_request_ver.pull_request_version_id
727 else:
727 else:
728 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
728 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
729 pull_request_id)
729 pull_request_id)
730
730
731 pull_request_display_obj = PullRequest.get_pr_display_object(
731 pull_request_display_obj = PullRequest.get_pr_display_object(
732 pull_request_obj, _org_pull_request_obj)
732 pull_request_obj, _org_pull_request_obj)
733
733
734 return _org_pull_request_obj, pull_request_obj, \
734 return _org_pull_request_obj, pull_request_obj, \
735 pull_request_display_obj, at_version
735 pull_request_display_obj, at_version
736
736
737 def pr_commits_versions(self, versions):
737 def pr_commits_versions(self, versions):
738 """
738 """
739 Maps the pull-request commits into all known PR versions. This way we can obtain
739 Maps the pull-request commits into all known PR versions. This way we can obtain
740 each pr version the commit was introduced in.
740 each pr version the commit was introduced in.
741 """
741 """
742 commit_versions = collections.defaultdict(list)
742 commit_versions = collections.defaultdict(list)
743 num_versions = [x.pull_request_version_id for x in versions]
743 num_versions = [x.pull_request_version_id for x in versions]
744 for ver in versions:
744 for ver in versions:
745 for commit_id in ver.revisions:
745 for commit_id in ver.revisions:
746 ver_idx = ChangesetComment.get_index_from_version(
746 ver_idx = ChangesetComment.get_index_from_version(
747 ver.pull_request_version_id, num_versions=num_versions)
747 ver.pull_request_version_id, num_versions=num_versions)
748 commit_versions[commit_id].append(ver_idx)
748 commit_versions[commit_id].append(ver_idx)
749 return commit_versions
749 return commit_versions
750
750
751 def create(self, created_by, source_repo, source_ref, target_repo,
751 def create(self, created_by, source_repo, source_ref, target_repo,
752 target_ref, revisions, reviewers, observers, title, description=None,
752 target_ref, revisions, reviewers, observers, title, description=None,
753 common_ancestor_id=None,
753 common_ancestor_id=None,
754 description_renderer=None,
754 description_renderer=None,
755 reviewer_data=None, translator=None, auth_user=None):
755 reviewer_data=None, translator=None, auth_user=None):
756 translator = translator or get_current_request().translate
756 translator = translator or get_current_request().translate
757
757
758 created_by_user = self._get_user(created_by)
758 created_by_user = self._get_user(created_by)
759 auth_user = auth_user or created_by_user.AuthUser()
759 auth_user = auth_user or created_by_user.AuthUser()
760 source_repo = self._get_repo(source_repo)
760 source_repo = self._get_repo(source_repo)
761 target_repo = self._get_repo(target_repo)
761 target_repo = self._get_repo(target_repo)
762
762
763 pull_request = PullRequest()
763 pull_request = PullRequest()
764 pull_request.source_repo = source_repo
764 pull_request.source_repo = source_repo
765 pull_request.source_ref = source_ref
765 pull_request.source_ref = source_ref
766 pull_request.target_repo = target_repo
766 pull_request.target_repo = target_repo
767 pull_request.target_ref = target_ref
767 pull_request.target_ref = target_ref
768 pull_request.revisions = revisions
768 pull_request.revisions = revisions
769 pull_request.title = title
769 pull_request.title = title
770 pull_request.description = description
770 pull_request.description = description
771 pull_request.description_renderer = description_renderer
771 pull_request.description_renderer = description_renderer
772 pull_request.author = created_by_user
772 pull_request.author = created_by_user
773 pull_request.reviewer_data = reviewer_data
773 pull_request.reviewer_data = reviewer_data
774 pull_request.pull_request_state = pull_request.STATE_CREATING
774 pull_request.pull_request_state = pull_request.STATE_CREATING
775 pull_request.common_ancestor_id = common_ancestor_id
775 pull_request.common_ancestor_id = common_ancestor_id
776
776
777 Session().add(pull_request)
777 Session().add(pull_request)
778 Session().flush()
778 Session().flush()
779
779
780 reviewer_ids = set()
780 reviewer_ids = set()
781 # members / reviewers
781 # members / reviewers
782 for reviewer_object in reviewers:
782 for reviewer_object in reviewers:
783 user_id, reasons, mandatory, role, rules = reviewer_object
783 user_id, reasons, mandatory, role, rules = reviewer_object
784 user = self._get_user(user_id)
784 user = self._get_user(user_id)
785
785
786 # skip duplicates
786 # skip duplicates
787 if user.user_id in reviewer_ids:
787 if user.user_id in reviewer_ids:
788 continue
788 continue
789
789
790 reviewer_ids.add(user.user_id)
790 reviewer_ids.add(user.user_id)
791
791
792 reviewer = PullRequestReviewers()
792 reviewer = PullRequestReviewers()
793 reviewer.user = user
793 reviewer.user = user
794 reviewer.pull_request = pull_request
794 reviewer.pull_request = pull_request
795 reviewer.reasons = reasons
795 reviewer.reasons = reasons
796 reviewer.mandatory = mandatory
796 reviewer.mandatory = mandatory
797 reviewer.role = role
797 reviewer.role = role
798
798
799 # NOTE(marcink): pick only first rule for now
799 # NOTE(marcink): pick only first rule for now
800 rule_id = list(rules)[0] if rules else None
800 rule_id = list(rules)[0] if rules else None
801 rule = RepoReviewRule.get(rule_id) if rule_id else None
801 rule = RepoReviewRule.get(rule_id) if rule_id else None
802 if rule:
802 if rule:
803 review_group = rule.user_group_vote_rule(user_id)
803 review_group = rule.user_group_vote_rule(user_id)
804 # we check if this particular reviewer is member of a voting group
804 # we check if this particular reviewer is member of a voting group
805 if review_group:
805 if review_group:
806 # NOTE(marcink):
806 # NOTE(marcink):
807 # can be that user is member of more but we pick the first same,
807 # can be that user is member of more but we pick the first same,
808 # same as default reviewers algo
808 # same as default reviewers algo
809 review_group = review_group[0]
809 review_group = review_group[0]
810
810
811 rule_data = {
811 rule_data = {
812 'rule_name':
812 'rule_name':
813 rule.review_rule_name,
813 rule.review_rule_name,
814 'rule_user_group_entry_id':
814 'rule_user_group_entry_id':
815 review_group.repo_review_rule_users_group_id,
815 review_group.repo_review_rule_users_group_id,
816 'rule_user_group_name':
816 'rule_user_group_name':
817 review_group.users_group.users_group_name,
817 review_group.users_group.users_group_name,
818 'rule_user_group_members':
818 'rule_user_group_members':
819 [x.user.username for x in review_group.users_group.members],
819 [x.user.username for x in review_group.users_group.members],
820 'rule_user_group_members_id':
820 'rule_user_group_members_id':
821 [x.user.user_id for x in review_group.users_group.members],
821 [x.user.user_id for x in review_group.users_group.members],
822 }
822 }
823 # e.g {'vote_rule': -1, 'mandatory': True}
823 # e.g {'vote_rule': -1, 'mandatory': True}
824 rule_data.update(review_group.rule_data())
824 rule_data.update(review_group.rule_data())
825
825
826 reviewer.rule_data = rule_data
826 reviewer.rule_data = rule_data
827
827
828 Session().add(reviewer)
828 Session().add(reviewer)
829 Session().flush()
829 Session().flush()
830
830
831 for observer_object in observers:
831 for observer_object in observers:
832 user_id, reasons, mandatory, role, rules = observer_object
832 user_id, reasons, mandatory, role, rules = observer_object
833 user = self._get_user(user_id)
833 user = self._get_user(user_id)
834
834
835 # skip duplicates from reviewers
835 # skip duplicates from reviewers
836 if user.user_id in reviewer_ids:
836 if user.user_id in reviewer_ids:
837 continue
837 continue
838
838
839 #reviewer_ids.add(user.user_id)
839 #reviewer_ids.add(user.user_id)
840
840
841 observer = PullRequestReviewers()
841 observer = PullRequestReviewers()
842 observer.user = user
842 observer.user = user
843 observer.pull_request = pull_request
843 observer.pull_request = pull_request
844 observer.reasons = reasons
844 observer.reasons = reasons
845 observer.mandatory = mandatory
845 observer.mandatory = mandatory
846 observer.role = role
846 observer.role = role
847
847
848 # NOTE(marcink): pick only first rule for now
848 # NOTE(marcink): pick only first rule for now
849 rule_id = list(rules)[0] if rules else None
849 rule_id = list(rules)[0] if rules else None
850 rule = RepoReviewRule.get(rule_id) if rule_id else None
850 rule = RepoReviewRule.get(rule_id) if rule_id else None
851 if rule:
851 if rule:
852 # TODO(marcink): do we need this for observers ??
852 # TODO(marcink): do we need this for observers ??
853 pass
853 pass
854
854
855 Session().add(observer)
855 Session().add(observer)
856 Session().flush()
856 Session().flush()
857
857
858 # Set approval status to "Under Review" for all commits which are
858 # Set approval status to "Under Review" for all commits which are
859 # part of this pull request.
859 # part of this pull request.
860 ChangesetStatusModel().set_status(
860 ChangesetStatusModel().set_status(
861 repo=target_repo,
861 repo=target_repo,
862 status=ChangesetStatus.STATUS_UNDER_REVIEW,
862 status=ChangesetStatus.STATUS_UNDER_REVIEW,
863 user=created_by_user,
863 user=created_by_user,
864 pull_request=pull_request
864 pull_request=pull_request
865 )
865 )
866 # we commit early at this point. This has to do with a fact
866 # we commit early at this point. This has to do with a fact
867 # that before queries do some row-locking. And because of that
867 # that before queries do some row-locking. And because of that
868 # we need to commit and finish transaction before below validate call
868 # we need to commit and finish transaction before below validate call
869 # that for large repos could be long resulting in long row locks
869 # that for large repos could be long resulting in long row locks
870 Session().commit()
870 Session().commit()
871
871
872 # prepare workspace, and run initial merge simulation. Set state during that
872 # prepare workspace, and run initial merge simulation. Set state during that
873 # operation
873 # operation
874 pull_request = PullRequest.get(pull_request.pull_request_id)
874 pull_request = PullRequest.get(pull_request.pull_request_id)
875
875
876 # set as merging, for merge simulation, and if finished to created so we mark
876 # set as merging, for merge simulation, and if finished to created so we mark
877 # simulation is working fine
877 # simulation is working fine
878 with pull_request.set_state(PullRequest.STATE_MERGING,
878 with pull_request.set_state(PullRequest.STATE_MERGING,
879 final_state=PullRequest.STATE_CREATED) as state_obj:
879 final_state=PullRequest.STATE_CREATED) as state_obj:
880 MergeCheck.validate(
880 MergeCheck.validate(
881 pull_request, auth_user=auth_user, translator=translator)
881 pull_request, auth_user=auth_user, translator=translator)
882
882
883 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
883 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
884 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
884 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
885
885
886 creation_data = pull_request.get_api_data(with_merge_state=False)
886 creation_data = pull_request.get_api_data(with_merge_state=False)
887 self._log_audit_action(
887 self._log_audit_action(
888 'repo.pull_request.create', {'data': creation_data},
888 'repo.pull_request.create', {'data': creation_data},
889 auth_user, pull_request)
889 auth_user, pull_request)
890
890
891 return pull_request
891 return pull_request
892
892
893 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
893 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
894 pull_request = self.__get_pull_request(pull_request)
894 pull_request = self.__get_pull_request(pull_request)
895 target_scm = pull_request.target_repo.scm_instance()
895 target_scm = pull_request.target_repo.scm_instance()
896 if action == 'create':
896 if action == 'create':
897 trigger_hook = hooks_utils.trigger_create_pull_request_hook
897 trigger_hook = hooks_utils.trigger_create_pull_request_hook
898 elif action == 'merge':
898 elif action == 'merge':
899 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
899 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
900 elif action == 'close':
900 elif action == 'close':
901 trigger_hook = hooks_utils.trigger_close_pull_request_hook
901 trigger_hook = hooks_utils.trigger_close_pull_request_hook
902 elif action == 'review_status_change':
902 elif action == 'review_status_change':
903 trigger_hook = hooks_utils.trigger_review_pull_request_hook
903 trigger_hook = hooks_utils.trigger_review_pull_request_hook
904 elif action == 'update':
904 elif action == 'update':
905 trigger_hook = hooks_utils.trigger_update_pull_request_hook
905 trigger_hook = hooks_utils.trigger_update_pull_request_hook
906 elif action == 'comment':
906 elif action == 'comment':
907 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
907 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
908 elif action == 'comment_edit':
908 elif action == 'comment_edit':
909 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
909 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
910 else:
910 else:
911 return
911 return
912
912
913 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
913 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
914 pull_request, action, trigger_hook)
914 pull_request, action, trigger_hook)
915 trigger_hook(
915 trigger_hook(
916 username=user.username,
916 username=user.username,
917 repo_name=pull_request.target_repo.repo_name,
917 repo_name=pull_request.target_repo.repo_name,
918 repo_type=target_scm.alias,
918 repo_type=target_scm.alias,
919 pull_request=pull_request,
919 pull_request=pull_request,
920 data=data)
920 data=data)
921
921
922 def _get_commit_ids(self, pull_request):
922 def _get_commit_ids(self, pull_request):
923 """
923 """
924 Return the commit ids of the merged pull request.
924 Return the commit ids of the merged pull request.
925
925
926 This method is not dealing correctly yet with the lack of autoupdates
926 This method is not dealing correctly yet with the lack of autoupdates
927 nor with the implicit target updates.
927 nor with the implicit target updates.
928 For example: if a commit in the source repo is already in the target it
928 For example: if a commit in the source repo is already in the target it
929 will be reported anyways.
929 will be reported anyways.
930 """
930 """
931 merge_rev = pull_request.merge_rev
931 merge_rev = pull_request.merge_rev
932 if merge_rev is None:
932 if merge_rev is None:
933 raise ValueError('This pull request was not merged yet')
933 raise ValueError('This pull request was not merged yet')
934
934
935 commit_ids = list(pull_request.revisions)
935 commit_ids = list(pull_request.revisions)
936 if merge_rev not in commit_ids:
936 if merge_rev not in commit_ids:
937 commit_ids.append(merge_rev)
937 commit_ids.append(merge_rev)
938
938
939 return commit_ids
939 return commit_ids
940
940
941 def merge_repo(self, pull_request, user, extras):
941 def merge_repo(self, pull_request, user, extras):
942 repo_type = pull_request.source_repo.repo_type
942 repo_type = pull_request.source_repo.repo_type
943 log.debug("Merging pull request %s", pull_request)
943 log.debug("Merging pull request %s", pull_request)
944
944
945 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
945 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
946 merge_state = self._merge_pull_request(pull_request, user, extras)
946 merge_state = self._merge_pull_request(pull_request, user, extras)
947 if merge_state.executed:
947 if merge_state.executed:
948 log.debug("Merge was successful, updating the pull request comments.")
948 log.debug("Merge was successful, updating the pull request comments.")
949 self._comment_and_close_pr(pull_request, user, merge_state)
949 self._comment_and_close_pr(pull_request, user, merge_state)
950
950
951 self._log_audit_action(
951 self._log_audit_action(
952 'repo.pull_request.merge',
952 'repo.pull_request.merge',
953 {'merge_state': merge_state.__dict__},
953 {'merge_state': merge_state.__dict__},
954 user, pull_request)
954 user, pull_request)
955
955
956 else:
956 else:
957 log.warning("Merge failed, not updating the pull request.")
957 log.warning("Merge failed, not updating the pull request.")
958 return merge_state
958 return merge_state
959
959
960 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
960 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
961 target_vcs = pull_request.target_repo.scm_instance()
961 target_vcs = pull_request.target_repo.scm_instance()
962 source_vcs = pull_request.source_repo.scm_instance()
962 source_vcs = pull_request.source_repo.scm_instance()
963
963
964 message = safe_str(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
964 message = safe_str(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
965 pr_id=pull_request.pull_request_id,
965 pr_id=pull_request.pull_request_id,
966 pr_title=pull_request.title,
966 pr_title=pull_request.title,
967 pr_desc=pull_request.description,
967 pr_desc=pull_request.description,
968 source_repo=source_vcs.name,
968 source_repo=source_vcs.name,
969 source_ref_name=pull_request.source_ref_parts.name,
969 source_ref_name=pull_request.source_ref_parts.name,
970 target_repo=target_vcs.name,
970 target_repo=target_vcs.name,
971 target_ref_name=pull_request.target_ref_parts.name,
971 target_ref_name=pull_request.target_ref_parts.name,
972 )
972 )
973
973
974 workspace_id = self._workspace_id(pull_request)
974 workspace_id = self._workspace_id(pull_request)
975 repo_id = pull_request.target_repo.repo_id
975 repo_id = pull_request.target_repo.repo_id
976 use_rebase = self._use_rebase_for_merging(pull_request)
976 use_rebase = self._use_rebase_for_merging(pull_request)
977 close_branch = self._close_branch_before_merging(pull_request)
977 close_branch = self._close_branch_before_merging(pull_request)
978 user_name = self._user_name_for_merging(pull_request, user)
978 user_name = self._user_name_for_merging(pull_request, user)
979
979
980 target_ref = self._refresh_reference(
980 target_ref = self._refresh_reference(
981 pull_request.target_ref_parts, target_vcs)
981 pull_request.target_ref_parts, target_vcs)
982
982
983 callback_daemon, extras = prepare_callback_daemon(
983 callback_daemon, extras = prepare_callback_daemon(
984 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
984 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
985 host=vcs_settings.HOOKS_HOST,
985 host=vcs_settings.HOOKS_HOST,
986 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
986 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
987
987
988 with callback_daemon:
988 with callback_daemon:
989 # TODO: johbo: Implement a clean way to run a config_override
989 # TODO: johbo: Implement a clean way to run a config_override
990 # for a single call.
990 # for a single call.
991 target_vcs.config.set(
991 target_vcs.config.set(
992 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
992 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
993
993
994 merge_state = target_vcs.merge(
994 merge_state = target_vcs.merge(
995 repo_id, workspace_id, target_ref, source_vcs,
995 repo_id, workspace_id, target_ref, source_vcs,
996 pull_request.source_ref_parts,
996 pull_request.source_ref_parts,
997 user_name=user_name, user_email=user.email,
997 user_name=user_name, user_email=user.email,
998 message=message, use_rebase=use_rebase,
998 message=message, use_rebase=use_rebase,
999 close_branch=close_branch)
999 close_branch=close_branch)
1000
1000
1001 return merge_state
1001 return merge_state
1002
1002
1003 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1003 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1004 pull_request.merge_rev = merge_state.merge_ref.commit_id
1004 pull_request.merge_rev = merge_state.merge_ref.commit_id
1005 pull_request.updated_on = datetime.datetime.now()
1005 pull_request.updated_on = datetime.datetime.now()
1006 close_msg = close_msg or 'Pull request merged and closed'
1006 close_msg = close_msg or 'Pull request merged and closed'
1007
1007
1008 CommentsModel().create(
1008 CommentsModel().create(
1009 text=safe_str(close_msg),
1009 text=safe_str(close_msg),
1010 repo=pull_request.target_repo.repo_id,
1010 repo=pull_request.target_repo.repo_id,
1011 user=user.user_id,
1011 user=user.user_id,
1012 pull_request=pull_request.pull_request_id,
1012 pull_request=pull_request.pull_request_id,
1013 f_path=None,
1013 f_path=None,
1014 line_no=None,
1014 line_no=None,
1015 closing_pr=True
1015 closing_pr=True
1016 )
1016 )
1017
1017
1018 Session().add(pull_request)
1018 Session().add(pull_request)
1019 Session().flush()
1019 Session().flush()
1020 # TODO: paris: replace invalidation with less radical solution
1020 # TODO: paris: replace invalidation with less radical solution
1021 ScmModel().mark_for_invalidation(
1021 ScmModel().mark_for_invalidation(
1022 pull_request.target_repo.repo_name)
1022 pull_request.target_repo.repo_name)
1023 self.trigger_pull_request_hook(pull_request, user, 'merge')
1023 self.trigger_pull_request_hook(pull_request, user, 'merge')
1024
1024
1025 def has_valid_update_type(self, pull_request):
1025 def has_valid_update_type(self, pull_request):
1026 source_ref_type = pull_request.source_ref_parts.type
1026 source_ref_type = pull_request.source_ref_parts.type
1027 return source_ref_type in self.REF_TYPES
1027 return source_ref_type in self.REF_TYPES
1028
1028
1029 def get_flow_commits(self, pull_request):
1029 def get_flow_commits(self, pull_request):
1030
1030
1031 # source repo
1031 # source repo
1032 source_ref_name = pull_request.source_ref_parts.name
1032 source_ref_name = pull_request.source_ref_parts.name
1033 source_ref_type = pull_request.source_ref_parts.type
1033 source_ref_type = pull_request.source_ref_parts.type
1034 source_ref_id = pull_request.source_ref_parts.commit_id
1034 source_ref_id = pull_request.source_ref_parts.commit_id
1035 source_repo = pull_request.source_repo.scm_instance()
1035 source_repo = pull_request.source_repo.scm_instance()
1036
1036
1037 try:
1037 try:
1038 if source_ref_type in self.REF_TYPES:
1038 if source_ref_type in self.REF_TYPES:
1039 source_commit = source_repo.get_commit(
1039 source_commit = source_repo.get_commit(
1040 source_ref_name, reference_obj=pull_request.source_ref_parts)
1040 source_ref_name, reference_obj=pull_request.source_ref_parts)
1041 else:
1041 else:
1042 source_commit = source_repo.get_commit(source_ref_id)
1042 source_commit = source_repo.get_commit(source_ref_id)
1043 except CommitDoesNotExistError:
1043 except CommitDoesNotExistError:
1044 raise SourceRefMissing()
1044 raise SourceRefMissing()
1045
1045
1046 # target repo
1046 # target repo
1047 target_ref_name = pull_request.target_ref_parts.name
1047 target_ref_name = pull_request.target_ref_parts.name
1048 target_ref_type = pull_request.target_ref_parts.type
1048 target_ref_type = pull_request.target_ref_parts.type
1049 target_ref_id = pull_request.target_ref_parts.commit_id
1049 target_ref_id = pull_request.target_ref_parts.commit_id
1050 target_repo = pull_request.target_repo.scm_instance()
1050 target_repo = pull_request.target_repo.scm_instance()
1051
1051
1052 try:
1052 try:
1053 if target_ref_type in self.REF_TYPES:
1053 if target_ref_type in self.REF_TYPES:
1054 target_commit = target_repo.get_commit(
1054 target_commit = target_repo.get_commit(
1055 target_ref_name, reference_obj=pull_request.target_ref_parts)
1055 target_ref_name, reference_obj=pull_request.target_ref_parts)
1056 else:
1056 else:
1057 target_commit = target_repo.get_commit(target_ref_id)
1057 target_commit = target_repo.get_commit(target_ref_id)
1058 except CommitDoesNotExistError:
1058 except CommitDoesNotExistError:
1059 raise TargetRefMissing()
1059 raise TargetRefMissing()
1060
1060
1061 return source_commit, target_commit
1061 return source_commit, target_commit
1062
1062
1063 def update_commits(self, pull_request, updating_user):
1063 def update_commits(self, pull_request, updating_user):
1064 """
1064 """
1065 Get the updated list of commits for the pull request
1065 Get the updated list of commits for the pull request
1066 and return the new pull request version and the list
1066 and return the new pull request version and the list
1067 of commits processed by this update action
1067 of commits processed by this update action
1068
1068
1069 updating_user is the user_object who triggered the update
1069 updating_user is the user_object who triggered the update
1070 """
1070 """
1071 pull_request = self.__get_pull_request(pull_request)
1071 pull_request = self.__get_pull_request(pull_request)
1072 source_ref_type = pull_request.source_ref_parts.type
1072 source_ref_type = pull_request.source_ref_parts.type
1073 source_ref_name = pull_request.source_ref_parts.name
1073 source_ref_name = pull_request.source_ref_parts.name
1074 source_ref_id = pull_request.source_ref_parts.commit_id
1074 source_ref_id = pull_request.source_ref_parts.commit_id
1075
1075
1076 target_ref_type = pull_request.target_ref_parts.type
1076 target_ref_type = pull_request.target_ref_parts.type
1077 target_ref_name = pull_request.target_ref_parts.name
1077 target_ref_name = pull_request.target_ref_parts.name
1078 target_ref_id = pull_request.target_ref_parts.commit_id
1078 target_ref_id = pull_request.target_ref_parts.commit_id
1079
1079
1080 if not self.has_valid_update_type(pull_request):
1080 if not self.has_valid_update_type(pull_request):
1081 log.debug("Skipping update of pull request %s due to ref type: %s",
1081 log.debug("Skipping update of pull request %s due to ref type: %s",
1082 pull_request, source_ref_type)
1082 pull_request, source_ref_type)
1083 return UpdateResponse(
1083 return UpdateResponse(
1084 executed=False,
1084 executed=False,
1085 reason=UpdateFailureReason.WRONG_REF_TYPE,
1085 reason=UpdateFailureReason.WRONG_REF_TYPE,
1086 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1086 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1087 source_changed=False, target_changed=False)
1087 source_changed=False, target_changed=False)
1088
1088
1089 try:
1089 try:
1090 source_commit, target_commit = self.get_flow_commits(pull_request)
1090 source_commit, target_commit = self.get_flow_commits(pull_request)
1091 except SourceRefMissing:
1091 except SourceRefMissing:
1092 return UpdateResponse(
1092 return UpdateResponse(
1093 executed=False,
1093 executed=False,
1094 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1094 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1095 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1095 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1096 source_changed=False, target_changed=False)
1096 source_changed=False, target_changed=False)
1097 except TargetRefMissing:
1097 except TargetRefMissing:
1098 return UpdateResponse(
1098 return UpdateResponse(
1099 executed=False,
1099 executed=False,
1100 reason=UpdateFailureReason.MISSING_TARGET_REF,
1100 reason=UpdateFailureReason.MISSING_TARGET_REF,
1101 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1101 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1102 source_changed=False, target_changed=False)
1102 source_changed=False, target_changed=False)
1103
1103
1104 source_changed = source_ref_id != source_commit.raw_id
1104 source_changed = source_ref_id != source_commit.raw_id
1105 target_changed = target_ref_id != target_commit.raw_id
1105 target_changed = target_ref_id != target_commit.raw_id
1106
1106
1107 if not (source_changed or target_changed):
1107 if not (source_changed or target_changed):
1108 log.debug("Nothing changed in pull request %s", pull_request)
1108 log.debug("Nothing changed in pull request %s", pull_request)
1109 return UpdateResponse(
1109 return UpdateResponse(
1110 executed=False,
1110 executed=False,
1111 reason=UpdateFailureReason.NO_CHANGE,
1111 reason=UpdateFailureReason.NO_CHANGE,
1112 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1112 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1113 source_changed=target_changed, target_changed=source_changed)
1113 source_changed=target_changed, target_changed=source_changed)
1114
1114
1115 change_in_found = 'target repo' if target_changed else 'source repo'
1115 change_in_found = 'target repo' if target_changed else 'source repo'
1116 log.debug('Updating pull request because of change in %s detected',
1116 log.debug('Updating pull request because of change in %s detected',
1117 change_in_found)
1117 change_in_found)
1118
1118
1119 # Finally there is a need for an update, in case of source change
1119 # Finally there is a need for an update, in case of source change
1120 # we create a new version, else just an update
1120 # we create a new version, else just an update
1121 if source_changed:
1121 if source_changed:
1122 pull_request_version = self._create_version_from_snapshot(pull_request)
1122 pull_request_version = self._create_version_from_snapshot(pull_request)
1123 self._link_comments_to_version(pull_request_version)
1123 self._link_comments_to_version(pull_request_version)
1124 else:
1124 else:
1125 try:
1125 try:
1126 ver = pull_request.versions[-1]
1126 ver = pull_request.versions[-1]
1127 except IndexError:
1127 except IndexError:
1128 ver = None
1128 ver = None
1129
1129
1130 pull_request.pull_request_version_id = \
1130 pull_request.pull_request_version_id = \
1131 ver.pull_request_version_id if ver else None
1131 ver.pull_request_version_id if ver else None
1132 pull_request_version = pull_request
1132 pull_request_version = pull_request
1133
1133
1134 source_repo = pull_request.source_repo.scm_instance()
1134 source_repo = pull_request.source_repo.scm_instance()
1135 target_repo = pull_request.target_repo.scm_instance()
1135 target_repo = pull_request.target_repo.scm_instance()
1136
1136
1137 # re-compute commit ids
1137 # re-compute commit ids
1138 old_commit_ids = pull_request.revisions
1138 old_commit_ids = pull_request.revisions
1139 pre_load = ["author", "date", "message", "branch"]
1139 pre_load = ["author", "date", "message", "branch"]
1140 commit_ranges = target_repo.compare(
1140 commit_ranges = target_repo.compare(
1141 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1141 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1142 pre_load=pre_load)
1142 pre_load=pre_load)
1143
1143
1144 target_ref = target_commit.raw_id
1144 target_ref = target_commit.raw_id
1145 source_ref = source_commit.raw_id
1145 source_ref = source_commit.raw_id
1146 ancestor_commit_id = target_repo.get_common_ancestor(
1146 ancestor_commit_id = target_repo.get_common_ancestor(
1147 target_ref, source_ref, source_repo)
1147 target_ref, source_ref, source_repo)
1148
1148
1149 if not ancestor_commit_id:
1149 if not ancestor_commit_id:
1150 raise ValueError(
1150 raise ValueError(
1151 'cannot calculate diff info without a common ancestor. '
1151 'cannot calculate diff info without a common ancestor. '
1152 'Make sure both repositories are related, and have a common forking commit.')
1152 'Make sure both repositories are related, and have a common forking commit.')
1153
1153
1154 pull_request.common_ancestor_id = ancestor_commit_id
1154 pull_request.common_ancestor_id = ancestor_commit_id
1155
1155
1156 pull_request.source_ref = '%s:%s:%s' % (
1156 pull_request.source_ref = f'{source_ref_type}:{source_ref_name}:{source_commit.raw_id}'
1157 source_ref_type, source_ref_name, source_commit.raw_id)
1157 pull_request.target_ref = f'{target_ref_type}:{target_ref_name}:{ancestor_commit_id}'
1158 pull_request.target_ref = '%s:%s:%s' % (
1159 target_ref_type, target_ref_name, ancestor_commit_id)
1160
1158
1161 pull_request.revisions = [
1159 pull_request.revisions = [
1162 commit.raw_id for commit in reversed(commit_ranges)]
1160 commit.raw_id for commit in reversed(commit_ranges)]
1163 pull_request.updated_on = datetime.datetime.now()
1161 pull_request.updated_on = datetime.datetime.now()
1164 Session().add(pull_request)
1162 Session().add(pull_request)
1165 new_commit_ids = pull_request.revisions
1163 new_commit_ids = pull_request.revisions
1166
1164
1167 old_diff_data, new_diff_data = self._generate_update_diffs(
1165 old_diff_data, new_diff_data = self._generate_update_diffs(
1168 pull_request, pull_request_version)
1166 pull_request, pull_request_version)
1169
1167
1170 # calculate commit and file changes
1168 # calculate commit and file changes
1171 commit_changes = self._calculate_commit_id_changes(
1169 commit_changes = self._calculate_commit_id_changes(
1172 old_commit_ids, new_commit_ids)
1170 old_commit_ids, new_commit_ids)
1173 file_changes = self._calculate_file_changes(
1171 file_changes = self._calculate_file_changes(
1174 old_diff_data, new_diff_data)
1172 old_diff_data, new_diff_data)
1175
1173
1176 # set comments as outdated if DIFFS changed
1174 # set comments as outdated if DIFFS changed
1177 CommentsModel().outdate_comments(
1175 CommentsModel().outdate_comments(
1178 pull_request, old_diff_data=old_diff_data,
1176 pull_request, old_diff_data=old_diff_data,
1179 new_diff_data=new_diff_data)
1177 new_diff_data=new_diff_data)
1180
1178
1181 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1179 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1182 file_node_changes = (
1180 file_node_changes = (
1183 file_changes.added or file_changes.modified or file_changes.removed)
1181 file_changes.added or file_changes.modified or file_changes.removed)
1184 pr_has_changes = valid_commit_changes or file_node_changes
1182 pr_has_changes = valid_commit_changes or file_node_changes
1185
1183
1186 # Add an automatic comment to the pull request, in case
1184 # Add an automatic comment to the pull request, in case
1187 # anything has changed
1185 # anything has changed
1188 if pr_has_changes:
1186 if pr_has_changes:
1189 update_comment = CommentsModel().create(
1187 update_comment = CommentsModel().create(
1190 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1188 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1191 repo=pull_request.target_repo,
1189 repo=pull_request.target_repo,
1192 user=pull_request.author,
1190 user=pull_request.author,
1193 pull_request=pull_request,
1191 pull_request=pull_request,
1194 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1192 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1195
1193
1196 # Update status to "Under Review" for added commits
1194 # Update status to "Under Review" for added commits
1197 for commit_id in commit_changes.added:
1195 for commit_id in commit_changes.added:
1198 ChangesetStatusModel().set_status(
1196 ChangesetStatusModel().set_status(
1199 repo=pull_request.source_repo,
1197 repo=pull_request.source_repo,
1200 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1198 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1201 comment=update_comment,
1199 comment=update_comment,
1202 user=pull_request.author,
1200 user=pull_request.author,
1203 pull_request=pull_request,
1201 pull_request=pull_request,
1204 revision=commit_id)
1202 revision=commit_id)
1205
1203
1206 # initial commit
1204 # initial commit
1207 Session().commit()
1205 Session().commit()
1208
1206
1209 if pr_has_changes:
1207 if pr_has_changes:
1210 # send update email to users
1208 # send update email to users
1211 try:
1209 try:
1212 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1210 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1213 ancestor_commit_id=ancestor_commit_id,
1211 ancestor_commit_id=ancestor_commit_id,
1214 commit_changes=commit_changes,
1212 commit_changes=commit_changes,
1215 file_changes=file_changes)
1213 file_changes=file_changes)
1216 Session().commit()
1214 Session().commit()
1217 except Exception:
1215 except Exception:
1218 log.exception('Failed to send email notification to users')
1216 log.exception('Failed to send email notification to users')
1219 Session().rollback()
1217 Session().rollback()
1220
1218
1221 log.debug(
1219 log.debug(
1222 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1220 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1223 'removed_ids: %s', pull_request.pull_request_id,
1221 'removed_ids: %s', pull_request.pull_request_id,
1224 commit_changes.added, commit_changes.common, commit_changes.removed)
1222 commit_changes.added, commit_changes.common, commit_changes.removed)
1225 log.debug(
1223 log.debug(
1226 'Updated pull request with the following file changes: %s',
1224 'Updated pull request with the following file changes: %s',
1227 file_changes)
1225 file_changes)
1228
1226
1229 log.info(
1227 log.info(
1230 "Updated pull request %s from commit %s to commit %s, "
1228 "Updated pull request %s from commit %s to commit %s, "
1231 "stored new version %s of this pull request.",
1229 "stored new version %s of this pull request.",
1232 pull_request.pull_request_id, source_ref_id,
1230 pull_request.pull_request_id, source_ref_id,
1233 pull_request.source_ref_parts.commit_id,
1231 pull_request.source_ref_parts.commit_id,
1234 pull_request_version.pull_request_version_id)
1232 pull_request_version.pull_request_version_id)
1235
1233
1236 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1234 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1237
1235
1238 return UpdateResponse(
1236 return UpdateResponse(
1239 executed=True, reason=UpdateFailureReason.NONE,
1237 executed=True, reason=UpdateFailureReason.NONE,
1240 old=pull_request, new=pull_request_version,
1238 old=pull_request, new=pull_request_version,
1241 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1239 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1242 source_changed=source_changed, target_changed=target_changed)
1240 source_changed=source_changed, target_changed=target_changed)
1243
1241
1244 def _create_version_from_snapshot(self, pull_request):
1242 def _create_version_from_snapshot(self, pull_request):
1245 version = PullRequestVersion()
1243 version = PullRequestVersion()
1246 version.title = pull_request.title
1244 version.title = pull_request.title
1247 version.description = pull_request.description
1245 version.description = pull_request.description
1248 version.status = pull_request.status
1246 version.status = pull_request.status
1249 version.pull_request_state = pull_request.pull_request_state
1247 version.pull_request_state = pull_request.pull_request_state
1250 version.created_on = datetime.datetime.now()
1248 version.created_on = datetime.datetime.now()
1251 version.updated_on = pull_request.updated_on
1249 version.updated_on = pull_request.updated_on
1252 version.user_id = pull_request.user_id
1250 version.user_id = pull_request.user_id
1253 version.source_repo = pull_request.source_repo
1251 version.source_repo = pull_request.source_repo
1254 version.source_ref = pull_request.source_ref
1252 version.source_ref = pull_request.source_ref
1255 version.target_repo = pull_request.target_repo
1253 version.target_repo = pull_request.target_repo
1256 version.target_ref = pull_request.target_ref
1254 version.target_ref = pull_request.target_ref
1257
1255
1258 version._last_merge_source_rev = pull_request._last_merge_source_rev
1256 version._last_merge_source_rev = pull_request._last_merge_source_rev
1259 version._last_merge_target_rev = pull_request._last_merge_target_rev
1257 version._last_merge_target_rev = pull_request._last_merge_target_rev
1260 version.last_merge_status = pull_request.last_merge_status
1258 version.last_merge_status = pull_request.last_merge_status
1261 version.last_merge_metadata = pull_request.last_merge_metadata
1259 version.last_merge_metadata = pull_request.last_merge_metadata
1262 version.shadow_merge_ref = pull_request.shadow_merge_ref
1260 version.shadow_merge_ref = pull_request.shadow_merge_ref
1263 version.merge_rev = pull_request.merge_rev
1261 version.merge_rev = pull_request.merge_rev
1264 version.reviewer_data = pull_request.reviewer_data
1262 version.reviewer_data = pull_request.reviewer_data
1265
1263
1266 version.revisions = pull_request.revisions
1264 version.revisions = pull_request.revisions
1267 version.common_ancestor_id = pull_request.common_ancestor_id
1265 version.common_ancestor_id = pull_request.common_ancestor_id
1268 version.pull_request = pull_request
1266 version.pull_request = pull_request
1269 Session().add(version)
1267 Session().add(version)
1270 Session().flush()
1268 Session().flush()
1271
1269
1272 return version
1270 return version
1273
1271
1274 def _generate_update_diffs(self, pull_request, pull_request_version):
1272 def _generate_update_diffs(self, pull_request, pull_request_version):
1275
1273
1276 diff_context = (
1274 diff_context = (
1277 self.DIFF_CONTEXT +
1275 self.DIFF_CONTEXT +
1278 CommentsModel.needed_extra_diff_context())
1276 CommentsModel.needed_extra_diff_context())
1279 hide_whitespace_changes = False
1277 hide_whitespace_changes = False
1280 source_repo = pull_request_version.source_repo
1278 source_repo = pull_request_version.source_repo
1281 source_ref_id = pull_request_version.source_ref_parts.commit_id
1279 source_ref_id = pull_request_version.source_ref_parts.commit_id
1282 target_ref_id = pull_request_version.target_ref_parts.commit_id
1280 target_ref_id = pull_request_version.target_ref_parts.commit_id
1283 old_diff = self._get_diff_from_pr_or_version(
1281 old_diff = self._get_diff_from_pr_or_version(
1284 source_repo, source_ref_id, target_ref_id,
1282 source_repo, source_ref_id, target_ref_id,
1285 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1283 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1286
1284
1287 source_repo = pull_request.source_repo
1285 source_repo = pull_request.source_repo
1288 source_ref_id = pull_request.source_ref_parts.commit_id
1286 source_ref_id = pull_request.source_ref_parts.commit_id
1289 target_ref_id = pull_request.target_ref_parts.commit_id
1287 target_ref_id = pull_request.target_ref_parts.commit_id
1290
1288
1291 new_diff = self._get_diff_from_pr_or_version(
1289 new_diff = self._get_diff_from_pr_or_version(
1292 source_repo, source_ref_id, target_ref_id,
1290 source_repo, source_ref_id, target_ref_id,
1293 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1291 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1294
1292
1295 # NOTE: this was using diff_format='gitdiff'
1293 # NOTE: this was using diff_format='gitdiff'
1296 old_diff_data = diffs.DiffProcessor(old_diff, diff_format='newdiff')
1294 old_diff_data = diffs.DiffProcessor(old_diff, diff_format='newdiff')
1297 old_diff_data.prepare()
1295 old_diff_data.prepare()
1298 new_diff_data = diffs.DiffProcessor(new_diff, diff_format='newdiff')
1296 new_diff_data = diffs.DiffProcessor(new_diff, diff_format='newdiff')
1299 new_diff_data.prepare()
1297 new_diff_data.prepare()
1300
1298
1301 return old_diff_data, new_diff_data
1299 return old_diff_data, new_diff_data
1302
1300
1303 def _link_comments_to_version(self, pull_request_version):
1301 def _link_comments_to_version(self, pull_request_version):
1304 """
1302 """
1305 Link all unlinked comments of this pull request to the given version.
1303 Link all unlinked comments of this pull request to the given version.
1306
1304
1307 :param pull_request_version: The `PullRequestVersion` to which
1305 :param pull_request_version: The `PullRequestVersion` to which
1308 the comments shall be linked.
1306 the comments shall be linked.
1309
1307
1310 """
1308 """
1311 pull_request = pull_request_version.pull_request
1309 pull_request = pull_request_version.pull_request
1312 comments = ChangesetComment.query()\
1310 comments = ChangesetComment.query()\
1313 .filter(
1311 .filter(
1314 # TODO: johbo: Should we query for the repo at all here?
1312 # TODO: johbo: Should we query for the repo at all here?
1315 # Pending decision on how comments of PRs are to be related
1313 # Pending decision on how comments of PRs are to be related
1316 # to either the source repo, the target repo or no repo at all.
1314 # to either the source repo, the target repo or no repo at all.
1317 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1315 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1318 ChangesetComment.pull_request == pull_request,
1316 ChangesetComment.pull_request == pull_request,
1319 ChangesetComment.pull_request_version == null())\
1317 ChangesetComment.pull_request_version == null())\
1320 .order_by(ChangesetComment.comment_id.asc())
1318 .order_by(ChangesetComment.comment_id.asc())
1321
1319
1322 # TODO: johbo: Find out why this breaks if it is done in a bulk
1320 # TODO: johbo: Find out why this breaks if it is done in a bulk
1323 # operation.
1321 # operation.
1324 for comment in comments:
1322 for comment in comments:
1325 comment.pull_request_version_id = (
1323 comment.pull_request_version_id = (
1326 pull_request_version.pull_request_version_id)
1324 pull_request_version.pull_request_version_id)
1327 Session().add(comment)
1325 Session().add(comment)
1328
1326
1329 def _calculate_commit_id_changes(self, old_ids, new_ids):
1327 def _calculate_commit_id_changes(self, old_ids, new_ids):
1330 added = [x for x in new_ids if x not in old_ids]
1328 added = [x for x in new_ids if x not in old_ids]
1331 common = [x for x in new_ids if x in old_ids]
1329 common = [x for x in new_ids if x in old_ids]
1332 removed = [x for x in old_ids if x not in new_ids]
1330 removed = [x for x in old_ids if x not in new_ids]
1333 total = new_ids
1331 total = new_ids
1334 return ChangeTuple(added, common, removed, total)
1332 return ChangeTuple(added, common, removed, total)
1335
1333
1336 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1334 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1337
1335
1338 old_files = OrderedDict()
1336 old_files = OrderedDict()
1339 for diff_data in old_diff_data.parsed_diff:
1337 for diff_data in old_diff_data.parsed_diff:
1340 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1338 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1341
1339
1342 added_files = []
1340 added_files = []
1343 modified_files = []
1341 modified_files = []
1344 removed_files = []
1342 removed_files = []
1345 for diff_data in new_diff_data.parsed_diff:
1343 for diff_data in new_diff_data.parsed_diff:
1346 new_filename = diff_data['filename']
1344 new_filename = diff_data['filename']
1347 new_hash = md5_safe(diff_data['raw_diff'])
1345 new_hash = md5_safe(diff_data['raw_diff'])
1348
1346
1349 old_hash = old_files.get(new_filename)
1347 old_hash = old_files.get(new_filename)
1350 if not old_hash:
1348 if not old_hash:
1351 # file is not present in old diff, we have to figure out from parsed diff
1349 # file is not present in old diff, we have to figure out from parsed diff
1352 # operation ADD/REMOVE
1350 # operation ADD/REMOVE
1353 operations_dict = diff_data['stats']['ops']
1351 operations_dict = diff_data['stats']['ops']
1354 if diffs.DEL_FILENODE in operations_dict:
1352 if diffs.DEL_FILENODE in operations_dict:
1355 removed_files.append(new_filename)
1353 removed_files.append(new_filename)
1356 else:
1354 else:
1357 added_files.append(new_filename)
1355 added_files.append(new_filename)
1358 else:
1356 else:
1359 if new_hash != old_hash:
1357 if new_hash != old_hash:
1360 modified_files.append(new_filename)
1358 modified_files.append(new_filename)
1361 # now remove a file from old, since we have seen it already
1359 # now remove a file from old, since we have seen it already
1362 del old_files[new_filename]
1360 del old_files[new_filename]
1363
1361
1364 # removed files is when there are present in old, but not in NEW,
1362 # removed files is when there are present in old, but not in NEW,
1365 # since we remove old files that are present in new diff, left-overs
1363 # since we remove old files that are present in new diff, left-overs
1366 # if any should be the removed files
1364 # if any should be the removed files
1367 removed_files.extend(old_files.keys())
1365 removed_files.extend(old_files.keys())
1368
1366
1369 return FileChangeTuple(added_files, modified_files, removed_files)
1367 return FileChangeTuple(added_files, modified_files, removed_files)
1370
1368
1371 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1369 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1372 """
1370 """
1373 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1371 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1374 so it's always looking the same disregarding on which default
1372 so it's always looking the same disregarding on which default
1375 renderer system is using.
1373 renderer system is using.
1376
1374
1377 :param ancestor_commit_id: ancestor raw_id
1375 :param ancestor_commit_id: ancestor raw_id
1378 :param changes: changes named tuple
1376 :param changes: changes named tuple
1379 :param file_changes: file changes named tuple
1377 :param file_changes: file changes named tuple
1380
1378
1381 """
1379 """
1382 new_status = ChangesetStatus.get_status_lbl(
1380 new_status = ChangesetStatus.get_status_lbl(
1383 ChangesetStatus.STATUS_UNDER_REVIEW)
1381 ChangesetStatus.STATUS_UNDER_REVIEW)
1384
1382
1385 changed_files = (
1383 changed_files = (
1386 file_changes.added + file_changes.modified + file_changes.removed)
1384 file_changes.added + file_changes.modified + file_changes.removed)
1387
1385
1388 params = {
1386 params = {
1389 'under_review_label': new_status,
1387 'under_review_label': new_status,
1390 'added_commits': changes.added,
1388 'added_commits': changes.added,
1391 'removed_commits': changes.removed,
1389 'removed_commits': changes.removed,
1392 'changed_files': changed_files,
1390 'changed_files': changed_files,
1393 'added_files': file_changes.added,
1391 'added_files': file_changes.added,
1394 'modified_files': file_changes.modified,
1392 'modified_files': file_changes.modified,
1395 'removed_files': file_changes.removed,
1393 'removed_files': file_changes.removed,
1396 'ancestor_commit_id': ancestor_commit_id
1394 'ancestor_commit_id': ancestor_commit_id
1397 }
1395 }
1398 renderer = RstTemplateRenderer()
1396 renderer = RstTemplateRenderer()
1399 return renderer.render('pull_request_update.mako', **params)
1397 return renderer.render('pull_request_update.mako', **params)
1400
1398
1401 def edit(self, pull_request, title, description, description_renderer, user):
1399 def edit(self, pull_request, title, description, description_renderer, user):
1402 pull_request = self.__get_pull_request(pull_request)
1400 pull_request = self.__get_pull_request(pull_request)
1403 old_data = pull_request.get_api_data(with_merge_state=False)
1401 old_data = pull_request.get_api_data(with_merge_state=False)
1404 if pull_request.is_closed():
1402 if pull_request.is_closed():
1405 raise ValueError('This pull request is closed')
1403 raise ValueError('This pull request is closed')
1406 if title:
1404 if title:
1407 pull_request.title = title
1405 pull_request.title = title
1408 pull_request.description = description
1406 pull_request.description = description
1409 pull_request.updated_on = datetime.datetime.now()
1407 pull_request.updated_on = datetime.datetime.now()
1410 pull_request.description_renderer = description_renderer
1408 pull_request.description_renderer = description_renderer
1411 Session().add(pull_request)
1409 Session().add(pull_request)
1412 self._log_audit_action(
1410 self._log_audit_action(
1413 'repo.pull_request.edit', {'old_data': old_data},
1411 'repo.pull_request.edit', {'old_data': old_data},
1414 user, pull_request)
1412 user, pull_request)
1415
1413
1416 def update_reviewers(self, pull_request, reviewer_data, user):
1414 def update_reviewers(self, pull_request, reviewer_data, user):
1417 """
1415 """
1418 Update the reviewers in the pull request
1416 Update the reviewers in the pull request
1419
1417
1420 :param pull_request: the pr to update
1418 :param pull_request: the pr to update
1421 :param reviewer_data: list of tuples
1419 :param reviewer_data: list of tuples
1422 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1420 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1423 :param user: current use who triggers this action
1421 :param user: current use who triggers this action
1424 """
1422 """
1425
1423
1426 pull_request = self.__get_pull_request(pull_request)
1424 pull_request = self.__get_pull_request(pull_request)
1427 if pull_request.is_closed():
1425 if pull_request.is_closed():
1428 raise ValueError('This pull request is closed')
1426 raise ValueError('This pull request is closed')
1429
1427
1430 reviewers = {}
1428 reviewers = {}
1431 for user_id, reasons, mandatory, role, rules in reviewer_data:
1429 for user_id, reasons, mandatory, role, rules in reviewer_data:
1432 if isinstance(user_id, (int, str)):
1430 if isinstance(user_id, (int, str)):
1433 user_id = self._get_user(user_id).user_id
1431 user_id = self._get_user(user_id).user_id
1434 reviewers[user_id] = {
1432 reviewers[user_id] = {
1435 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1433 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1436
1434
1437 reviewers_ids = set(reviewers.keys())
1435 reviewers_ids = set(reviewers.keys())
1438 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1436 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1439 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1437 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1440
1438
1441 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1439 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1442
1440
1443 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1441 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1444 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1442 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1445
1443
1446 log.debug("Adding %s reviewers", ids_to_add)
1444 log.debug("Adding %s reviewers", ids_to_add)
1447 log.debug("Removing %s reviewers", ids_to_remove)
1445 log.debug("Removing %s reviewers", ids_to_remove)
1448 changed = False
1446 changed = False
1449 added_audit_reviewers = []
1447 added_audit_reviewers = []
1450 removed_audit_reviewers = []
1448 removed_audit_reviewers = []
1451
1449
1452 for uid in ids_to_add:
1450 for uid in ids_to_add:
1453 changed = True
1451 changed = True
1454 _usr = self._get_user(uid)
1452 _usr = self._get_user(uid)
1455 reviewer = PullRequestReviewers()
1453 reviewer = PullRequestReviewers()
1456 reviewer.user = _usr
1454 reviewer.user = _usr
1457 reviewer.pull_request = pull_request
1455 reviewer.pull_request = pull_request
1458 reviewer.reasons = reviewers[uid]['reasons']
1456 reviewer.reasons = reviewers[uid]['reasons']
1459 # NOTE(marcink): mandatory shouldn't be changed now
1457 # NOTE(marcink): mandatory shouldn't be changed now
1460 # reviewer.mandatory = reviewers[uid]['reasons']
1458 # reviewer.mandatory = reviewers[uid]['reasons']
1461 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1459 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1462 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1460 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1463 Session().add(reviewer)
1461 Session().add(reviewer)
1464 added_audit_reviewers.append(reviewer.get_dict())
1462 added_audit_reviewers.append(reviewer.get_dict())
1465
1463
1466 for uid in ids_to_remove:
1464 for uid in ids_to_remove:
1467 changed = True
1465 changed = True
1468 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1466 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1469 # This is an edge case that handles previous state of having the same reviewer twice.
1467 # This is an edge case that handles previous state of having the same reviewer twice.
1470 # this CAN happen due to the lack of DB checks
1468 # this CAN happen due to the lack of DB checks
1471 reviewers = PullRequestReviewers.query()\
1469 reviewers = PullRequestReviewers.query()\
1472 .filter(PullRequestReviewers.user_id == uid,
1470 .filter(PullRequestReviewers.user_id == uid,
1473 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1471 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1474 PullRequestReviewers.pull_request == pull_request)\
1472 PullRequestReviewers.pull_request == pull_request)\
1475 .all()
1473 .all()
1476
1474
1477 for obj in reviewers:
1475 for obj in reviewers:
1478 added_audit_reviewers.append(obj.get_dict())
1476 added_audit_reviewers.append(obj.get_dict())
1479 Session().delete(obj)
1477 Session().delete(obj)
1480
1478
1481 if changed:
1479 if changed:
1482 Session().expire_all()
1480 Session().expire_all()
1483 pull_request.updated_on = datetime.datetime.now()
1481 pull_request.updated_on = datetime.datetime.now()
1484 Session().add(pull_request)
1482 Session().add(pull_request)
1485
1483
1486 # finally store audit logs
1484 # finally store audit logs
1487 for user_data in added_audit_reviewers:
1485 for user_data in added_audit_reviewers:
1488 self._log_audit_action(
1486 self._log_audit_action(
1489 'repo.pull_request.reviewer.add', {'data': user_data},
1487 'repo.pull_request.reviewer.add', {'data': user_data},
1490 user, pull_request)
1488 user, pull_request)
1491 for user_data in removed_audit_reviewers:
1489 for user_data in removed_audit_reviewers:
1492 self._log_audit_action(
1490 self._log_audit_action(
1493 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1491 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1494 user, pull_request)
1492 user, pull_request)
1495
1493
1496 self.notify_reviewers(pull_request, ids_to_add, user)
1494 self.notify_reviewers(pull_request, ids_to_add, user)
1497 return ids_to_add, ids_to_remove
1495 return ids_to_add, ids_to_remove
1498
1496
1499 def update_observers(self, pull_request, observer_data, user):
1497 def update_observers(self, pull_request, observer_data, user):
1500 """
1498 """
1501 Update the observers in the pull request
1499 Update the observers in the pull request
1502
1500
1503 :param pull_request: the pr to update
1501 :param pull_request: the pr to update
1504 :param observer_data: list of tuples
1502 :param observer_data: list of tuples
1505 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1503 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1506 :param user: current use who triggers this action
1504 :param user: current use who triggers this action
1507 """
1505 """
1508 pull_request = self.__get_pull_request(pull_request)
1506 pull_request = self.__get_pull_request(pull_request)
1509 if pull_request.is_closed():
1507 if pull_request.is_closed():
1510 raise ValueError('This pull request is closed')
1508 raise ValueError('This pull request is closed')
1511
1509
1512 observers = {}
1510 observers = {}
1513 for user_id, reasons, mandatory, role, rules in observer_data:
1511 for user_id, reasons, mandatory, role, rules in observer_data:
1514 if isinstance(user_id, (int, str)):
1512 if isinstance(user_id, (int, str)):
1515 user_id = self._get_user(user_id).user_id
1513 user_id = self._get_user(user_id).user_id
1516 observers[user_id] = {
1514 observers[user_id] = {
1517 'reasons': reasons, 'observers': mandatory, 'role': role}
1515 'reasons': reasons, 'observers': mandatory, 'role': role}
1518
1516
1519 observers_ids = set(observers.keys())
1517 observers_ids = set(observers.keys())
1520 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1518 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1521 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1519 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1522
1520
1523 current_observers_ids = set([x.user.user_id for x in current_observers])
1521 current_observers_ids = set([x.user.user_id for x in current_observers])
1524
1522
1525 ids_to_add = observers_ids.difference(current_observers_ids)
1523 ids_to_add = observers_ids.difference(current_observers_ids)
1526 ids_to_remove = current_observers_ids.difference(observers_ids)
1524 ids_to_remove = current_observers_ids.difference(observers_ids)
1527
1525
1528 log.debug("Adding %s observer", ids_to_add)
1526 log.debug("Adding %s observer", ids_to_add)
1529 log.debug("Removing %s observer", ids_to_remove)
1527 log.debug("Removing %s observer", ids_to_remove)
1530 changed = False
1528 changed = False
1531 added_audit_observers = []
1529 added_audit_observers = []
1532 removed_audit_observers = []
1530 removed_audit_observers = []
1533
1531
1534 for uid in ids_to_add:
1532 for uid in ids_to_add:
1535 changed = True
1533 changed = True
1536 _usr = self._get_user(uid)
1534 _usr = self._get_user(uid)
1537 observer = PullRequestReviewers()
1535 observer = PullRequestReviewers()
1538 observer.user = _usr
1536 observer.user = _usr
1539 observer.pull_request = pull_request
1537 observer.pull_request = pull_request
1540 observer.reasons = observers[uid]['reasons']
1538 observer.reasons = observers[uid]['reasons']
1541 # NOTE(marcink): mandatory shouldn't be changed now
1539 # NOTE(marcink): mandatory shouldn't be changed now
1542 # observer.mandatory = observer[uid]['reasons']
1540 # observer.mandatory = observer[uid]['reasons']
1543
1541
1544 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1542 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1545 observer.role = PullRequestReviewers.ROLE_OBSERVER
1543 observer.role = PullRequestReviewers.ROLE_OBSERVER
1546 Session().add(observer)
1544 Session().add(observer)
1547 added_audit_observers.append(observer.get_dict())
1545 added_audit_observers.append(observer.get_dict())
1548
1546
1549 for uid in ids_to_remove:
1547 for uid in ids_to_remove:
1550 changed = True
1548 changed = True
1551 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1549 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1552 # This is an edge case that handles previous state of having the same reviewer twice.
1550 # This is an edge case that handles previous state of having the same reviewer twice.
1553 # this CAN happen due to the lack of DB checks
1551 # this CAN happen due to the lack of DB checks
1554 observers = PullRequestReviewers.query()\
1552 observers = PullRequestReviewers.query()\
1555 .filter(PullRequestReviewers.user_id == uid,
1553 .filter(PullRequestReviewers.user_id == uid,
1556 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1554 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1557 PullRequestReviewers.pull_request == pull_request)\
1555 PullRequestReviewers.pull_request == pull_request)\
1558 .all()
1556 .all()
1559
1557
1560 for obj in observers:
1558 for obj in observers:
1561 added_audit_observers.append(obj.get_dict())
1559 added_audit_observers.append(obj.get_dict())
1562 Session().delete(obj)
1560 Session().delete(obj)
1563
1561
1564 if changed:
1562 if changed:
1565 Session().expire_all()
1563 Session().expire_all()
1566 pull_request.updated_on = datetime.datetime.now()
1564 pull_request.updated_on = datetime.datetime.now()
1567 Session().add(pull_request)
1565 Session().add(pull_request)
1568
1566
1569 # finally store audit logs
1567 # finally store audit logs
1570 for user_data in added_audit_observers:
1568 for user_data in added_audit_observers:
1571 self._log_audit_action(
1569 self._log_audit_action(
1572 'repo.pull_request.observer.add', {'data': user_data},
1570 'repo.pull_request.observer.add', {'data': user_data},
1573 user, pull_request)
1571 user, pull_request)
1574 for user_data in removed_audit_observers:
1572 for user_data in removed_audit_observers:
1575 self._log_audit_action(
1573 self._log_audit_action(
1576 'repo.pull_request.observer.delete', {'old_data': user_data},
1574 'repo.pull_request.observer.delete', {'old_data': user_data},
1577 user, pull_request)
1575 user, pull_request)
1578
1576
1579 self.notify_observers(pull_request, ids_to_add, user)
1577 self.notify_observers(pull_request, ids_to_add, user)
1580 return ids_to_add, ids_to_remove
1578 return ids_to_add, ids_to_remove
1581
1579
1582 def get_url(self, pull_request, request=None, permalink=False):
1580 def get_url(self, pull_request, request=None, permalink=False):
1583 if not request:
1581 if not request:
1584 request = get_current_request()
1582 request = get_current_request()
1585
1583
1586 if permalink:
1584 if permalink:
1587 return request.route_url(
1585 return request.route_url(
1588 'pull_requests_global',
1586 'pull_requests_global',
1589 pull_request_id=pull_request.pull_request_id,)
1587 pull_request_id=pull_request.pull_request_id,)
1590 else:
1588 else:
1591 return request.route_url('pullrequest_show',
1589 return request.route_url('pullrequest_show',
1592 repo_name=safe_str(pull_request.target_repo.repo_name),
1590 repo_name=safe_str(pull_request.target_repo.repo_name),
1593 pull_request_id=pull_request.pull_request_id,)
1591 pull_request_id=pull_request.pull_request_id,)
1594
1592
1595 def get_shadow_clone_url(self, pull_request, request=None):
1593 def get_shadow_clone_url(self, pull_request, request=None):
1596 """
1594 """
1597 Returns qualified url pointing to the shadow repository. If this pull
1595 Returns qualified url pointing to the shadow repository. If this pull
1598 request is closed there is no shadow repository and ``None`` will be
1596 request is closed there is no shadow repository and ``None`` will be
1599 returned.
1597 returned.
1600 """
1598 """
1601 if pull_request.is_closed():
1599 if pull_request.is_closed():
1602 return None
1600 return None
1603 else:
1601 else:
1604 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1602 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1605 return safe_str('{pr_url}/repository'.format(pr_url=pr_url))
1603 return safe_str('{pr_url}/repository'.format(pr_url=pr_url))
1606
1604
1607 def _notify_reviewers(self, pull_request, user_ids, role, user):
1605 def _notify_reviewers(self, pull_request, user_ids, role, user):
1608 # notification to reviewers/observers
1606 # notification to reviewers/observers
1609 if not user_ids:
1607 if not user_ids:
1610 return
1608 return
1611
1609
1612 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1610 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1613
1611
1614 pull_request_obj = pull_request
1612 pull_request_obj = pull_request
1615 # get the current participants of this pull request
1613 # get the current participants of this pull request
1616 recipients = user_ids
1614 recipients = user_ids
1617 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1615 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1618
1616
1619 pr_source_repo = pull_request_obj.source_repo
1617 pr_source_repo = pull_request_obj.source_repo
1620 pr_target_repo = pull_request_obj.target_repo
1618 pr_target_repo = pull_request_obj.target_repo
1621
1619
1622 pr_url = h.route_url('pullrequest_show',
1620 pr_url = h.route_url('pullrequest_show',
1623 repo_name=pr_target_repo.repo_name,
1621 repo_name=pr_target_repo.repo_name,
1624 pull_request_id=pull_request_obj.pull_request_id,)
1622 pull_request_id=pull_request_obj.pull_request_id,)
1625
1623
1626 # set some variables for email notification
1624 # set some variables for email notification
1627 pr_target_repo_url = h.route_url(
1625 pr_target_repo_url = h.route_url(
1628 'repo_summary', repo_name=pr_target_repo.repo_name)
1626 'repo_summary', repo_name=pr_target_repo.repo_name)
1629
1627
1630 pr_source_repo_url = h.route_url(
1628 pr_source_repo_url = h.route_url(
1631 'repo_summary', repo_name=pr_source_repo.repo_name)
1629 'repo_summary', repo_name=pr_source_repo.repo_name)
1632
1630
1633 # pull request specifics
1631 # pull request specifics
1634 pull_request_commits = [
1632 pull_request_commits = [
1635 (x.raw_id, x.message)
1633 (x.raw_id, x.message)
1636 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1634 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1637
1635
1638 current_rhodecode_user = user
1636 current_rhodecode_user = user
1639 kwargs = {
1637 kwargs = {
1640 'user': current_rhodecode_user,
1638 'user': current_rhodecode_user,
1641 'pull_request_author': pull_request.author,
1639 'pull_request_author': pull_request.author,
1642 'pull_request': pull_request_obj,
1640 'pull_request': pull_request_obj,
1643 'pull_request_commits': pull_request_commits,
1641 'pull_request_commits': pull_request_commits,
1644
1642
1645 'pull_request_target_repo': pr_target_repo,
1643 'pull_request_target_repo': pr_target_repo,
1646 'pull_request_target_repo_url': pr_target_repo_url,
1644 'pull_request_target_repo_url': pr_target_repo_url,
1647
1645
1648 'pull_request_source_repo': pr_source_repo,
1646 'pull_request_source_repo': pr_source_repo,
1649 'pull_request_source_repo_url': pr_source_repo_url,
1647 'pull_request_source_repo_url': pr_source_repo_url,
1650
1648
1651 'pull_request_url': pr_url,
1649 'pull_request_url': pr_url,
1652 'thread_ids': [pr_url],
1650 'thread_ids': [pr_url],
1653 'user_role': role
1651 'user_role': role
1654 }
1652 }
1655
1653
1656 # create notification objects, and emails
1654 # create notification objects, and emails
1657 NotificationModel().create(
1655 NotificationModel().create(
1658 created_by=current_rhodecode_user,
1656 created_by=current_rhodecode_user,
1659 notification_subject='', # Filled in based on the notification_type
1657 notification_subject='', # Filled in based on the notification_type
1660 notification_body='', # Filled in based on the notification_type
1658 notification_body='', # Filled in based on the notification_type
1661 notification_type=notification_type,
1659 notification_type=notification_type,
1662 recipients=recipients,
1660 recipients=recipients,
1663 email_kwargs=kwargs,
1661 email_kwargs=kwargs,
1664 )
1662 )
1665
1663
1666 def notify_reviewers(self, pull_request, reviewers_ids, user):
1664 def notify_reviewers(self, pull_request, reviewers_ids, user):
1667 return self._notify_reviewers(pull_request, reviewers_ids,
1665 return self._notify_reviewers(pull_request, reviewers_ids,
1668 PullRequestReviewers.ROLE_REVIEWER, user)
1666 PullRequestReviewers.ROLE_REVIEWER, user)
1669
1667
1670 def notify_observers(self, pull_request, observers_ids, user):
1668 def notify_observers(self, pull_request, observers_ids, user):
1671 return self._notify_reviewers(pull_request, observers_ids,
1669 return self._notify_reviewers(pull_request, observers_ids,
1672 PullRequestReviewers.ROLE_OBSERVER, user)
1670 PullRequestReviewers.ROLE_OBSERVER, user)
1673
1671
1674 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1672 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1675 commit_changes, file_changes):
1673 commit_changes, file_changes):
1676
1674
1677 updating_user_id = updating_user.user_id
1675 updating_user_id = updating_user.user_id
1678 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1676 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1679 # NOTE(marcink): send notification to all other users except to
1677 # NOTE(marcink): send notification to all other users except to
1680 # person who updated the PR
1678 # person who updated the PR
1681 recipients = reviewers.difference(set([updating_user_id]))
1679 recipients = reviewers.difference(set([updating_user_id]))
1682
1680
1683 log.debug('Notify following recipients about pull-request update %s', recipients)
1681 log.debug('Notify following recipients about pull-request update %s', recipients)
1684
1682
1685 pull_request_obj = pull_request
1683 pull_request_obj = pull_request
1686
1684
1687 # send email about the update
1685 # send email about the update
1688 changed_files = (
1686 changed_files = (
1689 file_changes.added + file_changes.modified + file_changes.removed)
1687 file_changes.added + file_changes.modified + file_changes.removed)
1690
1688
1691 pr_source_repo = pull_request_obj.source_repo
1689 pr_source_repo = pull_request_obj.source_repo
1692 pr_target_repo = pull_request_obj.target_repo
1690 pr_target_repo = pull_request_obj.target_repo
1693
1691
1694 pr_url = h.route_url('pullrequest_show',
1692 pr_url = h.route_url('pullrequest_show',
1695 repo_name=pr_target_repo.repo_name,
1693 repo_name=pr_target_repo.repo_name,
1696 pull_request_id=pull_request_obj.pull_request_id,)
1694 pull_request_id=pull_request_obj.pull_request_id,)
1697
1695
1698 # set some variables for email notification
1696 # set some variables for email notification
1699 pr_target_repo_url = h.route_url(
1697 pr_target_repo_url = h.route_url(
1700 'repo_summary', repo_name=pr_target_repo.repo_name)
1698 'repo_summary', repo_name=pr_target_repo.repo_name)
1701
1699
1702 pr_source_repo_url = h.route_url(
1700 pr_source_repo_url = h.route_url(
1703 'repo_summary', repo_name=pr_source_repo.repo_name)
1701 'repo_summary', repo_name=pr_source_repo.repo_name)
1704
1702
1705 email_kwargs = {
1703 email_kwargs = {
1706 'date': datetime.datetime.now(),
1704 'date': datetime.datetime.now(),
1707 'updating_user': updating_user,
1705 'updating_user': updating_user,
1708
1706
1709 'pull_request': pull_request_obj,
1707 'pull_request': pull_request_obj,
1710
1708
1711 'pull_request_target_repo': pr_target_repo,
1709 'pull_request_target_repo': pr_target_repo,
1712 'pull_request_target_repo_url': pr_target_repo_url,
1710 'pull_request_target_repo_url': pr_target_repo_url,
1713
1711
1714 'pull_request_source_repo': pr_source_repo,
1712 'pull_request_source_repo': pr_source_repo,
1715 'pull_request_source_repo_url': pr_source_repo_url,
1713 'pull_request_source_repo_url': pr_source_repo_url,
1716
1714
1717 'pull_request_url': pr_url,
1715 'pull_request_url': pr_url,
1718
1716
1719 'ancestor_commit_id': ancestor_commit_id,
1717 'ancestor_commit_id': ancestor_commit_id,
1720 'added_commits': commit_changes.added,
1718 'added_commits': commit_changes.added,
1721 'removed_commits': commit_changes.removed,
1719 'removed_commits': commit_changes.removed,
1722 'changed_files': changed_files,
1720 'changed_files': changed_files,
1723 'added_files': file_changes.added,
1721 'added_files': file_changes.added,
1724 'modified_files': file_changes.modified,
1722 'modified_files': file_changes.modified,
1725 'removed_files': file_changes.removed,
1723 'removed_files': file_changes.removed,
1726 'thread_ids': [pr_url],
1724 'thread_ids': [pr_url],
1727 }
1725 }
1728
1726
1729 # create notification objects, and emails
1727 # create notification objects, and emails
1730 NotificationModel().create(
1728 NotificationModel().create(
1731 created_by=updating_user,
1729 created_by=updating_user,
1732 notification_subject='', # Filled in based on the notification_type
1730 notification_subject='', # Filled in based on the notification_type
1733 notification_body='', # Filled in based on the notification_type
1731 notification_body='', # Filled in based on the notification_type
1734 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1732 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1735 recipients=recipients,
1733 recipients=recipients,
1736 email_kwargs=email_kwargs,
1734 email_kwargs=email_kwargs,
1737 )
1735 )
1738
1736
1739 def delete(self, pull_request, user=None):
1737 def delete(self, pull_request, user=None):
1740 if not user:
1738 if not user:
1741 user = getattr(get_current_rhodecode_user(), 'username', None)
1739 user = getattr(get_current_rhodecode_user(), 'username', None)
1742
1740
1743 pull_request = self.__get_pull_request(pull_request)
1741 pull_request = self.__get_pull_request(pull_request)
1744 old_data = pull_request.get_api_data(with_merge_state=False)
1742 old_data = pull_request.get_api_data(with_merge_state=False)
1745 self._cleanup_merge_workspace(pull_request)
1743 self._cleanup_merge_workspace(pull_request)
1746 self._log_audit_action(
1744 self._log_audit_action(
1747 'repo.pull_request.delete', {'old_data': old_data},
1745 'repo.pull_request.delete', {'old_data': old_data},
1748 user, pull_request)
1746 user, pull_request)
1749 Session().delete(pull_request)
1747 Session().delete(pull_request)
1750
1748
1751 def close_pull_request(self, pull_request, user):
1749 def close_pull_request(self, pull_request, user):
1752 pull_request = self.__get_pull_request(pull_request)
1750 pull_request = self.__get_pull_request(pull_request)
1753 self._cleanup_merge_workspace(pull_request)
1751 self._cleanup_merge_workspace(pull_request)
1754 pull_request.status = PullRequest.STATUS_CLOSED
1752 pull_request.status = PullRequest.STATUS_CLOSED
1755 pull_request.updated_on = datetime.datetime.now()
1753 pull_request.updated_on = datetime.datetime.now()
1756 Session().add(pull_request)
1754 Session().add(pull_request)
1757 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1755 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1758
1756
1759 pr_data = pull_request.get_api_data(with_merge_state=False)
1757 pr_data = pull_request.get_api_data(with_merge_state=False)
1760 self._log_audit_action(
1758 self._log_audit_action(
1761 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1759 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1762
1760
1763 def close_pull_request_with_comment(
1761 def close_pull_request_with_comment(
1764 self, pull_request, user, repo, message=None, auth_user=None):
1762 self, pull_request, user, repo, message=None, auth_user=None):
1765
1763
1766 pull_request_review_status = pull_request.calculated_review_status()
1764 pull_request_review_status = pull_request.calculated_review_status()
1767
1765
1768 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1766 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1769 # approved only if we have voting consent
1767 # approved only if we have voting consent
1770 status = ChangesetStatus.STATUS_APPROVED
1768 status = ChangesetStatus.STATUS_APPROVED
1771 else:
1769 else:
1772 status = ChangesetStatus.STATUS_REJECTED
1770 status = ChangesetStatus.STATUS_REJECTED
1773 status_lbl = ChangesetStatus.get_status_lbl(status)
1771 status_lbl = ChangesetStatus.get_status_lbl(status)
1774
1772
1775 default_message = (
1773 default_message = (
1776 'Closing with status change {transition_icon} {status}.'
1774 'Closing with status change {transition_icon} {status}.'
1777 ).format(transition_icon='>', status=status_lbl)
1775 ).format(transition_icon='>', status=status_lbl)
1778 text = message or default_message
1776 text = message or default_message
1779
1777
1780 # create a comment, and link it to new status
1778 # create a comment, and link it to new status
1781 comment = CommentsModel().create(
1779 comment = CommentsModel().create(
1782 text=text,
1780 text=text,
1783 repo=repo.repo_id,
1781 repo=repo.repo_id,
1784 user=user.user_id,
1782 user=user.user_id,
1785 pull_request=pull_request.pull_request_id,
1783 pull_request=pull_request.pull_request_id,
1786 status_change=status_lbl,
1784 status_change=status_lbl,
1787 status_change_type=status,
1785 status_change_type=status,
1788 closing_pr=True,
1786 closing_pr=True,
1789 auth_user=auth_user,
1787 auth_user=auth_user,
1790 )
1788 )
1791
1789
1792 # calculate old status before we change it
1790 # calculate old status before we change it
1793 old_calculated_status = pull_request.calculated_review_status()
1791 old_calculated_status = pull_request.calculated_review_status()
1794 ChangesetStatusModel().set_status(
1792 ChangesetStatusModel().set_status(
1795 repo.repo_id,
1793 repo.repo_id,
1796 status,
1794 status,
1797 user.user_id,
1795 user.user_id,
1798 comment=comment,
1796 comment=comment,
1799 pull_request=pull_request.pull_request_id
1797 pull_request=pull_request.pull_request_id
1800 )
1798 )
1801
1799
1802 Session().flush()
1800 Session().flush()
1803
1801
1804 self.trigger_pull_request_hook(pull_request, user, 'comment',
1802 self.trigger_pull_request_hook(pull_request, user, 'comment',
1805 data={'comment': comment})
1803 data={'comment': comment})
1806
1804
1807 # we now calculate the status of pull request again, and based on that
1805 # we now calculate the status of pull request again, and based on that
1808 # calculation trigger status change. This might happen in cases
1806 # calculation trigger status change. This might happen in cases
1809 # that non-reviewer admin closes a pr, which means his vote doesn't
1807 # that non-reviewer admin closes a pr, which means his vote doesn't
1810 # change the status, while if he's a reviewer this might change it.
1808 # change the status, while if he's a reviewer this might change it.
1811 calculated_status = pull_request.calculated_review_status()
1809 calculated_status = pull_request.calculated_review_status()
1812 if old_calculated_status != calculated_status:
1810 if old_calculated_status != calculated_status:
1813 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1811 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1814 data={'status': calculated_status})
1812 data={'status': calculated_status})
1815
1813
1816 # finally close the PR
1814 # finally close the PR
1817 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1815 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1818
1816
1819 return comment, status
1817 return comment, status
1820
1818
1821 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1819 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1822 _ = translator or get_current_request().translate
1820 _ = translator or get_current_request().translate
1823
1821
1824 if not self._is_merge_enabled(pull_request):
1822 if not self._is_merge_enabled(pull_request):
1825 return None, False, _('Server-side pull request merging is disabled.')
1823 return None, False, _('Server-side pull request merging is disabled.')
1826
1824
1827 if pull_request.is_closed():
1825 if pull_request.is_closed():
1828 return None, False, _('This pull request is closed.')
1826 return None, False, _('This pull request is closed.')
1829
1827
1830 merge_possible, msg = self._check_repo_requirements(
1828 merge_possible, msg = self._check_repo_requirements(
1831 target=pull_request.target_repo, source=pull_request.source_repo,
1829 target=pull_request.target_repo, source=pull_request.source_repo,
1832 translator=_)
1830 translator=_)
1833 if not merge_possible:
1831 if not merge_possible:
1834 return None, merge_possible, msg
1832 return None, merge_possible, msg
1835
1833
1836 try:
1834 try:
1837 merge_response = self._try_merge(
1835 merge_response = self._try_merge(
1838 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1836 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1839 log.debug("Merge response: %s", merge_response)
1837 log.debug("Merge response: %s", merge_response)
1840 return merge_response, merge_response.possible, merge_response.merge_status_message
1838 return merge_response, merge_response.possible, merge_response.merge_status_message
1841 except NotImplementedError:
1839 except NotImplementedError:
1842 return None, False, _('Pull request merging is not supported.')
1840 return None, False, _('Pull request merging is not supported.')
1843
1841
1844 def _check_repo_requirements(self, target, source, translator):
1842 def _check_repo_requirements(self, target, source, translator):
1845 """
1843 """
1846 Check if `target` and `source` have compatible requirements.
1844 Check if `target` and `source` have compatible requirements.
1847
1845
1848 Currently this is just checking for largefiles.
1846 Currently this is just checking for largefiles.
1849 """
1847 """
1850 _ = translator
1848 _ = translator
1851 target_has_largefiles = self._has_largefiles(target)
1849 target_has_largefiles = self._has_largefiles(target)
1852 source_has_largefiles = self._has_largefiles(source)
1850 source_has_largefiles = self._has_largefiles(source)
1853 merge_possible = True
1851 merge_possible = True
1854 message = u''
1852 message = u''
1855
1853
1856 if target_has_largefiles != source_has_largefiles:
1854 if target_has_largefiles != source_has_largefiles:
1857 merge_possible = False
1855 merge_possible = False
1858 if source_has_largefiles:
1856 if source_has_largefiles:
1859 message = _(
1857 message = _(
1860 'Target repository large files support is disabled.')
1858 'Target repository large files support is disabled.')
1861 else:
1859 else:
1862 message = _(
1860 message = _(
1863 'Source repository large files support is disabled.')
1861 'Source repository large files support is disabled.')
1864
1862
1865 return merge_possible, message
1863 return merge_possible, message
1866
1864
1867 def _has_largefiles(self, repo):
1865 def _has_largefiles(self, repo):
1868 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1866 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1869 'extensions', 'largefiles')
1867 'extensions', 'largefiles')
1870 return largefiles_ui and largefiles_ui[0].active
1868 return largefiles_ui and largefiles_ui[0].active
1871
1869
1872 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1870 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1873 """
1871 """
1874 Try to merge the pull request and return the merge status.
1872 Try to merge the pull request and return the merge status.
1875 """
1873 """
1876 log.debug(
1874 log.debug(
1877 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1875 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1878 pull_request.pull_request_id, force_shadow_repo_refresh)
1876 pull_request.pull_request_id, force_shadow_repo_refresh)
1879 target_vcs = pull_request.target_repo.scm_instance()
1877 target_vcs = pull_request.target_repo.scm_instance()
1880 # Refresh the target reference.
1878 # Refresh the target reference.
1881 try:
1879 try:
1882 target_ref = self._refresh_reference(
1880 target_ref = self._refresh_reference(
1883 pull_request.target_ref_parts, target_vcs)
1881 pull_request.target_ref_parts, target_vcs)
1884 except CommitDoesNotExistError:
1882 except CommitDoesNotExistError:
1885 merge_state = MergeResponse(
1883 merge_state = MergeResponse(
1886 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1884 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1887 metadata={'target_ref': pull_request.target_ref_parts})
1885 metadata={'target_ref': pull_request.target_ref_parts})
1888 return merge_state
1886 return merge_state
1889
1887
1890 target_locked = pull_request.target_repo.locked
1888 target_locked = pull_request.target_repo.locked
1891 if target_locked and target_locked[0]:
1889 if target_locked and target_locked[0]:
1892 locked_by = 'user:{}'.format(target_locked[0])
1890 locked_by = 'user:{}'.format(target_locked[0])
1893 log.debug("The target repository is locked by %s.", locked_by)
1891 log.debug("The target repository is locked by %s.", locked_by)
1894 merge_state = MergeResponse(
1892 merge_state = MergeResponse(
1895 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1893 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1896 metadata={'locked_by': locked_by})
1894 metadata={'locked_by': locked_by})
1897 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1895 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1898 pull_request, target_ref):
1896 pull_request, target_ref):
1899 log.debug("Refreshing the merge status of the repository.")
1897 log.debug("Refreshing the merge status of the repository.")
1900 merge_state = self._refresh_merge_state(
1898 merge_state = self._refresh_merge_state(
1901 pull_request, target_vcs, target_ref)
1899 pull_request, target_vcs, target_ref)
1902 else:
1900 else:
1903 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1901 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1904 metadata = {
1902 metadata = {
1905 'unresolved_files': '',
1903 'unresolved_files': '',
1906 'target_ref': pull_request.target_ref_parts,
1904 'target_ref': pull_request.target_ref_parts,
1907 'source_ref': pull_request.source_ref_parts,
1905 'source_ref': pull_request.source_ref_parts,
1908 }
1906 }
1909 if pull_request.last_merge_metadata:
1907 if pull_request.last_merge_metadata:
1910 metadata.update(pull_request.last_merge_metadata_parsed)
1908 metadata.update(pull_request.last_merge_metadata_parsed)
1911
1909
1912 if not possible and target_ref.type == 'branch':
1910 if not possible and target_ref.type == 'branch':
1913 # NOTE(marcink): case for mercurial multiple heads on branch
1911 # NOTE(marcink): case for mercurial multiple heads on branch
1914 heads = target_vcs._heads(target_ref.name)
1912 heads = target_vcs._heads(target_ref.name)
1915 if len(heads) != 1:
1913 if len(heads) != 1:
1916 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1914 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1917 metadata.update({
1915 metadata.update({
1918 'heads': heads
1916 'heads': heads
1919 })
1917 })
1920
1918
1921 merge_state = MergeResponse(
1919 merge_state = MergeResponse(
1922 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1920 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1923
1921
1924 return merge_state
1922 return merge_state
1925
1923
1926 def _refresh_reference(self, reference, vcs_repository):
1924 def _refresh_reference(self, reference, vcs_repository):
1927 if reference.type in self.UPDATABLE_REF_TYPES:
1925 if reference.type in self.UPDATABLE_REF_TYPES:
1928 name_or_id = reference.name
1926 name_or_id = reference.name
1929 else:
1927 else:
1930 name_or_id = reference.commit_id
1928 name_or_id = reference.commit_id
1931
1929
1932 refreshed_commit = vcs_repository.get_commit(name_or_id)
1930 refreshed_commit = vcs_repository.get_commit(name_or_id)
1933 refreshed_reference = Reference(
1931 refreshed_reference = Reference(
1934 reference.type, reference.name, refreshed_commit.raw_id)
1932 reference.type, reference.name, refreshed_commit.raw_id)
1935 return refreshed_reference
1933 return refreshed_reference
1936
1934
1937 def _needs_merge_state_refresh(self, pull_request, target_reference):
1935 def _needs_merge_state_refresh(self, pull_request, target_reference):
1938 return not(
1936 return not(
1939 pull_request.revisions and
1937 pull_request.revisions and
1940 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1938 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1941 target_reference.commit_id == pull_request._last_merge_target_rev)
1939 target_reference.commit_id == pull_request._last_merge_target_rev)
1942
1940
1943 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1941 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1944 workspace_id = self._workspace_id(pull_request)
1942 workspace_id = self._workspace_id(pull_request)
1945 source_vcs = pull_request.source_repo.scm_instance()
1943 source_vcs = pull_request.source_repo.scm_instance()
1946 repo_id = pull_request.target_repo.repo_id
1944 repo_id = pull_request.target_repo.repo_id
1947 use_rebase = self._use_rebase_for_merging(pull_request)
1945 use_rebase = self._use_rebase_for_merging(pull_request)
1948 close_branch = self._close_branch_before_merging(pull_request)
1946 close_branch = self._close_branch_before_merging(pull_request)
1949 merge_state = target_vcs.merge(
1947 merge_state = target_vcs.merge(
1950 repo_id, workspace_id,
1948 repo_id, workspace_id,
1951 target_reference, source_vcs, pull_request.source_ref_parts,
1949 target_reference, source_vcs, pull_request.source_ref_parts,
1952 dry_run=True, use_rebase=use_rebase,
1950 dry_run=True, use_rebase=use_rebase,
1953 close_branch=close_branch)
1951 close_branch=close_branch)
1954
1952
1955 # Do not store the response if there was an unknown error.
1953 # Do not store the response if there was an unknown error.
1956 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1954 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1957 pull_request._last_merge_source_rev = \
1955 pull_request._last_merge_source_rev = \
1958 pull_request.source_ref_parts.commit_id
1956 pull_request.source_ref_parts.commit_id
1959 pull_request._last_merge_target_rev = target_reference.commit_id
1957 pull_request._last_merge_target_rev = target_reference.commit_id
1960 pull_request.last_merge_status = merge_state.failure_reason
1958 pull_request.last_merge_status = merge_state.failure_reason
1961 pull_request.last_merge_metadata = merge_state.metadata
1959 pull_request.last_merge_metadata = merge_state.metadata
1962
1960
1963 pull_request.shadow_merge_ref = merge_state.merge_ref
1961 pull_request.shadow_merge_ref = merge_state.merge_ref
1964 Session().add(pull_request)
1962 Session().add(pull_request)
1965 Session().commit()
1963 Session().commit()
1966
1964
1967 return merge_state
1965 return merge_state
1968
1966
1969 def _workspace_id(self, pull_request):
1967 def _workspace_id(self, pull_request):
1970 workspace_id = 'pr-%s' % pull_request.pull_request_id
1968 workspace_id = 'pr-%s' % pull_request.pull_request_id
1971 return workspace_id
1969 return workspace_id
1972
1970
1973 def generate_repo_data(self, repo, commit_id=None, branch=None,
1971 def generate_repo_data(self, repo, commit_id=None, branch=None,
1974 bookmark=None, translator=None):
1972 bookmark=None, translator=None):
1975 from rhodecode.model.repo import RepoModel
1973 from rhodecode.model.repo import RepoModel
1976
1974
1977 all_refs, selected_ref = \
1975 all_refs, selected_ref = \
1978 self._get_repo_pullrequest_sources(
1976 self._get_repo_pullrequest_sources(
1979 repo.scm_instance(), commit_id=commit_id,
1977 repo.scm_instance(), commit_id=commit_id,
1980 branch=branch, bookmark=bookmark, translator=translator)
1978 branch=branch, bookmark=bookmark, translator=translator)
1981
1979
1982 refs_select2 = []
1980 refs_select2 = []
1983 for element in all_refs:
1981 for element in all_refs:
1984 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1982 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1985 refs_select2.append({'text': element[1], 'children': children})
1983 refs_select2.append({'text': element[1], 'children': children})
1986
1984
1987 return {
1985 return {
1988 'user': {
1986 'user': {
1989 'user_id': repo.user.user_id,
1987 'user_id': repo.user.user_id,
1990 'username': repo.user.username,
1988 'username': repo.user.username,
1991 'firstname': repo.user.first_name,
1989 'firstname': repo.user.first_name,
1992 'lastname': repo.user.last_name,
1990 'lastname': repo.user.last_name,
1993 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1991 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1994 },
1992 },
1995 'name': repo.repo_name,
1993 'name': repo.repo_name,
1996 'link': RepoModel().get_url(repo),
1994 'link': RepoModel().get_url(repo),
1997 'description': h.chop_at_smart(repo.description_safe, '\n'),
1995 'description': h.chop_at_smart(repo.description_safe, '\n'),
1998 'refs': {
1996 'refs': {
1999 'all_refs': all_refs,
1997 'all_refs': all_refs,
2000 'selected_ref': selected_ref,
1998 'selected_ref': selected_ref,
2001 'select2_refs': refs_select2
1999 'select2_refs': refs_select2
2002 }
2000 }
2003 }
2001 }
2004
2002
2005 def generate_pullrequest_title(self, source, source_ref, target):
2003 def generate_pullrequest_title(self, source, source_ref, target):
2006 return u'{source}#{at_ref} to {target}'.format(
2004 return u'{source}#{at_ref} to {target}'.format(
2007 source=source,
2005 source=source,
2008 at_ref=source_ref,
2006 at_ref=source_ref,
2009 target=target,
2007 target=target,
2010 )
2008 )
2011
2009
2012 def _cleanup_merge_workspace(self, pull_request):
2010 def _cleanup_merge_workspace(self, pull_request):
2013 # Merging related cleanup
2011 # Merging related cleanup
2014 repo_id = pull_request.target_repo.repo_id
2012 repo_id = pull_request.target_repo.repo_id
2015 target_scm = pull_request.target_repo.scm_instance()
2013 target_scm = pull_request.target_repo.scm_instance()
2016 workspace_id = self._workspace_id(pull_request)
2014 workspace_id = self._workspace_id(pull_request)
2017
2015
2018 try:
2016 try:
2019 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2017 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2020 except NotImplementedError:
2018 except NotImplementedError:
2021 pass
2019 pass
2022
2020
2023 def _get_repo_pullrequest_sources(
2021 def _get_repo_pullrequest_sources(
2024 self, repo, commit_id=None, branch=None, bookmark=None,
2022 self, repo, commit_id=None, branch=None, bookmark=None,
2025 translator=None):
2023 translator=None):
2026 """
2024 """
2027 Return a structure with repo's interesting commits, suitable for
2025 Return a structure with repo's interesting commits, suitable for
2028 the selectors in pullrequest controller
2026 the selectors in pullrequest controller
2029
2027
2030 :param commit_id: a commit that must be in the list somehow
2028 :param commit_id: a commit that must be in the list somehow
2031 and selected by default
2029 and selected by default
2032 :param branch: a branch that must be in the list and selected
2030 :param branch: a branch that must be in the list and selected
2033 by default - even if closed
2031 by default - even if closed
2034 :param bookmark: a bookmark that must be in the list and selected
2032 :param bookmark: a bookmark that must be in the list and selected
2035 """
2033 """
2036 _ = translator or get_current_request().translate
2034 _ = translator or get_current_request().translate
2037
2035
2038 commit_id = safe_str(commit_id) if commit_id else None
2036 commit_id = safe_str(commit_id) if commit_id else None
2039 branch = safe_str(branch) if branch else None
2037 branch = safe_str(branch) if branch else None
2040 bookmark = safe_str(bookmark) if bookmark else None
2038 bookmark = safe_str(bookmark) if bookmark else None
2041
2039
2042 selected = None
2040 selected = None
2043
2041
2044 # order matters: first source that has commit_id in it will be selected
2042 # order matters: first source that has commit_id in it will be selected
2045 sources = []
2043 sources = []
2046 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2044 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2047 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2045 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2048
2046
2049 if commit_id:
2047 if commit_id:
2050 ref_commit = (h.short_id(commit_id), commit_id)
2048 ref_commit = (h.short_id(commit_id), commit_id)
2051 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2049 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2052
2050
2053 sources.append(
2051 sources.append(
2054 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2052 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2055 )
2053 )
2056
2054
2057 groups = []
2055 groups = []
2058
2056
2059 for group_key, ref_list, group_name, match in sources:
2057 for group_key, ref_list, group_name, match in sources:
2060 group_refs = []
2058 group_refs = []
2061 for ref_name, ref_id in ref_list:
2059 for ref_name, ref_id in ref_list:
2062 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2060 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2063 group_refs.append((ref_key, ref_name))
2061 group_refs.append((ref_key, ref_name))
2064
2062
2065 if not selected:
2063 if not selected:
2066 if set([commit_id, match]) & set([ref_id, ref_name]):
2064 if set([commit_id, match]) & set([ref_id, ref_name]):
2067 selected = ref_key
2065 selected = ref_key
2068
2066
2069 if group_refs:
2067 if group_refs:
2070 groups.append((group_refs, group_name))
2068 groups.append((group_refs, group_name))
2071
2069
2072 if not selected:
2070 if not selected:
2073 ref = commit_id or branch or bookmark
2071 ref = commit_id or branch or bookmark
2074 if ref:
2072 if ref:
2075 raise CommitDoesNotExistError(
2073 raise CommitDoesNotExistError(
2076 u'No commit refs could be found matching: {}'.format(ref))
2074 u'No commit refs could be found matching: {}'.format(ref))
2077 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2075 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2078 selected = u'branch:{}:{}'.format(
2076 selected = u'branch:{}:{}'.format(
2079 safe_str(repo.DEFAULT_BRANCH_NAME),
2077 safe_str(repo.DEFAULT_BRANCH_NAME),
2080 safe_str(repo.branches[repo.DEFAULT_BRANCH_NAME])
2078 safe_str(repo.branches[repo.DEFAULT_BRANCH_NAME])
2081 )
2079 )
2082 elif repo.commit_ids:
2080 elif repo.commit_ids:
2083 # make the user select in this case
2081 # make the user select in this case
2084 selected = None
2082 selected = None
2085 else:
2083 else:
2086 raise EmptyRepositoryError()
2084 raise EmptyRepositoryError()
2087 return groups, selected
2085 return groups, selected
2088
2086
2089 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2087 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2090 hide_whitespace_changes, diff_context):
2088 hide_whitespace_changes, diff_context):
2091
2089
2092 return self._get_diff_from_pr_or_version(
2090 return self._get_diff_from_pr_or_version(
2093 source_repo, source_ref_id, target_ref_id,
2091 source_repo, source_ref_id, target_ref_id,
2094 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2092 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2095
2093
2096 def _get_diff_from_pr_or_version(
2094 def _get_diff_from_pr_or_version(
2097 self, source_repo, source_ref_id, target_ref_id,
2095 self, source_repo, source_ref_id, target_ref_id,
2098 hide_whitespace_changes, diff_context):
2096 hide_whitespace_changes, diff_context):
2099
2097
2100 target_commit = source_repo.get_commit(
2098 target_commit = source_repo.get_commit(
2101 commit_id=safe_str(target_ref_id))
2099 commit_id=safe_str(target_ref_id))
2102 source_commit = source_repo.get_commit(
2100 source_commit = source_repo.get_commit(
2103 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2101 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2104 if isinstance(source_repo, Repository):
2102 if isinstance(source_repo, Repository):
2105 vcs_repo = source_repo.scm_instance()
2103 vcs_repo = source_repo.scm_instance()
2106 else:
2104 else:
2107 vcs_repo = source_repo
2105 vcs_repo = source_repo
2108
2106
2109 # TODO: johbo: In the context of an update, we cannot reach
2107 # TODO: johbo: In the context of an update, we cannot reach
2110 # the old commit anymore with our normal mechanisms. It needs
2108 # the old commit anymore with our normal mechanisms. It needs
2111 # some sort of special support in the vcs layer to avoid this
2109 # some sort of special support in the vcs layer to avoid this
2112 # workaround.
2110 # workaround.
2113 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2111 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2114 vcs_repo.alias == 'git'):
2112 vcs_repo.alias == 'git'):
2115 source_commit.raw_id = safe_str(source_ref_id)
2113 source_commit.raw_id = safe_str(source_ref_id)
2116
2114
2117 log.debug('calculating diff between '
2115 log.debug('calculating diff between '
2118 'source_ref:%s and target_ref:%s for repo `%s`',
2116 'source_ref:%s and target_ref:%s for repo `%s`',
2119 target_ref_id, source_ref_id,
2117 target_ref_id, source_ref_id,
2120 safe_str(vcs_repo.path))
2118 safe_str(vcs_repo.path))
2121
2119
2122 vcs_diff = vcs_repo.get_diff(
2120 vcs_diff = vcs_repo.get_diff(
2123 commit1=target_commit, commit2=source_commit,
2121 commit1=target_commit, commit2=source_commit,
2124 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2122 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2125 return vcs_diff
2123 return vcs_diff
2126
2124
2127 def _is_merge_enabled(self, pull_request):
2125 def _is_merge_enabled(self, pull_request):
2128 return self._get_general_setting(
2126 return self._get_general_setting(
2129 pull_request, 'rhodecode_pr_merge_enabled')
2127 pull_request, 'rhodecode_pr_merge_enabled')
2130
2128
2131 def _use_rebase_for_merging(self, pull_request):
2129 def _use_rebase_for_merging(self, pull_request):
2132 repo_type = pull_request.target_repo.repo_type
2130 repo_type = pull_request.target_repo.repo_type
2133 if repo_type == 'hg':
2131 if repo_type == 'hg':
2134 return self._get_general_setting(
2132 return self._get_general_setting(
2135 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2133 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2136 elif repo_type == 'git':
2134 elif repo_type == 'git':
2137 return self._get_general_setting(
2135 return self._get_general_setting(
2138 pull_request, 'rhodecode_git_use_rebase_for_merging')
2136 pull_request, 'rhodecode_git_use_rebase_for_merging')
2139
2137
2140 return False
2138 return False
2141
2139
2142 def _user_name_for_merging(self, pull_request, user):
2140 def _user_name_for_merging(self, pull_request, user):
2143 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2141 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2144 if env_user_name_attr and hasattr(user, env_user_name_attr):
2142 if env_user_name_attr and hasattr(user, env_user_name_attr):
2145 user_name_attr = env_user_name_attr
2143 user_name_attr = env_user_name_attr
2146 else:
2144 else:
2147 user_name_attr = 'short_contact'
2145 user_name_attr = 'short_contact'
2148
2146
2149 user_name = getattr(user, user_name_attr)
2147 user_name = getattr(user, user_name_attr)
2150 return user_name
2148 return user_name
2151
2149
2152 def _close_branch_before_merging(self, pull_request):
2150 def _close_branch_before_merging(self, pull_request):
2153 repo_type = pull_request.target_repo.repo_type
2151 repo_type = pull_request.target_repo.repo_type
2154 if repo_type == 'hg':
2152 if repo_type == 'hg':
2155 return self._get_general_setting(
2153 return self._get_general_setting(
2156 pull_request, 'rhodecode_hg_close_branch_before_merging')
2154 pull_request, 'rhodecode_hg_close_branch_before_merging')
2157 elif repo_type == 'git':
2155 elif repo_type == 'git':
2158 return self._get_general_setting(
2156 return self._get_general_setting(
2159 pull_request, 'rhodecode_git_close_branch_before_merging')
2157 pull_request, 'rhodecode_git_close_branch_before_merging')
2160
2158
2161 return False
2159 return False
2162
2160
2163 def _get_general_setting(self, pull_request, settings_key, default=False):
2161 def _get_general_setting(self, pull_request, settings_key, default=False):
2164 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2162 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2165 settings = settings_model.get_general_settings()
2163 settings = settings_model.get_general_settings()
2166 return settings.get(settings_key, default)
2164 return settings.get(settings_key, default)
2167
2165
2168 def _log_audit_action(self, action, action_data, user, pull_request):
2166 def _log_audit_action(self, action, action_data, user, pull_request):
2169 audit_logger.store(
2167 audit_logger.store(
2170 action=action,
2168 action=action,
2171 action_data=action_data,
2169 action_data=action_data,
2172 user=user,
2170 user=user,
2173 repo=pull_request.target_repo)
2171 repo=pull_request.target_repo)
2174
2172
2175 def get_reviewer_functions(self):
2173 def get_reviewer_functions(self):
2176 """
2174 """
2177 Fetches functions for validation and fetching default reviewers.
2175 Fetches functions for validation and fetching default reviewers.
2178 If available we use the EE package, else we fallback to CE
2176 If available we use the EE package, else we fallback to CE
2179 package functions
2177 package functions
2180 """
2178 """
2181 try:
2179 try:
2182 from rc_reviewers.utils import get_default_reviewers_data
2180 from rc_reviewers.utils import get_default_reviewers_data
2183 from rc_reviewers.utils import validate_default_reviewers
2181 from rc_reviewers.utils import validate_default_reviewers
2184 from rc_reviewers.utils import validate_observers
2182 from rc_reviewers.utils import validate_observers
2185 except ImportError:
2183 except ImportError:
2186 from rhodecode.apps.repository.utils import get_default_reviewers_data
2184 from rhodecode.apps.repository.utils import get_default_reviewers_data
2187 from rhodecode.apps.repository.utils import validate_default_reviewers
2185 from rhodecode.apps.repository.utils import validate_default_reviewers
2188 from rhodecode.apps.repository.utils import validate_observers
2186 from rhodecode.apps.repository.utils import validate_observers
2189
2187
2190 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2188 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2191
2189
2192
2190
2193 class MergeCheck(object):
2191 class MergeCheck(object):
2194 """
2192 """
2195 Perform Merge Checks and returns a check object which stores information
2193 Perform Merge Checks and returns a check object which stores information
2196 about merge errors, and merge conditions
2194 about merge errors, and merge conditions
2197 """
2195 """
2198 TODO_CHECK = 'todo'
2196 TODO_CHECK = 'todo'
2199 PERM_CHECK = 'perm'
2197 PERM_CHECK = 'perm'
2200 REVIEW_CHECK = 'review'
2198 REVIEW_CHECK = 'review'
2201 MERGE_CHECK = 'merge'
2199 MERGE_CHECK = 'merge'
2202 WIP_CHECK = 'wip'
2200 WIP_CHECK = 'wip'
2203
2201
2204 def __init__(self):
2202 def __init__(self):
2205 self.review_status = None
2203 self.review_status = None
2206 self.merge_possible = None
2204 self.merge_possible = None
2207 self.merge_msg = ''
2205 self.merge_msg = ''
2208 self.merge_response = None
2206 self.merge_response = None
2209 self.failed = None
2207 self.failed = None
2210 self.errors = []
2208 self.errors = []
2211 self.error_details = OrderedDict()
2209 self.error_details = OrderedDict()
2212 self.source_commit = AttributeDict()
2210 self.source_commit = AttributeDict()
2213 self.target_commit = AttributeDict()
2211 self.target_commit = AttributeDict()
2214 self.reviewers_count = 0
2212 self.reviewers_count = 0
2215 self.observers_count = 0
2213 self.observers_count = 0
2216
2214
2217 def __repr__(self):
2215 def __repr__(self):
2218 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2216 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2219 self.merge_possible, self.failed, self.errors)
2217 self.merge_possible, self.failed, self.errors)
2220
2218
2221 def push_error(self, error_type, message, error_key, details):
2219 def push_error(self, error_type, message, error_key, details):
2222 self.failed = True
2220 self.failed = True
2223 self.errors.append([error_type, message])
2221 self.errors.append([error_type, message])
2224 self.error_details[error_key] = dict(
2222 self.error_details[error_key] = dict(
2225 details=details,
2223 details=details,
2226 error_type=error_type,
2224 error_type=error_type,
2227 message=message
2225 message=message
2228 )
2226 )
2229
2227
2230 @classmethod
2228 @classmethod
2231 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2229 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2232 force_shadow_repo_refresh=False):
2230 force_shadow_repo_refresh=False):
2233 _ = translator
2231 _ = translator
2234 merge_check = cls()
2232 merge_check = cls()
2235
2233
2236 # title has WIP:
2234 # title has WIP:
2237 if pull_request.work_in_progress:
2235 if pull_request.work_in_progress:
2238 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2236 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2239
2237
2240 msg = _('WIP marker in title prevents from accidental merge.')
2238 msg = _('WIP marker in title prevents from accidental merge.')
2241 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2239 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2242 if fail_early:
2240 if fail_early:
2243 return merge_check
2241 return merge_check
2244
2242
2245 # permissions to merge
2243 # permissions to merge
2246 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2244 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2247 if not user_allowed_to_merge:
2245 if not user_allowed_to_merge:
2248 log.debug("MergeCheck: cannot merge, approval is pending.")
2246 log.debug("MergeCheck: cannot merge, approval is pending.")
2249
2247
2250 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2248 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2251 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2249 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2252 if fail_early:
2250 if fail_early:
2253 return merge_check
2251 return merge_check
2254
2252
2255 # permission to merge into the target branch
2253 # permission to merge into the target branch
2256 target_commit_id = pull_request.target_ref_parts.commit_id
2254 target_commit_id = pull_request.target_ref_parts.commit_id
2257 if pull_request.target_ref_parts.type == 'branch':
2255 if pull_request.target_ref_parts.type == 'branch':
2258 branch_name = pull_request.target_ref_parts.name
2256 branch_name = pull_request.target_ref_parts.name
2259 else:
2257 else:
2260 # for mercurial we can always figure out the branch from the commit
2258 # for mercurial we can always figure out the branch from the commit
2261 # in case of bookmark
2259 # in case of bookmark
2262 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2260 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2263 branch_name = target_commit.branch
2261 branch_name = target_commit.branch
2264
2262
2265 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2263 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2266 pull_request.target_repo.repo_name, branch_name)
2264 pull_request.target_repo.repo_name, branch_name)
2267 if branch_perm and branch_perm == 'branch.none':
2265 if branch_perm and branch_perm == 'branch.none':
2268 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2266 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2269 branch_name, rule)
2267 branch_name, rule)
2270 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2268 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2271 if fail_early:
2269 if fail_early:
2272 return merge_check
2270 return merge_check
2273
2271
2274 # review status, must be always present
2272 # review status, must be always present
2275 review_status = pull_request.calculated_review_status()
2273 review_status = pull_request.calculated_review_status()
2276 merge_check.review_status = review_status
2274 merge_check.review_status = review_status
2277 merge_check.reviewers_count = pull_request.reviewers_count
2275 merge_check.reviewers_count = pull_request.reviewers_count
2278 merge_check.observers_count = pull_request.observers_count
2276 merge_check.observers_count = pull_request.observers_count
2279
2277
2280 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2278 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2281 if not status_approved and merge_check.reviewers_count:
2279 if not status_approved and merge_check.reviewers_count:
2282 log.debug("MergeCheck: cannot merge, approval is pending.")
2280 log.debug("MergeCheck: cannot merge, approval is pending.")
2283 msg = _('Pull request reviewer approval is pending.')
2281 msg = _('Pull request reviewer approval is pending.')
2284
2282
2285 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2283 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2286
2284
2287 if fail_early:
2285 if fail_early:
2288 return merge_check
2286 return merge_check
2289
2287
2290 # left over TODOs
2288 # left over TODOs
2291 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2289 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2292 if todos:
2290 if todos:
2293 log.debug("MergeCheck: cannot merge, {} "
2291 log.debug("MergeCheck: cannot merge, {} "
2294 "unresolved TODOs left.".format(len(todos)))
2292 "unresolved TODOs left.".format(len(todos)))
2295
2293
2296 if len(todos) == 1:
2294 if len(todos) == 1:
2297 msg = _('Cannot merge, {} TODO still not resolved.').format(
2295 msg = _('Cannot merge, {} TODO still not resolved.').format(
2298 len(todos))
2296 len(todos))
2299 else:
2297 else:
2300 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2298 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2301 len(todos))
2299 len(todos))
2302
2300
2303 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2301 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2304
2302
2305 if fail_early:
2303 if fail_early:
2306 return merge_check
2304 return merge_check
2307
2305
2308 # merge possible, here is the filesystem simulation + shadow repo
2306 # merge possible, here is the filesystem simulation + shadow repo
2309 merge_response, merge_status, msg = PullRequestModel().merge_status(
2307 merge_response, merge_status, msg = PullRequestModel().merge_status(
2310 pull_request, translator=translator,
2308 pull_request, translator=translator,
2311 force_shadow_repo_refresh=force_shadow_repo_refresh)
2309 force_shadow_repo_refresh=force_shadow_repo_refresh)
2312
2310
2313 merge_check.merge_possible = merge_status
2311 merge_check.merge_possible = merge_status
2314 merge_check.merge_msg = msg
2312 merge_check.merge_msg = msg
2315 merge_check.merge_response = merge_response
2313 merge_check.merge_response = merge_response
2316
2314
2317 source_ref_id = pull_request.source_ref_parts.commit_id
2315 source_ref_id = pull_request.source_ref_parts.commit_id
2318 target_ref_id = pull_request.target_ref_parts.commit_id
2316 target_ref_id = pull_request.target_ref_parts.commit_id
2319
2317
2320 try:
2318 try:
2321 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2319 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2322 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2320 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2323 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2321 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2324 merge_check.source_commit.current_raw_id = source_commit.raw_id
2322 merge_check.source_commit.current_raw_id = source_commit.raw_id
2325 merge_check.source_commit.previous_raw_id = source_ref_id
2323 merge_check.source_commit.previous_raw_id = source_ref_id
2326
2324
2327 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2325 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2328 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2326 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2329 merge_check.target_commit.current_raw_id = target_commit.raw_id
2327 merge_check.target_commit.current_raw_id = target_commit.raw_id
2330 merge_check.target_commit.previous_raw_id = target_ref_id
2328 merge_check.target_commit.previous_raw_id = target_ref_id
2331 except (SourceRefMissing, TargetRefMissing):
2329 except (SourceRefMissing, TargetRefMissing):
2332 pass
2330 pass
2333
2331
2334 if not merge_status:
2332 if not merge_status:
2335 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2333 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2336 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2334 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2337
2335
2338 if fail_early:
2336 if fail_early:
2339 return merge_check
2337 return merge_check
2340
2338
2341 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2339 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2342 return merge_check
2340 return merge_check
2343
2341
2344 @classmethod
2342 @classmethod
2345 def get_merge_conditions(cls, pull_request, translator):
2343 def get_merge_conditions(cls, pull_request, translator):
2346 _ = translator
2344 _ = translator
2347 merge_details = {}
2345 merge_details = {}
2348
2346
2349 model = PullRequestModel()
2347 model = PullRequestModel()
2350 use_rebase = model._use_rebase_for_merging(pull_request)
2348 use_rebase = model._use_rebase_for_merging(pull_request)
2351
2349
2352 if use_rebase:
2350 if use_rebase:
2353 merge_details['merge_strategy'] = dict(
2351 merge_details['merge_strategy'] = dict(
2354 details={},
2352 details={},
2355 message=_('Merge strategy: rebase')
2353 message=_('Merge strategy: rebase')
2356 )
2354 )
2357 else:
2355 else:
2358 merge_details['merge_strategy'] = dict(
2356 merge_details['merge_strategy'] = dict(
2359 details={},
2357 details={},
2360 message=_('Merge strategy: explicit merge commit')
2358 message=_('Merge strategy: explicit merge commit')
2361 )
2359 )
2362
2360
2363 close_branch = model._close_branch_before_merging(pull_request)
2361 close_branch = model._close_branch_before_merging(pull_request)
2364 if close_branch:
2362 if close_branch:
2365 repo_type = pull_request.target_repo.repo_type
2363 repo_type = pull_request.target_repo.repo_type
2366 close_msg = ''
2364 close_msg = ''
2367 if repo_type == 'hg':
2365 if repo_type == 'hg':
2368 close_msg = _('Source branch will be closed before the merge.')
2366 close_msg = _('Source branch will be closed before the merge.')
2369 elif repo_type == 'git':
2367 elif repo_type == 'git':
2370 close_msg = _('Source branch will be deleted after the merge.')
2368 close_msg = _('Source branch will be deleted after the merge.')
2371
2369
2372 merge_details['close_branch'] = dict(
2370 merge_details['close_branch'] = dict(
2373 details={},
2371 details={},
2374 message=close_msg
2372 message=close_msg
2375 )
2373 )
2376
2374
2377 return merge_details
2375 return merge_details
2378
2376
2379
2377
2380 @dataclasses.dataclass
2378 @dataclasses.dataclass
2381 class ChangeTuple:
2379 class ChangeTuple:
2382 added: list
2380 added: list
2383 common: list
2381 common: list
2384 removed: list
2382 removed: list
2385 total: list
2383 total: list
2386
2384
2387
2385
2388 @dataclasses.dataclass
2386 @dataclasses.dataclass
2389 class FileChangeTuple:
2387 class FileChangeTuple:
2390 added: list
2388 added: list
2391 modified: list
2389 modified: list
2392 removed: list
2390 removed: list
@@ -1,147 +1,147 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import datetime
20 import datetime
21
21
22 import pytest
22 import pytest
23
23
24 from rhodecode.lib.vcs.nodes import FileNode
24 from rhodecode.lib.vcs.nodes import FileNode
25 from rhodecode.tests.vcs.conftest import BackendTestMixin
25 from rhodecode.tests.vcs.conftest import BackendTestMixin
26
26
27
27
28 @pytest.mark.usefixtures("vcs_repository_support")
28 @pytest.mark.usefixtures("vcs_repository_support")
29 class TestBranches(BackendTestMixin):
29 class TestBranches(BackendTestMixin):
30
30
31 def test_empty_repository_has_no_branches(self, vcsbackend):
31 def test_empty_repository_has_no_branches(self, vcsbackend):
32 empty_repo = vcsbackend.create_repo()
32 empty_repo = vcsbackend.create_repo()
33 assert empty_repo.branches == {}
33 assert empty_repo.branches == {}
34
34
35 def test_branches_all(self, vcsbackend):
35 def test_branches_all(self, vcsbackend):
36 branch_count = {
36 branch_count = {
37 'git': 1,
37 'git': 1,
38 'hg': 1,
38 'hg': 1,
39 'svn': 0,
39 'svn': 0,
40 }
40 }
41 assert len(self.repo.branches_all) == branch_count[vcsbackend.alias]
41 assert len(self.repo.branches_all) == branch_count[vcsbackend.alias]
42
42
43 def test_closed_branches(self):
43 def test_closed_branches(self):
44 assert len(self.repo.branches_closed) == 0
44 assert len(self.repo.branches_closed) == 0
45
45
46 def test_simple(self, local_dt_to_utc):
46 def test_simple(self, local_dt_to_utc):
47 tip = self.repo.get_commit()
47 tip = self.repo.get_commit()
48 assert tip.message == 'Changes...'
48 assert tip.message == 'Changes...'
49 assert tip.date == local_dt_to_utc(datetime.datetime(2010, 1, 1, 21))
49 assert tip.date == local_dt_to_utc(datetime.datetime(2010, 1, 1, 21))
50
50
51 @pytest.mark.backends("git", "hg")
51 @pytest.mark.backends("git", "hg")
52 def test_new_branch(self):
52 def test_new_branch(self):
53 # This check must not be removed to ensure the 'branches' LazyProperty
53 # This check must not be removed to ensure the 'branches' LazyProperty
54 # gets hit *before* the new 'foobar' branch got created:
54 # gets hit *before* the new 'foobar' branch got created:
55 assert 'foobar' not in self.repo.branches
55 assert 'foobar' not in self.repo.branches
56 self.imc.add(
56 self.imc.add(
57 FileNode(b'docs/index.txt', content=b'Documentation\n')
57 FileNode(b'docs/index.txt', content=b'Documentation\n')
58 )
58 )
59 foobar_tip = self.imc.commit(
59 foobar_tip = self.imc.commit(
60 message=u'New branch: foobar',
60 message='New branch: foobar',
61 author=u'joe <joe@rhodecode.com>',
61 author='joe <joe@rhodecode.com>',
62 branch='foobar',
62 branch='foobar',
63 )
63 )
64 assert 'foobar' in self.repo.branches
64 assert 'foobar' in self.repo.branches
65 assert foobar_tip.branch == 'foobar'
65 assert foobar_tip.branch == 'foobar'
66
66
67 @pytest.mark.backends("git", "hg")
67 @pytest.mark.backends("git", "hg")
68 def test_new_head(self):
68 def test_new_head(self):
69 tip = self.repo.get_commit()
69 tip = self.repo.get_commit()
70 self.imc.add(
70 self.imc.add(
71 FileNode(b'docs/index.txt',
71 FileNode(b'docs/index.txt',
72 content=b'Documentation\n')
72 content=b'Documentation\n')
73 )
73 )
74 foobar_tip = self.imc.commit(
74 foobar_tip = self.imc.commit(
75 message=u'New branch: foobar',
75 message='New branch: foobar',
76 author=u'joe <joe@rhodecode.com>',
76 author='joe <joe@rhodecode.com>',
77 branch='foobar',
77 branch='foobar',
78 parents=[tip],
78 parents=[tip],
79 )
79 )
80 self.imc.change(FileNode(
80 self.imc.change(FileNode(
81 b'docs/index.txt',
81 b'docs/index.txt',
82 content=b'Documentation\nand more...\n'))
82 content=b'Documentation\nand more...\n'))
83 newtip = self.imc.commit(
83 newtip = self.imc.commit(
84 message=u'At default branch',
84 message=u'At default branch',
85 author=u'joe <joe@rhodecode.com>',
85 author=u'joe <joe@rhodecode.com>',
86 branch=foobar_tip.branch,
86 branch=foobar_tip.branch,
87 parents=[foobar_tip],
87 parents=[foobar_tip],
88 )
88 )
89
89
90 newest_tip = self.imc.commit(
90 newest_tip = self.imc.commit(
91 message=u'Merged with %s' % foobar_tip.raw_id,
91 message=u'Merged with %s' % foobar_tip.raw_id,
92 author=u'joe <joe@rhodecode.com>',
92 author=u'joe <joe@rhodecode.com>',
93 branch=self.backend_class.DEFAULT_BRANCH_NAME,
93 branch=self.backend_class.DEFAULT_BRANCH_NAME,
94 parents=[newtip, foobar_tip],
94 parents=[newtip, foobar_tip],
95 )
95 )
96
96
97 assert newest_tip.branch == \
97 assert newest_tip.branch == \
98 self.backend_class.DEFAULT_BRANCH_NAME
98 self.backend_class.DEFAULT_BRANCH_NAME
99
99
100 @pytest.mark.backends("git", "hg")
100 @pytest.mark.backends("git", "hg")
101 def test_branch_with_slash_in_name(self):
101 def test_branch_with_slash_in_name(self):
102 self.imc.add(FileNode(b'extrafile', content=b'Some data\n'))
102 self.imc.add(FileNode(b'extrafile', content=b'Some data\n'))
103 self.imc.commit(
103 self.imc.commit(
104 u'Branch with a slash!', author=u'joe <joe@rhodecode.com>',
104 u'Branch with a slash!', author=u'joe <joe@rhodecode.com>',
105 branch='issue/123')
105 branch='issue/123')
106 assert 'issue/123' in self.repo.branches
106 assert 'issue/123' in self.repo.branches
107
107
108 @pytest.mark.backends("git", "hg")
108 @pytest.mark.backends("git", "hg")
109 def test_branch_with_slash_in_name_and_similar_without(self):
109 def test_branch_with_slash_in_name_and_similar_without(self):
110 self.imc.add(FileNode(b'extrafile', content=b'Some data\n'))
110 self.imc.add(FileNode(b'extrafile', content=b'Some data\n'))
111 self.imc.commit(
111 self.imc.commit(
112 u'Branch with a slash!', author=u'joe <joe@rhodecode.com>',
112 u'Branch with a slash!', author=u'joe <joe@rhodecode.com>',
113 branch='issue/123')
113 branch='issue/123')
114 self.imc.add(FileNode(b'extrafile II', content=b'Some data\n'))
114 self.imc.add(FileNode(b'extrafile II', content=b'Some data\n'))
115 self.imc.commit(
115 self.imc.commit(
116 u'Branch without a slash...', author=u'joe <joe@rhodecode.com>',
116 u'Branch without a slash...', author=u'joe <joe@rhodecode.com>',
117 branch='123')
117 branch='123')
118 assert 'issue/123' in self.repo.branches
118 assert 'issue/123' in self.repo.branches
119 assert '123' in self.repo.branches
119 assert '123' in self.repo.branches
120
120
121
121
122 class TestSvnBranches(object):
122 class TestSvnBranches(object):
123
123
124 def test_empty_repository_has_no_tags_and_branches(self, vcsbackend_svn):
124 def test_empty_repository_has_no_tags_and_branches(self, vcsbackend_svn):
125 empty_repo = vcsbackend_svn.create_repo()
125 empty_repo = vcsbackend_svn.create_repo()
126 assert empty_repo.branches == {}
126 assert empty_repo.branches == {}
127 assert empty_repo.tags == {}
127 assert empty_repo.tags == {}
128
128
129 def test_missing_structure_has_no_tags_and_branches(self, vcsbackend_svn):
129 def test_missing_structure_has_no_tags_and_branches(self, vcsbackend_svn):
130 repo = vcsbackend_svn.create_repo(number_of_commits=1)
130 repo = vcsbackend_svn.create_repo(number_of_commits=1)
131 assert repo.branches == {}
131 assert repo.branches == {}
132 assert repo.tags == {}
132 assert repo.tags == {}
133
133
134 def test_discovers_ordered_branches(self, vcsbackend_svn):
134 def test_discovers_ordered_branches(self, vcsbackend_svn):
135 repo = vcsbackend_svn['svn-simple-layout']
135 repo = vcsbackend_svn['svn-simple-layout']
136 expected_branches = [
136 expected_branches = [
137 'branches/add-docs',
137 'branches/add-docs',
138 'branches/argparse',
138 'branches/argparse',
139 'trunk',
139 'trunk',
140 ]
140 ]
141 assert list(repo.branches.keys()) == expected_branches
141 assert list(repo.branches.keys()) == expected_branches
142
142
143 def test_discovers_ordered_tags(self, vcsbackend_svn):
143 def test_discovers_ordered_tags(self, vcsbackend_svn):
144 repo = vcsbackend_svn['svn-simple-layout']
144 repo = vcsbackend_svn['svn-simple-layout']
145 expected_tags = [
145 expected_tags = [
146 'tags/v0.1', 'tags/v0.2', 'tags/v0.3', 'tags/v0.5']
146 'tags/v0.1', 'tags/v0.2', 'tags/v0.3', 'tags/v0.5']
147 assert list(repo.tags.keys()) == expected_tags
147 assert list(repo.tags.keys()) == expected_tags
General Comments 0
You need to be logged in to leave comments. Login now