##// END OF EJS Templates
vcs: use a real two part name for merge operation....
marcink -
r3040:dcaa9d67 stable
parent child Browse files
Show More
@@ -1,1749 +1,1749 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24
24
25 import collections
25 import collections
26 import datetime
26 import datetime
27 import fnmatch
27 import fnmatch
28 import itertools
28 import itertools
29 import logging
29 import logging
30 import os
30 import os
31 import re
31 import re
32 import time
32 import time
33 import warnings
33 import warnings
34 import shutil
34 import shutil
35
35
36 from zope.cachedescriptors.property import Lazy as LazyProperty
36 from zope.cachedescriptors.property import Lazy as LazyProperty
37
37
38 from rhodecode.lib.utils2 import safe_str, safe_unicode
38 from rhodecode.lib.utils2 import safe_str, safe_unicode
39 from rhodecode.lib.vcs import connection
39 from rhodecode.lib.vcs import connection
40 from rhodecode.lib.vcs.utils import author_name, author_email
40 from rhodecode.lib.vcs.utils import author_name, author_email
41 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.conf import settings
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 NodeDoesNotExistError, NodeNotChangedError, VCSError,
46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
47 RepositoryError)
47 RepositoryError)
48
48
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 FILEMODE_DEFAULT = 0100644
53 FILEMODE_DEFAULT = 0100644
54 FILEMODE_EXECUTABLE = 0100755
54 FILEMODE_EXECUTABLE = 0100755
55
55
56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
56 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
57 MergeResponse = collections.namedtuple(
57 MergeResponse = collections.namedtuple(
58 'MergeResponse',
58 'MergeResponse',
59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
59 ('possible', 'executed', 'merge_ref', 'failure_reason'))
60
60
61
61
62 class MergeFailureReason(object):
62 class MergeFailureReason(object):
63 """
63 """
64 Enumeration with all the reasons why the server side merge could fail.
64 Enumeration with all the reasons why the server side merge could fail.
65
65
66 DO NOT change the number of the reasons, as they may be stored in the
66 DO NOT change the number of the reasons, as they may be stored in the
67 database.
67 database.
68
68
69 Changing the name of a reason is acceptable and encouraged to deprecate old
69 Changing the name of a reason is acceptable and encouraged to deprecate old
70 reasons.
70 reasons.
71 """
71 """
72
72
73 # Everything went well.
73 # Everything went well.
74 NONE = 0
74 NONE = 0
75
75
76 # An unexpected exception was raised. Check the logs for more details.
76 # An unexpected exception was raised. Check the logs for more details.
77 UNKNOWN = 1
77 UNKNOWN = 1
78
78
79 # The merge was not successful, there are conflicts.
79 # The merge was not successful, there are conflicts.
80 MERGE_FAILED = 2
80 MERGE_FAILED = 2
81
81
82 # The merge succeeded but we could not push it to the target repository.
82 # The merge succeeded but we could not push it to the target repository.
83 PUSH_FAILED = 3
83 PUSH_FAILED = 3
84
84
85 # The specified target is not a head in the target repository.
85 # The specified target is not a head in the target repository.
86 TARGET_IS_NOT_HEAD = 4
86 TARGET_IS_NOT_HEAD = 4
87
87
88 # The source repository contains more branches than the target. Pushing
88 # The source repository contains more branches than the target. Pushing
89 # the merge will create additional branches in the target.
89 # the merge will create additional branches in the target.
90 HG_SOURCE_HAS_MORE_BRANCHES = 5
90 HG_SOURCE_HAS_MORE_BRANCHES = 5
91
91
92 # The target reference has multiple heads. That does not allow to correctly
92 # The target reference has multiple heads. That does not allow to correctly
93 # identify the target location. This could only happen for mercurial
93 # identify the target location. This could only happen for mercurial
94 # branches.
94 # branches.
95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
95 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96
96
97 # The target repository is locked
97 # The target repository is locked
98 TARGET_IS_LOCKED = 7
98 TARGET_IS_LOCKED = 7
99
99
100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
100 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 # A involved commit could not be found.
101 # A involved commit could not be found.
102 _DEPRECATED_MISSING_COMMIT = 8
102 _DEPRECATED_MISSING_COMMIT = 8
103
103
104 # The target repo reference is missing.
104 # The target repo reference is missing.
105 MISSING_TARGET_REF = 9
105 MISSING_TARGET_REF = 9
106
106
107 # The source repo reference is missing.
107 # The source repo reference is missing.
108 MISSING_SOURCE_REF = 10
108 MISSING_SOURCE_REF = 10
109
109
110 # The merge was not successful, there are conflicts related to sub
110 # The merge was not successful, there are conflicts related to sub
111 # repositories.
111 # repositories.
112 SUBREPO_MERGE_FAILED = 11
112 SUBREPO_MERGE_FAILED = 11
113
113
114
114
115 class UpdateFailureReason(object):
115 class UpdateFailureReason(object):
116 """
116 """
117 Enumeration with all the reasons why the pull request update could fail.
117 Enumeration with all the reasons why the pull request update could fail.
118
118
119 DO NOT change the number of the reasons, as they may be stored in the
119 DO NOT change the number of the reasons, as they may be stored in the
120 database.
120 database.
121
121
122 Changing the name of a reason is acceptable and encouraged to deprecate old
122 Changing the name of a reason is acceptable and encouraged to deprecate old
123 reasons.
123 reasons.
124 """
124 """
125
125
126 # Everything went well.
126 # Everything went well.
127 NONE = 0
127 NONE = 0
128
128
129 # An unexpected exception was raised. Check the logs for more details.
129 # An unexpected exception was raised. Check the logs for more details.
130 UNKNOWN = 1
130 UNKNOWN = 1
131
131
132 # The pull request is up to date.
132 # The pull request is up to date.
133 NO_CHANGE = 2
133 NO_CHANGE = 2
134
134
135 # The pull request has a reference type that is not supported for update.
135 # The pull request has a reference type that is not supported for update.
136 WRONG_REF_TYPE = 3
136 WRONG_REF_TYPE = 3
137
137
138 # Update failed because the target reference is missing.
138 # Update failed because the target reference is missing.
139 MISSING_TARGET_REF = 4
139 MISSING_TARGET_REF = 4
140
140
141 # Update failed because the source reference is missing.
141 # Update failed because the source reference is missing.
142 MISSING_SOURCE_REF = 5
142 MISSING_SOURCE_REF = 5
143
143
144
144
145 class BaseRepository(object):
145 class BaseRepository(object):
146 """
146 """
147 Base Repository for final backends
147 Base Repository for final backends
148
148
149 .. attribute:: DEFAULT_BRANCH_NAME
149 .. attribute:: DEFAULT_BRANCH_NAME
150
150
151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
151 name of default branch (i.e. "trunk" for svn, "master" for git etc.
152
152
153 .. attribute:: commit_ids
153 .. attribute:: commit_ids
154
154
155 list of all available commit ids, in ascending order
155 list of all available commit ids, in ascending order
156
156
157 .. attribute:: path
157 .. attribute:: path
158
158
159 absolute path to the repository
159 absolute path to the repository
160
160
161 .. attribute:: bookmarks
161 .. attribute:: bookmarks
162
162
163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
163 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
164 there are no bookmarks or the backend implementation does not support
164 there are no bookmarks or the backend implementation does not support
165 bookmarks.
165 bookmarks.
166
166
167 .. attribute:: tags
167 .. attribute:: tags
168
168
169 Mapping from name to :term:`Commit ID` of the tag.
169 Mapping from name to :term:`Commit ID` of the tag.
170
170
171 """
171 """
172
172
173 DEFAULT_BRANCH_NAME = None
173 DEFAULT_BRANCH_NAME = None
174 DEFAULT_CONTACT = u"Unknown"
174 DEFAULT_CONTACT = u"Unknown"
175 DEFAULT_DESCRIPTION = u"unknown"
175 DEFAULT_DESCRIPTION = u"unknown"
176 EMPTY_COMMIT_ID = '0' * 40
176 EMPTY_COMMIT_ID = '0' * 40
177
177
178 path = None
178 path = None
179
179
180 def __init__(self, repo_path, config=None, create=False, **kwargs):
180 def __init__(self, repo_path, config=None, create=False, **kwargs):
181 """
181 """
182 Initializes repository. Raises RepositoryError if repository could
182 Initializes repository. Raises RepositoryError if repository could
183 not be find at the given ``repo_path`` or directory at ``repo_path``
183 not be find at the given ``repo_path`` or directory at ``repo_path``
184 exists and ``create`` is set to True.
184 exists and ``create`` is set to True.
185
185
186 :param repo_path: local path of the repository
186 :param repo_path: local path of the repository
187 :param config: repository configuration
187 :param config: repository configuration
188 :param create=False: if set to True, would try to create repository.
188 :param create=False: if set to True, would try to create repository.
189 :param src_url=None: if set, should be proper url from which repository
189 :param src_url=None: if set, should be proper url from which repository
190 would be cloned; requires ``create`` parameter to be set to True -
190 would be cloned; requires ``create`` parameter to be set to True -
191 raises RepositoryError if src_url is set and create evaluates to
191 raises RepositoryError if src_url is set and create evaluates to
192 False
192 False
193 """
193 """
194 raise NotImplementedError
194 raise NotImplementedError
195
195
196 def __repr__(self):
196 def __repr__(self):
197 return '<%s at %s>' % (self.__class__.__name__, self.path)
197 return '<%s at %s>' % (self.__class__.__name__, self.path)
198
198
199 def __len__(self):
199 def __len__(self):
200 return self.count()
200 return self.count()
201
201
202 def __eq__(self, other):
202 def __eq__(self, other):
203 same_instance = isinstance(other, self.__class__)
203 same_instance = isinstance(other, self.__class__)
204 return same_instance and other.path == self.path
204 return same_instance and other.path == self.path
205
205
206 def __ne__(self, other):
206 def __ne__(self, other):
207 return not self.__eq__(other)
207 return not self.__eq__(other)
208
208
209 def get_create_shadow_cache_pr_path(self, db_repo):
209 def get_create_shadow_cache_pr_path(self, db_repo):
210 path = db_repo.cached_diffs_dir
210 path = db_repo.cached_diffs_dir
211 if not os.path.exists(path):
211 if not os.path.exists(path):
212 os.makedirs(path, 0755)
212 os.makedirs(path, 0755)
213 return path
213 return path
214
214
215 @classmethod
215 @classmethod
216 def get_default_config(cls, default=None):
216 def get_default_config(cls, default=None):
217 config = Config()
217 config = Config()
218 if default and isinstance(default, list):
218 if default and isinstance(default, list):
219 for section, key, val in default:
219 for section, key, val in default:
220 config.set(section, key, val)
220 config.set(section, key, val)
221 return config
221 return config
222
222
223 @LazyProperty
223 @LazyProperty
224 def _remote(self):
224 def _remote(self):
225 raise NotImplementedError
225 raise NotImplementedError
226
226
227 @LazyProperty
227 @LazyProperty
228 def EMPTY_COMMIT(self):
228 def EMPTY_COMMIT(self):
229 return EmptyCommit(self.EMPTY_COMMIT_ID)
229 return EmptyCommit(self.EMPTY_COMMIT_ID)
230
230
231 @LazyProperty
231 @LazyProperty
232 def alias(self):
232 def alias(self):
233 for k, v in settings.BACKENDS.items():
233 for k, v in settings.BACKENDS.items():
234 if v.split('.')[-1] == str(self.__class__.__name__):
234 if v.split('.')[-1] == str(self.__class__.__name__):
235 return k
235 return k
236
236
237 @LazyProperty
237 @LazyProperty
238 def name(self):
238 def name(self):
239 return safe_unicode(os.path.basename(self.path))
239 return safe_unicode(os.path.basename(self.path))
240
240
241 @LazyProperty
241 @LazyProperty
242 def description(self):
242 def description(self):
243 raise NotImplementedError
243 raise NotImplementedError
244
244
245 def refs(self):
245 def refs(self):
246 """
246 """
247 returns a `dict` with branches, bookmarks, tags, and closed_branches
247 returns a `dict` with branches, bookmarks, tags, and closed_branches
248 for this repository
248 for this repository
249 """
249 """
250 return dict(
250 return dict(
251 branches=self.branches,
251 branches=self.branches,
252 branches_closed=self.branches_closed,
252 branches_closed=self.branches_closed,
253 tags=self.tags,
253 tags=self.tags,
254 bookmarks=self.bookmarks
254 bookmarks=self.bookmarks
255 )
255 )
256
256
257 @LazyProperty
257 @LazyProperty
258 def branches(self):
258 def branches(self):
259 """
259 """
260 A `dict` which maps branch names to commit ids.
260 A `dict` which maps branch names to commit ids.
261 """
261 """
262 raise NotImplementedError
262 raise NotImplementedError
263
263
264 @LazyProperty
264 @LazyProperty
265 def branches_closed(self):
265 def branches_closed(self):
266 """
266 """
267 A `dict` which maps tags names to commit ids.
267 A `dict` which maps tags names to commit ids.
268 """
268 """
269 raise NotImplementedError
269 raise NotImplementedError
270
270
271 @LazyProperty
271 @LazyProperty
272 def bookmarks(self):
272 def bookmarks(self):
273 """
273 """
274 A `dict` which maps tags names to commit ids.
274 A `dict` which maps tags names to commit ids.
275 """
275 """
276 raise NotImplementedError
276 raise NotImplementedError
277
277
278 @LazyProperty
278 @LazyProperty
279 def tags(self):
279 def tags(self):
280 """
280 """
281 A `dict` which maps tags names to commit ids.
281 A `dict` which maps tags names to commit ids.
282 """
282 """
283 raise NotImplementedError
283 raise NotImplementedError
284
284
285 @LazyProperty
285 @LazyProperty
286 def size(self):
286 def size(self):
287 """
287 """
288 Returns combined size in bytes for all repository files
288 Returns combined size in bytes for all repository files
289 """
289 """
290 tip = self.get_commit()
290 tip = self.get_commit()
291 return tip.size
291 return tip.size
292
292
293 def size_at_commit(self, commit_id):
293 def size_at_commit(self, commit_id):
294 commit = self.get_commit(commit_id)
294 commit = self.get_commit(commit_id)
295 return commit.size
295 return commit.size
296
296
297 def is_empty(self):
297 def is_empty(self):
298 return not bool(self.commit_ids)
298 return not bool(self.commit_ids)
299
299
300 @staticmethod
300 @staticmethod
301 def check_url(url, config):
301 def check_url(url, config):
302 """
302 """
303 Function will check given url and try to verify if it's a valid
303 Function will check given url and try to verify if it's a valid
304 link.
304 link.
305 """
305 """
306 raise NotImplementedError
306 raise NotImplementedError
307
307
308 @staticmethod
308 @staticmethod
309 def is_valid_repository(path):
309 def is_valid_repository(path):
310 """
310 """
311 Check if given `path` contains a valid repository of this backend
311 Check if given `path` contains a valid repository of this backend
312 """
312 """
313 raise NotImplementedError
313 raise NotImplementedError
314
314
315 # ==========================================================================
315 # ==========================================================================
316 # COMMITS
316 # COMMITS
317 # ==========================================================================
317 # ==========================================================================
318
318
319 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
319 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
320 """
320 """
321 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
321 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
322 are both None, most recent commit is returned.
322 are both None, most recent commit is returned.
323
323
324 :param pre_load: Optional. List of commit attributes to load.
324 :param pre_load: Optional. List of commit attributes to load.
325
325
326 :raises ``EmptyRepositoryError``: if there are no commits
326 :raises ``EmptyRepositoryError``: if there are no commits
327 """
327 """
328 raise NotImplementedError
328 raise NotImplementedError
329
329
330 def __iter__(self):
330 def __iter__(self):
331 for commit_id in self.commit_ids:
331 for commit_id in self.commit_ids:
332 yield self.get_commit(commit_id=commit_id)
332 yield self.get_commit(commit_id=commit_id)
333
333
334 def get_commits(
334 def get_commits(
335 self, start_id=None, end_id=None, start_date=None, end_date=None,
335 self, start_id=None, end_id=None, start_date=None, end_date=None,
336 branch_name=None, show_hidden=False, pre_load=None):
336 branch_name=None, show_hidden=False, pre_load=None):
337 """
337 """
338 Returns iterator of `BaseCommit` objects from start to end
338 Returns iterator of `BaseCommit` objects from start to end
339 not inclusive. This should behave just like a list, ie. end is not
339 not inclusive. This should behave just like a list, ie. end is not
340 inclusive.
340 inclusive.
341
341
342 :param start_id: None or str, must be a valid commit id
342 :param start_id: None or str, must be a valid commit id
343 :param end_id: None or str, must be a valid commit id
343 :param end_id: None or str, must be a valid commit id
344 :param start_date:
344 :param start_date:
345 :param end_date:
345 :param end_date:
346 :param branch_name:
346 :param branch_name:
347 :param show_hidden:
347 :param show_hidden:
348 :param pre_load:
348 :param pre_load:
349 """
349 """
350 raise NotImplementedError
350 raise NotImplementedError
351
351
352 def __getitem__(self, key):
352 def __getitem__(self, key):
353 """
353 """
354 Allows index based access to the commit objects of this repository.
354 Allows index based access to the commit objects of this repository.
355 """
355 """
356 pre_load = ["author", "branch", "date", "message", "parents"]
356 pre_load = ["author", "branch", "date", "message", "parents"]
357 if isinstance(key, slice):
357 if isinstance(key, slice):
358 return self._get_range(key, pre_load)
358 return self._get_range(key, pre_load)
359 return self.get_commit(commit_idx=key, pre_load=pre_load)
359 return self.get_commit(commit_idx=key, pre_load=pre_load)
360
360
361 def _get_range(self, slice_obj, pre_load):
361 def _get_range(self, slice_obj, pre_load):
362 for commit_id in self.commit_ids.__getitem__(slice_obj):
362 for commit_id in self.commit_ids.__getitem__(slice_obj):
363 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
363 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
364
364
365 def count(self):
365 def count(self):
366 return len(self.commit_ids)
366 return len(self.commit_ids)
367
367
368 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
368 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
369 """
369 """
370 Creates and returns a tag for the given ``commit_id``.
370 Creates and returns a tag for the given ``commit_id``.
371
371
372 :param name: name for new tag
372 :param name: name for new tag
373 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
373 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
374 :param commit_id: commit id for which new tag would be created
374 :param commit_id: commit id for which new tag would be created
375 :param message: message of the tag's commit
375 :param message: message of the tag's commit
376 :param date: date of tag's commit
376 :param date: date of tag's commit
377
377
378 :raises TagAlreadyExistError: if tag with same name already exists
378 :raises TagAlreadyExistError: if tag with same name already exists
379 """
379 """
380 raise NotImplementedError
380 raise NotImplementedError
381
381
382 def remove_tag(self, name, user, message=None, date=None):
382 def remove_tag(self, name, user, message=None, date=None):
383 """
383 """
384 Removes tag with the given ``name``.
384 Removes tag with the given ``name``.
385
385
386 :param name: name of the tag to be removed
386 :param name: name of the tag to be removed
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
387 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
388 :param message: message of the tag's removal commit
388 :param message: message of the tag's removal commit
389 :param date: date of tag's removal commit
389 :param date: date of tag's removal commit
390
390
391 :raises TagDoesNotExistError: if tag with given name does not exists
391 :raises TagDoesNotExistError: if tag with given name does not exists
392 """
392 """
393 raise NotImplementedError
393 raise NotImplementedError
394
394
395 def get_diff(
395 def get_diff(
396 self, commit1, commit2, path=None, ignore_whitespace=False,
396 self, commit1, commit2, path=None, ignore_whitespace=False,
397 context=3, path1=None):
397 context=3, path1=None):
398 """
398 """
399 Returns (git like) *diff*, as plain text. Shows changes introduced by
399 Returns (git like) *diff*, as plain text. Shows changes introduced by
400 `commit2` since `commit1`.
400 `commit2` since `commit1`.
401
401
402 :param commit1: Entry point from which diff is shown. Can be
402 :param commit1: Entry point from which diff is shown. Can be
403 ``self.EMPTY_COMMIT`` - in this case, patch showing all
403 ``self.EMPTY_COMMIT`` - in this case, patch showing all
404 the changes since empty state of the repository until `commit2`
404 the changes since empty state of the repository until `commit2`
405 :param commit2: Until which commit changes should be shown.
405 :param commit2: Until which commit changes should be shown.
406 :param path: Can be set to a path of a file to create a diff of that
406 :param path: Can be set to a path of a file to create a diff of that
407 file. If `path1` is also set, this value is only associated to
407 file. If `path1` is also set, this value is only associated to
408 `commit2`.
408 `commit2`.
409 :param ignore_whitespace: If set to ``True``, would not show whitespace
409 :param ignore_whitespace: If set to ``True``, would not show whitespace
410 changes. Defaults to ``False``.
410 changes. Defaults to ``False``.
411 :param context: How many lines before/after changed lines should be
411 :param context: How many lines before/after changed lines should be
412 shown. Defaults to ``3``.
412 shown. Defaults to ``3``.
413 :param path1: Can be set to a path to associate with `commit1`. This
413 :param path1: Can be set to a path to associate with `commit1`. This
414 parameter works only for backends which support diff generation for
414 parameter works only for backends which support diff generation for
415 different paths. Other backends will raise a `ValueError` if `path1`
415 different paths. Other backends will raise a `ValueError` if `path1`
416 is set and has a different value than `path`.
416 is set and has a different value than `path`.
417 :param file_path: filter this diff by given path pattern
417 :param file_path: filter this diff by given path pattern
418 """
418 """
419 raise NotImplementedError
419 raise NotImplementedError
420
420
421 def strip(self, commit_id, branch=None):
421 def strip(self, commit_id, branch=None):
422 """
422 """
423 Strip given commit_id from the repository
423 Strip given commit_id from the repository
424 """
424 """
425 raise NotImplementedError
425 raise NotImplementedError
426
426
427 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
427 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
428 """
428 """
429 Return a latest common ancestor commit if one exists for this repo
429 Return a latest common ancestor commit if one exists for this repo
430 `commit_id1` vs `commit_id2` from `repo2`.
430 `commit_id1` vs `commit_id2` from `repo2`.
431
431
432 :param commit_id1: Commit it from this repository to use as a
432 :param commit_id1: Commit it from this repository to use as a
433 target for the comparison.
433 target for the comparison.
434 :param commit_id2: Source commit id to use for comparison.
434 :param commit_id2: Source commit id to use for comparison.
435 :param repo2: Source repository to use for comparison.
435 :param repo2: Source repository to use for comparison.
436 """
436 """
437 raise NotImplementedError
437 raise NotImplementedError
438
438
439 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
439 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
440 """
440 """
441 Compare this repository's revision `commit_id1` with `commit_id2`.
441 Compare this repository's revision `commit_id1` with `commit_id2`.
442
442
443 Returns a tuple(commits, ancestor) that would be merged from
443 Returns a tuple(commits, ancestor) that would be merged from
444 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
444 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
445 will be returned as ancestor.
445 will be returned as ancestor.
446
446
447 :param commit_id1: Commit it from this repository to use as a
447 :param commit_id1: Commit it from this repository to use as a
448 target for the comparison.
448 target for the comparison.
449 :param commit_id2: Source commit id to use for comparison.
449 :param commit_id2: Source commit id to use for comparison.
450 :param repo2: Source repository to use for comparison.
450 :param repo2: Source repository to use for comparison.
451 :param merge: If set to ``True`` will do a merge compare which also
451 :param merge: If set to ``True`` will do a merge compare which also
452 returns the common ancestor.
452 returns the common ancestor.
453 :param pre_load: Optional. List of commit attributes to load.
453 :param pre_load: Optional. List of commit attributes to load.
454 """
454 """
455 raise NotImplementedError
455 raise NotImplementedError
456
456
457 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
457 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
458 user_name='', user_email='', message='', dry_run=False,
458 user_name='', user_email='', message='', dry_run=False,
459 use_rebase=False, close_branch=False):
459 use_rebase=False, close_branch=False):
460 """
460 """
461 Merge the revisions specified in `source_ref` from `source_repo`
461 Merge the revisions specified in `source_ref` from `source_repo`
462 onto the `target_ref` of this repository.
462 onto the `target_ref` of this repository.
463
463
464 `source_ref` and `target_ref` are named tupls with the following
464 `source_ref` and `target_ref` are named tupls with the following
465 fields `type`, `name` and `commit_id`.
465 fields `type`, `name` and `commit_id`.
466
466
467 Returns a MergeResponse named tuple with the following fields
467 Returns a MergeResponse named tuple with the following fields
468 'possible', 'executed', 'source_commit', 'target_commit',
468 'possible', 'executed', 'source_commit', 'target_commit',
469 'merge_commit'.
469 'merge_commit'.
470
470
471 :param repo_id: `repo_id` target repo id.
471 :param repo_id: `repo_id` target repo id.
472 :param workspace_id: `workspace_id` unique identifier.
472 :param workspace_id: `workspace_id` unique identifier.
473 :param target_ref: `target_ref` points to the commit on top of which
473 :param target_ref: `target_ref` points to the commit on top of which
474 the `source_ref` should be merged.
474 the `source_ref` should be merged.
475 :param source_repo: The repository that contains the commits to be
475 :param source_repo: The repository that contains the commits to be
476 merged.
476 merged.
477 :param source_ref: `source_ref` points to the topmost commit from
477 :param source_ref: `source_ref` points to the topmost commit from
478 the `source_repo` which should be merged.
478 the `source_repo` which should be merged.
479 :param user_name: Merge commit `user_name`.
479 :param user_name: Merge commit `user_name`.
480 :param user_email: Merge commit `user_email`.
480 :param user_email: Merge commit `user_email`.
481 :param message: Merge commit `message`.
481 :param message: Merge commit `message`.
482 :param dry_run: If `True` the merge will not take place.
482 :param dry_run: If `True` the merge will not take place.
483 :param use_rebase: If `True` commits from the source will be rebased
483 :param use_rebase: If `True` commits from the source will be rebased
484 on top of the target instead of being merged.
484 on top of the target instead of being merged.
485 :param close_branch: If `True` branch will be close before merging it
485 :param close_branch: If `True` branch will be close before merging it
486 """
486 """
487 if dry_run:
487 if dry_run:
488 message = message or 'dry_run_merge_message'
488 message = message or settings.MERGE_DRY_RUN_MESSAGE
489 user_email = user_email or 'dry-run-merge@rhodecode.com'
489 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
490 user_name = user_name or 'Dry-Run User'
490 user_name = user_name or settings.MERGE_DRY_RUN_USER
491 else:
491 else:
492 if not user_name:
492 if not user_name:
493 raise ValueError('user_name cannot be empty')
493 raise ValueError('user_name cannot be empty')
494 if not user_email:
494 if not user_email:
495 raise ValueError('user_email cannot be empty')
495 raise ValueError('user_email cannot be empty')
496 if not message:
496 if not message:
497 raise ValueError('message cannot be empty')
497 raise ValueError('message cannot be empty')
498
498
499 try:
499 try:
500 return self._merge_repo(
500 return self._merge_repo(
501 repo_id, workspace_id, target_ref, source_repo,
501 repo_id, workspace_id, target_ref, source_repo,
502 source_ref, message, user_name, user_email, dry_run=dry_run,
502 source_ref, message, user_name, user_email, dry_run=dry_run,
503 use_rebase=use_rebase, close_branch=close_branch)
503 use_rebase=use_rebase, close_branch=close_branch)
504 except RepositoryError:
504 except RepositoryError:
505 log.exception(
505 log.exception(
506 'Unexpected failure when running merge, dry-run=%s',
506 'Unexpected failure when running merge, dry-run=%s',
507 dry_run)
507 dry_run)
508 return MergeResponse(
508 return MergeResponse(
509 False, False, None, MergeFailureReason.UNKNOWN)
509 False, False, None, MergeFailureReason.UNKNOWN)
510
510
511 def _merge_repo(self, repo_id, workspace_id, target_ref,
511 def _merge_repo(self, repo_id, workspace_id, target_ref,
512 source_repo, source_ref, merge_message,
512 source_repo, source_ref, merge_message,
513 merger_name, merger_email, dry_run=False,
513 merger_name, merger_email, dry_run=False,
514 use_rebase=False, close_branch=False):
514 use_rebase=False, close_branch=False):
515 """Internal implementation of merge."""
515 """Internal implementation of merge."""
516 raise NotImplementedError
516 raise NotImplementedError
517
517
518 def _maybe_prepare_merge_workspace(
518 def _maybe_prepare_merge_workspace(
519 self, repo_id, workspace_id, target_ref, source_ref):
519 self, repo_id, workspace_id, target_ref, source_ref):
520 """
520 """
521 Create the merge workspace.
521 Create the merge workspace.
522
522
523 :param workspace_id: `workspace_id` unique identifier.
523 :param workspace_id: `workspace_id` unique identifier.
524 """
524 """
525 raise NotImplementedError
525 raise NotImplementedError
526
526
527 def _get_legacy_shadow_repository_path(self, workspace_id):
527 def _get_legacy_shadow_repository_path(self, workspace_id):
528 """
528 """
529 Legacy version that was used before. We still need it for
529 Legacy version that was used before. We still need it for
530 backward compat
530 backward compat
531 """
531 """
532 return os.path.join(
532 return os.path.join(
533 os.path.dirname(self.path),
533 os.path.dirname(self.path),
534 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
534 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
535
535
536 def _get_shadow_repository_path(self, repo_id, workspace_id):
536 def _get_shadow_repository_path(self, repo_id, workspace_id):
537 # The name of the shadow repository must start with '.', so it is
537 # The name of the shadow repository must start with '.', so it is
538 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
538 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
539 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
539 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
540 if os.path.exists(legacy_repository_path):
540 if os.path.exists(legacy_repository_path):
541 return legacy_repository_path
541 return legacy_repository_path
542 else:
542 else:
543 return os.path.join(
543 return os.path.join(
544 os.path.dirname(self.path),
544 os.path.dirname(self.path),
545 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
545 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
546
546
547 def cleanup_merge_workspace(self, repo_id, workspace_id):
547 def cleanup_merge_workspace(self, repo_id, workspace_id):
548 """
548 """
549 Remove merge workspace.
549 Remove merge workspace.
550
550
551 This function MUST not fail in case there is no workspace associated to
551 This function MUST not fail in case there is no workspace associated to
552 the given `workspace_id`.
552 the given `workspace_id`.
553
553
554 :param workspace_id: `workspace_id` unique identifier.
554 :param workspace_id: `workspace_id` unique identifier.
555 """
555 """
556 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
556 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
557 shadow_repository_path_del = '{}.{}.delete'.format(
557 shadow_repository_path_del = '{}.{}.delete'.format(
558 shadow_repository_path, time.time())
558 shadow_repository_path, time.time())
559
559
560 # move the shadow repo, so it never conflicts with the one used.
560 # move the shadow repo, so it never conflicts with the one used.
561 # we use this method because shutil.rmtree had some edge case problems
561 # we use this method because shutil.rmtree had some edge case problems
562 # removing symlinked repositories
562 # removing symlinked repositories
563 if not os.path.isdir(shadow_repository_path):
563 if not os.path.isdir(shadow_repository_path):
564 return
564 return
565
565
566 shutil.move(shadow_repository_path, shadow_repository_path_del)
566 shutil.move(shadow_repository_path, shadow_repository_path_del)
567 try:
567 try:
568 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
568 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
569 except Exception:
569 except Exception:
570 log.exception('Failed to gracefully remove shadow repo under %s',
570 log.exception('Failed to gracefully remove shadow repo under %s',
571 shadow_repository_path_del)
571 shadow_repository_path_del)
572 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
572 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
573
573
574 # ========== #
574 # ========== #
575 # COMMIT API #
575 # COMMIT API #
576 # ========== #
576 # ========== #
577
577
578 @LazyProperty
578 @LazyProperty
579 def in_memory_commit(self):
579 def in_memory_commit(self):
580 """
580 """
581 Returns :class:`InMemoryCommit` object for this repository.
581 Returns :class:`InMemoryCommit` object for this repository.
582 """
582 """
583 raise NotImplementedError
583 raise NotImplementedError
584
584
585 # ======================== #
585 # ======================== #
586 # UTILITIES FOR SUBCLASSES #
586 # UTILITIES FOR SUBCLASSES #
587 # ======================== #
587 # ======================== #
588
588
589 def _validate_diff_commits(self, commit1, commit2):
589 def _validate_diff_commits(self, commit1, commit2):
590 """
590 """
591 Validates that the given commits are related to this repository.
591 Validates that the given commits are related to this repository.
592
592
593 Intended as a utility for sub classes to have a consistent validation
593 Intended as a utility for sub classes to have a consistent validation
594 of input parameters in methods like :meth:`get_diff`.
594 of input parameters in methods like :meth:`get_diff`.
595 """
595 """
596 self._validate_commit(commit1)
596 self._validate_commit(commit1)
597 self._validate_commit(commit2)
597 self._validate_commit(commit2)
598 if (isinstance(commit1, EmptyCommit) and
598 if (isinstance(commit1, EmptyCommit) and
599 isinstance(commit2, EmptyCommit)):
599 isinstance(commit2, EmptyCommit)):
600 raise ValueError("Cannot compare two empty commits")
600 raise ValueError("Cannot compare two empty commits")
601
601
602 def _validate_commit(self, commit):
602 def _validate_commit(self, commit):
603 if not isinstance(commit, BaseCommit):
603 if not isinstance(commit, BaseCommit):
604 raise TypeError(
604 raise TypeError(
605 "%s is not of type BaseCommit" % repr(commit))
605 "%s is not of type BaseCommit" % repr(commit))
606 if commit.repository != self and not isinstance(commit, EmptyCommit):
606 if commit.repository != self and not isinstance(commit, EmptyCommit):
607 raise ValueError(
607 raise ValueError(
608 "Commit %s must be a valid commit from this repository %s, "
608 "Commit %s must be a valid commit from this repository %s, "
609 "related to this repository instead %s." %
609 "related to this repository instead %s." %
610 (commit, self, commit.repository))
610 (commit, self, commit.repository))
611
611
612 def _validate_commit_id(self, commit_id):
612 def _validate_commit_id(self, commit_id):
613 if not isinstance(commit_id, basestring):
613 if not isinstance(commit_id, basestring):
614 raise TypeError("commit_id must be a string value")
614 raise TypeError("commit_id must be a string value")
615
615
616 def _validate_commit_idx(self, commit_idx):
616 def _validate_commit_idx(self, commit_idx):
617 if not isinstance(commit_idx, (int, long)):
617 if not isinstance(commit_idx, (int, long)):
618 raise TypeError("commit_idx must be a numeric value")
618 raise TypeError("commit_idx must be a numeric value")
619
619
620 def _validate_branch_name(self, branch_name):
620 def _validate_branch_name(self, branch_name):
621 if branch_name and branch_name not in self.branches_all:
621 if branch_name and branch_name not in self.branches_all:
622 msg = ("Branch %s not found in %s" % (branch_name, self))
622 msg = ("Branch %s not found in %s" % (branch_name, self))
623 raise BranchDoesNotExistError(msg)
623 raise BranchDoesNotExistError(msg)
624
624
625 #
625 #
626 # Supporting deprecated API parts
626 # Supporting deprecated API parts
627 # TODO: johbo: consider to move this into a mixin
627 # TODO: johbo: consider to move this into a mixin
628 #
628 #
629
629
630 @property
630 @property
631 def EMPTY_CHANGESET(self):
631 def EMPTY_CHANGESET(self):
632 warnings.warn(
632 warnings.warn(
633 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
633 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
634 return self.EMPTY_COMMIT_ID
634 return self.EMPTY_COMMIT_ID
635
635
636 @property
636 @property
637 def revisions(self):
637 def revisions(self):
638 warnings.warn("Use commits attribute instead", DeprecationWarning)
638 warnings.warn("Use commits attribute instead", DeprecationWarning)
639 return self.commit_ids
639 return self.commit_ids
640
640
641 @revisions.setter
641 @revisions.setter
642 def revisions(self, value):
642 def revisions(self, value):
643 warnings.warn("Use commits attribute instead", DeprecationWarning)
643 warnings.warn("Use commits attribute instead", DeprecationWarning)
644 self.commit_ids = value
644 self.commit_ids = value
645
645
646 def get_changeset(self, revision=None, pre_load=None):
646 def get_changeset(self, revision=None, pre_load=None):
647 warnings.warn("Use get_commit instead", DeprecationWarning)
647 warnings.warn("Use get_commit instead", DeprecationWarning)
648 commit_id = None
648 commit_id = None
649 commit_idx = None
649 commit_idx = None
650 if isinstance(revision, basestring):
650 if isinstance(revision, basestring):
651 commit_id = revision
651 commit_id = revision
652 else:
652 else:
653 commit_idx = revision
653 commit_idx = revision
654 return self.get_commit(
654 return self.get_commit(
655 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
655 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
656
656
657 def get_changesets(
657 def get_changesets(
658 self, start=None, end=None, start_date=None, end_date=None,
658 self, start=None, end=None, start_date=None, end_date=None,
659 branch_name=None, pre_load=None):
659 branch_name=None, pre_load=None):
660 warnings.warn("Use get_commits instead", DeprecationWarning)
660 warnings.warn("Use get_commits instead", DeprecationWarning)
661 start_id = self._revision_to_commit(start)
661 start_id = self._revision_to_commit(start)
662 end_id = self._revision_to_commit(end)
662 end_id = self._revision_to_commit(end)
663 return self.get_commits(
663 return self.get_commits(
664 start_id=start_id, end_id=end_id, start_date=start_date,
664 start_id=start_id, end_id=end_id, start_date=start_date,
665 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
665 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
666
666
667 def _revision_to_commit(self, revision):
667 def _revision_to_commit(self, revision):
668 """
668 """
669 Translates a revision to a commit_id
669 Translates a revision to a commit_id
670
670
671 Helps to support the old changeset based API which allows to use
671 Helps to support the old changeset based API which allows to use
672 commit ids and commit indices interchangeable.
672 commit ids and commit indices interchangeable.
673 """
673 """
674 if revision is None:
674 if revision is None:
675 return revision
675 return revision
676
676
677 if isinstance(revision, basestring):
677 if isinstance(revision, basestring):
678 commit_id = revision
678 commit_id = revision
679 else:
679 else:
680 commit_id = self.commit_ids[revision]
680 commit_id = self.commit_ids[revision]
681 return commit_id
681 return commit_id
682
682
683 @property
683 @property
684 def in_memory_changeset(self):
684 def in_memory_changeset(self):
685 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
685 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
686 return self.in_memory_commit
686 return self.in_memory_commit
687
687
688 def get_path_permissions(self, username):
688 def get_path_permissions(self, username):
689 """
689 """
690 Returns a path permission checker or None if not supported
690 Returns a path permission checker or None if not supported
691
691
692 :param username: session user name
692 :param username: session user name
693 :return: an instance of BasePathPermissionChecker or None
693 :return: an instance of BasePathPermissionChecker or None
694 """
694 """
695 return None
695 return None
696
696
697 def install_hooks(self, force=False):
697 def install_hooks(self, force=False):
698 return self._remote.install_hooks(force)
698 return self._remote.install_hooks(force)
699
699
700
700
701 class BaseCommit(object):
701 class BaseCommit(object):
702 """
702 """
703 Each backend should implement it's commit representation.
703 Each backend should implement it's commit representation.
704
704
705 **Attributes**
705 **Attributes**
706
706
707 ``repository``
707 ``repository``
708 repository object within which commit exists
708 repository object within which commit exists
709
709
710 ``id``
710 ``id``
711 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
711 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
712 just ``tip``.
712 just ``tip``.
713
713
714 ``raw_id``
714 ``raw_id``
715 raw commit representation (i.e. full 40 length sha for git
715 raw commit representation (i.e. full 40 length sha for git
716 backend)
716 backend)
717
717
718 ``short_id``
718 ``short_id``
719 shortened (if apply) version of ``raw_id``; it would be simple
719 shortened (if apply) version of ``raw_id``; it would be simple
720 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
720 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
721 as ``raw_id`` for subversion
721 as ``raw_id`` for subversion
722
722
723 ``idx``
723 ``idx``
724 commit index
724 commit index
725
725
726 ``files``
726 ``files``
727 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
727 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
728
728
729 ``dirs``
729 ``dirs``
730 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
730 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
731
731
732 ``nodes``
732 ``nodes``
733 combined list of ``Node`` objects
733 combined list of ``Node`` objects
734
734
735 ``author``
735 ``author``
736 author of the commit, as unicode
736 author of the commit, as unicode
737
737
738 ``message``
738 ``message``
739 message of the commit, as unicode
739 message of the commit, as unicode
740
740
741 ``parents``
741 ``parents``
742 list of parent commits
742 list of parent commits
743
743
744 """
744 """
745
745
746 branch = None
746 branch = None
747 """
747 """
748 Depending on the backend this should be set to the branch name of the
748 Depending on the backend this should be set to the branch name of the
749 commit. Backends not supporting branches on commits should leave this
749 commit. Backends not supporting branches on commits should leave this
750 value as ``None``.
750 value as ``None``.
751 """
751 """
752
752
753 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
753 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
754 """
754 """
755 This template is used to generate a default prefix for repository archives
755 This template is used to generate a default prefix for repository archives
756 if no prefix has been specified.
756 if no prefix has been specified.
757 """
757 """
758
758
759 def __str__(self):
759 def __str__(self):
760 return '<%s at %s:%s>' % (
760 return '<%s at %s:%s>' % (
761 self.__class__.__name__, self.idx, self.short_id)
761 self.__class__.__name__, self.idx, self.short_id)
762
762
763 def __repr__(self):
763 def __repr__(self):
764 return self.__str__()
764 return self.__str__()
765
765
766 def __unicode__(self):
766 def __unicode__(self):
767 return u'%s:%s' % (self.idx, self.short_id)
767 return u'%s:%s' % (self.idx, self.short_id)
768
768
769 def __eq__(self, other):
769 def __eq__(self, other):
770 same_instance = isinstance(other, self.__class__)
770 same_instance = isinstance(other, self.__class__)
771 return same_instance and self.raw_id == other.raw_id
771 return same_instance and self.raw_id == other.raw_id
772
772
773 def __json__(self):
773 def __json__(self):
774 parents = []
774 parents = []
775 try:
775 try:
776 for parent in self.parents:
776 for parent in self.parents:
777 parents.append({'raw_id': parent.raw_id})
777 parents.append({'raw_id': parent.raw_id})
778 except NotImplementedError:
778 except NotImplementedError:
779 # empty commit doesn't have parents implemented
779 # empty commit doesn't have parents implemented
780 pass
780 pass
781
781
782 return {
782 return {
783 'short_id': self.short_id,
783 'short_id': self.short_id,
784 'raw_id': self.raw_id,
784 'raw_id': self.raw_id,
785 'revision': self.idx,
785 'revision': self.idx,
786 'message': self.message,
786 'message': self.message,
787 'date': self.date,
787 'date': self.date,
788 'author': self.author,
788 'author': self.author,
789 'parents': parents,
789 'parents': parents,
790 'branch': self.branch
790 'branch': self.branch
791 }
791 }
792
792
793 def __getstate__(self):
793 def __getstate__(self):
794 d = self.__dict__.copy()
794 d = self.__dict__.copy()
795 d.pop('_remote', None)
795 d.pop('_remote', None)
796 d.pop('repository', None)
796 d.pop('repository', None)
797 return d
797 return d
798
798
799 def _get_refs(self):
799 def _get_refs(self):
800 return {
800 return {
801 'branches': [self.branch] if self.branch else [],
801 'branches': [self.branch] if self.branch else [],
802 'bookmarks': getattr(self, 'bookmarks', []),
802 'bookmarks': getattr(self, 'bookmarks', []),
803 'tags': self.tags
803 'tags': self.tags
804 }
804 }
805
805
806 @LazyProperty
806 @LazyProperty
807 def last(self):
807 def last(self):
808 """
808 """
809 ``True`` if this is last commit in repository, ``False``
809 ``True`` if this is last commit in repository, ``False``
810 otherwise; trying to access this attribute while there is no
810 otherwise; trying to access this attribute while there is no
811 commits would raise `EmptyRepositoryError`
811 commits would raise `EmptyRepositoryError`
812 """
812 """
813 if self.repository is None:
813 if self.repository is None:
814 raise CommitError("Cannot check if it's most recent commit")
814 raise CommitError("Cannot check if it's most recent commit")
815 return self.raw_id == self.repository.commit_ids[-1]
815 return self.raw_id == self.repository.commit_ids[-1]
816
816
817 @LazyProperty
817 @LazyProperty
818 def parents(self):
818 def parents(self):
819 """
819 """
820 Returns list of parent commits.
820 Returns list of parent commits.
821 """
821 """
822 raise NotImplementedError
822 raise NotImplementedError
823
823
824 @property
824 @property
825 def merge(self):
825 def merge(self):
826 """
826 """
827 Returns boolean if commit is a merge.
827 Returns boolean if commit is a merge.
828 """
828 """
829 return len(self.parents) > 1
829 return len(self.parents) > 1
830
830
831 @LazyProperty
831 @LazyProperty
832 def children(self):
832 def children(self):
833 """
833 """
834 Returns list of child commits.
834 Returns list of child commits.
835 """
835 """
836 raise NotImplementedError
836 raise NotImplementedError
837
837
838 @LazyProperty
838 @LazyProperty
839 def id(self):
839 def id(self):
840 """
840 """
841 Returns string identifying this commit.
841 Returns string identifying this commit.
842 """
842 """
843 raise NotImplementedError
843 raise NotImplementedError
844
844
845 @LazyProperty
845 @LazyProperty
846 def raw_id(self):
846 def raw_id(self):
847 """
847 """
848 Returns raw string identifying this commit.
848 Returns raw string identifying this commit.
849 """
849 """
850 raise NotImplementedError
850 raise NotImplementedError
851
851
852 @LazyProperty
852 @LazyProperty
853 def short_id(self):
853 def short_id(self):
854 """
854 """
855 Returns shortened version of ``raw_id`` attribute, as string,
855 Returns shortened version of ``raw_id`` attribute, as string,
856 identifying this commit, useful for presentation to users.
856 identifying this commit, useful for presentation to users.
857 """
857 """
858 raise NotImplementedError
858 raise NotImplementedError
859
859
860 @LazyProperty
860 @LazyProperty
861 def idx(self):
861 def idx(self):
862 """
862 """
863 Returns integer identifying this commit.
863 Returns integer identifying this commit.
864 """
864 """
865 raise NotImplementedError
865 raise NotImplementedError
866
866
867 @LazyProperty
867 @LazyProperty
868 def committer(self):
868 def committer(self):
869 """
869 """
870 Returns committer for this commit
870 Returns committer for this commit
871 """
871 """
872 raise NotImplementedError
872 raise NotImplementedError
873
873
874 @LazyProperty
874 @LazyProperty
875 def committer_name(self):
875 def committer_name(self):
876 """
876 """
877 Returns committer name for this commit
877 Returns committer name for this commit
878 """
878 """
879
879
880 return author_name(self.committer)
880 return author_name(self.committer)
881
881
882 @LazyProperty
882 @LazyProperty
883 def committer_email(self):
883 def committer_email(self):
884 """
884 """
885 Returns committer email address for this commit
885 Returns committer email address for this commit
886 """
886 """
887
887
888 return author_email(self.committer)
888 return author_email(self.committer)
889
889
890 @LazyProperty
890 @LazyProperty
891 def author(self):
891 def author(self):
892 """
892 """
893 Returns author for this commit
893 Returns author for this commit
894 """
894 """
895
895
896 raise NotImplementedError
896 raise NotImplementedError
897
897
898 @LazyProperty
898 @LazyProperty
899 def author_name(self):
899 def author_name(self):
900 """
900 """
901 Returns author name for this commit
901 Returns author name for this commit
902 """
902 """
903
903
904 return author_name(self.author)
904 return author_name(self.author)
905
905
906 @LazyProperty
906 @LazyProperty
907 def author_email(self):
907 def author_email(self):
908 """
908 """
909 Returns author email address for this commit
909 Returns author email address for this commit
910 """
910 """
911
911
912 return author_email(self.author)
912 return author_email(self.author)
913
913
914 def get_file_mode(self, path):
914 def get_file_mode(self, path):
915 """
915 """
916 Returns stat mode of the file at `path`.
916 Returns stat mode of the file at `path`.
917 """
917 """
918 raise NotImplementedError
918 raise NotImplementedError
919
919
920 def is_link(self, path):
920 def is_link(self, path):
921 """
921 """
922 Returns ``True`` if given `path` is a symlink
922 Returns ``True`` if given `path` is a symlink
923 """
923 """
924 raise NotImplementedError
924 raise NotImplementedError
925
925
926 def get_file_content(self, path):
926 def get_file_content(self, path):
927 """
927 """
928 Returns content of the file at the given `path`.
928 Returns content of the file at the given `path`.
929 """
929 """
930 raise NotImplementedError
930 raise NotImplementedError
931
931
932 def get_file_size(self, path):
932 def get_file_size(self, path):
933 """
933 """
934 Returns size of the file at the given `path`.
934 Returns size of the file at the given `path`.
935 """
935 """
936 raise NotImplementedError
936 raise NotImplementedError
937
937
938 def get_file_commit(self, path, pre_load=None):
938 def get_file_commit(self, path, pre_load=None):
939 """
939 """
940 Returns last commit of the file at the given `path`.
940 Returns last commit of the file at the given `path`.
941
941
942 :param pre_load: Optional. List of commit attributes to load.
942 :param pre_load: Optional. List of commit attributes to load.
943 """
943 """
944 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
944 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
945 if not commits:
945 if not commits:
946 raise RepositoryError(
946 raise RepositoryError(
947 'Failed to fetch history for path {}. '
947 'Failed to fetch history for path {}. '
948 'Please check if such path exists in your repository'.format(
948 'Please check if such path exists in your repository'.format(
949 path))
949 path))
950 return commits[0]
950 return commits[0]
951
951
952 def get_file_history(self, path, limit=None, pre_load=None):
952 def get_file_history(self, path, limit=None, pre_load=None):
953 """
953 """
954 Returns history of file as reversed list of :class:`BaseCommit`
954 Returns history of file as reversed list of :class:`BaseCommit`
955 objects for which file at given `path` has been modified.
955 objects for which file at given `path` has been modified.
956
956
957 :param limit: Optional. Allows to limit the size of the returned
957 :param limit: Optional. Allows to limit the size of the returned
958 history. This is intended as a hint to the underlying backend, so
958 history. This is intended as a hint to the underlying backend, so
959 that it can apply optimizations depending on the limit.
959 that it can apply optimizations depending on the limit.
960 :param pre_load: Optional. List of commit attributes to load.
960 :param pre_load: Optional. List of commit attributes to load.
961 """
961 """
962 raise NotImplementedError
962 raise NotImplementedError
963
963
964 def get_file_annotate(self, path, pre_load=None):
964 def get_file_annotate(self, path, pre_load=None):
965 """
965 """
966 Returns a generator of four element tuples with
966 Returns a generator of four element tuples with
967 lineno, sha, commit lazy loader and line
967 lineno, sha, commit lazy loader and line
968
968
969 :param pre_load: Optional. List of commit attributes to load.
969 :param pre_load: Optional. List of commit attributes to load.
970 """
970 """
971 raise NotImplementedError
971 raise NotImplementedError
972
972
973 def get_nodes(self, path):
973 def get_nodes(self, path):
974 """
974 """
975 Returns combined ``DirNode`` and ``FileNode`` objects list representing
975 Returns combined ``DirNode`` and ``FileNode`` objects list representing
976 state of commit at the given ``path``.
976 state of commit at the given ``path``.
977
977
978 :raises ``CommitError``: if node at the given ``path`` is not
978 :raises ``CommitError``: if node at the given ``path`` is not
979 instance of ``DirNode``
979 instance of ``DirNode``
980 """
980 """
981 raise NotImplementedError
981 raise NotImplementedError
982
982
983 def get_node(self, path):
983 def get_node(self, path):
984 """
984 """
985 Returns ``Node`` object from the given ``path``.
985 Returns ``Node`` object from the given ``path``.
986
986
987 :raises ``NodeDoesNotExistError``: if there is no node at the given
987 :raises ``NodeDoesNotExistError``: if there is no node at the given
988 ``path``
988 ``path``
989 """
989 """
990 raise NotImplementedError
990 raise NotImplementedError
991
991
992 def get_largefile_node(self, path):
992 def get_largefile_node(self, path):
993 """
993 """
994 Returns the path to largefile from Mercurial/Git-lfs storage.
994 Returns the path to largefile from Mercurial/Git-lfs storage.
995 or None if it's not a largefile node
995 or None if it's not a largefile node
996 """
996 """
997 return None
997 return None
998
998
999 def archive_repo(self, file_path, kind='tgz', subrepos=None,
999 def archive_repo(self, file_path, kind='tgz', subrepos=None,
1000 prefix=None, write_metadata=False, mtime=None):
1000 prefix=None, write_metadata=False, mtime=None):
1001 """
1001 """
1002 Creates an archive containing the contents of the repository.
1002 Creates an archive containing the contents of the repository.
1003
1003
1004 :param file_path: path to the file which to create the archive.
1004 :param file_path: path to the file which to create the archive.
1005 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1005 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1006 :param prefix: name of root directory in archive.
1006 :param prefix: name of root directory in archive.
1007 Default is repository name and commit's short_id joined with dash:
1007 Default is repository name and commit's short_id joined with dash:
1008 ``"{repo_name}-{short_id}"``.
1008 ``"{repo_name}-{short_id}"``.
1009 :param write_metadata: write a metadata file into archive.
1009 :param write_metadata: write a metadata file into archive.
1010 :param mtime: custom modification time for archive creation, defaults
1010 :param mtime: custom modification time for archive creation, defaults
1011 to time.time() if not given.
1011 to time.time() if not given.
1012
1012
1013 :raise VCSError: If prefix has a problem.
1013 :raise VCSError: If prefix has a problem.
1014 """
1014 """
1015 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1015 allowed_kinds = settings.ARCHIVE_SPECS.keys()
1016 if kind not in allowed_kinds:
1016 if kind not in allowed_kinds:
1017 raise ImproperArchiveTypeError(
1017 raise ImproperArchiveTypeError(
1018 'Archive kind (%s) not supported use one of %s' %
1018 'Archive kind (%s) not supported use one of %s' %
1019 (kind, allowed_kinds))
1019 (kind, allowed_kinds))
1020
1020
1021 prefix = self._validate_archive_prefix(prefix)
1021 prefix = self._validate_archive_prefix(prefix)
1022
1022
1023 mtime = mtime or time.mktime(self.date.timetuple())
1023 mtime = mtime or time.mktime(self.date.timetuple())
1024
1024
1025 file_info = []
1025 file_info = []
1026 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1026 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1027 for _r, _d, files in cur_rev.walk('/'):
1027 for _r, _d, files in cur_rev.walk('/'):
1028 for f in files:
1028 for f in files:
1029 f_path = os.path.join(prefix, f.path)
1029 f_path = os.path.join(prefix, f.path)
1030 file_info.append(
1030 file_info.append(
1031 (f_path, f.mode, f.is_link(), f.raw_bytes))
1031 (f_path, f.mode, f.is_link(), f.raw_bytes))
1032
1032
1033 if write_metadata:
1033 if write_metadata:
1034 metadata = [
1034 metadata = [
1035 ('repo_name', self.repository.name),
1035 ('repo_name', self.repository.name),
1036 ('rev', self.raw_id),
1036 ('rev', self.raw_id),
1037 ('create_time', mtime),
1037 ('create_time', mtime),
1038 ('branch', self.branch),
1038 ('branch', self.branch),
1039 ('tags', ','.join(self.tags)),
1039 ('tags', ','.join(self.tags)),
1040 ]
1040 ]
1041 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1041 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1042 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1042 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
1043
1043
1044 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1044 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
1045
1045
1046 def _validate_archive_prefix(self, prefix):
1046 def _validate_archive_prefix(self, prefix):
1047 if prefix is None:
1047 if prefix is None:
1048 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1048 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1049 repo_name=safe_str(self.repository.name),
1049 repo_name=safe_str(self.repository.name),
1050 short_id=self.short_id)
1050 short_id=self.short_id)
1051 elif not isinstance(prefix, str):
1051 elif not isinstance(prefix, str):
1052 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1052 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1053 elif prefix.startswith('/'):
1053 elif prefix.startswith('/'):
1054 raise VCSError("Prefix cannot start with leading slash")
1054 raise VCSError("Prefix cannot start with leading slash")
1055 elif prefix.strip() == '':
1055 elif prefix.strip() == '':
1056 raise VCSError("Prefix cannot be empty")
1056 raise VCSError("Prefix cannot be empty")
1057 return prefix
1057 return prefix
1058
1058
1059 @LazyProperty
1059 @LazyProperty
1060 def root(self):
1060 def root(self):
1061 """
1061 """
1062 Returns ``RootNode`` object for this commit.
1062 Returns ``RootNode`` object for this commit.
1063 """
1063 """
1064 return self.get_node('')
1064 return self.get_node('')
1065
1065
1066 def next(self, branch=None):
1066 def next(self, branch=None):
1067 """
1067 """
1068 Returns next commit from current, if branch is gives it will return
1068 Returns next commit from current, if branch is gives it will return
1069 next commit belonging to this branch
1069 next commit belonging to this branch
1070
1070
1071 :param branch: show commits within the given named branch
1071 :param branch: show commits within the given named branch
1072 """
1072 """
1073 indexes = xrange(self.idx + 1, self.repository.count())
1073 indexes = xrange(self.idx + 1, self.repository.count())
1074 return self._find_next(indexes, branch)
1074 return self._find_next(indexes, branch)
1075
1075
1076 def prev(self, branch=None):
1076 def prev(self, branch=None):
1077 """
1077 """
1078 Returns previous commit from current, if branch is gives it will
1078 Returns previous commit from current, if branch is gives it will
1079 return previous commit belonging to this branch
1079 return previous commit belonging to this branch
1080
1080
1081 :param branch: show commit within the given named branch
1081 :param branch: show commit within the given named branch
1082 """
1082 """
1083 indexes = xrange(self.idx - 1, -1, -1)
1083 indexes = xrange(self.idx - 1, -1, -1)
1084 return self._find_next(indexes, branch)
1084 return self._find_next(indexes, branch)
1085
1085
1086 def _find_next(self, indexes, branch=None):
1086 def _find_next(self, indexes, branch=None):
1087 if branch and self.branch != branch:
1087 if branch and self.branch != branch:
1088 raise VCSError('Branch option used on commit not belonging '
1088 raise VCSError('Branch option used on commit not belonging '
1089 'to that branch')
1089 'to that branch')
1090
1090
1091 for next_idx in indexes:
1091 for next_idx in indexes:
1092 commit = self.repository.get_commit(commit_idx=next_idx)
1092 commit = self.repository.get_commit(commit_idx=next_idx)
1093 if branch and branch != commit.branch:
1093 if branch and branch != commit.branch:
1094 continue
1094 continue
1095 return commit
1095 return commit
1096 raise CommitDoesNotExistError
1096 raise CommitDoesNotExistError
1097
1097
1098 def diff(self, ignore_whitespace=True, context=3):
1098 def diff(self, ignore_whitespace=True, context=3):
1099 """
1099 """
1100 Returns a `Diff` object representing the change made by this commit.
1100 Returns a `Diff` object representing the change made by this commit.
1101 """
1101 """
1102 parent = (
1102 parent = (
1103 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1103 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1104 diff = self.repository.get_diff(
1104 diff = self.repository.get_diff(
1105 parent, self,
1105 parent, self,
1106 ignore_whitespace=ignore_whitespace,
1106 ignore_whitespace=ignore_whitespace,
1107 context=context)
1107 context=context)
1108 return diff
1108 return diff
1109
1109
1110 @LazyProperty
1110 @LazyProperty
1111 def added(self):
1111 def added(self):
1112 """
1112 """
1113 Returns list of added ``FileNode`` objects.
1113 Returns list of added ``FileNode`` objects.
1114 """
1114 """
1115 raise NotImplementedError
1115 raise NotImplementedError
1116
1116
1117 @LazyProperty
1117 @LazyProperty
1118 def changed(self):
1118 def changed(self):
1119 """
1119 """
1120 Returns list of modified ``FileNode`` objects.
1120 Returns list of modified ``FileNode`` objects.
1121 """
1121 """
1122 raise NotImplementedError
1122 raise NotImplementedError
1123
1123
1124 @LazyProperty
1124 @LazyProperty
1125 def removed(self):
1125 def removed(self):
1126 """
1126 """
1127 Returns list of removed ``FileNode`` objects.
1127 Returns list of removed ``FileNode`` objects.
1128 """
1128 """
1129 raise NotImplementedError
1129 raise NotImplementedError
1130
1130
1131 @LazyProperty
1131 @LazyProperty
1132 def size(self):
1132 def size(self):
1133 """
1133 """
1134 Returns total number of bytes from contents of all filenodes.
1134 Returns total number of bytes from contents of all filenodes.
1135 """
1135 """
1136 return sum((node.size for node in self.get_filenodes_generator()))
1136 return sum((node.size for node in self.get_filenodes_generator()))
1137
1137
1138 def walk(self, topurl=''):
1138 def walk(self, topurl=''):
1139 """
1139 """
1140 Similar to os.walk method. Insted of filesystem it walks through
1140 Similar to os.walk method. Insted of filesystem it walks through
1141 commit starting at given ``topurl``. Returns generator of tuples
1141 commit starting at given ``topurl``. Returns generator of tuples
1142 (topnode, dirnodes, filenodes).
1142 (topnode, dirnodes, filenodes).
1143 """
1143 """
1144 topnode = self.get_node(topurl)
1144 topnode = self.get_node(topurl)
1145 if not topnode.is_dir():
1145 if not topnode.is_dir():
1146 return
1146 return
1147 yield (topnode, topnode.dirs, topnode.files)
1147 yield (topnode, topnode.dirs, topnode.files)
1148 for dirnode in topnode.dirs:
1148 for dirnode in topnode.dirs:
1149 for tup in self.walk(dirnode.path):
1149 for tup in self.walk(dirnode.path):
1150 yield tup
1150 yield tup
1151
1151
1152 def get_filenodes_generator(self):
1152 def get_filenodes_generator(self):
1153 """
1153 """
1154 Returns generator that yields *all* file nodes.
1154 Returns generator that yields *all* file nodes.
1155 """
1155 """
1156 for topnode, dirs, files in self.walk():
1156 for topnode, dirs, files in self.walk():
1157 for node in files:
1157 for node in files:
1158 yield node
1158 yield node
1159
1159
1160 #
1160 #
1161 # Utilities for sub classes to support consistent behavior
1161 # Utilities for sub classes to support consistent behavior
1162 #
1162 #
1163
1163
1164 def no_node_at_path(self, path):
1164 def no_node_at_path(self, path):
1165 return NodeDoesNotExistError(
1165 return NodeDoesNotExistError(
1166 u"There is no file nor directory at the given path: "
1166 u"There is no file nor directory at the given path: "
1167 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1167 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1168
1168
1169 def _fix_path(self, path):
1169 def _fix_path(self, path):
1170 """
1170 """
1171 Paths are stored without trailing slash so we need to get rid off it if
1171 Paths are stored without trailing slash so we need to get rid off it if
1172 needed.
1172 needed.
1173 """
1173 """
1174 return path.rstrip('/')
1174 return path.rstrip('/')
1175
1175
1176 #
1176 #
1177 # Deprecated API based on changesets
1177 # Deprecated API based on changesets
1178 #
1178 #
1179
1179
1180 @property
1180 @property
1181 def revision(self):
1181 def revision(self):
1182 warnings.warn("Use idx instead", DeprecationWarning)
1182 warnings.warn("Use idx instead", DeprecationWarning)
1183 return self.idx
1183 return self.idx
1184
1184
1185 @revision.setter
1185 @revision.setter
1186 def revision(self, value):
1186 def revision(self, value):
1187 warnings.warn("Use idx instead", DeprecationWarning)
1187 warnings.warn("Use idx instead", DeprecationWarning)
1188 self.idx = value
1188 self.idx = value
1189
1189
1190 def get_file_changeset(self, path):
1190 def get_file_changeset(self, path):
1191 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1191 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1192 return self.get_file_commit(path)
1192 return self.get_file_commit(path)
1193
1193
1194
1194
1195 class BaseChangesetClass(type):
1195 class BaseChangesetClass(type):
1196
1196
1197 def __instancecheck__(self, instance):
1197 def __instancecheck__(self, instance):
1198 return isinstance(instance, BaseCommit)
1198 return isinstance(instance, BaseCommit)
1199
1199
1200
1200
1201 class BaseChangeset(BaseCommit):
1201 class BaseChangeset(BaseCommit):
1202
1202
1203 __metaclass__ = BaseChangesetClass
1203 __metaclass__ = BaseChangesetClass
1204
1204
1205 def __new__(cls, *args, **kwargs):
1205 def __new__(cls, *args, **kwargs):
1206 warnings.warn(
1206 warnings.warn(
1207 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1207 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1208 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1208 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1209
1209
1210
1210
1211 class BaseInMemoryCommit(object):
1211 class BaseInMemoryCommit(object):
1212 """
1212 """
1213 Represents differences between repository's state (most recent head) and
1213 Represents differences between repository's state (most recent head) and
1214 changes made *in place*.
1214 changes made *in place*.
1215
1215
1216 **Attributes**
1216 **Attributes**
1217
1217
1218 ``repository``
1218 ``repository``
1219 repository object for this in-memory-commit
1219 repository object for this in-memory-commit
1220
1220
1221 ``added``
1221 ``added``
1222 list of ``FileNode`` objects marked as *added*
1222 list of ``FileNode`` objects marked as *added*
1223
1223
1224 ``changed``
1224 ``changed``
1225 list of ``FileNode`` objects marked as *changed*
1225 list of ``FileNode`` objects marked as *changed*
1226
1226
1227 ``removed``
1227 ``removed``
1228 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1228 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1229 *removed*
1229 *removed*
1230
1230
1231 ``parents``
1231 ``parents``
1232 list of :class:`BaseCommit` instances representing parents of
1232 list of :class:`BaseCommit` instances representing parents of
1233 in-memory commit. Should always be 2-element sequence.
1233 in-memory commit. Should always be 2-element sequence.
1234
1234
1235 """
1235 """
1236
1236
1237 def __init__(self, repository):
1237 def __init__(self, repository):
1238 self.repository = repository
1238 self.repository = repository
1239 self.added = []
1239 self.added = []
1240 self.changed = []
1240 self.changed = []
1241 self.removed = []
1241 self.removed = []
1242 self.parents = []
1242 self.parents = []
1243
1243
1244 def add(self, *filenodes):
1244 def add(self, *filenodes):
1245 """
1245 """
1246 Marks given ``FileNode`` objects as *to be committed*.
1246 Marks given ``FileNode`` objects as *to be committed*.
1247
1247
1248 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1248 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1249 latest commit
1249 latest commit
1250 :raises ``NodeAlreadyAddedError``: if node with same path is already
1250 :raises ``NodeAlreadyAddedError``: if node with same path is already
1251 marked as *added*
1251 marked as *added*
1252 """
1252 """
1253 # Check if not already marked as *added* first
1253 # Check if not already marked as *added* first
1254 for node in filenodes:
1254 for node in filenodes:
1255 if node.path in (n.path for n in self.added):
1255 if node.path in (n.path for n in self.added):
1256 raise NodeAlreadyAddedError(
1256 raise NodeAlreadyAddedError(
1257 "Such FileNode %s is already marked for addition"
1257 "Such FileNode %s is already marked for addition"
1258 % node.path)
1258 % node.path)
1259 for node in filenodes:
1259 for node in filenodes:
1260 self.added.append(node)
1260 self.added.append(node)
1261
1261
1262 def change(self, *filenodes):
1262 def change(self, *filenodes):
1263 """
1263 """
1264 Marks given ``FileNode`` objects to be *changed* in next commit.
1264 Marks given ``FileNode`` objects to be *changed* in next commit.
1265
1265
1266 :raises ``EmptyRepositoryError``: if there are no commits yet
1266 :raises ``EmptyRepositoryError``: if there are no commits yet
1267 :raises ``NodeAlreadyExistsError``: if node with same path is already
1267 :raises ``NodeAlreadyExistsError``: if node with same path is already
1268 marked to be *changed*
1268 marked to be *changed*
1269 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1269 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1270 marked to be *removed*
1270 marked to be *removed*
1271 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1271 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1272 commit
1272 commit
1273 :raises ``NodeNotChangedError``: if node hasn't really be changed
1273 :raises ``NodeNotChangedError``: if node hasn't really be changed
1274 """
1274 """
1275 for node in filenodes:
1275 for node in filenodes:
1276 if node.path in (n.path for n in self.removed):
1276 if node.path in (n.path for n in self.removed):
1277 raise NodeAlreadyRemovedError(
1277 raise NodeAlreadyRemovedError(
1278 "Node at %s is already marked as removed" % node.path)
1278 "Node at %s is already marked as removed" % node.path)
1279 try:
1279 try:
1280 self.repository.get_commit()
1280 self.repository.get_commit()
1281 except EmptyRepositoryError:
1281 except EmptyRepositoryError:
1282 raise EmptyRepositoryError(
1282 raise EmptyRepositoryError(
1283 "Nothing to change - try to *add* new nodes rather than "
1283 "Nothing to change - try to *add* new nodes rather than "
1284 "changing them")
1284 "changing them")
1285 for node in filenodes:
1285 for node in filenodes:
1286 if node.path in (n.path for n in self.changed):
1286 if node.path in (n.path for n in self.changed):
1287 raise NodeAlreadyChangedError(
1287 raise NodeAlreadyChangedError(
1288 "Node at '%s' is already marked as changed" % node.path)
1288 "Node at '%s' is already marked as changed" % node.path)
1289 self.changed.append(node)
1289 self.changed.append(node)
1290
1290
1291 def remove(self, *filenodes):
1291 def remove(self, *filenodes):
1292 """
1292 """
1293 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1293 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1294 *removed* in next commit.
1294 *removed* in next commit.
1295
1295
1296 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1296 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1297 be *removed*
1297 be *removed*
1298 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1298 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1299 be *changed*
1299 be *changed*
1300 """
1300 """
1301 for node in filenodes:
1301 for node in filenodes:
1302 if node.path in (n.path for n in self.removed):
1302 if node.path in (n.path for n in self.removed):
1303 raise NodeAlreadyRemovedError(
1303 raise NodeAlreadyRemovedError(
1304 "Node is already marked to for removal at %s" % node.path)
1304 "Node is already marked to for removal at %s" % node.path)
1305 if node.path in (n.path for n in self.changed):
1305 if node.path in (n.path for n in self.changed):
1306 raise NodeAlreadyChangedError(
1306 raise NodeAlreadyChangedError(
1307 "Node is already marked to be changed at %s" % node.path)
1307 "Node is already marked to be changed at %s" % node.path)
1308 # We only mark node as *removed* - real removal is done by
1308 # We only mark node as *removed* - real removal is done by
1309 # commit method
1309 # commit method
1310 self.removed.append(node)
1310 self.removed.append(node)
1311
1311
1312 def reset(self):
1312 def reset(self):
1313 """
1313 """
1314 Resets this instance to initial state (cleans ``added``, ``changed``
1314 Resets this instance to initial state (cleans ``added``, ``changed``
1315 and ``removed`` lists).
1315 and ``removed`` lists).
1316 """
1316 """
1317 self.added = []
1317 self.added = []
1318 self.changed = []
1318 self.changed = []
1319 self.removed = []
1319 self.removed = []
1320 self.parents = []
1320 self.parents = []
1321
1321
1322 def get_ipaths(self):
1322 def get_ipaths(self):
1323 """
1323 """
1324 Returns generator of paths from nodes marked as added, changed or
1324 Returns generator of paths from nodes marked as added, changed or
1325 removed.
1325 removed.
1326 """
1326 """
1327 for node in itertools.chain(self.added, self.changed, self.removed):
1327 for node in itertools.chain(self.added, self.changed, self.removed):
1328 yield node.path
1328 yield node.path
1329
1329
1330 def get_paths(self):
1330 def get_paths(self):
1331 """
1331 """
1332 Returns list of paths from nodes marked as added, changed or removed.
1332 Returns list of paths from nodes marked as added, changed or removed.
1333 """
1333 """
1334 return list(self.get_ipaths())
1334 return list(self.get_ipaths())
1335
1335
1336 def check_integrity(self, parents=None):
1336 def check_integrity(self, parents=None):
1337 """
1337 """
1338 Checks in-memory commit's integrity. Also, sets parents if not
1338 Checks in-memory commit's integrity. Also, sets parents if not
1339 already set.
1339 already set.
1340
1340
1341 :raises CommitError: if any error occurs (i.e.
1341 :raises CommitError: if any error occurs (i.e.
1342 ``NodeDoesNotExistError``).
1342 ``NodeDoesNotExistError``).
1343 """
1343 """
1344 if not self.parents:
1344 if not self.parents:
1345 parents = parents or []
1345 parents = parents or []
1346 if len(parents) == 0:
1346 if len(parents) == 0:
1347 try:
1347 try:
1348 parents = [self.repository.get_commit(), None]
1348 parents = [self.repository.get_commit(), None]
1349 except EmptyRepositoryError:
1349 except EmptyRepositoryError:
1350 parents = [None, None]
1350 parents = [None, None]
1351 elif len(parents) == 1:
1351 elif len(parents) == 1:
1352 parents += [None]
1352 parents += [None]
1353 self.parents = parents
1353 self.parents = parents
1354
1354
1355 # Local parents, only if not None
1355 # Local parents, only if not None
1356 parents = [p for p in self.parents if p]
1356 parents = [p for p in self.parents if p]
1357
1357
1358 # Check nodes marked as added
1358 # Check nodes marked as added
1359 for p in parents:
1359 for p in parents:
1360 for node in self.added:
1360 for node in self.added:
1361 try:
1361 try:
1362 p.get_node(node.path)
1362 p.get_node(node.path)
1363 except NodeDoesNotExistError:
1363 except NodeDoesNotExistError:
1364 pass
1364 pass
1365 else:
1365 else:
1366 raise NodeAlreadyExistsError(
1366 raise NodeAlreadyExistsError(
1367 "Node `%s` already exists at %s" % (node.path, p))
1367 "Node `%s` already exists at %s" % (node.path, p))
1368
1368
1369 # Check nodes marked as changed
1369 # Check nodes marked as changed
1370 missing = set(self.changed)
1370 missing = set(self.changed)
1371 not_changed = set(self.changed)
1371 not_changed = set(self.changed)
1372 if self.changed and not parents:
1372 if self.changed and not parents:
1373 raise NodeDoesNotExistError(str(self.changed[0].path))
1373 raise NodeDoesNotExistError(str(self.changed[0].path))
1374 for p in parents:
1374 for p in parents:
1375 for node in self.changed:
1375 for node in self.changed:
1376 try:
1376 try:
1377 old = p.get_node(node.path)
1377 old = p.get_node(node.path)
1378 missing.remove(node)
1378 missing.remove(node)
1379 # if content actually changed, remove node from not_changed
1379 # if content actually changed, remove node from not_changed
1380 if old.content != node.content:
1380 if old.content != node.content:
1381 not_changed.remove(node)
1381 not_changed.remove(node)
1382 except NodeDoesNotExistError:
1382 except NodeDoesNotExistError:
1383 pass
1383 pass
1384 if self.changed and missing:
1384 if self.changed and missing:
1385 raise NodeDoesNotExistError(
1385 raise NodeDoesNotExistError(
1386 "Node `%s` marked as modified but missing in parents: %s"
1386 "Node `%s` marked as modified but missing in parents: %s"
1387 % (node.path, parents))
1387 % (node.path, parents))
1388
1388
1389 if self.changed and not_changed:
1389 if self.changed and not_changed:
1390 raise NodeNotChangedError(
1390 raise NodeNotChangedError(
1391 "Node `%s` wasn't actually changed (parents: %s)"
1391 "Node `%s` wasn't actually changed (parents: %s)"
1392 % (not_changed.pop().path, parents))
1392 % (not_changed.pop().path, parents))
1393
1393
1394 # Check nodes marked as removed
1394 # Check nodes marked as removed
1395 if self.removed and not parents:
1395 if self.removed and not parents:
1396 raise NodeDoesNotExistError(
1396 raise NodeDoesNotExistError(
1397 "Cannot remove node at %s as there "
1397 "Cannot remove node at %s as there "
1398 "were no parents specified" % self.removed[0].path)
1398 "were no parents specified" % self.removed[0].path)
1399 really_removed = set()
1399 really_removed = set()
1400 for p in parents:
1400 for p in parents:
1401 for node in self.removed:
1401 for node in self.removed:
1402 try:
1402 try:
1403 p.get_node(node.path)
1403 p.get_node(node.path)
1404 really_removed.add(node)
1404 really_removed.add(node)
1405 except CommitError:
1405 except CommitError:
1406 pass
1406 pass
1407 not_removed = set(self.removed) - really_removed
1407 not_removed = set(self.removed) - really_removed
1408 if not_removed:
1408 if not_removed:
1409 # TODO: johbo: This code branch does not seem to be covered
1409 # TODO: johbo: This code branch does not seem to be covered
1410 raise NodeDoesNotExistError(
1410 raise NodeDoesNotExistError(
1411 "Cannot remove node at %s from "
1411 "Cannot remove node at %s from "
1412 "following parents: %s" % (not_removed, parents))
1412 "following parents: %s" % (not_removed, parents))
1413
1413
1414 def commit(
1414 def commit(
1415 self, message, author, parents=None, branch=None, date=None,
1415 self, message, author, parents=None, branch=None, date=None,
1416 **kwargs):
1416 **kwargs):
1417 """
1417 """
1418 Performs in-memory commit (doesn't check workdir in any way) and
1418 Performs in-memory commit (doesn't check workdir in any way) and
1419 returns newly created :class:`BaseCommit`. Updates repository's
1419 returns newly created :class:`BaseCommit`. Updates repository's
1420 attribute `commits`.
1420 attribute `commits`.
1421
1421
1422 .. note::
1422 .. note::
1423
1423
1424 While overriding this method each backend's should call
1424 While overriding this method each backend's should call
1425 ``self.check_integrity(parents)`` in the first place.
1425 ``self.check_integrity(parents)`` in the first place.
1426
1426
1427 :param message: message of the commit
1427 :param message: message of the commit
1428 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1428 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1429 :param parents: single parent or sequence of parents from which commit
1429 :param parents: single parent or sequence of parents from which commit
1430 would be derived
1430 would be derived
1431 :param date: ``datetime.datetime`` instance. Defaults to
1431 :param date: ``datetime.datetime`` instance. Defaults to
1432 ``datetime.datetime.now()``.
1432 ``datetime.datetime.now()``.
1433 :param branch: branch name, as string. If none given, default backend's
1433 :param branch: branch name, as string. If none given, default backend's
1434 branch would be used.
1434 branch would be used.
1435
1435
1436 :raises ``CommitError``: if any error occurs while committing
1436 :raises ``CommitError``: if any error occurs while committing
1437 """
1437 """
1438 raise NotImplementedError
1438 raise NotImplementedError
1439
1439
1440
1440
1441 class BaseInMemoryChangesetClass(type):
1441 class BaseInMemoryChangesetClass(type):
1442
1442
1443 def __instancecheck__(self, instance):
1443 def __instancecheck__(self, instance):
1444 return isinstance(instance, BaseInMemoryCommit)
1444 return isinstance(instance, BaseInMemoryCommit)
1445
1445
1446
1446
1447 class BaseInMemoryChangeset(BaseInMemoryCommit):
1447 class BaseInMemoryChangeset(BaseInMemoryCommit):
1448
1448
1449 __metaclass__ = BaseInMemoryChangesetClass
1449 __metaclass__ = BaseInMemoryChangesetClass
1450
1450
1451 def __new__(cls, *args, **kwargs):
1451 def __new__(cls, *args, **kwargs):
1452 warnings.warn(
1452 warnings.warn(
1453 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1453 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1454 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1454 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1455
1455
1456
1456
1457 class EmptyCommit(BaseCommit):
1457 class EmptyCommit(BaseCommit):
1458 """
1458 """
1459 An dummy empty commit. It's possible to pass hash when creating
1459 An dummy empty commit. It's possible to pass hash when creating
1460 an EmptyCommit
1460 an EmptyCommit
1461 """
1461 """
1462
1462
1463 def __init__(
1463 def __init__(
1464 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1464 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1465 message='', author='', date=None):
1465 message='', author='', date=None):
1466 self._empty_commit_id = commit_id
1466 self._empty_commit_id = commit_id
1467 # TODO: johbo: Solve idx parameter, default value does not make
1467 # TODO: johbo: Solve idx parameter, default value does not make
1468 # too much sense
1468 # too much sense
1469 self.idx = idx
1469 self.idx = idx
1470 self.message = message
1470 self.message = message
1471 self.author = author
1471 self.author = author
1472 self.date = date or datetime.datetime.fromtimestamp(0)
1472 self.date = date or datetime.datetime.fromtimestamp(0)
1473 self.repository = repo
1473 self.repository = repo
1474 self.alias = alias
1474 self.alias = alias
1475
1475
1476 @LazyProperty
1476 @LazyProperty
1477 def raw_id(self):
1477 def raw_id(self):
1478 """
1478 """
1479 Returns raw string identifying this commit, useful for web
1479 Returns raw string identifying this commit, useful for web
1480 representation.
1480 representation.
1481 """
1481 """
1482
1482
1483 return self._empty_commit_id
1483 return self._empty_commit_id
1484
1484
1485 @LazyProperty
1485 @LazyProperty
1486 def branch(self):
1486 def branch(self):
1487 if self.alias:
1487 if self.alias:
1488 from rhodecode.lib.vcs.backends import get_backend
1488 from rhodecode.lib.vcs.backends import get_backend
1489 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1489 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1490
1490
1491 @LazyProperty
1491 @LazyProperty
1492 def short_id(self):
1492 def short_id(self):
1493 return self.raw_id[:12]
1493 return self.raw_id[:12]
1494
1494
1495 @LazyProperty
1495 @LazyProperty
1496 def id(self):
1496 def id(self):
1497 return self.raw_id
1497 return self.raw_id
1498
1498
1499 def get_file_commit(self, path):
1499 def get_file_commit(self, path):
1500 return self
1500 return self
1501
1501
1502 def get_file_content(self, path):
1502 def get_file_content(self, path):
1503 return u''
1503 return u''
1504
1504
1505 def get_file_size(self, path):
1505 def get_file_size(self, path):
1506 return 0
1506 return 0
1507
1507
1508
1508
1509 class EmptyChangesetClass(type):
1509 class EmptyChangesetClass(type):
1510
1510
1511 def __instancecheck__(self, instance):
1511 def __instancecheck__(self, instance):
1512 return isinstance(instance, EmptyCommit)
1512 return isinstance(instance, EmptyCommit)
1513
1513
1514
1514
1515 class EmptyChangeset(EmptyCommit):
1515 class EmptyChangeset(EmptyCommit):
1516
1516
1517 __metaclass__ = EmptyChangesetClass
1517 __metaclass__ = EmptyChangesetClass
1518
1518
1519 def __new__(cls, *args, **kwargs):
1519 def __new__(cls, *args, **kwargs):
1520 warnings.warn(
1520 warnings.warn(
1521 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1521 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1522 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1522 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1523
1523
1524 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1524 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1525 alias=None, revision=-1, message='', author='', date=None):
1525 alias=None, revision=-1, message='', author='', date=None):
1526 if requested_revision is not None:
1526 if requested_revision is not None:
1527 warnings.warn(
1527 warnings.warn(
1528 "Parameter requested_revision not supported anymore",
1528 "Parameter requested_revision not supported anymore",
1529 DeprecationWarning)
1529 DeprecationWarning)
1530 super(EmptyChangeset, self).__init__(
1530 super(EmptyChangeset, self).__init__(
1531 commit_id=cs, repo=repo, alias=alias, idx=revision,
1531 commit_id=cs, repo=repo, alias=alias, idx=revision,
1532 message=message, author=author, date=date)
1532 message=message, author=author, date=date)
1533
1533
1534 @property
1534 @property
1535 def revision(self):
1535 def revision(self):
1536 warnings.warn("Use idx instead", DeprecationWarning)
1536 warnings.warn("Use idx instead", DeprecationWarning)
1537 return self.idx
1537 return self.idx
1538
1538
1539 @revision.setter
1539 @revision.setter
1540 def revision(self, value):
1540 def revision(self, value):
1541 warnings.warn("Use idx instead", DeprecationWarning)
1541 warnings.warn("Use idx instead", DeprecationWarning)
1542 self.idx = value
1542 self.idx = value
1543
1543
1544
1544
1545 class EmptyRepository(BaseRepository):
1545 class EmptyRepository(BaseRepository):
1546 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1546 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1547 pass
1547 pass
1548
1548
1549 def get_diff(self, *args, **kwargs):
1549 def get_diff(self, *args, **kwargs):
1550 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1550 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1551 return GitDiff('')
1551 return GitDiff('')
1552
1552
1553
1553
1554 class CollectionGenerator(object):
1554 class CollectionGenerator(object):
1555
1555
1556 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1556 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1557 self.repo = repo
1557 self.repo = repo
1558 self.commit_ids = commit_ids
1558 self.commit_ids = commit_ids
1559 # TODO: (oliver) this isn't currently hooked up
1559 # TODO: (oliver) this isn't currently hooked up
1560 self.collection_size = None
1560 self.collection_size = None
1561 self.pre_load = pre_load
1561 self.pre_load = pre_load
1562
1562
1563 def __len__(self):
1563 def __len__(self):
1564 if self.collection_size is not None:
1564 if self.collection_size is not None:
1565 return self.collection_size
1565 return self.collection_size
1566 return self.commit_ids.__len__()
1566 return self.commit_ids.__len__()
1567
1567
1568 def __iter__(self):
1568 def __iter__(self):
1569 for commit_id in self.commit_ids:
1569 for commit_id in self.commit_ids:
1570 # TODO: johbo: Mercurial passes in commit indices or commit ids
1570 # TODO: johbo: Mercurial passes in commit indices or commit ids
1571 yield self._commit_factory(commit_id)
1571 yield self._commit_factory(commit_id)
1572
1572
1573 def _commit_factory(self, commit_id):
1573 def _commit_factory(self, commit_id):
1574 """
1574 """
1575 Allows backends to override the way commits are generated.
1575 Allows backends to override the way commits are generated.
1576 """
1576 """
1577 return self.repo.get_commit(commit_id=commit_id,
1577 return self.repo.get_commit(commit_id=commit_id,
1578 pre_load=self.pre_load)
1578 pre_load=self.pre_load)
1579
1579
1580 def __getslice__(self, i, j):
1580 def __getslice__(self, i, j):
1581 """
1581 """
1582 Returns an iterator of sliced repository
1582 Returns an iterator of sliced repository
1583 """
1583 """
1584 commit_ids = self.commit_ids[i:j]
1584 commit_ids = self.commit_ids[i:j]
1585 return self.__class__(
1585 return self.__class__(
1586 self.repo, commit_ids, pre_load=self.pre_load)
1586 self.repo, commit_ids, pre_load=self.pre_load)
1587
1587
1588 def __repr__(self):
1588 def __repr__(self):
1589 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1589 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1590
1590
1591
1591
1592 class Config(object):
1592 class Config(object):
1593 """
1593 """
1594 Represents the configuration for a repository.
1594 Represents the configuration for a repository.
1595
1595
1596 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1596 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1597 standard library. It implements only the needed subset.
1597 standard library. It implements only the needed subset.
1598 """
1598 """
1599
1599
1600 def __init__(self):
1600 def __init__(self):
1601 self._values = {}
1601 self._values = {}
1602
1602
1603 def copy(self):
1603 def copy(self):
1604 clone = Config()
1604 clone = Config()
1605 for section, values in self._values.items():
1605 for section, values in self._values.items():
1606 clone._values[section] = values.copy()
1606 clone._values[section] = values.copy()
1607 return clone
1607 return clone
1608
1608
1609 def __repr__(self):
1609 def __repr__(self):
1610 return '<Config(%s sections) at %s>' % (
1610 return '<Config(%s sections) at %s>' % (
1611 len(self._values), hex(id(self)))
1611 len(self._values), hex(id(self)))
1612
1612
1613 def items(self, section):
1613 def items(self, section):
1614 return self._values.get(section, {}).iteritems()
1614 return self._values.get(section, {}).iteritems()
1615
1615
1616 def get(self, section, option):
1616 def get(self, section, option):
1617 return self._values.get(section, {}).get(option)
1617 return self._values.get(section, {}).get(option)
1618
1618
1619 def set(self, section, option, value):
1619 def set(self, section, option, value):
1620 section_values = self._values.setdefault(section, {})
1620 section_values = self._values.setdefault(section, {})
1621 section_values[option] = value
1621 section_values[option] = value
1622
1622
1623 def clear_section(self, section):
1623 def clear_section(self, section):
1624 self._values[section] = {}
1624 self._values[section] = {}
1625
1625
1626 def serialize(self):
1626 def serialize(self):
1627 """
1627 """
1628 Creates a list of three tuples (section, key, value) representing
1628 Creates a list of three tuples (section, key, value) representing
1629 this config object.
1629 this config object.
1630 """
1630 """
1631 items = []
1631 items = []
1632 for section in self._values:
1632 for section in self._values:
1633 for option, value in self._values[section].items():
1633 for option, value in self._values[section].items():
1634 items.append(
1634 items.append(
1635 (safe_str(section), safe_str(option), safe_str(value)))
1635 (safe_str(section), safe_str(option), safe_str(value)))
1636 return items
1636 return items
1637
1637
1638
1638
1639 class Diff(object):
1639 class Diff(object):
1640 """
1640 """
1641 Represents a diff result from a repository backend.
1641 Represents a diff result from a repository backend.
1642
1642
1643 Subclasses have to provide a backend specific value for
1643 Subclasses have to provide a backend specific value for
1644 :attr:`_header_re` and :attr:`_meta_re`.
1644 :attr:`_header_re` and :attr:`_meta_re`.
1645 """
1645 """
1646 _meta_re = None
1646 _meta_re = None
1647 _header_re = None
1647 _header_re = None
1648
1648
1649 def __init__(self, raw_diff):
1649 def __init__(self, raw_diff):
1650 self.raw = raw_diff
1650 self.raw = raw_diff
1651
1651
1652 def chunks(self):
1652 def chunks(self):
1653 """
1653 """
1654 split the diff in chunks of separate --git a/file b/file chunks
1654 split the diff in chunks of separate --git a/file b/file chunks
1655 to make diffs consistent we must prepend with \n, and make sure
1655 to make diffs consistent we must prepend with \n, and make sure
1656 we can detect last chunk as this was also has special rule
1656 we can detect last chunk as this was also has special rule
1657 """
1657 """
1658
1658
1659 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1659 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1660 header = diff_parts[0]
1660 header = diff_parts[0]
1661
1661
1662 if self._meta_re:
1662 if self._meta_re:
1663 match = self._meta_re.match(header)
1663 match = self._meta_re.match(header)
1664
1664
1665 chunks = diff_parts[1:]
1665 chunks = diff_parts[1:]
1666 total_chunks = len(chunks)
1666 total_chunks = len(chunks)
1667
1667
1668 return (
1668 return (
1669 DiffChunk(chunk, self, cur_chunk == total_chunks)
1669 DiffChunk(chunk, self, cur_chunk == total_chunks)
1670 for cur_chunk, chunk in enumerate(chunks, start=1))
1670 for cur_chunk, chunk in enumerate(chunks, start=1))
1671
1671
1672
1672
1673 class DiffChunk(object):
1673 class DiffChunk(object):
1674
1674
1675 def __init__(self, chunk, diff, last_chunk):
1675 def __init__(self, chunk, diff, last_chunk):
1676 self._diff = diff
1676 self._diff = diff
1677
1677
1678 # since we split by \ndiff --git that part is lost from original diff
1678 # since we split by \ndiff --git that part is lost from original diff
1679 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1679 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1680 if not last_chunk:
1680 if not last_chunk:
1681 chunk += '\n'
1681 chunk += '\n'
1682
1682
1683 match = self._diff._header_re.match(chunk)
1683 match = self._diff._header_re.match(chunk)
1684 self.header = match.groupdict()
1684 self.header = match.groupdict()
1685 self.diff = chunk[match.end():]
1685 self.diff = chunk[match.end():]
1686 self.raw = chunk
1686 self.raw = chunk
1687
1687
1688
1688
1689 class BasePathPermissionChecker(object):
1689 class BasePathPermissionChecker(object):
1690
1690
1691 @staticmethod
1691 @staticmethod
1692 def create_from_patterns(includes, excludes):
1692 def create_from_patterns(includes, excludes):
1693 if includes and '*' in includes and not excludes:
1693 if includes and '*' in includes and not excludes:
1694 return AllPathPermissionChecker()
1694 return AllPathPermissionChecker()
1695 elif excludes and '*' in excludes:
1695 elif excludes and '*' in excludes:
1696 return NonePathPermissionChecker()
1696 return NonePathPermissionChecker()
1697 else:
1697 else:
1698 return PatternPathPermissionChecker(includes, excludes)
1698 return PatternPathPermissionChecker(includes, excludes)
1699
1699
1700 @property
1700 @property
1701 def has_full_access(self):
1701 def has_full_access(self):
1702 raise NotImplemented()
1702 raise NotImplemented()
1703
1703
1704 def has_access(self, path):
1704 def has_access(self, path):
1705 raise NotImplemented()
1705 raise NotImplemented()
1706
1706
1707
1707
1708 class AllPathPermissionChecker(BasePathPermissionChecker):
1708 class AllPathPermissionChecker(BasePathPermissionChecker):
1709
1709
1710 @property
1710 @property
1711 def has_full_access(self):
1711 def has_full_access(self):
1712 return True
1712 return True
1713
1713
1714 def has_access(self, path):
1714 def has_access(self, path):
1715 return True
1715 return True
1716
1716
1717
1717
1718 class NonePathPermissionChecker(BasePathPermissionChecker):
1718 class NonePathPermissionChecker(BasePathPermissionChecker):
1719
1719
1720 @property
1720 @property
1721 def has_full_access(self):
1721 def has_full_access(self):
1722 return False
1722 return False
1723
1723
1724 def has_access(self, path):
1724 def has_access(self, path):
1725 return False
1725 return False
1726
1726
1727
1727
1728 class PatternPathPermissionChecker(BasePathPermissionChecker):
1728 class PatternPathPermissionChecker(BasePathPermissionChecker):
1729
1729
1730 def __init__(self, includes, excludes):
1730 def __init__(self, includes, excludes):
1731 self.includes = includes
1731 self.includes = includes
1732 self.excludes = excludes
1732 self.excludes = excludes
1733 self.includes_re = [] if not includes else [
1733 self.includes_re = [] if not includes else [
1734 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1734 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1735 self.excludes_re = [] if not excludes else [
1735 self.excludes_re = [] if not excludes else [
1736 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1736 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1737
1737
1738 @property
1738 @property
1739 def has_full_access(self):
1739 def has_full_access(self):
1740 return '*' in self.includes and not self.excludes
1740 return '*' in self.includes and not self.excludes
1741
1741
1742 def has_access(self, path):
1742 def has_access(self, path):
1743 for regex in self.excludes_re:
1743 for regex in self.excludes_re:
1744 if regex.match(path):
1744 if regex.match(path):
1745 return False
1745 return False
1746 for regex in self.includes_re:
1746 for regex in self.includes_re:
1747 if regex.match(path):
1747 if regex.match(path):
1748 return True
1748 return True
1749 return False
1749 return False
@@ -1,67 +1,75 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Internal settings for vcs-lib
22 Internal settings for vcs-lib
23 """
23 """
24
24
25 # list of default encoding used in safe_unicode/safe_str methods
25 # list of default encoding used in safe_unicode/safe_str methods
26 DEFAULT_ENCODINGS = ['utf8']
26 DEFAULT_ENCODINGS = ['utf8']
27
27
28 # Optional arguments to rev-filter, it has to be a list
28 # Optional arguments to rev-filter, it has to be a list
29 # It can also be ['--branches', '--tags']
29 # It can also be ['--branches', '--tags']
30 GIT_REV_FILTER = ['--all']
30 GIT_REV_FILTER = ['--all']
31
31
32 # Compatibility version when creating SVN repositories. None means newest.
32 # Compatibility version when creating SVN repositories. None means newest.
33 # Other available options are: pre-1.4-compatible, pre-1.5-compatible,
33 # Other available options are: pre-1.4-compatible, pre-1.5-compatible,
34 # pre-1.6-compatible, pre-1.8-compatible
34 # pre-1.6-compatible, pre-1.8-compatible
35 SVN_COMPATIBLE_VERSION = None
35 SVN_COMPATIBLE_VERSION = None
36
36
37 ALIASES = ['hg', 'git', 'svn']
37 ALIASES = ['hg', 'git', 'svn']
38
38
39 BACKENDS = {
39 BACKENDS = {
40 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
40 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
41 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
41 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
42 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
42 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
43 }
43 }
44
44
45 # TODO: Remove once controllers/files.py is adjusted
45 # TODO: Remove once controllers/files.py is adjusted
46 ARCHIVE_SPECS = {
46 ARCHIVE_SPECS = {
47 'tbz2': ('application/x-bzip2', '.tar.bz2'),
47 'tbz2': ('application/x-bzip2', '.tar.bz2'),
48 'tgz': ('application/x-gzip', '.tar.gz'),
48 'tgz': ('application/x-gzip', '.tar.gz'),
49 'zip': ('application/zip', '.zip'),
49 'zip': ('application/zip', '.zip'),
50 }
50 }
51
51
52 HOOKS_PROTOCOL = None
52 HOOKS_PROTOCOL = None
53 HOOKS_DIRECT_CALLS = False
53 HOOKS_DIRECT_CALLS = False
54 HOOKS_HOST = '127.0.0.1'
54 HOOKS_HOST = '127.0.0.1'
55
55
56
56
57 MERGE_MESSAGE_TMPL = (
58 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}\n\n '
59 u'{pr_title}')
60 MERGE_DRY_RUN_MESSAGE = 'dry_run_merge_message_from_rhodecode'
61 MERGE_DRY_RUN_USER = 'Dry-Run User'
62 MERGE_DRY_RUN_EMAIL = 'dry-run-merge@rhodecode.com'
63
64
57 def available_aliases():
65 def available_aliases():
58 """
66 """
59 Mercurial is required for the system to work, so in case vcs.backends does
67 Mercurial is required for the system to work, so in case vcs.backends does
60 not include it, we make sure it will be available internally
68 not include it, we make sure it will be available internally
61 TODO: anderson: refactor vcs.backends so it won't be necessary, VCS server
69 TODO: anderson: refactor vcs.backends so it won't be necessary, VCS server
62 should be responsible to dictate available backends.
70 should be responsible to dictate available backends.
63 """
71 """
64 aliases = ALIASES[:]
72 aliases = ALIASES[:]
65 if 'hg' not in aliases:
73 if 'hg' not in aliases:
66 aliases += ['hg']
74 aliases += ['hg']
67 return aliases
75 return aliases
@@ -1,1727 +1,1730 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2018 RhodeCode GmbH
3 # Copyright (C) 2012-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
26
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import urllib
30 import urllib
31 import collections
31 import collections
32
32
33 from pyramid.threadlocal import get_current_request
33 from pyramid.threadlocal import get_current_request
34
34
35 from rhodecode import events
35 from rhodecode import events
36 from rhodecode.translation import lazy_ugettext#, _
36 from rhodecode.translation import lazy_ugettext#, _
37 from rhodecode.lib import helpers as h, hooks_utils, diffs
37 from rhodecode.lib import helpers as h, hooks_utils, diffs
38 from rhodecode.lib import audit_logger
38 from rhodecode.lib import audit_logger
39 from rhodecode.lib.compat import OrderedDict
39 from rhodecode.lib.compat import OrderedDict
40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
40 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
41 from rhodecode.lib.markup_renderer import (
41 from rhodecode.lib.markup_renderer import (
42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
42 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
43 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
44 from rhodecode.lib.vcs.backends.base import (
44 from rhodecode.lib.vcs.backends.base import (
45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
45 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
46 from rhodecode.lib.vcs.conf import settings as vcs_settings
46 from rhodecode.lib.vcs.conf import settings as vcs_settings
47 from rhodecode.lib.vcs.exceptions import (
47 from rhodecode.lib.vcs.exceptions import (
48 CommitDoesNotExistError, EmptyRepositoryError)
48 CommitDoesNotExistError, EmptyRepositoryError)
49 from rhodecode.model import BaseModel
49 from rhodecode.model import BaseModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
50 from rhodecode.model.changeset_status import ChangesetStatusModel
51 from rhodecode.model.comment import CommentsModel
51 from rhodecode.model.comment import CommentsModel
52 from rhodecode.model.db import (
52 from rhodecode.model.db import (
53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
53 or_, PullRequest, PullRequestReviewers, ChangesetStatus,
54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
54 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule)
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56 from rhodecode.model.notification import NotificationModel, \
56 from rhodecode.model.notification import NotificationModel, \
57 EmailNotificationModel
57 EmailNotificationModel
58 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.settings import VcsSettingsModel
59 from rhodecode.model.settings import VcsSettingsModel
60
60
61
61
62 log = logging.getLogger(__name__)
62 log = logging.getLogger(__name__)
63
63
64
64
65 # Data structure to hold the response data when updating commits during a pull
65 # Data structure to hold the response data when updating commits during a pull
66 # request update.
66 # request update.
67 UpdateResponse = collections.namedtuple('UpdateResponse', [
67 UpdateResponse = collections.namedtuple('UpdateResponse', [
68 'executed', 'reason', 'new', 'old', 'changes',
68 'executed', 'reason', 'new', 'old', 'changes',
69 'source_changed', 'target_changed'])
69 'source_changed', 'target_changed'])
70
70
71
71
72 class PullRequestModel(BaseModel):
72 class PullRequestModel(BaseModel):
73
73
74 cls = PullRequest
74 cls = PullRequest
75
75
76 DIFF_CONTEXT = 3
76 DIFF_CONTEXT = 3
77
77
78 MERGE_STATUS_MESSAGES = {
78 MERGE_STATUS_MESSAGES = {
79 MergeFailureReason.NONE: lazy_ugettext(
79 MergeFailureReason.NONE: lazy_ugettext(
80 'This pull request can be automatically merged.'),
80 'This pull request can be automatically merged.'),
81 MergeFailureReason.UNKNOWN: lazy_ugettext(
81 MergeFailureReason.UNKNOWN: lazy_ugettext(
82 'This pull request cannot be merged because of an unhandled'
82 'This pull request cannot be merged because of an unhandled'
83 ' exception.'),
83 ' exception.'),
84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
84 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
85 'This pull request cannot be merged because of merge conflicts.'),
85 'This pull request cannot be merged because of merge conflicts.'),
86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
86 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
87 'This pull request could not be merged because push to target'
87 'This pull request could not be merged because push to target'
88 ' failed.'),
88 ' failed.'),
89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
89 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
90 'This pull request cannot be merged because the target is not a'
90 'This pull request cannot be merged because the target is not a'
91 ' head.'),
91 ' head.'),
92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
92 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
93 'This pull request cannot be merged because the source contains'
93 'This pull request cannot be merged because the source contains'
94 ' more branches than the target.'),
94 ' more branches than the target.'),
95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
95 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
96 'This pull request cannot be merged because the target has'
96 'This pull request cannot be merged because the target has'
97 ' multiple heads.'),
97 ' multiple heads.'),
98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
98 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
99 'This pull request cannot be merged because the target repository'
99 'This pull request cannot be merged because the target repository'
100 ' is locked.'),
100 ' is locked.'),
101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
101 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
102 'This pull request cannot be merged because the target or the '
102 'This pull request cannot be merged because the target or the '
103 'source reference is missing.'),
103 'source reference is missing.'),
104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
104 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
105 'This pull request cannot be merged because the target '
105 'This pull request cannot be merged because the target '
106 'reference is missing.'),
106 'reference is missing.'),
107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
107 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
108 'This pull request cannot be merged because the source '
108 'This pull request cannot be merged because the source '
109 'reference is missing.'),
109 'reference is missing.'),
110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
110 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
111 'This pull request cannot be merged because of conflicts related '
111 'This pull request cannot be merged because of conflicts related '
112 'to sub repositories.'),
112 'to sub repositories.'),
113 }
113 }
114
114
115 UPDATE_STATUS_MESSAGES = {
115 UPDATE_STATUS_MESSAGES = {
116 UpdateFailureReason.NONE: lazy_ugettext(
116 UpdateFailureReason.NONE: lazy_ugettext(
117 'Pull request update successful.'),
117 'Pull request update successful.'),
118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
118 UpdateFailureReason.UNKNOWN: lazy_ugettext(
119 'Pull request update failed because of an unknown error.'),
119 'Pull request update failed because of an unknown error.'),
120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
120 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
121 'No update needed because the source and target have not changed.'),
121 'No update needed because the source and target have not changed.'),
122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
122 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
123 'Pull request cannot be updated because the reference type is '
123 'Pull request cannot be updated because the reference type is '
124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
124 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
125 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
126 'This pull request cannot be updated because the target '
126 'This pull request cannot be updated because the target '
127 'reference is missing.'),
127 'reference is missing.'),
128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
128 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
129 'This pull request cannot be updated because the source '
129 'This pull request cannot be updated because the source '
130 'reference is missing.'),
130 'reference is missing.'),
131 }
131 }
132
132
133 def __get_pull_request(self, pull_request):
133 def __get_pull_request(self, pull_request):
134 return self._get_instance((
134 return self._get_instance((
135 PullRequest, PullRequestVersion), pull_request)
135 PullRequest, PullRequestVersion), pull_request)
136
136
137 def _check_perms(self, perms, pull_request, user, api=False):
137 def _check_perms(self, perms, pull_request, user, api=False):
138 if not api:
138 if not api:
139 return h.HasRepoPermissionAny(*perms)(
139 return h.HasRepoPermissionAny(*perms)(
140 user=user, repo_name=pull_request.target_repo.repo_name)
140 user=user, repo_name=pull_request.target_repo.repo_name)
141 else:
141 else:
142 return h.HasRepoPermissionAnyApi(*perms)(
142 return h.HasRepoPermissionAnyApi(*perms)(
143 user=user, repo_name=pull_request.target_repo.repo_name)
143 user=user, repo_name=pull_request.target_repo.repo_name)
144
144
145 def check_user_read(self, pull_request, user, api=False):
145 def check_user_read(self, pull_request, user, api=False):
146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
146 _perms = ('repository.admin', 'repository.write', 'repository.read',)
147 return self._check_perms(_perms, pull_request, user, api)
147 return self._check_perms(_perms, pull_request, user, api)
148
148
149 def check_user_merge(self, pull_request, user, api=False):
149 def check_user_merge(self, pull_request, user, api=False):
150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
150 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
151 return self._check_perms(_perms, pull_request, user, api)
151 return self._check_perms(_perms, pull_request, user, api)
152
152
153 def check_user_update(self, pull_request, user, api=False):
153 def check_user_update(self, pull_request, user, api=False):
154 owner = user.user_id == pull_request.user_id
154 owner = user.user_id == pull_request.user_id
155 return self.check_user_merge(pull_request, user, api) or owner
155 return self.check_user_merge(pull_request, user, api) or owner
156
156
157 def check_user_delete(self, pull_request, user):
157 def check_user_delete(self, pull_request, user):
158 owner = user.user_id == pull_request.user_id
158 owner = user.user_id == pull_request.user_id
159 _perms = ('repository.admin',)
159 _perms = ('repository.admin',)
160 return self._check_perms(_perms, pull_request, user) or owner
160 return self._check_perms(_perms, pull_request, user) or owner
161
161
162 def check_user_change_status(self, pull_request, user, api=False):
162 def check_user_change_status(self, pull_request, user, api=False):
163 reviewer = user.user_id in [x.user_id for x in
163 reviewer = user.user_id in [x.user_id for x in
164 pull_request.reviewers]
164 pull_request.reviewers]
165 return self.check_user_update(pull_request, user, api) or reviewer
165 return self.check_user_update(pull_request, user, api) or reviewer
166
166
167 def check_user_comment(self, pull_request, user):
167 def check_user_comment(self, pull_request, user):
168 owner = user.user_id == pull_request.user_id
168 owner = user.user_id == pull_request.user_id
169 return self.check_user_read(pull_request, user) or owner
169 return self.check_user_read(pull_request, user) or owner
170
170
171 def get(self, pull_request):
171 def get(self, pull_request):
172 return self.__get_pull_request(pull_request)
172 return self.__get_pull_request(pull_request)
173
173
174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
174 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
175 opened_by=None, order_by=None,
175 opened_by=None, order_by=None,
176 order_dir='desc'):
176 order_dir='desc'):
177 repo = None
177 repo = None
178 if repo_name:
178 if repo_name:
179 repo = self._get_repo(repo_name)
179 repo = self._get_repo(repo_name)
180
180
181 q = PullRequest.query()
181 q = PullRequest.query()
182
182
183 # source or target
183 # source or target
184 if repo and source:
184 if repo and source:
185 q = q.filter(PullRequest.source_repo == repo)
185 q = q.filter(PullRequest.source_repo == repo)
186 elif repo:
186 elif repo:
187 q = q.filter(PullRequest.target_repo == repo)
187 q = q.filter(PullRequest.target_repo == repo)
188
188
189 # closed,opened
189 # closed,opened
190 if statuses:
190 if statuses:
191 q = q.filter(PullRequest.status.in_(statuses))
191 q = q.filter(PullRequest.status.in_(statuses))
192
192
193 # opened by filter
193 # opened by filter
194 if opened_by:
194 if opened_by:
195 q = q.filter(PullRequest.user_id.in_(opened_by))
195 q = q.filter(PullRequest.user_id.in_(opened_by))
196
196
197 if order_by:
197 if order_by:
198 order_map = {
198 order_map = {
199 'name_raw': PullRequest.pull_request_id,
199 'name_raw': PullRequest.pull_request_id,
200 'title': PullRequest.title,
200 'title': PullRequest.title,
201 'updated_on_raw': PullRequest.updated_on,
201 'updated_on_raw': PullRequest.updated_on,
202 'target_repo': PullRequest.target_repo_id
202 'target_repo': PullRequest.target_repo_id
203 }
203 }
204 if order_dir == 'asc':
204 if order_dir == 'asc':
205 q = q.order_by(order_map[order_by].asc())
205 q = q.order_by(order_map[order_by].asc())
206 else:
206 else:
207 q = q.order_by(order_map[order_by].desc())
207 q = q.order_by(order_map[order_by].desc())
208
208
209 return q
209 return q
210
210
211 def count_all(self, repo_name, source=False, statuses=None,
211 def count_all(self, repo_name, source=False, statuses=None,
212 opened_by=None):
212 opened_by=None):
213 """
213 """
214 Count the number of pull requests for a specific repository.
214 Count the number of pull requests for a specific repository.
215
215
216 :param repo_name: target or source repo
216 :param repo_name: target or source repo
217 :param source: boolean flag to specify if repo_name refers to source
217 :param source: boolean flag to specify if repo_name refers to source
218 :param statuses: list of pull request statuses
218 :param statuses: list of pull request statuses
219 :param opened_by: author user of the pull request
219 :param opened_by: author user of the pull request
220 :returns: int number of pull requests
220 :returns: int number of pull requests
221 """
221 """
222 q = self._prepare_get_all_query(
222 q = self._prepare_get_all_query(
223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
223 repo_name, source=source, statuses=statuses, opened_by=opened_by)
224
224
225 return q.count()
225 return q.count()
226
226
227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
227 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
228 offset=0, length=None, order_by=None, order_dir='desc'):
228 offset=0, length=None, order_by=None, order_dir='desc'):
229 """
229 """
230 Get all pull requests for a specific repository.
230 Get all pull requests for a specific repository.
231
231
232 :param repo_name: target or source repo
232 :param repo_name: target or source repo
233 :param source: boolean flag to specify if repo_name refers to source
233 :param source: boolean flag to specify if repo_name refers to source
234 :param statuses: list of pull request statuses
234 :param statuses: list of pull request statuses
235 :param opened_by: author user of the pull request
235 :param opened_by: author user of the pull request
236 :param offset: pagination offset
236 :param offset: pagination offset
237 :param length: length of returned list
237 :param length: length of returned list
238 :param order_by: order of the returned list
238 :param order_by: order of the returned list
239 :param order_dir: 'asc' or 'desc' ordering direction
239 :param order_dir: 'asc' or 'desc' ordering direction
240 :returns: list of pull requests
240 :returns: list of pull requests
241 """
241 """
242 q = self._prepare_get_all_query(
242 q = self._prepare_get_all_query(
243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
243 repo_name, source=source, statuses=statuses, opened_by=opened_by,
244 order_by=order_by, order_dir=order_dir)
244 order_by=order_by, order_dir=order_dir)
245
245
246 if length:
246 if length:
247 pull_requests = q.limit(length).offset(offset).all()
247 pull_requests = q.limit(length).offset(offset).all()
248 else:
248 else:
249 pull_requests = q.all()
249 pull_requests = q.all()
250
250
251 return pull_requests
251 return pull_requests
252
252
253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
253 def count_awaiting_review(self, repo_name, source=False, statuses=None,
254 opened_by=None):
254 opened_by=None):
255 """
255 """
256 Count the number of pull requests for a specific repository that are
256 Count the number of pull requests for a specific repository that are
257 awaiting review.
257 awaiting review.
258
258
259 :param repo_name: target or source repo
259 :param repo_name: target or source repo
260 :param source: boolean flag to specify if repo_name refers to source
260 :param source: boolean flag to specify if repo_name refers to source
261 :param statuses: list of pull request statuses
261 :param statuses: list of pull request statuses
262 :param opened_by: author user of the pull request
262 :param opened_by: author user of the pull request
263 :returns: int number of pull requests
263 :returns: int number of pull requests
264 """
264 """
265 pull_requests = self.get_awaiting_review(
265 pull_requests = self.get_awaiting_review(
266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
266 repo_name, source=source, statuses=statuses, opened_by=opened_by)
267
267
268 return len(pull_requests)
268 return len(pull_requests)
269
269
270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
270 def get_awaiting_review(self, repo_name, source=False, statuses=None,
271 opened_by=None, offset=0, length=None,
271 opened_by=None, offset=0, length=None,
272 order_by=None, order_dir='desc'):
272 order_by=None, order_dir='desc'):
273 """
273 """
274 Get all pull requests for a specific repository that are awaiting
274 Get all pull requests for a specific repository that are awaiting
275 review.
275 review.
276
276
277 :param repo_name: target or source repo
277 :param repo_name: target or source repo
278 :param source: boolean flag to specify if repo_name refers to source
278 :param source: boolean flag to specify if repo_name refers to source
279 :param statuses: list of pull request statuses
279 :param statuses: list of pull request statuses
280 :param opened_by: author user of the pull request
280 :param opened_by: author user of the pull request
281 :param offset: pagination offset
281 :param offset: pagination offset
282 :param length: length of returned list
282 :param length: length of returned list
283 :param order_by: order of the returned list
283 :param order_by: order of the returned list
284 :param order_dir: 'asc' or 'desc' ordering direction
284 :param order_dir: 'asc' or 'desc' ordering direction
285 :returns: list of pull requests
285 :returns: list of pull requests
286 """
286 """
287 pull_requests = self.get_all(
287 pull_requests = self.get_all(
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
288 repo_name, source=source, statuses=statuses, opened_by=opened_by,
289 order_by=order_by, order_dir=order_dir)
289 order_by=order_by, order_dir=order_dir)
290
290
291 _filtered_pull_requests = []
291 _filtered_pull_requests = []
292 for pr in pull_requests:
292 for pr in pull_requests:
293 status = pr.calculated_review_status()
293 status = pr.calculated_review_status()
294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
294 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
295 ChangesetStatus.STATUS_UNDER_REVIEW]:
295 ChangesetStatus.STATUS_UNDER_REVIEW]:
296 _filtered_pull_requests.append(pr)
296 _filtered_pull_requests.append(pr)
297 if length:
297 if length:
298 return _filtered_pull_requests[offset:offset+length]
298 return _filtered_pull_requests[offset:offset+length]
299 else:
299 else:
300 return _filtered_pull_requests
300 return _filtered_pull_requests
301
301
302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
302 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
303 opened_by=None, user_id=None):
303 opened_by=None, user_id=None):
304 """
304 """
305 Count the number of pull requests for a specific repository that are
305 Count the number of pull requests for a specific repository that are
306 awaiting review from a specific user.
306 awaiting review from a specific user.
307
307
308 :param repo_name: target or source repo
308 :param repo_name: target or source repo
309 :param source: boolean flag to specify if repo_name refers to source
309 :param source: boolean flag to specify if repo_name refers to source
310 :param statuses: list of pull request statuses
310 :param statuses: list of pull request statuses
311 :param opened_by: author user of the pull request
311 :param opened_by: author user of the pull request
312 :param user_id: reviewer user of the pull request
312 :param user_id: reviewer user of the pull request
313 :returns: int number of pull requests
313 :returns: int number of pull requests
314 """
314 """
315 pull_requests = self.get_awaiting_my_review(
315 pull_requests = self.get_awaiting_my_review(
316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
316 repo_name, source=source, statuses=statuses, opened_by=opened_by,
317 user_id=user_id)
317 user_id=user_id)
318
318
319 return len(pull_requests)
319 return len(pull_requests)
320
320
321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
321 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
322 opened_by=None, user_id=None, offset=0,
322 opened_by=None, user_id=None, offset=0,
323 length=None, order_by=None, order_dir='desc'):
323 length=None, order_by=None, order_dir='desc'):
324 """
324 """
325 Get all pull requests for a specific repository that are awaiting
325 Get all pull requests for a specific repository that are awaiting
326 review from a specific user.
326 review from a specific user.
327
327
328 :param repo_name: target or source repo
328 :param repo_name: target or source repo
329 :param source: boolean flag to specify if repo_name refers to source
329 :param source: boolean flag to specify if repo_name refers to source
330 :param statuses: list of pull request statuses
330 :param statuses: list of pull request statuses
331 :param opened_by: author user of the pull request
331 :param opened_by: author user of the pull request
332 :param user_id: reviewer user of the pull request
332 :param user_id: reviewer user of the pull request
333 :param offset: pagination offset
333 :param offset: pagination offset
334 :param length: length of returned list
334 :param length: length of returned list
335 :param order_by: order of the returned list
335 :param order_by: order of the returned list
336 :param order_dir: 'asc' or 'desc' ordering direction
336 :param order_dir: 'asc' or 'desc' ordering direction
337 :returns: list of pull requests
337 :returns: list of pull requests
338 """
338 """
339 pull_requests = self.get_all(
339 pull_requests = self.get_all(
340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
340 repo_name, source=source, statuses=statuses, opened_by=opened_by,
341 order_by=order_by, order_dir=order_dir)
341 order_by=order_by, order_dir=order_dir)
342
342
343 _my = PullRequestModel().get_not_reviewed(user_id)
343 _my = PullRequestModel().get_not_reviewed(user_id)
344 my_participation = []
344 my_participation = []
345 for pr in pull_requests:
345 for pr in pull_requests:
346 if pr in _my:
346 if pr in _my:
347 my_participation.append(pr)
347 my_participation.append(pr)
348 _filtered_pull_requests = my_participation
348 _filtered_pull_requests = my_participation
349 if length:
349 if length:
350 return _filtered_pull_requests[offset:offset+length]
350 return _filtered_pull_requests[offset:offset+length]
351 else:
351 else:
352 return _filtered_pull_requests
352 return _filtered_pull_requests
353
353
354 def get_not_reviewed(self, user_id):
354 def get_not_reviewed(self, user_id):
355 return [
355 return [
356 x.pull_request for x in PullRequestReviewers.query().filter(
356 x.pull_request for x in PullRequestReviewers.query().filter(
357 PullRequestReviewers.user_id == user_id).all()
357 PullRequestReviewers.user_id == user_id).all()
358 ]
358 ]
359
359
360 def _prepare_participating_query(self, user_id=None, statuses=None,
360 def _prepare_participating_query(self, user_id=None, statuses=None,
361 order_by=None, order_dir='desc'):
361 order_by=None, order_dir='desc'):
362 q = PullRequest.query()
362 q = PullRequest.query()
363 if user_id:
363 if user_id:
364 reviewers_subquery = Session().query(
364 reviewers_subquery = Session().query(
365 PullRequestReviewers.pull_request_id).filter(
365 PullRequestReviewers.pull_request_id).filter(
366 PullRequestReviewers.user_id == user_id).subquery()
366 PullRequestReviewers.user_id == user_id).subquery()
367 user_filter = or_(
367 user_filter = or_(
368 PullRequest.user_id == user_id,
368 PullRequest.user_id == user_id,
369 PullRequest.pull_request_id.in_(reviewers_subquery)
369 PullRequest.pull_request_id.in_(reviewers_subquery)
370 )
370 )
371 q = PullRequest.query().filter(user_filter)
371 q = PullRequest.query().filter(user_filter)
372
372
373 # closed,opened
373 # closed,opened
374 if statuses:
374 if statuses:
375 q = q.filter(PullRequest.status.in_(statuses))
375 q = q.filter(PullRequest.status.in_(statuses))
376
376
377 if order_by:
377 if order_by:
378 order_map = {
378 order_map = {
379 'name_raw': PullRequest.pull_request_id,
379 'name_raw': PullRequest.pull_request_id,
380 'title': PullRequest.title,
380 'title': PullRequest.title,
381 'updated_on_raw': PullRequest.updated_on,
381 'updated_on_raw': PullRequest.updated_on,
382 'target_repo': PullRequest.target_repo_id
382 'target_repo': PullRequest.target_repo_id
383 }
383 }
384 if order_dir == 'asc':
384 if order_dir == 'asc':
385 q = q.order_by(order_map[order_by].asc())
385 q = q.order_by(order_map[order_by].asc())
386 else:
386 else:
387 q = q.order_by(order_map[order_by].desc())
387 q = q.order_by(order_map[order_by].desc())
388
388
389 return q
389 return q
390
390
391 def count_im_participating_in(self, user_id=None, statuses=None):
391 def count_im_participating_in(self, user_id=None, statuses=None):
392 q = self._prepare_participating_query(user_id, statuses=statuses)
392 q = self._prepare_participating_query(user_id, statuses=statuses)
393 return q.count()
393 return q.count()
394
394
395 def get_im_participating_in(
395 def get_im_participating_in(
396 self, user_id=None, statuses=None, offset=0,
396 self, user_id=None, statuses=None, offset=0,
397 length=None, order_by=None, order_dir='desc'):
397 length=None, order_by=None, order_dir='desc'):
398 """
398 """
399 Get all Pull requests that i'm participating in, or i have opened
399 Get all Pull requests that i'm participating in, or i have opened
400 """
400 """
401
401
402 q = self._prepare_participating_query(
402 q = self._prepare_participating_query(
403 user_id, statuses=statuses, order_by=order_by,
403 user_id, statuses=statuses, order_by=order_by,
404 order_dir=order_dir)
404 order_dir=order_dir)
405
405
406 if length:
406 if length:
407 pull_requests = q.limit(length).offset(offset).all()
407 pull_requests = q.limit(length).offset(offset).all()
408 else:
408 else:
409 pull_requests = q.all()
409 pull_requests = q.all()
410
410
411 return pull_requests
411 return pull_requests
412
412
413 def get_versions(self, pull_request):
413 def get_versions(self, pull_request):
414 """
414 """
415 returns version of pull request sorted by ID descending
415 returns version of pull request sorted by ID descending
416 """
416 """
417 return PullRequestVersion.query()\
417 return PullRequestVersion.query()\
418 .filter(PullRequestVersion.pull_request == pull_request)\
418 .filter(PullRequestVersion.pull_request == pull_request)\
419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
419 .order_by(PullRequestVersion.pull_request_version_id.asc())\
420 .all()
420 .all()
421
421
422 def get_pr_version(self, pull_request_id, version=None):
422 def get_pr_version(self, pull_request_id, version=None):
423 at_version = None
423 at_version = None
424
424
425 if version and version == 'latest':
425 if version and version == 'latest':
426 pull_request_ver = PullRequest.get(pull_request_id)
426 pull_request_ver = PullRequest.get(pull_request_id)
427 pull_request_obj = pull_request_ver
427 pull_request_obj = pull_request_ver
428 _org_pull_request_obj = pull_request_obj
428 _org_pull_request_obj = pull_request_obj
429 at_version = 'latest'
429 at_version = 'latest'
430 elif version:
430 elif version:
431 pull_request_ver = PullRequestVersion.get_or_404(version)
431 pull_request_ver = PullRequestVersion.get_or_404(version)
432 pull_request_obj = pull_request_ver
432 pull_request_obj = pull_request_ver
433 _org_pull_request_obj = pull_request_ver.pull_request
433 _org_pull_request_obj = pull_request_ver.pull_request
434 at_version = pull_request_ver.pull_request_version_id
434 at_version = pull_request_ver.pull_request_version_id
435 else:
435 else:
436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
436 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
437 pull_request_id)
437 pull_request_id)
438
438
439 pull_request_display_obj = PullRequest.get_pr_display_object(
439 pull_request_display_obj = PullRequest.get_pr_display_object(
440 pull_request_obj, _org_pull_request_obj)
440 pull_request_obj, _org_pull_request_obj)
441
441
442 return _org_pull_request_obj, pull_request_obj, \
442 return _org_pull_request_obj, pull_request_obj, \
443 pull_request_display_obj, at_version
443 pull_request_display_obj, at_version
444
444
445 def create(self, created_by, source_repo, source_ref, target_repo,
445 def create(self, created_by, source_repo, source_ref, target_repo,
446 target_ref, revisions, reviewers, title, description=None,
446 target_ref, revisions, reviewers, title, description=None,
447 description_renderer=None,
447 description_renderer=None,
448 reviewer_data=None, translator=None, auth_user=None):
448 reviewer_data=None, translator=None, auth_user=None):
449 translator = translator or get_current_request().translate
449 translator = translator or get_current_request().translate
450
450
451 created_by_user = self._get_user(created_by)
451 created_by_user = self._get_user(created_by)
452 auth_user = auth_user or created_by_user.AuthUser()
452 auth_user = auth_user or created_by_user.AuthUser()
453 source_repo = self._get_repo(source_repo)
453 source_repo = self._get_repo(source_repo)
454 target_repo = self._get_repo(target_repo)
454 target_repo = self._get_repo(target_repo)
455
455
456 pull_request = PullRequest()
456 pull_request = PullRequest()
457 pull_request.source_repo = source_repo
457 pull_request.source_repo = source_repo
458 pull_request.source_ref = source_ref
458 pull_request.source_ref = source_ref
459 pull_request.target_repo = target_repo
459 pull_request.target_repo = target_repo
460 pull_request.target_ref = target_ref
460 pull_request.target_ref = target_ref
461 pull_request.revisions = revisions
461 pull_request.revisions = revisions
462 pull_request.title = title
462 pull_request.title = title
463 pull_request.description = description
463 pull_request.description = description
464 pull_request.description_renderer = description_renderer
464 pull_request.description_renderer = description_renderer
465 pull_request.author = created_by_user
465 pull_request.author = created_by_user
466 pull_request.reviewer_data = reviewer_data
466 pull_request.reviewer_data = reviewer_data
467
467
468 Session().add(pull_request)
468 Session().add(pull_request)
469 Session().flush()
469 Session().flush()
470
470
471 reviewer_ids = set()
471 reviewer_ids = set()
472 # members / reviewers
472 # members / reviewers
473 for reviewer_object in reviewers:
473 for reviewer_object in reviewers:
474 user_id, reasons, mandatory, rules = reviewer_object
474 user_id, reasons, mandatory, rules = reviewer_object
475 user = self._get_user(user_id)
475 user = self._get_user(user_id)
476
476
477 # skip duplicates
477 # skip duplicates
478 if user.user_id in reviewer_ids:
478 if user.user_id in reviewer_ids:
479 continue
479 continue
480
480
481 reviewer_ids.add(user.user_id)
481 reviewer_ids.add(user.user_id)
482
482
483 reviewer = PullRequestReviewers()
483 reviewer = PullRequestReviewers()
484 reviewer.user = user
484 reviewer.user = user
485 reviewer.pull_request = pull_request
485 reviewer.pull_request = pull_request
486 reviewer.reasons = reasons
486 reviewer.reasons = reasons
487 reviewer.mandatory = mandatory
487 reviewer.mandatory = mandatory
488
488
489 # NOTE(marcink): pick only first rule for now
489 # NOTE(marcink): pick only first rule for now
490 rule_id = list(rules)[0] if rules else None
490 rule_id = list(rules)[0] if rules else None
491 rule = RepoReviewRule.get(rule_id) if rule_id else None
491 rule = RepoReviewRule.get(rule_id) if rule_id else None
492 if rule:
492 if rule:
493 review_group = rule.user_group_vote_rule(user_id)
493 review_group = rule.user_group_vote_rule(user_id)
494 # we check if this particular reviewer is member of a voting group
494 # we check if this particular reviewer is member of a voting group
495 if review_group:
495 if review_group:
496 # NOTE(marcink):
496 # NOTE(marcink):
497 # can be that user is member of more but we pick the first same,
497 # can be that user is member of more but we pick the first same,
498 # same as default reviewers algo
498 # same as default reviewers algo
499 review_group = review_group[0]
499 review_group = review_group[0]
500
500
501 rule_data = {
501 rule_data = {
502 'rule_name':
502 'rule_name':
503 rule.review_rule_name,
503 rule.review_rule_name,
504 'rule_user_group_entry_id':
504 'rule_user_group_entry_id':
505 review_group.repo_review_rule_users_group_id,
505 review_group.repo_review_rule_users_group_id,
506 'rule_user_group_name':
506 'rule_user_group_name':
507 review_group.users_group.users_group_name,
507 review_group.users_group.users_group_name,
508 'rule_user_group_members':
508 'rule_user_group_members':
509 [x.user.username for x in review_group.users_group.members],
509 [x.user.username for x in review_group.users_group.members],
510 'rule_user_group_members_id':
510 'rule_user_group_members_id':
511 [x.user.user_id for x in review_group.users_group.members],
511 [x.user.user_id for x in review_group.users_group.members],
512 }
512 }
513 # e.g {'vote_rule': -1, 'mandatory': True}
513 # e.g {'vote_rule': -1, 'mandatory': True}
514 rule_data.update(review_group.rule_data())
514 rule_data.update(review_group.rule_data())
515
515
516 reviewer.rule_data = rule_data
516 reviewer.rule_data = rule_data
517
517
518 Session().add(reviewer)
518 Session().add(reviewer)
519 Session().flush()
519 Session().flush()
520
520
521 # Set approval status to "Under Review" for all commits which are
521 # Set approval status to "Under Review" for all commits which are
522 # part of this pull request.
522 # part of this pull request.
523 ChangesetStatusModel().set_status(
523 ChangesetStatusModel().set_status(
524 repo=target_repo,
524 repo=target_repo,
525 status=ChangesetStatus.STATUS_UNDER_REVIEW,
525 status=ChangesetStatus.STATUS_UNDER_REVIEW,
526 user=created_by_user,
526 user=created_by_user,
527 pull_request=pull_request
527 pull_request=pull_request
528 )
528 )
529 # we commit early at this point. This has to do with a fact
529 # we commit early at this point. This has to do with a fact
530 # that before queries do some row-locking. And because of that
530 # that before queries do some row-locking. And because of that
531 # we need to commit and finish transation before below validate call
531 # we need to commit and finish transation before below validate call
532 # that for large repos could be long resulting in long row locks
532 # that for large repos could be long resulting in long row locks
533 Session().commit()
533 Session().commit()
534
534
535 # prepare workspace, and run initial merge simulation
535 # prepare workspace, and run initial merge simulation
536 MergeCheck.validate(
536 MergeCheck.validate(
537 pull_request, auth_user=auth_user, translator=translator)
537 pull_request, auth_user=auth_user, translator=translator)
538
538
539 self.notify_reviewers(pull_request, reviewer_ids)
539 self.notify_reviewers(pull_request, reviewer_ids)
540 self._trigger_pull_request_hook(
540 self._trigger_pull_request_hook(
541 pull_request, created_by_user, 'create')
541 pull_request, created_by_user, 'create')
542
542
543 creation_data = pull_request.get_api_data(with_merge_state=False)
543 creation_data = pull_request.get_api_data(with_merge_state=False)
544 self._log_audit_action(
544 self._log_audit_action(
545 'repo.pull_request.create', {'data': creation_data},
545 'repo.pull_request.create', {'data': creation_data},
546 auth_user, pull_request)
546 auth_user, pull_request)
547
547
548 return pull_request
548 return pull_request
549
549
550 def _trigger_pull_request_hook(self, pull_request, user, action):
550 def _trigger_pull_request_hook(self, pull_request, user, action):
551 pull_request = self.__get_pull_request(pull_request)
551 pull_request = self.__get_pull_request(pull_request)
552 target_scm = pull_request.target_repo.scm_instance()
552 target_scm = pull_request.target_repo.scm_instance()
553 if action == 'create':
553 if action == 'create':
554 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
554 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
555 elif action == 'merge':
555 elif action == 'merge':
556 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
556 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
557 elif action == 'close':
557 elif action == 'close':
558 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
558 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
559 elif action == 'review_status_change':
559 elif action == 'review_status_change':
560 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
560 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
561 elif action == 'update':
561 elif action == 'update':
562 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
562 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
563 else:
563 else:
564 return
564 return
565
565
566 trigger_hook(
566 trigger_hook(
567 username=user.username,
567 username=user.username,
568 repo_name=pull_request.target_repo.repo_name,
568 repo_name=pull_request.target_repo.repo_name,
569 repo_alias=target_scm.alias,
569 repo_alias=target_scm.alias,
570 pull_request=pull_request)
570 pull_request=pull_request)
571
571
572 def _get_commit_ids(self, pull_request):
572 def _get_commit_ids(self, pull_request):
573 """
573 """
574 Return the commit ids of the merged pull request.
574 Return the commit ids of the merged pull request.
575
575
576 This method is not dealing correctly yet with the lack of autoupdates
576 This method is not dealing correctly yet with the lack of autoupdates
577 nor with the implicit target updates.
577 nor with the implicit target updates.
578 For example: if a commit in the source repo is already in the target it
578 For example: if a commit in the source repo is already in the target it
579 will be reported anyways.
579 will be reported anyways.
580 """
580 """
581 merge_rev = pull_request.merge_rev
581 merge_rev = pull_request.merge_rev
582 if merge_rev is None:
582 if merge_rev is None:
583 raise ValueError('This pull request was not merged yet')
583 raise ValueError('This pull request was not merged yet')
584
584
585 commit_ids = list(pull_request.revisions)
585 commit_ids = list(pull_request.revisions)
586 if merge_rev not in commit_ids:
586 if merge_rev not in commit_ids:
587 commit_ids.append(merge_rev)
587 commit_ids.append(merge_rev)
588
588
589 return commit_ids
589 return commit_ids
590
590
591 def merge_repo(self, pull_request, user, extras):
591 def merge_repo(self, pull_request, user, extras):
592 log.debug("Merging pull request %s", pull_request.pull_request_id)
592 log.debug("Merging pull request %s", pull_request.pull_request_id)
593 merge_state = self._merge_pull_request(pull_request, user, extras)
593 merge_state = self._merge_pull_request(pull_request, user, extras)
594 if merge_state.executed:
594 if merge_state.executed:
595 log.debug(
595 log.debug(
596 "Merge was successful, updating the pull request comments.")
596 "Merge was successful, updating the pull request comments.")
597 self._comment_and_close_pr(pull_request, user, merge_state)
597 self._comment_and_close_pr(pull_request, user, merge_state)
598
598
599 self._log_audit_action(
599 self._log_audit_action(
600 'repo.pull_request.merge',
600 'repo.pull_request.merge',
601 {'merge_state': merge_state.__dict__},
601 {'merge_state': merge_state.__dict__},
602 user, pull_request)
602 user, pull_request)
603
603
604 else:
604 else:
605 log.warn("Merge failed, not updating the pull request.")
605 log.warn("Merge failed, not updating the pull request.")
606 return merge_state
606 return merge_state
607
607
608 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
608 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
609 target_vcs = pull_request.target_repo.scm_instance()
609 target_vcs = pull_request.target_repo.scm_instance()
610 source_vcs = pull_request.source_repo.scm_instance()
610 source_vcs = pull_request.source_repo.scm_instance()
611 target_ref = self._refresh_reference(
612 pull_request.target_ref_parts, target_vcs)
613
611
614 message = merge_msg or (
612 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
615 'Merge pull request #%(pr_id)s from '
613 pr_id=pull_request.pull_request_id,
616 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
614 pr_title=pull_request.title,
617 'pr_id': pull_request.pull_request_id,
615 source_repo=source_vcs.name,
618 'source_repo': source_vcs.name,
616 source_ref_name=pull_request.source_ref_parts.name,
619 'source_ref_name': pull_request.source_ref_parts.name,
617 target_repo=target_vcs.name,
620 'pr_title': pull_request.title
618 target_ref_name=pull_request.target_ref_parts.name,
621 }
619 )
622
620
623 workspace_id = self._workspace_id(pull_request)
621 workspace_id = self._workspace_id(pull_request)
624 repo_id = pull_request.target_repo.repo_id
622 repo_id = pull_request.target_repo.repo_id
625 use_rebase = self._use_rebase_for_merging(pull_request)
623 use_rebase = self._use_rebase_for_merging(pull_request)
626 close_branch = self._close_branch_before_merging(pull_request)
624 close_branch = self._close_branch_before_merging(pull_request)
627
625
626 target_ref = self._refresh_reference(
627 pull_request.target_ref_parts, target_vcs)
628
628 callback_daemon, extras = prepare_callback_daemon(
629 callback_daemon, extras = prepare_callback_daemon(
629 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
630 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
630 host=vcs_settings.HOOKS_HOST,
631 host=vcs_settings.HOOKS_HOST,
631 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
632 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
632
633
633 with callback_daemon:
634 with callback_daemon:
634 # TODO: johbo: Implement a clean way to run a config_override
635 # TODO: johbo: Implement a clean way to run a config_override
635 # for a single call.
636 # for a single call.
636 target_vcs.config.set(
637 target_vcs.config.set(
637 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
638 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
639
640 user_name = user.short_contact
638 merge_state = target_vcs.merge(
641 merge_state = target_vcs.merge(
639 repo_id, workspace_id, target_ref, source_vcs,
642 repo_id, workspace_id, target_ref, source_vcs,
640 pull_request.source_ref_parts,
643 pull_request.source_ref_parts,
641 user_name=user.username, user_email=user.email,
644 user_name=user_name, user_email=user.email,
642 message=message, use_rebase=use_rebase,
645 message=message, use_rebase=use_rebase,
643 close_branch=close_branch)
646 close_branch=close_branch)
644 return merge_state
647 return merge_state
645
648
646 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
649 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
647 pull_request.merge_rev = merge_state.merge_ref.commit_id
650 pull_request.merge_rev = merge_state.merge_ref.commit_id
648 pull_request.updated_on = datetime.datetime.now()
651 pull_request.updated_on = datetime.datetime.now()
649 close_msg = close_msg or 'Pull request merged and closed'
652 close_msg = close_msg or 'Pull request merged and closed'
650
653
651 CommentsModel().create(
654 CommentsModel().create(
652 text=safe_unicode(close_msg),
655 text=safe_unicode(close_msg),
653 repo=pull_request.target_repo.repo_id,
656 repo=pull_request.target_repo.repo_id,
654 user=user.user_id,
657 user=user.user_id,
655 pull_request=pull_request.pull_request_id,
658 pull_request=pull_request.pull_request_id,
656 f_path=None,
659 f_path=None,
657 line_no=None,
660 line_no=None,
658 closing_pr=True
661 closing_pr=True
659 )
662 )
660
663
661 Session().add(pull_request)
664 Session().add(pull_request)
662 Session().flush()
665 Session().flush()
663 # TODO: paris: replace invalidation with less radical solution
666 # TODO: paris: replace invalidation with less radical solution
664 ScmModel().mark_for_invalidation(
667 ScmModel().mark_for_invalidation(
665 pull_request.target_repo.repo_name)
668 pull_request.target_repo.repo_name)
666 self._trigger_pull_request_hook(pull_request, user, 'merge')
669 self._trigger_pull_request_hook(pull_request, user, 'merge')
667
670
668 def has_valid_update_type(self, pull_request):
671 def has_valid_update_type(self, pull_request):
669 source_ref_type = pull_request.source_ref_parts.type
672 source_ref_type = pull_request.source_ref_parts.type
670 return source_ref_type in ['book', 'branch', 'tag']
673 return source_ref_type in ['book', 'branch', 'tag']
671
674
672 def update_commits(self, pull_request):
675 def update_commits(self, pull_request):
673 """
676 """
674 Get the updated list of commits for the pull request
677 Get the updated list of commits for the pull request
675 and return the new pull request version and the list
678 and return the new pull request version and the list
676 of commits processed by this update action
679 of commits processed by this update action
677 """
680 """
678 pull_request = self.__get_pull_request(pull_request)
681 pull_request = self.__get_pull_request(pull_request)
679 source_ref_type = pull_request.source_ref_parts.type
682 source_ref_type = pull_request.source_ref_parts.type
680 source_ref_name = pull_request.source_ref_parts.name
683 source_ref_name = pull_request.source_ref_parts.name
681 source_ref_id = pull_request.source_ref_parts.commit_id
684 source_ref_id = pull_request.source_ref_parts.commit_id
682
685
683 target_ref_type = pull_request.target_ref_parts.type
686 target_ref_type = pull_request.target_ref_parts.type
684 target_ref_name = pull_request.target_ref_parts.name
687 target_ref_name = pull_request.target_ref_parts.name
685 target_ref_id = pull_request.target_ref_parts.commit_id
688 target_ref_id = pull_request.target_ref_parts.commit_id
686
689
687 if not self.has_valid_update_type(pull_request):
690 if not self.has_valid_update_type(pull_request):
688 log.debug(
691 log.debug(
689 "Skipping update of pull request %s due to ref type: %s",
692 "Skipping update of pull request %s due to ref type: %s",
690 pull_request, source_ref_type)
693 pull_request, source_ref_type)
691 return UpdateResponse(
694 return UpdateResponse(
692 executed=False,
695 executed=False,
693 reason=UpdateFailureReason.WRONG_REF_TYPE,
696 reason=UpdateFailureReason.WRONG_REF_TYPE,
694 old=pull_request, new=None, changes=None,
697 old=pull_request, new=None, changes=None,
695 source_changed=False, target_changed=False)
698 source_changed=False, target_changed=False)
696
699
697 # source repo
700 # source repo
698 source_repo = pull_request.source_repo.scm_instance()
701 source_repo = pull_request.source_repo.scm_instance()
699 try:
702 try:
700 source_commit = source_repo.get_commit(commit_id=source_ref_name)
703 source_commit = source_repo.get_commit(commit_id=source_ref_name)
701 except CommitDoesNotExistError:
704 except CommitDoesNotExistError:
702 return UpdateResponse(
705 return UpdateResponse(
703 executed=False,
706 executed=False,
704 reason=UpdateFailureReason.MISSING_SOURCE_REF,
707 reason=UpdateFailureReason.MISSING_SOURCE_REF,
705 old=pull_request, new=None, changes=None,
708 old=pull_request, new=None, changes=None,
706 source_changed=False, target_changed=False)
709 source_changed=False, target_changed=False)
707
710
708 source_changed = source_ref_id != source_commit.raw_id
711 source_changed = source_ref_id != source_commit.raw_id
709
712
710 # target repo
713 # target repo
711 target_repo = pull_request.target_repo.scm_instance()
714 target_repo = pull_request.target_repo.scm_instance()
712 try:
715 try:
713 target_commit = target_repo.get_commit(commit_id=target_ref_name)
716 target_commit = target_repo.get_commit(commit_id=target_ref_name)
714 except CommitDoesNotExistError:
717 except CommitDoesNotExistError:
715 return UpdateResponse(
718 return UpdateResponse(
716 executed=False,
719 executed=False,
717 reason=UpdateFailureReason.MISSING_TARGET_REF,
720 reason=UpdateFailureReason.MISSING_TARGET_REF,
718 old=pull_request, new=None, changes=None,
721 old=pull_request, new=None, changes=None,
719 source_changed=False, target_changed=False)
722 source_changed=False, target_changed=False)
720 target_changed = target_ref_id != target_commit.raw_id
723 target_changed = target_ref_id != target_commit.raw_id
721
724
722 if not (source_changed or target_changed):
725 if not (source_changed or target_changed):
723 log.debug("Nothing changed in pull request %s", pull_request)
726 log.debug("Nothing changed in pull request %s", pull_request)
724 return UpdateResponse(
727 return UpdateResponse(
725 executed=False,
728 executed=False,
726 reason=UpdateFailureReason.NO_CHANGE,
729 reason=UpdateFailureReason.NO_CHANGE,
727 old=pull_request, new=None, changes=None,
730 old=pull_request, new=None, changes=None,
728 source_changed=target_changed, target_changed=source_changed)
731 source_changed=target_changed, target_changed=source_changed)
729
732
730 change_in_found = 'target repo' if target_changed else 'source repo'
733 change_in_found = 'target repo' if target_changed else 'source repo'
731 log.debug('Updating pull request because of change in %s detected',
734 log.debug('Updating pull request because of change in %s detected',
732 change_in_found)
735 change_in_found)
733
736
734 # Finally there is a need for an update, in case of source change
737 # Finally there is a need for an update, in case of source change
735 # we create a new version, else just an update
738 # we create a new version, else just an update
736 if source_changed:
739 if source_changed:
737 pull_request_version = self._create_version_from_snapshot(pull_request)
740 pull_request_version = self._create_version_from_snapshot(pull_request)
738 self._link_comments_to_version(pull_request_version)
741 self._link_comments_to_version(pull_request_version)
739 else:
742 else:
740 try:
743 try:
741 ver = pull_request.versions[-1]
744 ver = pull_request.versions[-1]
742 except IndexError:
745 except IndexError:
743 ver = None
746 ver = None
744
747
745 pull_request.pull_request_version_id = \
748 pull_request.pull_request_version_id = \
746 ver.pull_request_version_id if ver else None
749 ver.pull_request_version_id if ver else None
747 pull_request_version = pull_request
750 pull_request_version = pull_request
748
751
749 try:
752 try:
750 if target_ref_type in ('tag', 'branch', 'book'):
753 if target_ref_type in ('tag', 'branch', 'book'):
751 target_commit = target_repo.get_commit(target_ref_name)
754 target_commit = target_repo.get_commit(target_ref_name)
752 else:
755 else:
753 target_commit = target_repo.get_commit(target_ref_id)
756 target_commit = target_repo.get_commit(target_ref_id)
754 except CommitDoesNotExistError:
757 except CommitDoesNotExistError:
755 return UpdateResponse(
758 return UpdateResponse(
756 executed=False,
759 executed=False,
757 reason=UpdateFailureReason.MISSING_TARGET_REF,
760 reason=UpdateFailureReason.MISSING_TARGET_REF,
758 old=pull_request, new=None, changes=None,
761 old=pull_request, new=None, changes=None,
759 source_changed=source_changed, target_changed=target_changed)
762 source_changed=source_changed, target_changed=target_changed)
760
763
761 # re-compute commit ids
764 # re-compute commit ids
762 old_commit_ids = pull_request.revisions
765 old_commit_ids = pull_request.revisions
763 pre_load = ["author", "branch", "date", "message"]
766 pre_load = ["author", "branch", "date", "message"]
764 commit_ranges = target_repo.compare(
767 commit_ranges = target_repo.compare(
765 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
768 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
766 pre_load=pre_load)
769 pre_load=pre_load)
767
770
768 ancestor = target_repo.get_common_ancestor(
771 ancestor = target_repo.get_common_ancestor(
769 target_commit.raw_id, source_commit.raw_id, source_repo)
772 target_commit.raw_id, source_commit.raw_id, source_repo)
770
773
771 pull_request.source_ref = '%s:%s:%s' % (
774 pull_request.source_ref = '%s:%s:%s' % (
772 source_ref_type, source_ref_name, source_commit.raw_id)
775 source_ref_type, source_ref_name, source_commit.raw_id)
773 pull_request.target_ref = '%s:%s:%s' % (
776 pull_request.target_ref = '%s:%s:%s' % (
774 target_ref_type, target_ref_name, ancestor)
777 target_ref_type, target_ref_name, ancestor)
775
778
776 pull_request.revisions = [
779 pull_request.revisions = [
777 commit.raw_id for commit in reversed(commit_ranges)]
780 commit.raw_id for commit in reversed(commit_ranges)]
778 pull_request.updated_on = datetime.datetime.now()
781 pull_request.updated_on = datetime.datetime.now()
779 Session().add(pull_request)
782 Session().add(pull_request)
780 new_commit_ids = pull_request.revisions
783 new_commit_ids = pull_request.revisions
781
784
782 old_diff_data, new_diff_data = self._generate_update_diffs(
785 old_diff_data, new_diff_data = self._generate_update_diffs(
783 pull_request, pull_request_version)
786 pull_request, pull_request_version)
784
787
785 # calculate commit and file changes
788 # calculate commit and file changes
786 changes = self._calculate_commit_id_changes(
789 changes = self._calculate_commit_id_changes(
787 old_commit_ids, new_commit_ids)
790 old_commit_ids, new_commit_ids)
788 file_changes = self._calculate_file_changes(
791 file_changes = self._calculate_file_changes(
789 old_diff_data, new_diff_data)
792 old_diff_data, new_diff_data)
790
793
791 # set comments as outdated if DIFFS changed
794 # set comments as outdated if DIFFS changed
792 CommentsModel().outdate_comments(
795 CommentsModel().outdate_comments(
793 pull_request, old_diff_data=old_diff_data,
796 pull_request, old_diff_data=old_diff_data,
794 new_diff_data=new_diff_data)
797 new_diff_data=new_diff_data)
795
798
796 commit_changes = (changes.added or changes.removed)
799 commit_changes = (changes.added or changes.removed)
797 file_node_changes = (
800 file_node_changes = (
798 file_changes.added or file_changes.modified or file_changes.removed)
801 file_changes.added or file_changes.modified or file_changes.removed)
799 pr_has_changes = commit_changes or file_node_changes
802 pr_has_changes = commit_changes or file_node_changes
800
803
801 # Add an automatic comment to the pull request, in case
804 # Add an automatic comment to the pull request, in case
802 # anything has changed
805 # anything has changed
803 if pr_has_changes:
806 if pr_has_changes:
804 update_comment = CommentsModel().create(
807 update_comment = CommentsModel().create(
805 text=self._render_update_message(changes, file_changes),
808 text=self._render_update_message(changes, file_changes),
806 repo=pull_request.target_repo,
809 repo=pull_request.target_repo,
807 user=pull_request.author,
810 user=pull_request.author,
808 pull_request=pull_request,
811 pull_request=pull_request,
809 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
812 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
810
813
811 # Update status to "Under Review" for added commits
814 # Update status to "Under Review" for added commits
812 for commit_id in changes.added:
815 for commit_id in changes.added:
813 ChangesetStatusModel().set_status(
816 ChangesetStatusModel().set_status(
814 repo=pull_request.source_repo,
817 repo=pull_request.source_repo,
815 status=ChangesetStatus.STATUS_UNDER_REVIEW,
818 status=ChangesetStatus.STATUS_UNDER_REVIEW,
816 comment=update_comment,
819 comment=update_comment,
817 user=pull_request.author,
820 user=pull_request.author,
818 pull_request=pull_request,
821 pull_request=pull_request,
819 revision=commit_id)
822 revision=commit_id)
820
823
821 log.debug(
824 log.debug(
822 'Updated pull request %s, added_ids: %s, common_ids: %s, '
825 'Updated pull request %s, added_ids: %s, common_ids: %s, '
823 'removed_ids: %s', pull_request.pull_request_id,
826 'removed_ids: %s', pull_request.pull_request_id,
824 changes.added, changes.common, changes.removed)
827 changes.added, changes.common, changes.removed)
825 log.debug(
828 log.debug(
826 'Updated pull request with the following file changes: %s',
829 'Updated pull request with the following file changes: %s',
827 file_changes)
830 file_changes)
828
831
829 log.info(
832 log.info(
830 "Updated pull request %s from commit %s to commit %s, "
833 "Updated pull request %s from commit %s to commit %s, "
831 "stored new version %s of this pull request.",
834 "stored new version %s of this pull request.",
832 pull_request.pull_request_id, source_ref_id,
835 pull_request.pull_request_id, source_ref_id,
833 pull_request.source_ref_parts.commit_id,
836 pull_request.source_ref_parts.commit_id,
834 pull_request_version.pull_request_version_id)
837 pull_request_version.pull_request_version_id)
835 Session().commit()
838 Session().commit()
836 self._trigger_pull_request_hook(
839 self._trigger_pull_request_hook(
837 pull_request, pull_request.author, 'update')
840 pull_request, pull_request.author, 'update')
838
841
839 return UpdateResponse(
842 return UpdateResponse(
840 executed=True, reason=UpdateFailureReason.NONE,
843 executed=True, reason=UpdateFailureReason.NONE,
841 old=pull_request, new=pull_request_version, changes=changes,
844 old=pull_request, new=pull_request_version, changes=changes,
842 source_changed=source_changed, target_changed=target_changed)
845 source_changed=source_changed, target_changed=target_changed)
843
846
844 def _create_version_from_snapshot(self, pull_request):
847 def _create_version_from_snapshot(self, pull_request):
845 version = PullRequestVersion()
848 version = PullRequestVersion()
846 version.title = pull_request.title
849 version.title = pull_request.title
847 version.description = pull_request.description
850 version.description = pull_request.description
848 version.status = pull_request.status
851 version.status = pull_request.status
849 version.created_on = datetime.datetime.now()
852 version.created_on = datetime.datetime.now()
850 version.updated_on = pull_request.updated_on
853 version.updated_on = pull_request.updated_on
851 version.user_id = pull_request.user_id
854 version.user_id = pull_request.user_id
852 version.source_repo = pull_request.source_repo
855 version.source_repo = pull_request.source_repo
853 version.source_ref = pull_request.source_ref
856 version.source_ref = pull_request.source_ref
854 version.target_repo = pull_request.target_repo
857 version.target_repo = pull_request.target_repo
855 version.target_ref = pull_request.target_ref
858 version.target_ref = pull_request.target_ref
856
859
857 version._last_merge_source_rev = pull_request._last_merge_source_rev
860 version._last_merge_source_rev = pull_request._last_merge_source_rev
858 version._last_merge_target_rev = pull_request._last_merge_target_rev
861 version._last_merge_target_rev = pull_request._last_merge_target_rev
859 version.last_merge_status = pull_request.last_merge_status
862 version.last_merge_status = pull_request.last_merge_status
860 version.shadow_merge_ref = pull_request.shadow_merge_ref
863 version.shadow_merge_ref = pull_request.shadow_merge_ref
861 version.merge_rev = pull_request.merge_rev
864 version.merge_rev = pull_request.merge_rev
862 version.reviewer_data = pull_request.reviewer_data
865 version.reviewer_data = pull_request.reviewer_data
863
866
864 version.revisions = pull_request.revisions
867 version.revisions = pull_request.revisions
865 version.pull_request = pull_request
868 version.pull_request = pull_request
866 Session().add(version)
869 Session().add(version)
867 Session().flush()
870 Session().flush()
868
871
869 return version
872 return version
870
873
871 def _generate_update_diffs(self, pull_request, pull_request_version):
874 def _generate_update_diffs(self, pull_request, pull_request_version):
872
875
873 diff_context = (
876 diff_context = (
874 self.DIFF_CONTEXT +
877 self.DIFF_CONTEXT +
875 CommentsModel.needed_extra_diff_context())
878 CommentsModel.needed_extra_diff_context())
876
879
877 source_repo = pull_request_version.source_repo
880 source_repo = pull_request_version.source_repo
878 source_ref_id = pull_request_version.source_ref_parts.commit_id
881 source_ref_id = pull_request_version.source_ref_parts.commit_id
879 target_ref_id = pull_request_version.target_ref_parts.commit_id
882 target_ref_id = pull_request_version.target_ref_parts.commit_id
880 old_diff = self._get_diff_from_pr_or_version(
883 old_diff = self._get_diff_from_pr_or_version(
881 source_repo, source_ref_id, target_ref_id, context=diff_context)
884 source_repo, source_ref_id, target_ref_id, context=diff_context)
882
885
883 source_repo = pull_request.source_repo
886 source_repo = pull_request.source_repo
884 source_ref_id = pull_request.source_ref_parts.commit_id
887 source_ref_id = pull_request.source_ref_parts.commit_id
885 target_ref_id = pull_request.target_ref_parts.commit_id
888 target_ref_id = pull_request.target_ref_parts.commit_id
886
889
887 new_diff = self._get_diff_from_pr_or_version(
890 new_diff = self._get_diff_from_pr_or_version(
888 source_repo, source_ref_id, target_ref_id, context=diff_context)
891 source_repo, source_ref_id, target_ref_id, context=diff_context)
889
892
890 old_diff_data = diffs.DiffProcessor(old_diff)
893 old_diff_data = diffs.DiffProcessor(old_diff)
891 old_diff_data.prepare()
894 old_diff_data.prepare()
892 new_diff_data = diffs.DiffProcessor(new_diff)
895 new_diff_data = diffs.DiffProcessor(new_diff)
893 new_diff_data.prepare()
896 new_diff_data.prepare()
894
897
895 return old_diff_data, new_diff_data
898 return old_diff_data, new_diff_data
896
899
897 def _link_comments_to_version(self, pull_request_version):
900 def _link_comments_to_version(self, pull_request_version):
898 """
901 """
899 Link all unlinked comments of this pull request to the given version.
902 Link all unlinked comments of this pull request to the given version.
900
903
901 :param pull_request_version: The `PullRequestVersion` to which
904 :param pull_request_version: The `PullRequestVersion` to which
902 the comments shall be linked.
905 the comments shall be linked.
903
906
904 """
907 """
905 pull_request = pull_request_version.pull_request
908 pull_request = pull_request_version.pull_request
906 comments = ChangesetComment.query()\
909 comments = ChangesetComment.query()\
907 .filter(
910 .filter(
908 # TODO: johbo: Should we query for the repo at all here?
911 # TODO: johbo: Should we query for the repo at all here?
909 # Pending decision on how comments of PRs are to be related
912 # Pending decision on how comments of PRs are to be related
910 # to either the source repo, the target repo or no repo at all.
913 # to either the source repo, the target repo or no repo at all.
911 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
914 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
912 ChangesetComment.pull_request == pull_request,
915 ChangesetComment.pull_request == pull_request,
913 ChangesetComment.pull_request_version == None)\
916 ChangesetComment.pull_request_version == None)\
914 .order_by(ChangesetComment.comment_id.asc())
917 .order_by(ChangesetComment.comment_id.asc())
915
918
916 # TODO: johbo: Find out why this breaks if it is done in a bulk
919 # TODO: johbo: Find out why this breaks if it is done in a bulk
917 # operation.
920 # operation.
918 for comment in comments:
921 for comment in comments:
919 comment.pull_request_version_id = (
922 comment.pull_request_version_id = (
920 pull_request_version.pull_request_version_id)
923 pull_request_version.pull_request_version_id)
921 Session().add(comment)
924 Session().add(comment)
922
925
923 def _calculate_commit_id_changes(self, old_ids, new_ids):
926 def _calculate_commit_id_changes(self, old_ids, new_ids):
924 added = [x for x in new_ids if x not in old_ids]
927 added = [x for x in new_ids if x not in old_ids]
925 common = [x for x in new_ids if x in old_ids]
928 common = [x for x in new_ids if x in old_ids]
926 removed = [x for x in old_ids if x not in new_ids]
929 removed = [x for x in old_ids if x not in new_ids]
927 total = new_ids
930 total = new_ids
928 return ChangeTuple(added, common, removed, total)
931 return ChangeTuple(added, common, removed, total)
929
932
930 def _calculate_file_changes(self, old_diff_data, new_diff_data):
933 def _calculate_file_changes(self, old_diff_data, new_diff_data):
931
934
932 old_files = OrderedDict()
935 old_files = OrderedDict()
933 for diff_data in old_diff_data.parsed_diff:
936 for diff_data in old_diff_data.parsed_diff:
934 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
937 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
935
938
936 added_files = []
939 added_files = []
937 modified_files = []
940 modified_files = []
938 removed_files = []
941 removed_files = []
939 for diff_data in new_diff_data.parsed_diff:
942 for diff_data in new_diff_data.parsed_diff:
940 new_filename = diff_data['filename']
943 new_filename = diff_data['filename']
941 new_hash = md5_safe(diff_data['raw_diff'])
944 new_hash = md5_safe(diff_data['raw_diff'])
942
945
943 old_hash = old_files.get(new_filename)
946 old_hash = old_files.get(new_filename)
944 if not old_hash:
947 if not old_hash:
945 # file is not present in old diff, means it's added
948 # file is not present in old diff, means it's added
946 added_files.append(new_filename)
949 added_files.append(new_filename)
947 else:
950 else:
948 if new_hash != old_hash:
951 if new_hash != old_hash:
949 modified_files.append(new_filename)
952 modified_files.append(new_filename)
950 # now remove a file from old, since we have seen it already
953 # now remove a file from old, since we have seen it already
951 del old_files[new_filename]
954 del old_files[new_filename]
952
955
953 # removed files is when there are present in old, but not in NEW,
956 # removed files is when there are present in old, but not in NEW,
954 # since we remove old files that are present in new diff, left-overs
957 # since we remove old files that are present in new diff, left-overs
955 # if any should be the removed files
958 # if any should be the removed files
956 removed_files.extend(old_files.keys())
959 removed_files.extend(old_files.keys())
957
960
958 return FileChangeTuple(added_files, modified_files, removed_files)
961 return FileChangeTuple(added_files, modified_files, removed_files)
959
962
960 def _render_update_message(self, changes, file_changes):
963 def _render_update_message(self, changes, file_changes):
961 """
964 """
962 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
965 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
963 so it's always looking the same disregarding on which default
966 so it's always looking the same disregarding on which default
964 renderer system is using.
967 renderer system is using.
965
968
966 :param changes: changes named tuple
969 :param changes: changes named tuple
967 :param file_changes: file changes named tuple
970 :param file_changes: file changes named tuple
968
971
969 """
972 """
970 new_status = ChangesetStatus.get_status_lbl(
973 new_status = ChangesetStatus.get_status_lbl(
971 ChangesetStatus.STATUS_UNDER_REVIEW)
974 ChangesetStatus.STATUS_UNDER_REVIEW)
972
975
973 changed_files = (
976 changed_files = (
974 file_changes.added + file_changes.modified + file_changes.removed)
977 file_changes.added + file_changes.modified + file_changes.removed)
975
978
976 params = {
979 params = {
977 'under_review_label': new_status,
980 'under_review_label': new_status,
978 'added_commits': changes.added,
981 'added_commits': changes.added,
979 'removed_commits': changes.removed,
982 'removed_commits': changes.removed,
980 'changed_files': changed_files,
983 'changed_files': changed_files,
981 'added_files': file_changes.added,
984 'added_files': file_changes.added,
982 'modified_files': file_changes.modified,
985 'modified_files': file_changes.modified,
983 'removed_files': file_changes.removed,
986 'removed_files': file_changes.removed,
984 }
987 }
985 renderer = RstTemplateRenderer()
988 renderer = RstTemplateRenderer()
986 return renderer.render('pull_request_update.mako', **params)
989 return renderer.render('pull_request_update.mako', **params)
987
990
988 def edit(self, pull_request, title, description, description_renderer, user):
991 def edit(self, pull_request, title, description, description_renderer, user):
989 pull_request = self.__get_pull_request(pull_request)
992 pull_request = self.__get_pull_request(pull_request)
990 old_data = pull_request.get_api_data(with_merge_state=False)
993 old_data = pull_request.get_api_data(with_merge_state=False)
991 if pull_request.is_closed():
994 if pull_request.is_closed():
992 raise ValueError('This pull request is closed')
995 raise ValueError('This pull request is closed')
993 if title:
996 if title:
994 pull_request.title = title
997 pull_request.title = title
995 pull_request.description = description
998 pull_request.description = description
996 pull_request.updated_on = datetime.datetime.now()
999 pull_request.updated_on = datetime.datetime.now()
997 pull_request.description_renderer = description_renderer
1000 pull_request.description_renderer = description_renderer
998 Session().add(pull_request)
1001 Session().add(pull_request)
999 self._log_audit_action(
1002 self._log_audit_action(
1000 'repo.pull_request.edit', {'old_data': old_data},
1003 'repo.pull_request.edit', {'old_data': old_data},
1001 user, pull_request)
1004 user, pull_request)
1002
1005
1003 def update_reviewers(self, pull_request, reviewer_data, user):
1006 def update_reviewers(self, pull_request, reviewer_data, user):
1004 """
1007 """
1005 Update the reviewers in the pull request
1008 Update the reviewers in the pull request
1006
1009
1007 :param pull_request: the pr to update
1010 :param pull_request: the pr to update
1008 :param reviewer_data: list of tuples
1011 :param reviewer_data: list of tuples
1009 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1012 [(user, ['reason1', 'reason2'], mandatory_flag, [rules])]
1010 """
1013 """
1011 pull_request = self.__get_pull_request(pull_request)
1014 pull_request = self.__get_pull_request(pull_request)
1012 if pull_request.is_closed():
1015 if pull_request.is_closed():
1013 raise ValueError('This pull request is closed')
1016 raise ValueError('This pull request is closed')
1014
1017
1015 reviewers = {}
1018 reviewers = {}
1016 for user_id, reasons, mandatory, rules in reviewer_data:
1019 for user_id, reasons, mandatory, rules in reviewer_data:
1017 if isinstance(user_id, (int, basestring)):
1020 if isinstance(user_id, (int, basestring)):
1018 user_id = self._get_user(user_id).user_id
1021 user_id = self._get_user(user_id).user_id
1019 reviewers[user_id] = {
1022 reviewers[user_id] = {
1020 'reasons': reasons, 'mandatory': mandatory}
1023 'reasons': reasons, 'mandatory': mandatory}
1021
1024
1022 reviewers_ids = set(reviewers.keys())
1025 reviewers_ids = set(reviewers.keys())
1023 current_reviewers = PullRequestReviewers.query()\
1026 current_reviewers = PullRequestReviewers.query()\
1024 .filter(PullRequestReviewers.pull_request ==
1027 .filter(PullRequestReviewers.pull_request ==
1025 pull_request).all()
1028 pull_request).all()
1026 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1029 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1027
1030
1028 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1031 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1029 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1032 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1030
1033
1031 log.debug("Adding %s reviewers", ids_to_add)
1034 log.debug("Adding %s reviewers", ids_to_add)
1032 log.debug("Removing %s reviewers", ids_to_remove)
1035 log.debug("Removing %s reviewers", ids_to_remove)
1033 changed = False
1036 changed = False
1034 for uid in ids_to_add:
1037 for uid in ids_to_add:
1035 changed = True
1038 changed = True
1036 _usr = self._get_user(uid)
1039 _usr = self._get_user(uid)
1037 reviewer = PullRequestReviewers()
1040 reviewer = PullRequestReviewers()
1038 reviewer.user = _usr
1041 reviewer.user = _usr
1039 reviewer.pull_request = pull_request
1042 reviewer.pull_request = pull_request
1040 reviewer.reasons = reviewers[uid]['reasons']
1043 reviewer.reasons = reviewers[uid]['reasons']
1041 # NOTE(marcink): mandatory shouldn't be changed now
1044 # NOTE(marcink): mandatory shouldn't be changed now
1042 # reviewer.mandatory = reviewers[uid]['reasons']
1045 # reviewer.mandatory = reviewers[uid]['reasons']
1043 Session().add(reviewer)
1046 Session().add(reviewer)
1044 self._log_audit_action(
1047 self._log_audit_action(
1045 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1048 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()},
1046 user, pull_request)
1049 user, pull_request)
1047
1050
1048 for uid in ids_to_remove:
1051 for uid in ids_to_remove:
1049 changed = True
1052 changed = True
1050 reviewers = PullRequestReviewers.query()\
1053 reviewers = PullRequestReviewers.query()\
1051 .filter(PullRequestReviewers.user_id == uid,
1054 .filter(PullRequestReviewers.user_id == uid,
1052 PullRequestReviewers.pull_request == pull_request)\
1055 PullRequestReviewers.pull_request == pull_request)\
1053 .all()
1056 .all()
1054 # use .all() in case we accidentally added the same person twice
1057 # use .all() in case we accidentally added the same person twice
1055 # this CAN happen due to the lack of DB checks
1058 # this CAN happen due to the lack of DB checks
1056 for obj in reviewers:
1059 for obj in reviewers:
1057 old_data = obj.get_dict()
1060 old_data = obj.get_dict()
1058 Session().delete(obj)
1061 Session().delete(obj)
1059 self._log_audit_action(
1062 self._log_audit_action(
1060 'repo.pull_request.reviewer.delete',
1063 'repo.pull_request.reviewer.delete',
1061 {'old_data': old_data}, user, pull_request)
1064 {'old_data': old_data}, user, pull_request)
1062
1065
1063 if changed:
1066 if changed:
1064 pull_request.updated_on = datetime.datetime.now()
1067 pull_request.updated_on = datetime.datetime.now()
1065 Session().add(pull_request)
1068 Session().add(pull_request)
1066
1069
1067 self.notify_reviewers(pull_request, ids_to_add)
1070 self.notify_reviewers(pull_request, ids_to_add)
1068 return ids_to_add, ids_to_remove
1071 return ids_to_add, ids_to_remove
1069
1072
1070 def get_url(self, pull_request, request=None, permalink=False):
1073 def get_url(self, pull_request, request=None, permalink=False):
1071 if not request:
1074 if not request:
1072 request = get_current_request()
1075 request = get_current_request()
1073
1076
1074 if permalink:
1077 if permalink:
1075 return request.route_url(
1078 return request.route_url(
1076 'pull_requests_global',
1079 'pull_requests_global',
1077 pull_request_id=pull_request.pull_request_id,)
1080 pull_request_id=pull_request.pull_request_id,)
1078 else:
1081 else:
1079 return request.route_url('pullrequest_show',
1082 return request.route_url('pullrequest_show',
1080 repo_name=safe_str(pull_request.target_repo.repo_name),
1083 repo_name=safe_str(pull_request.target_repo.repo_name),
1081 pull_request_id=pull_request.pull_request_id,)
1084 pull_request_id=pull_request.pull_request_id,)
1082
1085
1083 def get_shadow_clone_url(self, pull_request, request=None):
1086 def get_shadow_clone_url(self, pull_request, request=None):
1084 """
1087 """
1085 Returns qualified url pointing to the shadow repository. If this pull
1088 Returns qualified url pointing to the shadow repository. If this pull
1086 request is closed there is no shadow repository and ``None`` will be
1089 request is closed there is no shadow repository and ``None`` will be
1087 returned.
1090 returned.
1088 """
1091 """
1089 if pull_request.is_closed():
1092 if pull_request.is_closed():
1090 return None
1093 return None
1091 else:
1094 else:
1092 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1095 pr_url = urllib.unquote(self.get_url(pull_request, request=request))
1093 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1096 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1094
1097
1095 def notify_reviewers(self, pull_request, reviewers_ids):
1098 def notify_reviewers(self, pull_request, reviewers_ids):
1096 # notification to reviewers
1099 # notification to reviewers
1097 if not reviewers_ids:
1100 if not reviewers_ids:
1098 return
1101 return
1099
1102
1100 pull_request_obj = pull_request
1103 pull_request_obj = pull_request
1101 # get the current participants of this pull request
1104 # get the current participants of this pull request
1102 recipients = reviewers_ids
1105 recipients = reviewers_ids
1103 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1106 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1104
1107
1105 pr_source_repo = pull_request_obj.source_repo
1108 pr_source_repo = pull_request_obj.source_repo
1106 pr_target_repo = pull_request_obj.target_repo
1109 pr_target_repo = pull_request_obj.target_repo
1107
1110
1108 pr_url = h.route_url('pullrequest_show',
1111 pr_url = h.route_url('pullrequest_show',
1109 repo_name=pr_target_repo.repo_name,
1112 repo_name=pr_target_repo.repo_name,
1110 pull_request_id=pull_request_obj.pull_request_id,)
1113 pull_request_id=pull_request_obj.pull_request_id,)
1111
1114
1112 # set some variables for email notification
1115 # set some variables for email notification
1113 pr_target_repo_url = h.route_url(
1116 pr_target_repo_url = h.route_url(
1114 'repo_summary', repo_name=pr_target_repo.repo_name)
1117 'repo_summary', repo_name=pr_target_repo.repo_name)
1115
1118
1116 pr_source_repo_url = h.route_url(
1119 pr_source_repo_url = h.route_url(
1117 'repo_summary', repo_name=pr_source_repo.repo_name)
1120 'repo_summary', repo_name=pr_source_repo.repo_name)
1118
1121
1119 # pull request specifics
1122 # pull request specifics
1120 pull_request_commits = [
1123 pull_request_commits = [
1121 (x.raw_id, x.message)
1124 (x.raw_id, x.message)
1122 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1125 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1123
1126
1124 kwargs = {
1127 kwargs = {
1125 'user': pull_request.author,
1128 'user': pull_request.author,
1126 'pull_request': pull_request_obj,
1129 'pull_request': pull_request_obj,
1127 'pull_request_commits': pull_request_commits,
1130 'pull_request_commits': pull_request_commits,
1128
1131
1129 'pull_request_target_repo': pr_target_repo,
1132 'pull_request_target_repo': pr_target_repo,
1130 'pull_request_target_repo_url': pr_target_repo_url,
1133 'pull_request_target_repo_url': pr_target_repo_url,
1131
1134
1132 'pull_request_source_repo': pr_source_repo,
1135 'pull_request_source_repo': pr_source_repo,
1133 'pull_request_source_repo_url': pr_source_repo_url,
1136 'pull_request_source_repo_url': pr_source_repo_url,
1134
1137
1135 'pull_request_url': pr_url,
1138 'pull_request_url': pr_url,
1136 }
1139 }
1137
1140
1138 # pre-generate the subject for notification itself
1141 # pre-generate the subject for notification itself
1139 (subject,
1142 (subject,
1140 _h, _e, # we don't care about those
1143 _h, _e, # we don't care about those
1141 body_plaintext) = EmailNotificationModel().render_email(
1144 body_plaintext) = EmailNotificationModel().render_email(
1142 notification_type, **kwargs)
1145 notification_type, **kwargs)
1143
1146
1144 # create notification objects, and emails
1147 # create notification objects, and emails
1145 NotificationModel().create(
1148 NotificationModel().create(
1146 created_by=pull_request.author,
1149 created_by=pull_request.author,
1147 notification_subject=subject,
1150 notification_subject=subject,
1148 notification_body=body_plaintext,
1151 notification_body=body_plaintext,
1149 notification_type=notification_type,
1152 notification_type=notification_type,
1150 recipients=recipients,
1153 recipients=recipients,
1151 email_kwargs=kwargs,
1154 email_kwargs=kwargs,
1152 )
1155 )
1153
1156
1154 def delete(self, pull_request, user):
1157 def delete(self, pull_request, user):
1155 pull_request = self.__get_pull_request(pull_request)
1158 pull_request = self.__get_pull_request(pull_request)
1156 old_data = pull_request.get_api_data(with_merge_state=False)
1159 old_data = pull_request.get_api_data(with_merge_state=False)
1157 self._cleanup_merge_workspace(pull_request)
1160 self._cleanup_merge_workspace(pull_request)
1158 self._log_audit_action(
1161 self._log_audit_action(
1159 'repo.pull_request.delete', {'old_data': old_data},
1162 'repo.pull_request.delete', {'old_data': old_data},
1160 user, pull_request)
1163 user, pull_request)
1161 Session().delete(pull_request)
1164 Session().delete(pull_request)
1162
1165
1163 def close_pull_request(self, pull_request, user):
1166 def close_pull_request(self, pull_request, user):
1164 pull_request = self.__get_pull_request(pull_request)
1167 pull_request = self.__get_pull_request(pull_request)
1165 self._cleanup_merge_workspace(pull_request)
1168 self._cleanup_merge_workspace(pull_request)
1166 pull_request.status = PullRequest.STATUS_CLOSED
1169 pull_request.status = PullRequest.STATUS_CLOSED
1167 pull_request.updated_on = datetime.datetime.now()
1170 pull_request.updated_on = datetime.datetime.now()
1168 Session().add(pull_request)
1171 Session().add(pull_request)
1169 self._trigger_pull_request_hook(
1172 self._trigger_pull_request_hook(
1170 pull_request, pull_request.author, 'close')
1173 pull_request, pull_request.author, 'close')
1171
1174
1172 pr_data = pull_request.get_api_data(with_merge_state=False)
1175 pr_data = pull_request.get_api_data(with_merge_state=False)
1173 self._log_audit_action(
1176 self._log_audit_action(
1174 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1177 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1175
1178
1176 def close_pull_request_with_comment(
1179 def close_pull_request_with_comment(
1177 self, pull_request, user, repo, message=None, auth_user=None):
1180 self, pull_request, user, repo, message=None, auth_user=None):
1178
1181
1179 pull_request_review_status = pull_request.calculated_review_status()
1182 pull_request_review_status = pull_request.calculated_review_status()
1180
1183
1181 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1184 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1182 # approved only if we have voting consent
1185 # approved only if we have voting consent
1183 status = ChangesetStatus.STATUS_APPROVED
1186 status = ChangesetStatus.STATUS_APPROVED
1184 else:
1187 else:
1185 status = ChangesetStatus.STATUS_REJECTED
1188 status = ChangesetStatus.STATUS_REJECTED
1186 status_lbl = ChangesetStatus.get_status_lbl(status)
1189 status_lbl = ChangesetStatus.get_status_lbl(status)
1187
1190
1188 default_message = (
1191 default_message = (
1189 'Closing with status change {transition_icon} {status}.'
1192 'Closing with status change {transition_icon} {status}.'
1190 ).format(transition_icon='>', status=status_lbl)
1193 ).format(transition_icon='>', status=status_lbl)
1191 text = message or default_message
1194 text = message or default_message
1192
1195
1193 # create a comment, and link it to new status
1196 # create a comment, and link it to new status
1194 comment = CommentsModel().create(
1197 comment = CommentsModel().create(
1195 text=text,
1198 text=text,
1196 repo=repo.repo_id,
1199 repo=repo.repo_id,
1197 user=user.user_id,
1200 user=user.user_id,
1198 pull_request=pull_request.pull_request_id,
1201 pull_request=pull_request.pull_request_id,
1199 status_change=status_lbl,
1202 status_change=status_lbl,
1200 status_change_type=status,
1203 status_change_type=status,
1201 closing_pr=True,
1204 closing_pr=True,
1202 auth_user=auth_user,
1205 auth_user=auth_user,
1203 )
1206 )
1204
1207
1205 # calculate old status before we change it
1208 # calculate old status before we change it
1206 old_calculated_status = pull_request.calculated_review_status()
1209 old_calculated_status = pull_request.calculated_review_status()
1207 ChangesetStatusModel().set_status(
1210 ChangesetStatusModel().set_status(
1208 repo.repo_id,
1211 repo.repo_id,
1209 status,
1212 status,
1210 user.user_id,
1213 user.user_id,
1211 comment=comment,
1214 comment=comment,
1212 pull_request=pull_request.pull_request_id
1215 pull_request=pull_request.pull_request_id
1213 )
1216 )
1214
1217
1215 Session().flush()
1218 Session().flush()
1216 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1219 events.trigger(events.PullRequestCommentEvent(pull_request, comment))
1217 # we now calculate the status of pull request again, and based on that
1220 # we now calculate the status of pull request again, and based on that
1218 # calculation trigger status change. This might happen in cases
1221 # calculation trigger status change. This might happen in cases
1219 # that non-reviewer admin closes a pr, which means his vote doesn't
1222 # that non-reviewer admin closes a pr, which means his vote doesn't
1220 # change the status, while if he's a reviewer this might change it.
1223 # change the status, while if he's a reviewer this might change it.
1221 calculated_status = pull_request.calculated_review_status()
1224 calculated_status = pull_request.calculated_review_status()
1222 if old_calculated_status != calculated_status:
1225 if old_calculated_status != calculated_status:
1223 self._trigger_pull_request_hook(
1226 self._trigger_pull_request_hook(
1224 pull_request, user, 'review_status_change')
1227 pull_request, user, 'review_status_change')
1225
1228
1226 # finally close the PR
1229 # finally close the PR
1227 PullRequestModel().close_pull_request(
1230 PullRequestModel().close_pull_request(
1228 pull_request.pull_request_id, user)
1231 pull_request.pull_request_id, user)
1229
1232
1230 return comment, status
1233 return comment, status
1231
1234
1232 def merge_status(self, pull_request, translator=None,
1235 def merge_status(self, pull_request, translator=None,
1233 force_shadow_repo_refresh=False):
1236 force_shadow_repo_refresh=False):
1234 _ = translator or get_current_request().translate
1237 _ = translator or get_current_request().translate
1235
1238
1236 if not self._is_merge_enabled(pull_request):
1239 if not self._is_merge_enabled(pull_request):
1237 return False, _('Server-side pull request merging is disabled.')
1240 return False, _('Server-side pull request merging is disabled.')
1238 if pull_request.is_closed():
1241 if pull_request.is_closed():
1239 return False, _('This pull request is closed.')
1242 return False, _('This pull request is closed.')
1240 merge_possible, msg = self._check_repo_requirements(
1243 merge_possible, msg = self._check_repo_requirements(
1241 target=pull_request.target_repo, source=pull_request.source_repo,
1244 target=pull_request.target_repo, source=pull_request.source_repo,
1242 translator=_)
1245 translator=_)
1243 if not merge_possible:
1246 if not merge_possible:
1244 return merge_possible, msg
1247 return merge_possible, msg
1245
1248
1246 try:
1249 try:
1247 resp = self._try_merge(
1250 resp = self._try_merge(
1248 pull_request,
1251 pull_request,
1249 force_shadow_repo_refresh=force_shadow_repo_refresh)
1252 force_shadow_repo_refresh=force_shadow_repo_refresh)
1250 log.debug("Merge response: %s", resp)
1253 log.debug("Merge response: %s", resp)
1251 status = resp.possible, self.merge_status_message(
1254 status = resp.possible, self.merge_status_message(
1252 resp.failure_reason)
1255 resp.failure_reason)
1253 except NotImplementedError:
1256 except NotImplementedError:
1254 status = False, _('Pull request merging is not supported.')
1257 status = False, _('Pull request merging is not supported.')
1255
1258
1256 return status
1259 return status
1257
1260
1258 def _check_repo_requirements(self, target, source, translator):
1261 def _check_repo_requirements(self, target, source, translator):
1259 """
1262 """
1260 Check if `target` and `source` have compatible requirements.
1263 Check if `target` and `source` have compatible requirements.
1261
1264
1262 Currently this is just checking for largefiles.
1265 Currently this is just checking for largefiles.
1263 """
1266 """
1264 _ = translator
1267 _ = translator
1265 target_has_largefiles = self._has_largefiles(target)
1268 target_has_largefiles = self._has_largefiles(target)
1266 source_has_largefiles = self._has_largefiles(source)
1269 source_has_largefiles = self._has_largefiles(source)
1267 merge_possible = True
1270 merge_possible = True
1268 message = u''
1271 message = u''
1269
1272
1270 if target_has_largefiles != source_has_largefiles:
1273 if target_has_largefiles != source_has_largefiles:
1271 merge_possible = False
1274 merge_possible = False
1272 if source_has_largefiles:
1275 if source_has_largefiles:
1273 message = _(
1276 message = _(
1274 'Target repository large files support is disabled.')
1277 'Target repository large files support is disabled.')
1275 else:
1278 else:
1276 message = _(
1279 message = _(
1277 'Source repository large files support is disabled.')
1280 'Source repository large files support is disabled.')
1278
1281
1279 return merge_possible, message
1282 return merge_possible, message
1280
1283
1281 def _has_largefiles(self, repo):
1284 def _has_largefiles(self, repo):
1282 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1285 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1283 'extensions', 'largefiles')
1286 'extensions', 'largefiles')
1284 return largefiles_ui and largefiles_ui[0].active
1287 return largefiles_ui and largefiles_ui[0].active
1285
1288
1286 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1289 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1287 """
1290 """
1288 Try to merge the pull request and return the merge status.
1291 Try to merge the pull request and return the merge status.
1289 """
1292 """
1290 log.debug(
1293 log.debug(
1291 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1294 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1292 pull_request.pull_request_id, force_shadow_repo_refresh)
1295 pull_request.pull_request_id, force_shadow_repo_refresh)
1293 target_vcs = pull_request.target_repo.scm_instance()
1296 target_vcs = pull_request.target_repo.scm_instance()
1294
1297
1295 # Refresh the target reference.
1298 # Refresh the target reference.
1296 try:
1299 try:
1297 target_ref = self._refresh_reference(
1300 target_ref = self._refresh_reference(
1298 pull_request.target_ref_parts, target_vcs)
1301 pull_request.target_ref_parts, target_vcs)
1299 except CommitDoesNotExistError:
1302 except CommitDoesNotExistError:
1300 merge_state = MergeResponse(
1303 merge_state = MergeResponse(
1301 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1304 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1302 return merge_state
1305 return merge_state
1303
1306
1304 target_locked = pull_request.target_repo.locked
1307 target_locked = pull_request.target_repo.locked
1305 if target_locked and target_locked[0]:
1308 if target_locked and target_locked[0]:
1306 log.debug("The target repository is locked.")
1309 log.debug("The target repository is locked.")
1307 merge_state = MergeResponse(
1310 merge_state = MergeResponse(
1308 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1311 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1309 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1312 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1310 pull_request, target_ref):
1313 pull_request, target_ref):
1311 log.debug("Refreshing the merge status of the repository.")
1314 log.debug("Refreshing the merge status of the repository.")
1312 merge_state = self._refresh_merge_state(
1315 merge_state = self._refresh_merge_state(
1313 pull_request, target_vcs, target_ref)
1316 pull_request, target_vcs, target_ref)
1314 else:
1317 else:
1315 possible = pull_request.\
1318 possible = pull_request.\
1316 last_merge_status == MergeFailureReason.NONE
1319 last_merge_status == MergeFailureReason.NONE
1317 merge_state = MergeResponse(
1320 merge_state = MergeResponse(
1318 possible, False, None, pull_request.last_merge_status)
1321 possible, False, None, pull_request.last_merge_status)
1319
1322
1320 return merge_state
1323 return merge_state
1321
1324
1322 def _refresh_reference(self, reference, vcs_repository):
1325 def _refresh_reference(self, reference, vcs_repository):
1323 if reference.type in ('branch', 'book'):
1326 if reference.type in ('branch', 'book'):
1324 name_or_id = reference.name
1327 name_or_id = reference.name
1325 else:
1328 else:
1326 name_or_id = reference.commit_id
1329 name_or_id = reference.commit_id
1327 refreshed_commit = vcs_repository.get_commit(name_or_id)
1330 refreshed_commit = vcs_repository.get_commit(name_or_id)
1328 refreshed_reference = Reference(
1331 refreshed_reference = Reference(
1329 reference.type, reference.name, refreshed_commit.raw_id)
1332 reference.type, reference.name, refreshed_commit.raw_id)
1330 return refreshed_reference
1333 return refreshed_reference
1331
1334
1332 def _needs_merge_state_refresh(self, pull_request, target_reference):
1335 def _needs_merge_state_refresh(self, pull_request, target_reference):
1333 return not(
1336 return not(
1334 pull_request.revisions and
1337 pull_request.revisions and
1335 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1338 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1336 target_reference.commit_id == pull_request._last_merge_target_rev)
1339 target_reference.commit_id == pull_request._last_merge_target_rev)
1337
1340
1338 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1341 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1339 workspace_id = self._workspace_id(pull_request)
1342 workspace_id = self._workspace_id(pull_request)
1340 source_vcs = pull_request.source_repo.scm_instance()
1343 source_vcs = pull_request.source_repo.scm_instance()
1341 repo_id = pull_request.target_repo.repo_id
1344 repo_id = pull_request.target_repo.repo_id
1342 use_rebase = self._use_rebase_for_merging(pull_request)
1345 use_rebase = self._use_rebase_for_merging(pull_request)
1343 close_branch = self._close_branch_before_merging(pull_request)
1346 close_branch = self._close_branch_before_merging(pull_request)
1344 merge_state = target_vcs.merge(
1347 merge_state = target_vcs.merge(
1345 repo_id, workspace_id,
1348 repo_id, workspace_id,
1346 target_reference, source_vcs, pull_request.source_ref_parts,
1349 target_reference, source_vcs, pull_request.source_ref_parts,
1347 dry_run=True, use_rebase=use_rebase,
1350 dry_run=True, use_rebase=use_rebase,
1348 close_branch=close_branch)
1351 close_branch=close_branch)
1349
1352
1350 # Do not store the response if there was an unknown error.
1353 # Do not store the response if there was an unknown error.
1351 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1354 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1352 pull_request._last_merge_source_rev = \
1355 pull_request._last_merge_source_rev = \
1353 pull_request.source_ref_parts.commit_id
1356 pull_request.source_ref_parts.commit_id
1354 pull_request._last_merge_target_rev = target_reference.commit_id
1357 pull_request._last_merge_target_rev = target_reference.commit_id
1355 pull_request.last_merge_status = merge_state.failure_reason
1358 pull_request.last_merge_status = merge_state.failure_reason
1356 pull_request.shadow_merge_ref = merge_state.merge_ref
1359 pull_request.shadow_merge_ref = merge_state.merge_ref
1357 Session().add(pull_request)
1360 Session().add(pull_request)
1358 Session().commit()
1361 Session().commit()
1359
1362
1360 return merge_state
1363 return merge_state
1361
1364
1362 def _workspace_id(self, pull_request):
1365 def _workspace_id(self, pull_request):
1363 workspace_id = 'pr-%s' % pull_request.pull_request_id
1366 workspace_id = 'pr-%s' % pull_request.pull_request_id
1364 return workspace_id
1367 return workspace_id
1365
1368
1366 def merge_status_message(self, status_code):
1369 def merge_status_message(self, status_code):
1367 """
1370 """
1368 Return a human friendly error message for the given merge status code.
1371 Return a human friendly error message for the given merge status code.
1369 """
1372 """
1370 return self.MERGE_STATUS_MESSAGES[status_code]
1373 return self.MERGE_STATUS_MESSAGES[status_code]
1371
1374
1372 def generate_repo_data(self, repo, commit_id=None, branch=None,
1375 def generate_repo_data(self, repo, commit_id=None, branch=None,
1373 bookmark=None, translator=None):
1376 bookmark=None, translator=None):
1374 from rhodecode.model.repo import RepoModel
1377 from rhodecode.model.repo import RepoModel
1375
1378
1376 all_refs, selected_ref = \
1379 all_refs, selected_ref = \
1377 self._get_repo_pullrequest_sources(
1380 self._get_repo_pullrequest_sources(
1378 repo.scm_instance(), commit_id=commit_id,
1381 repo.scm_instance(), commit_id=commit_id,
1379 branch=branch, bookmark=bookmark, translator=translator)
1382 branch=branch, bookmark=bookmark, translator=translator)
1380
1383
1381 refs_select2 = []
1384 refs_select2 = []
1382 for element in all_refs:
1385 for element in all_refs:
1383 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1386 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1384 refs_select2.append({'text': element[1], 'children': children})
1387 refs_select2.append({'text': element[1], 'children': children})
1385
1388
1386 return {
1389 return {
1387 'user': {
1390 'user': {
1388 'user_id': repo.user.user_id,
1391 'user_id': repo.user.user_id,
1389 'username': repo.user.username,
1392 'username': repo.user.username,
1390 'firstname': repo.user.first_name,
1393 'firstname': repo.user.first_name,
1391 'lastname': repo.user.last_name,
1394 'lastname': repo.user.last_name,
1392 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1395 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1393 },
1396 },
1394 'name': repo.repo_name,
1397 'name': repo.repo_name,
1395 'link': RepoModel().get_url(repo),
1398 'link': RepoModel().get_url(repo),
1396 'description': h.chop_at_smart(repo.description_safe, '\n'),
1399 'description': h.chop_at_smart(repo.description_safe, '\n'),
1397 'refs': {
1400 'refs': {
1398 'all_refs': all_refs,
1401 'all_refs': all_refs,
1399 'selected_ref': selected_ref,
1402 'selected_ref': selected_ref,
1400 'select2_refs': refs_select2
1403 'select2_refs': refs_select2
1401 }
1404 }
1402 }
1405 }
1403
1406
1404 def generate_pullrequest_title(self, source, source_ref, target):
1407 def generate_pullrequest_title(self, source, source_ref, target):
1405 return u'{source}#{at_ref} to {target}'.format(
1408 return u'{source}#{at_ref} to {target}'.format(
1406 source=source,
1409 source=source,
1407 at_ref=source_ref,
1410 at_ref=source_ref,
1408 target=target,
1411 target=target,
1409 )
1412 )
1410
1413
1411 def _cleanup_merge_workspace(self, pull_request):
1414 def _cleanup_merge_workspace(self, pull_request):
1412 # Merging related cleanup
1415 # Merging related cleanup
1413 repo_id = pull_request.target_repo.repo_id
1416 repo_id = pull_request.target_repo.repo_id
1414 target_scm = pull_request.target_repo.scm_instance()
1417 target_scm = pull_request.target_repo.scm_instance()
1415 workspace_id = self._workspace_id(pull_request)
1418 workspace_id = self._workspace_id(pull_request)
1416
1419
1417 try:
1420 try:
1418 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1421 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
1419 except NotImplementedError:
1422 except NotImplementedError:
1420 pass
1423 pass
1421
1424
1422 def _get_repo_pullrequest_sources(
1425 def _get_repo_pullrequest_sources(
1423 self, repo, commit_id=None, branch=None, bookmark=None,
1426 self, repo, commit_id=None, branch=None, bookmark=None,
1424 translator=None):
1427 translator=None):
1425 """
1428 """
1426 Return a structure with repo's interesting commits, suitable for
1429 Return a structure with repo's interesting commits, suitable for
1427 the selectors in pullrequest controller
1430 the selectors in pullrequest controller
1428
1431
1429 :param commit_id: a commit that must be in the list somehow
1432 :param commit_id: a commit that must be in the list somehow
1430 and selected by default
1433 and selected by default
1431 :param branch: a branch that must be in the list and selected
1434 :param branch: a branch that must be in the list and selected
1432 by default - even if closed
1435 by default - even if closed
1433 :param bookmark: a bookmark that must be in the list and selected
1436 :param bookmark: a bookmark that must be in the list and selected
1434 """
1437 """
1435 _ = translator or get_current_request().translate
1438 _ = translator or get_current_request().translate
1436
1439
1437 commit_id = safe_str(commit_id) if commit_id else None
1440 commit_id = safe_str(commit_id) if commit_id else None
1438 branch = safe_str(branch) if branch else None
1441 branch = safe_str(branch) if branch else None
1439 bookmark = safe_str(bookmark) if bookmark else None
1442 bookmark = safe_str(bookmark) if bookmark else None
1440
1443
1441 selected = None
1444 selected = None
1442
1445
1443 # order matters: first source that has commit_id in it will be selected
1446 # order matters: first source that has commit_id in it will be selected
1444 sources = []
1447 sources = []
1445 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1448 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1446 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1449 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1447
1450
1448 if commit_id:
1451 if commit_id:
1449 ref_commit = (h.short_id(commit_id), commit_id)
1452 ref_commit = (h.short_id(commit_id), commit_id)
1450 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1453 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1451
1454
1452 sources.append(
1455 sources.append(
1453 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1456 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1454 )
1457 )
1455
1458
1456 groups = []
1459 groups = []
1457 for group_key, ref_list, group_name, match in sources:
1460 for group_key, ref_list, group_name, match in sources:
1458 group_refs = []
1461 group_refs = []
1459 for ref_name, ref_id in ref_list:
1462 for ref_name, ref_id in ref_list:
1460 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1463 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1461 group_refs.append((ref_key, ref_name))
1464 group_refs.append((ref_key, ref_name))
1462
1465
1463 if not selected:
1466 if not selected:
1464 if set([commit_id, match]) & set([ref_id, ref_name]):
1467 if set([commit_id, match]) & set([ref_id, ref_name]):
1465 selected = ref_key
1468 selected = ref_key
1466
1469
1467 if group_refs:
1470 if group_refs:
1468 groups.append((group_refs, group_name))
1471 groups.append((group_refs, group_name))
1469
1472
1470 if not selected:
1473 if not selected:
1471 ref = commit_id or branch or bookmark
1474 ref = commit_id or branch or bookmark
1472 if ref:
1475 if ref:
1473 raise CommitDoesNotExistError(
1476 raise CommitDoesNotExistError(
1474 'No commit refs could be found matching: %s' % ref)
1477 'No commit refs could be found matching: %s' % ref)
1475 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1478 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1476 selected = 'branch:%s:%s' % (
1479 selected = 'branch:%s:%s' % (
1477 repo.DEFAULT_BRANCH_NAME,
1480 repo.DEFAULT_BRANCH_NAME,
1478 repo.branches[repo.DEFAULT_BRANCH_NAME]
1481 repo.branches[repo.DEFAULT_BRANCH_NAME]
1479 )
1482 )
1480 elif repo.commit_ids:
1483 elif repo.commit_ids:
1481 # make the user select in this case
1484 # make the user select in this case
1482 selected = None
1485 selected = None
1483 else:
1486 else:
1484 raise EmptyRepositoryError()
1487 raise EmptyRepositoryError()
1485 return groups, selected
1488 return groups, selected
1486
1489
1487 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1490 def get_diff(self, source_repo, source_ref_id, target_ref_id, context=DIFF_CONTEXT):
1488 return self._get_diff_from_pr_or_version(
1491 return self._get_diff_from_pr_or_version(
1489 source_repo, source_ref_id, target_ref_id, context=context)
1492 source_repo, source_ref_id, target_ref_id, context=context)
1490
1493
1491 def _get_diff_from_pr_or_version(
1494 def _get_diff_from_pr_or_version(
1492 self, source_repo, source_ref_id, target_ref_id, context):
1495 self, source_repo, source_ref_id, target_ref_id, context):
1493 target_commit = source_repo.get_commit(
1496 target_commit = source_repo.get_commit(
1494 commit_id=safe_str(target_ref_id))
1497 commit_id=safe_str(target_ref_id))
1495 source_commit = source_repo.get_commit(
1498 source_commit = source_repo.get_commit(
1496 commit_id=safe_str(source_ref_id))
1499 commit_id=safe_str(source_ref_id))
1497 if isinstance(source_repo, Repository):
1500 if isinstance(source_repo, Repository):
1498 vcs_repo = source_repo.scm_instance()
1501 vcs_repo = source_repo.scm_instance()
1499 else:
1502 else:
1500 vcs_repo = source_repo
1503 vcs_repo = source_repo
1501
1504
1502 # TODO: johbo: In the context of an update, we cannot reach
1505 # TODO: johbo: In the context of an update, we cannot reach
1503 # the old commit anymore with our normal mechanisms. It needs
1506 # the old commit anymore with our normal mechanisms. It needs
1504 # some sort of special support in the vcs layer to avoid this
1507 # some sort of special support in the vcs layer to avoid this
1505 # workaround.
1508 # workaround.
1506 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1509 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1507 vcs_repo.alias == 'git'):
1510 vcs_repo.alias == 'git'):
1508 source_commit.raw_id = safe_str(source_ref_id)
1511 source_commit.raw_id = safe_str(source_ref_id)
1509
1512
1510 log.debug('calculating diff between '
1513 log.debug('calculating diff between '
1511 'source_ref:%s and target_ref:%s for repo `%s`',
1514 'source_ref:%s and target_ref:%s for repo `%s`',
1512 target_ref_id, source_ref_id,
1515 target_ref_id, source_ref_id,
1513 safe_unicode(vcs_repo.path))
1516 safe_unicode(vcs_repo.path))
1514
1517
1515 vcs_diff = vcs_repo.get_diff(
1518 vcs_diff = vcs_repo.get_diff(
1516 commit1=target_commit, commit2=source_commit, context=context)
1519 commit1=target_commit, commit2=source_commit, context=context)
1517 return vcs_diff
1520 return vcs_diff
1518
1521
1519 def _is_merge_enabled(self, pull_request):
1522 def _is_merge_enabled(self, pull_request):
1520 return self._get_general_setting(
1523 return self._get_general_setting(
1521 pull_request, 'rhodecode_pr_merge_enabled')
1524 pull_request, 'rhodecode_pr_merge_enabled')
1522
1525
1523 def _use_rebase_for_merging(self, pull_request):
1526 def _use_rebase_for_merging(self, pull_request):
1524 repo_type = pull_request.target_repo.repo_type
1527 repo_type = pull_request.target_repo.repo_type
1525 if repo_type == 'hg':
1528 if repo_type == 'hg':
1526 return self._get_general_setting(
1529 return self._get_general_setting(
1527 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1530 pull_request, 'rhodecode_hg_use_rebase_for_merging')
1528 elif repo_type == 'git':
1531 elif repo_type == 'git':
1529 return self._get_general_setting(
1532 return self._get_general_setting(
1530 pull_request, 'rhodecode_git_use_rebase_for_merging')
1533 pull_request, 'rhodecode_git_use_rebase_for_merging')
1531
1534
1532 return False
1535 return False
1533
1536
1534 def _close_branch_before_merging(self, pull_request):
1537 def _close_branch_before_merging(self, pull_request):
1535 repo_type = pull_request.target_repo.repo_type
1538 repo_type = pull_request.target_repo.repo_type
1536 if repo_type == 'hg':
1539 if repo_type == 'hg':
1537 return self._get_general_setting(
1540 return self._get_general_setting(
1538 pull_request, 'rhodecode_hg_close_branch_before_merging')
1541 pull_request, 'rhodecode_hg_close_branch_before_merging')
1539 elif repo_type == 'git':
1542 elif repo_type == 'git':
1540 return self._get_general_setting(
1543 return self._get_general_setting(
1541 pull_request, 'rhodecode_git_close_branch_before_merging')
1544 pull_request, 'rhodecode_git_close_branch_before_merging')
1542
1545
1543 return False
1546 return False
1544
1547
1545 def _get_general_setting(self, pull_request, settings_key, default=False):
1548 def _get_general_setting(self, pull_request, settings_key, default=False):
1546 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1549 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1547 settings = settings_model.get_general_settings()
1550 settings = settings_model.get_general_settings()
1548 return settings.get(settings_key, default)
1551 return settings.get(settings_key, default)
1549
1552
1550 def _log_audit_action(self, action, action_data, user, pull_request):
1553 def _log_audit_action(self, action, action_data, user, pull_request):
1551 audit_logger.store(
1554 audit_logger.store(
1552 action=action,
1555 action=action,
1553 action_data=action_data,
1556 action_data=action_data,
1554 user=user,
1557 user=user,
1555 repo=pull_request.target_repo)
1558 repo=pull_request.target_repo)
1556
1559
1557 def get_reviewer_functions(self):
1560 def get_reviewer_functions(self):
1558 """
1561 """
1559 Fetches functions for validation and fetching default reviewers.
1562 Fetches functions for validation and fetching default reviewers.
1560 If available we use the EE package, else we fallback to CE
1563 If available we use the EE package, else we fallback to CE
1561 package functions
1564 package functions
1562 """
1565 """
1563 try:
1566 try:
1564 from rc_reviewers.utils import get_default_reviewers_data
1567 from rc_reviewers.utils import get_default_reviewers_data
1565 from rc_reviewers.utils import validate_default_reviewers
1568 from rc_reviewers.utils import validate_default_reviewers
1566 except ImportError:
1569 except ImportError:
1567 from rhodecode.apps.repository.utils import \
1570 from rhodecode.apps.repository.utils import \
1568 get_default_reviewers_data
1571 get_default_reviewers_data
1569 from rhodecode.apps.repository.utils import \
1572 from rhodecode.apps.repository.utils import \
1570 validate_default_reviewers
1573 validate_default_reviewers
1571
1574
1572 return get_default_reviewers_data, validate_default_reviewers
1575 return get_default_reviewers_data, validate_default_reviewers
1573
1576
1574
1577
1575 class MergeCheck(object):
1578 class MergeCheck(object):
1576 """
1579 """
1577 Perform Merge Checks and returns a check object which stores information
1580 Perform Merge Checks and returns a check object which stores information
1578 about merge errors, and merge conditions
1581 about merge errors, and merge conditions
1579 """
1582 """
1580 TODO_CHECK = 'todo'
1583 TODO_CHECK = 'todo'
1581 PERM_CHECK = 'perm'
1584 PERM_CHECK = 'perm'
1582 REVIEW_CHECK = 'review'
1585 REVIEW_CHECK = 'review'
1583 MERGE_CHECK = 'merge'
1586 MERGE_CHECK = 'merge'
1584
1587
1585 def __init__(self):
1588 def __init__(self):
1586 self.review_status = None
1589 self.review_status = None
1587 self.merge_possible = None
1590 self.merge_possible = None
1588 self.merge_msg = ''
1591 self.merge_msg = ''
1589 self.failed = None
1592 self.failed = None
1590 self.errors = []
1593 self.errors = []
1591 self.error_details = OrderedDict()
1594 self.error_details = OrderedDict()
1592
1595
1593 def push_error(self, error_type, message, error_key, details):
1596 def push_error(self, error_type, message, error_key, details):
1594 self.failed = True
1597 self.failed = True
1595 self.errors.append([error_type, message])
1598 self.errors.append([error_type, message])
1596 self.error_details[error_key] = dict(
1599 self.error_details[error_key] = dict(
1597 details=details,
1600 details=details,
1598 error_type=error_type,
1601 error_type=error_type,
1599 message=message
1602 message=message
1600 )
1603 )
1601
1604
1602 @classmethod
1605 @classmethod
1603 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1606 def validate(cls, pull_request, auth_user, translator, fail_early=False,
1604 force_shadow_repo_refresh=False):
1607 force_shadow_repo_refresh=False):
1605 _ = translator
1608 _ = translator
1606 merge_check = cls()
1609 merge_check = cls()
1607
1610
1608 # permissions to merge
1611 # permissions to merge
1609 user_allowed_to_merge = PullRequestModel().check_user_merge(
1612 user_allowed_to_merge = PullRequestModel().check_user_merge(
1610 pull_request, auth_user)
1613 pull_request, auth_user)
1611 if not user_allowed_to_merge:
1614 if not user_allowed_to_merge:
1612 log.debug("MergeCheck: cannot merge, approval is pending.")
1615 log.debug("MergeCheck: cannot merge, approval is pending.")
1613
1616
1614 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1617 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
1615 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1618 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1616 if fail_early:
1619 if fail_early:
1617 return merge_check
1620 return merge_check
1618
1621
1619 # permission to merge into the target branch
1622 # permission to merge into the target branch
1620 target_commit_id = pull_request.target_ref_parts.commit_id
1623 target_commit_id = pull_request.target_ref_parts.commit_id
1621 if pull_request.target_ref_parts.type == 'branch':
1624 if pull_request.target_ref_parts.type == 'branch':
1622 branch_name = pull_request.target_ref_parts.name
1625 branch_name = pull_request.target_ref_parts.name
1623 else:
1626 else:
1624 # for mercurial we can always figure out the branch from the commit
1627 # for mercurial we can always figure out the branch from the commit
1625 # in case of bookmark
1628 # in case of bookmark
1626 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1629 target_commit = pull_request.target_repo.get_commit(target_commit_id)
1627 branch_name = target_commit.branch
1630 branch_name = target_commit.branch
1628
1631
1629 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1632 rule, branch_perm = auth_user.get_rule_and_branch_permission(
1630 pull_request.target_repo.repo_name, branch_name)
1633 pull_request.target_repo.repo_name, branch_name)
1631 if branch_perm and branch_perm == 'branch.none':
1634 if branch_perm and branch_perm == 'branch.none':
1632 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1635 msg = _('Target branch `{}` changes rejected by rule {}.').format(
1633 branch_name, rule)
1636 branch_name, rule)
1634 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1637 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
1635 if fail_early:
1638 if fail_early:
1636 return merge_check
1639 return merge_check
1637
1640
1638 # review status, must be always present
1641 # review status, must be always present
1639 review_status = pull_request.calculated_review_status()
1642 review_status = pull_request.calculated_review_status()
1640 merge_check.review_status = review_status
1643 merge_check.review_status = review_status
1641
1644
1642 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1645 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
1643 if not status_approved:
1646 if not status_approved:
1644 log.debug("MergeCheck: cannot merge, approval is pending.")
1647 log.debug("MergeCheck: cannot merge, approval is pending.")
1645
1648
1646 msg = _('Pull request reviewer approval is pending.')
1649 msg = _('Pull request reviewer approval is pending.')
1647
1650
1648 merge_check.push_error(
1651 merge_check.push_error(
1649 'warning', msg, cls.REVIEW_CHECK, review_status)
1652 'warning', msg, cls.REVIEW_CHECK, review_status)
1650
1653
1651 if fail_early:
1654 if fail_early:
1652 return merge_check
1655 return merge_check
1653
1656
1654 # left over TODOs
1657 # left over TODOs
1655 todos = CommentsModel().get_unresolved_todos(pull_request)
1658 todos = CommentsModel().get_unresolved_todos(pull_request)
1656 if todos:
1659 if todos:
1657 log.debug("MergeCheck: cannot merge, {} "
1660 log.debug("MergeCheck: cannot merge, {} "
1658 "unresolved todos left.".format(len(todos)))
1661 "unresolved todos left.".format(len(todos)))
1659
1662
1660 if len(todos) == 1:
1663 if len(todos) == 1:
1661 msg = _('Cannot merge, {} TODO still not resolved.').format(
1664 msg = _('Cannot merge, {} TODO still not resolved.').format(
1662 len(todos))
1665 len(todos))
1663 else:
1666 else:
1664 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1667 msg = _('Cannot merge, {} TODOs still not resolved.').format(
1665 len(todos))
1668 len(todos))
1666
1669
1667 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1670 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
1668
1671
1669 if fail_early:
1672 if fail_early:
1670 return merge_check
1673 return merge_check
1671
1674
1672 # merge possible, here is the filesystem simulation + shadow repo
1675 # merge possible, here is the filesystem simulation + shadow repo
1673 merge_status, msg = PullRequestModel().merge_status(
1676 merge_status, msg = PullRequestModel().merge_status(
1674 pull_request, translator=translator,
1677 pull_request, translator=translator,
1675 force_shadow_repo_refresh=force_shadow_repo_refresh)
1678 force_shadow_repo_refresh=force_shadow_repo_refresh)
1676 merge_check.merge_possible = merge_status
1679 merge_check.merge_possible = merge_status
1677 merge_check.merge_msg = msg
1680 merge_check.merge_msg = msg
1678 if not merge_status:
1681 if not merge_status:
1679 log.debug(
1682 log.debug(
1680 "MergeCheck: cannot merge, pull request merge not possible.")
1683 "MergeCheck: cannot merge, pull request merge not possible.")
1681 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1684 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
1682
1685
1683 if fail_early:
1686 if fail_early:
1684 return merge_check
1687 return merge_check
1685
1688
1686 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1689 log.debug('MergeCheck: is failed: %s', merge_check.failed)
1687 return merge_check
1690 return merge_check
1688
1691
1689 @classmethod
1692 @classmethod
1690 def get_merge_conditions(cls, pull_request, translator):
1693 def get_merge_conditions(cls, pull_request, translator):
1691 _ = translator
1694 _ = translator
1692 merge_details = {}
1695 merge_details = {}
1693
1696
1694 model = PullRequestModel()
1697 model = PullRequestModel()
1695 use_rebase = model._use_rebase_for_merging(pull_request)
1698 use_rebase = model._use_rebase_for_merging(pull_request)
1696
1699
1697 if use_rebase:
1700 if use_rebase:
1698 merge_details['merge_strategy'] = dict(
1701 merge_details['merge_strategy'] = dict(
1699 details={},
1702 details={},
1700 message=_('Merge strategy: rebase')
1703 message=_('Merge strategy: rebase')
1701 )
1704 )
1702 else:
1705 else:
1703 merge_details['merge_strategy'] = dict(
1706 merge_details['merge_strategy'] = dict(
1704 details={},
1707 details={},
1705 message=_('Merge strategy: explicit merge commit')
1708 message=_('Merge strategy: explicit merge commit')
1706 )
1709 )
1707
1710
1708 close_branch = model._close_branch_before_merging(pull_request)
1711 close_branch = model._close_branch_before_merging(pull_request)
1709 if close_branch:
1712 if close_branch:
1710 repo_type = pull_request.target_repo.repo_type
1713 repo_type = pull_request.target_repo.repo_type
1711 if repo_type == 'hg':
1714 if repo_type == 'hg':
1712 close_msg = _('Source branch will be closed after merge.')
1715 close_msg = _('Source branch will be closed after merge.')
1713 elif repo_type == 'git':
1716 elif repo_type == 'git':
1714 close_msg = _('Source branch will be deleted after merge.')
1717 close_msg = _('Source branch will be deleted after merge.')
1715
1718
1716 merge_details['close_branch'] = dict(
1719 merge_details['close_branch'] = dict(
1717 details={},
1720 details={},
1718 message=close_msg
1721 message=close_msg
1719 )
1722 )
1720
1723
1721 return merge_details
1724 return merge_details
1722
1725
1723 ChangeTuple = collections.namedtuple(
1726 ChangeTuple = collections.namedtuple(
1724 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1727 'ChangeTuple', ['added', 'common', 'removed', 'total'])
1725
1728
1726 FileChangeTuple = collections.namedtuple(
1729 FileChangeTuple = collections.namedtuple(
1727 'FileChangeTuple', ['added', 'modified', 'removed'])
1730 'FileChangeTuple', ['added', 'modified', 'removed'])
@@ -1,868 +1,868 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import textwrap
23 import textwrap
24
24
25 import rhodecode
25 import rhodecode
26 from rhodecode.lib.utils2 import safe_unicode
26 from rhodecode.lib.utils2 import safe_unicode
27 from rhodecode.lib.vcs.backends import get_backend
27 from rhodecode.lib.vcs.backends import get_backend
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 MergeResponse, MergeFailureReason, Reference)
29 MergeResponse, MergeFailureReason, Reference)
30 from rhodecode.lib.vcs.exceptions import RepositoryError
30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 from rhodecode.lib.vcs.nodes import FileNode
31 from rhodecode.lib.vcs.nodes import FileNode
32 from rhodecode.model.comment import CommentsModel
32 from rhodecode.model.comment import CommentsModel
33 from rhodecode.model.db import PullRequest, Session
33 from rhodecode.model.db import PullRequest, Session
34 from rhodecode.model.pull_request import PullRequestModel
34 from rhodecode.model.pull_request import PullRequestModel
35 from rhodecode.model.user import UserModel
35 from rhodecode.model.user import UserModel
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37
37
38
38
39 pytestmark = [
39 pytestmark = [
40 pytest.mark.backends("git", "hg"),
40 pytest.mark.backends("git", "hg"),
41 ]
41 ]
42
42
43
43
44 @pytest.mark.usefixtures('config_stub')
44 @pytest.mark.usefixtures('config_stub')
45 class TestPullRequestModel(object):
45 class TestPullRequestModel(object):
46
46
47 @pytest.fixture
47 @pytest.fixture
48 def pull_request(self, request, backend, pr_util):
48 def pull_request(self, request, backend, pr_util):
49 """
49 """
50 A pull request combined with multiples patches.
50 A pull request combined with multiples patches.
51 """
51 """
52 BackendClass = get_backend(backend.alias)
52 BackendClass = get_backend(backend.alias)
53 self.merge_patcher = mock.patch.object(
53 self.merge_patcher = mock.patch.object(
54 BackendClass, 'merge', return_value=MergeResponse(
54 BackendClass, 'merge', return_value=MergeResponse(
55 False, False, None, MergeFailureReason.UNKNOWN))
55 False, False, None, MergeFailureReason.UNKNOWN))
56 self.workspace_remove_patcher = mock.patch.object(
56 self.workspace_remove_patcher = mock.patch.object(
57 BackendClass, 'cleanup_merge_workspace')
57 BackendClass, 'cleanup_merge_workspace')
58
58
59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
60 self.merge_mock = self.merge_patcher.start()
60 self.merge_mock = self.merge_patcher.start()
61 self.comment_patcher = mock.patch(
61 self.comment_patcher = mock.patch(
62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
63 self.comment_patcher.start()
63 self.comment_patcher.start()
64 self.notification_patcher = mock.patch(
64 self.notification_patcher = mock.patch(
65 'rhodecode.model.notification.NotificationModel.create')
65 'rhodecode.model.notification.NotificationModel.create')
66 self.notification_patcher.start()
66 self.notification_patcher.start()
67 self.helper_patcher = mock.patch(
67 self.helper_patcher = mock.patch(
68 'rhodecode.lib.helpers.route_path')
68 'rhodecode.lib.helpers.route_path')
69 self.helper_patcher.start()
69 self.helper_patcher.start()
70
70
71 self.hook_patcher = mock.patch.object(PullRequestModel,
71 self.hook_patcher = mock.patch.object(PullRequestModel,
72 '_trigger_pull_request_hook')
72 '_trigger_pull_request_hook')
73 self.hook_mock = self.hook_patcher.start()
73 self.hook_mock = self.hook_patcher.start()
74
74
75 self.invalidation_patcher = mock.patch(
75 self.invalidation_patcher = mock.patch(
76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
77 self.invalidation_mock = self.invalidation_patcher.start()
77 self.invalidation_mock = self.invalidation_patcher.start()
78
78
79 self.pull_request = pr_util.create_pull_request(
79 self.pull_request = pr_util.create_pull_request(
80 mergeable=True, name_suffix=u'Δ…Δ‡')
80 mergeable=True, name_suffix=u'Δ…Δ‡')
81 self.source_commit = self.pull_request.source_ref_parts.commit_id
81 self.source_commit = self.pull_request.source_ref_parts.commit_id
82 self.target_commit = self.pull_request.target_ref_parts.commit_id
82 self.target_commit = self.pull_request.target_ref_parts.commit_id
83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
84 self.repo_id = self.pull_request.target_repo.repo_id
84 self.repo_id = self.pull_request.target_repo.repo_id
85
85
86 @request.addfinalizer
86 @request.addfinalizer
87 def cleanup_pull_request():
87 def cleanup_pull_request():
88 calls = [mock.call(
88 calls = [mock.call(
89 self.pull_request, self.pull_request.author, 'create')]
89 self.pull_request, self.pull_request.author, 'create')]
90 self.hook_mock.assert_has_calls(calls)
90 self.hook_mock.assert_has_calls(calls)
91
91
92 self.workspace_remove_patcher.stop()
92 self.workspace_remove_patcher.stop()
93 self.merge_patcher.stop()
93 self.merge_patcher.stop()
94 self.comment_patcher.stop()
94 self.comment_patcher.stop()
95 self.notification_patcher.stop()
95 self.notification_patcher.stop()
96 self.helper_patcher.stop()
96 self.helper_patcher.stop()
97 self.hook_patcher.stop()
97 self.hook_patcher.stop()
98 self.invalidation_patcher.stop()
98 self.invalidation_patcher.stop()
99
99
100 return self.pull_request
100 return self.pull_request
101
101
102 def test_get_all(self, pull_request):
102 def test_get_all(self, pull_request):
103 prs = PullRequestModel().get_all(pull_request.target_repo)
103 prs = PullRequestModel().get_all(pull_request.target_repo)
104 assert isinstance(prs, list)
104 assert isinstance(prs, list)
105 assert len(prs) == 1
105 assert len(prs) == 1
106
106
107 def test_count_all(self, pull_request):
107 def test_count_all(self, pull_request):
108 pr_count = PullRequestModel().count_all(pull_request.target_repo)
108 pr_count = PullRequestModel().count_all(pull_request.target_repo)
109 assert pr_count == 1
109 assert pr_count == 1
110
110
111 def test_get_awaiting_review(self, pull_request):
111 def test_get_awaiting_review(self, pull_request):
112 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
112 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
113 assert isinstance(prs, list)
113 assert isinstance(prs, list)
114 assert len(prs) == 1
114 assert len(prs) == 1
115
115
116 def test_count_awaiting_review(self, pull_request):
116 def test_count_awaiting_review(self, pull_request):
117 pr_count = PullRequestModel().count_awaiting_review(
117 pr_count = PullRequestModel().count_awaiting_review(
118 pull_request.target_repo)
118 pull_request.target_repo)
119 assert pr_count == 1
119 assert pr_count == 1
120
120
121 def test_get_awaiting_my_review(self, pull_request):
121 def test_get_awaiting_my_review(self, pull_request):
122 PullRequestModel().update_reviewers(
122 PullRequestModel().update_reviewers(
123 pull_request, [(pull_request.author, ['author'], False, [])],
123 pull_request, [(pull_request.author, ['author'], False, [])],
124 pull_request.author)
124 pull_request.author)
125 prs = PullRequestModel().get_awaiting_my_review(
125 prs = PullRequestModel().get_awaiting_my_review(
126 pull_request.target_repo, user_id=pull_request.author.user_id)
126 pull_request.target_repo, user_id=pull_request.author.user_id)
127 assert isinstance(prs, list)
127 assert isinstance(prs, list)
128 assert len(prs) == 1
128 assert len(prs) == 1
129
129
130 def test_count_awaiting_my_review(self, pull_request):
130 def test_count_awaiting_my_review(self, pull_request):
131 PullRequestModel().update_reviewers(
131 PullRequestModel().update_reviewers(
132 pull_request, [(pull_request.author, ['author'], False, [])],
132 pull_request, [(pull_request.author, ['author'], False, [])],
133 pull_request.author)
133 pull_request.author)
134 pr_count = PullRequestModel().count_awaiting_my_review(
134 pr_count = PullRequestModel().count_awaiting_my_review(
135 pull_request.target_repo, user_id=pull_request.author.user_id)
135 pull_request.target_repo, user_id=pull_request.author.user_id)
136 assert pr_count == 1
136 assert pr_count == 1
137
137
138 def test_delete_calls_cleanup_merge(self, pull_request):
138 def test_delete_calls_cleanup_merge(self, pull_request):
139 repo_id = pull_request.target_repo.repo_id
139 repo_id = pull_request.target_repo.repo_id
140 PullRequestModel().delete(pull_request, pull_request.author)
140 PullRequestModel().delete(pull_request, pull_request.author)
141
141
142 self.workspace_remove_mock.assert_called_once_with(
142 self.workspace_remove_mock.assert_called_once_with(
143 repo_id, self.workspace_id)
143 repo_id, self.workspace_id)
144
144
145 def test_close_calls_cleanup_and_hook(self, pull_request):
145 def test_close_calls_cleanup_and_hook(self, pull_request):
146 PullRequestModel().close_pull_request(
146 PullRequestModel().close_pull_request(
147 pull_request, pull_request.author)
147 pull_request, pull_request.author)
148 repo_id = pull_request.target_repo.repo_id
148 repo_id = pull_request.target_repo.repo_id
149
149
150 self.workspace_remove_mock.assert_called_once_with(
150 self.workspace_remove_mock.assert_called_once_with(
151 repo_id, self.workspace_id)
151 repo_id, self.workspace_id)
152 self.hook_mock.assert_called_with(
152 self.hook_mock.assert_called_with(
153 self.pull_request, self.pull_request.author, 'close')
153 self.pull_request, self.pull_request.author, 'close')
154
154
155 def test_merge_status(self, pull_request):
155 def test_merge_status(self, pull_request):
156 self.merge_mock.return_value = MergeResponse(
156 self.merge_mock.return_value = MergeResponse(
157 True, False, None, MergeFailureReason.NONE)
157 True, False, None, MergeFailureReason.NONE)
158
158
159 assert pull_request._last_merge_source_rev is None
159 assert pull_request._last_merge_source_rev is None
160 assert pull_request._last_merge_target_rev is None
160 assert pull_request._last_merge_target_rev is None
161 assert pull_request.last_merge_status is None
161 assert pull_request.last_merge_status is None
162
162
163 status, msg = PullRequestModel().merge_status(pull_request)
163 status, msg = PullRequestModel().merge_status(pull_request)
164 assert status is True
164 assert status is True
165 assert msg.eval() == 'This pull request can be automatically merged.'
165 assert msg.eval() == 'This pull request can be automatically merged.'
166 self.merge_mock.assert_called_with(
166 self.merge_mock.assert_called_with(
167 self.repo_id, self.workspace_id,
167 self.repo_id, self.workspace_id,
168 pull_request.target_ref_parts,
168 pull_request.target_ref_parts,
169 pull_request.source_repo.scm_instance(),
169 pull_request.source_repo.scm_instance(),
170 pull_request.source_ref_parts, dry_run=True,
170 pull_request.source_ref_parts, dry_run=True,
171 use_rebase=False, close_branch=False)
171 use_rebase=False, close_branch=False)
172
172
173 assert pull_request._last_merge_source_rev == self.source_commit
173 assert pull_request._last_merge_source_rev == self.source_commit
174 assert pull_request._last_merge_target_rev == self.target_commit
174 assert pull_request._last_merge_target_rev == self.target_commit
175 assert pull_request.last_merge_status is MergeFailureReason.NONE
175 assert pull_request.last_merge_status is MergeFailureReason.NONE
176
176
177 self.merge_mock.reset_mock()
177 self.merge_mock.reset_mock()
178 status, msg = PullRequestModel().merge_status(pull_request)
178 status, msg = PullRequestModel().merge_status(pull_request)
179 assert status is True
179 assert status is True
180 assert msg.eval() == 'This pull request can be automatically merged.'
180 assert msg.eval() == 'This pull request can be automatically merged.'
181 assert self.merge_mock.called is False
181 assert self.merge_mock.called is False
182
182
183 def test_merge_status_known_failure(self, pull_request):
183 def test_merge_status_known_failure(self, pull_request):
184 self.merge_mock.return_value = MergeResponse(
184 self.merge_mock.return_value = MergeResponse(
185 False, False, None, MergeFailureReason.MERGE_FAILED)
185 False, False, None, MergeFailureReason.MERGE_FAILED)
186
186
187 assert pull_request._last_merge_source_rev is None
187 assert pull_request._last_merge_source_rev is None
188 assert pull_request._last_merge_target_rev is None
188 assert pull_request._last_merge_target_rev is None
189 assert pull_request.last_merge_status is None
189 assert pull_request.last_merge_status is None
190
190
191 status, msg = PullRequestModel().merge_status(pull_request)
191 status, msg = PullRequestModel().merge_status(pull_request)
192 assert status is False
192 assert status is False
193 assert (
193 assert (
194 msg.eval() ==
194 msg.eval() ==
195 'This pull request cannot be merged because of merge conflicts.')
195 'This pull request cannot be merged because of merge conflicts.')
196 self.merge_mock.assert_called_with(
196 self.merge_mock.assert_called_with(
197 self.repo_id, self.workspace_id,
197 self.repo_id, self.workspace_id,
198 pull_request.target_ref_parts,
198 pull_request.target_ref_parts,
199 pull_request.source_repo.scm_instance(),
199 pull_request.source_repo.scm_instance(),
200 pull_request.source_ref_parts, dry_run=True,
200 pull_request.source_ref_parts, dry_run=True,
201 use_rebase=False, close_branch=False)
201 use_rebase=False, close_branch=False)
202
202
203 assert pull_request._last_merge_source_rev == self.source_commit
203 assert pull_request._last_merge_source_rev == self.source_commit
204 assert pull_request._last_merge_target_rev == self.target_commit
204 assert pull_request._last_merge_target_rev == self.target_commit
205 assert (
205 assert (
206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
207
207
208 self.merge_mock.reset_mock()
208 self.merge_mock.reset_mock()
209 status, msg = PullRequestModel().merge_status(pull_request)
209 status, msg = PullRequestModel().merge_status(pull_request)
210 assert status is False
210 assert status is False
211 assert (
211 assert (
212 msg.eval() ==
212 msg.eval() ==
213 'This pull request cannot be merged because of merge conflicts.')
213 'This pull request cannot be merged because of merge conflicts.')
214 assert self.merge_mock.called is False
214 assert self.merge_mock.called is False
215
215
216 def test_merge_status_unknown_failure(self, pull_request):
216 def test_merge_status_unknown_failure(self, pull_request):
217 self.merge_mock.return_value = MergeResponse(
217 self.merge_mock.return_value = MergeResponse(
218 False, False, None, MergeFailureReason.UNKNOWN)
218 False, False, None, MergeFailureReason.UNKNOWN)
219
219
220 assert pull_request._last_merge_source_rev is None
220 assert pull_request._last_merge_source_rev is None
221 assert pull_request._last_merge_target_rev is None
221 assert pull_request._last_merge_target_rev is None
222 assert pull_request.last_merge_status is None
222 assert pull_request.last_merge_status is None
223
223
224 status, msg = PullRequestModel().merge_status(pull_request)
224 status, msg = PullRequestModel().merge_status(pull_request)
225 assert status is False
225 assert status is False
226 assert msg.eval() == (
226 assert msg.eval() == (
227 'This pull request cannot be merged because of an unhandled'
227 'This pull request cannot be merged because of an unhandled'
228 ' exception.')
228 ' exception.')
229 self.merge_mock.assert_called_with(
229 self.merge_mock.assert_called_with(
230 self.repo_id, self.workspace_id,
230 self.repo_id, self.workspace_id,
231 pull_request.target_ref_parts,
231 pull_request.target_ref_parts,
232 pull_request.source_repo.scm_instance(),
232 pull_request.source_repo.scm_instance(),
233 pull_request.source_ref_parts, dry_run=True,
233 pull_request.source_ref_parts, dry_run=True,
234 use_rebase=False, close_branch=False)
234 use_rebase=False, close_branch=False)
235
235
236 assert pull_request._last_merge_source_rev is None
236 assert pull_request._last_merge_source_rev is None
237 assert pull_request._last_merge_target_rev is None
237 assert pull_request._last_merge_target_rev is None
238 assert pull_request.last_merge_status is None
238 assert pull_request.last_merge_status is None
239
239
240 self.merge_mock.reset_mock()
240 self.merge_mock.reset_mock()
241 status, msg = PullRequestModel().merge_status(pull_request)
241 status, msg = PullRequestModel().merge_status(pull_request)
242 assert status is False
242 assert status is False
243 assert msg.eval() == (
243 assert msg.eval() == (
244 'This pull request cannot be merged because of an unhandled'
244 'This pull request cannot be merged because of an unhandled'
245 ' exception.')
245 ' exception.')
246 assert self.merge_mock.called is True
246 assert self.merge_mock.called is True
247
247
248 def test_merge_status_when_target_is_locked(self, pull_request):
248 def test_merge_status_when_target_is_locked(self, pull_request):
249 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
249 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
250 status, msg = PullRequestModel().merge_status(pull_request)
250 status, msg = PullRequestModel().merge_status(pull_request)
251 assert status is False
251 assert status is False
252 assert msg.eval() == (
252 assert msg.eval() == (
253 'This pull request cannot be merged because the target repository'
253 'This pull request cannot be merged because the target repository'
254 ' is locked.')
254 ' is locked.')
255
255
256 def test_merge_status_requirements_check_target(self, pull_request):
256 def test_merge_status_requirements_check_target(self, pull_request):
257
257
258 def has_largefiles(self, repo):
258 def has_largefiles(self, repo):
259 return repo == pull_request.source_repo
259 return repo == pull_request.source_repo
260
260
261 patcher = mock.patch.object(
261 patcher = mock.patch.object(
262 PullRequestModel, '_has_largefiles', has_largefiles)
262 PullRequestModel, '_has_largefiles', has_largefiles)
263 with patcher:
263 with patcher:
264 status, msg = PullRequestModel().merge_status(pull_request)
264 status, msg = PullRequestModel().merge_status(pull_request)
265
265
266 assert status is False
266 assert status is False
267 assert msg == 'Target repository large files support is disabled.'
267 assert msg == 'Target repository large files support is disabled.'
268
268
269 def test_merge_status_requirements_check_source(self, pull_request):
269 def test_merge_status_requirements_check_source(self, pull_request):
270
270
271 def has_largefiles(self, repo):
271 def has_largefiles(self, repo):
272 return repo == pull_request.target_repo
272 return repo == pull_request.target_repo
273
273
274 patcher = mock.patch.object(
274 patcher = mock.patch.object(
275 PullRequestModel, '_has_largefiles', has_largefiles)
275 PullRequestModel, '_has_largefiles', has_largefiles)
276 with patcher:
276 with patcher:
277 status, msg = PullRequestModel().merge_status(pull_request)
277 status, msg = PullRequestModel().merge_status(pull_request)
278
278
279 assert status is False
279 assert status is False
280 assert msg == 'Source repository large files support is disabled.'
280 assert msg == 'Source repository large files support is disabled.'
281
281
282 def test_merge(self, pull_request, merge_extras):
282 def test_merge(self, pull_request, merge_extras):
283 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
283 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
284 merge_ref = Reference(
284 merge_ref = Reference(
285 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
285 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
286 self.merge_mock.return_value = MergeResponse(
286 self.merge_mock.return_value = MergeResponse(
287 True, True, merge_ref, MergeFailureReason.NONE)
287 True, True, merge_ref, MergeFailureReason.NONE)
288
288
289 merge_extras['repository'] = pull_request.target_repo.repo_name
289 merge_extras['repository'] = pull_request.target_repo.repo_name
290 PullRequestModel().merge_repo(
290 PullRequestModel().merge_repo(
291 pull_request, pull_request.author, extras=merge_extras)
291 pull_request, pull_request.author, extras=merge_extras)
292
292
293 message = (
293 message = (
294 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
294 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
295 u'\n\n {pr_title}'.format(
295 u'\n\n {pr_title}'.format(
296 pr_id=pull_request.pull_request_id,
296 pr_id=pull_request.pull_request_id,
297 source_repo=safe_unicode(
297 source_repo=safe_unicode(
298 pull_request.source_repo.scm_instance().name),
298 pull_request.source_repo.scm_instance().name),
299 source_ref_name=pull_request.source_ref_parts.name,
299 source_ref_name=pull_request.source_ref_parts.name,
300 pr_title=safe_unicode(pull_request.title)
300 pr_title=safe_unicode(pull_request.title)
301 )
301 )
302 )
302 )
303 self.merge_mock.assert_called_with(
303 self.merge_mock.assert_called_with(
304 self.repo_id, self.workspace_id,
304 self.repo_id, self.workspace_id,
305 pull_request.target_ref_parts,
305 pull_request.target_ref_parts,
306 pull_request.source_repo.scm_instance(),
306 pull_request.source_repo.scm_instance(),
307 pull_request.source_ref_parts,
307 pull_request.source_ref_parts,
308 user_name=user.username, user_email=user.email, message=message,
308 user_name=user.short_contact, user_email=user.email, message=message,
309 use_rebase=False, close_branch=False
309 use_rebase=False, close_branch=False
310 )
310 )
311 self.invalidation_mock.assert_called_once_with(
311 self.invalidation_mock.assert_called_once_with(
312 pull_request.target_repo.repo_name)
312 pull_request.target_repo.repo_name)
313
313
314 self.hook_mock.assert_called_with(
314 self.hook_mock.assert_called_with(
315 self.pull_request, self.pull_request.author, 'merge')
315 self.pull_request, self.pull_request.author, 'merge')
316
316
317 pull_request = PullRequest.get(pull_request.pull_request_id)
317 pull_request = PullRequest.get(pull_request.pull_request_id)
318 assert (
318 assert (
319 pull_request.merge_rev ==
319 pull_request.merge_rev ==
320 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
320 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
321
321
322 def test_merge_failed(self, pull_request, merge_extras):
322 def test_merge_failed(self, pull_request, merge_extras):
323 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
323 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
324 merge_ref = Reference(
324 merge_ref = Reference(
325 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
325 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
326 self.merge_mock.return_value = MergeResponse(
326 self.merge_mock.return_value = MergeResponse(
327 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
327 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
328
328
329 merge_extras['repository'] = pull_request.target_repo.repo_name
329 merge_extras['repository'] = pull_request.target_repo.repo_name
330 PullRequestModel().merge_repo(
330 PullRequestModel().merge_repo(
331 pull_request, pull_request.author, extras=merge_extras)
331 pull_request, pull_request.author, extras=merge_extras)
332
332
333 message = (
333 message = (
334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
335 u'\n\n {pr_title}'.format(
335 u'\n\n {pr_title}'.format(
336 pr_id=pull_request.pull_request_id,
336 pr_id=pull_request.pull_request_id,
337 source_repo=safe_unicode(
337 source_repo=safe_unicode(
338 pull_request.source_repo.scm_instance().name),
338 pull_request.source_repo.scm_instance().name),
339 source_ref_name=pull_request.source_ref_parts.name,
339 source_ref_name=pull_request.source_ref_parts.name,
340 pr_title=safe_unicode(pull_request.title)
340 pr_title=safe_unicode(pull_request.title)
341 )
341 )
342 )
342 )
343 self.merge_mock.assert_called_with(
343 self.merge_mock.assert_called_with(
344 self.repo_id, self.workspace_id,
344 self.repo_id, self.workspace_id,
345 pull_request.target_ref_parts,
345 pull_request.target_ref_parts,
346 pull_request.source_repo.scm_instance(),
346 pull_request.source_repo.scm_instance(),
347 pull_request.source_ref_parts,
347 pull_request.source_ref_parts,
348 user_name=user.username, user_email=user.email, message=message,
348 user_name=user.short_contact, user_email=user.email, message=message,
349 use_rebase=False, close_branch=False
349 use_rebase=False, close_branch=False
350 )
350 )
351
351
352 pull_request = PullRequest.get(pull_request.pull_request_id)
352 pull_request = PullRequest.get(pull_request.pull_request_id)
353 assert self.invalidation_mock.called is False
353 assert self.invalidation_mock.called is False
354 assert pull_request.merge_rev is None
354 assert pull_request.merge_rev is None
355
355
356 def test_get_commit_ids(self, pull_request):
356 def test_get_commit_ids(self, pull_request):
357 # The PR has been not merget yet, so expect an exception
357 # The PR has been not merget yet, so expect an exception
358 with pytest.raises(ValueError):
358 with pytest.raises(ValueError):
359 PullRequestModel()._get_commit_ids(pull_request)
359 PullRequestModel()._get_commit_ids(pull_request)
360
360
361 # Merge revision is in the revisions list
361 # Merge revision is in the revisions list
362 pull_request.merge_rev = pull_request.revisions[0]
362 pull_request.merge_rev = pull_request.revisions[0]
363 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
363 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
364 assert commit_ids == pull_request.revisions
364 assert commit_ids == pull_request.revisions
365
365
366 # Merge revision is not in the revisions list
366 # Merge revision is not in the revisions list
367 pull_request.merge_rev = 'f000' * 10
367 pull_request.merge_rev = 'f000' * 10
368 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
368 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
369 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
369 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
370
370
371 def test_get_diff_from_pr_version(self, pull_request):
371 def test_get_diff_from_pr_version(self, pull_request):
372 source_repo = pull_request.source_repo
372 source_repo = pull_request.source_repo
373 source_ref_id = pull_request.source_ref_parts.commit_id
373 source_ref_id = pull_request.source_ref_parts.commit_id
374 target_ref_id = pull_request.target_ref_parts.commit_id
374 target_ref_id = pull_request.target_ref_parts.commit_id
375 diff = PullRequestModel()._get_diff_from_pr_or_version(
375 diff = PullRequestModel()._get_diff_from_pr_or_version(
376 source_repo, source_ref_id, target_ref_id, context=6)
376 source_repo, source_ref_id, target_ref_id, context=6)
377 assert 'file_1' in diff.raw
377 assert 'file_1' in diff.raw
378
378
379 def test_generate_title_returns_unicode(self):
379 def test_generate_title_returns_unicode(self):
380 title = PullRequestModel().generate_pullrequest_title(
380 title = PullRequestModel().generate_pullrequest_title(
381 source='source-dummy',
381 source='source-dummy',
382 source_ref='source-ref-dummy',
382 source_ref='source-ref-dummy',
383 target='target-dummy',
383 target='target-dummy',
384 )
384 )
385 assert type(title) == unicode
385 assert type(title) == unicode
386
386
387
387
388 @pytest.mark.usefixtures('config_stub')
388 @pytest.mark.usefixtures('config_stub')
389 class TestIntegrationMerge(object):
389 class TestIntegrationMerge(object):
390 @pytest.mark.parametrize('extra_config', (
390 @pytest.mark.parametrize('extra_config', (
391 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
391 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
392 ))
392 ))
393 def test_merge_triggers_push_hooks(
393 def test_merge_triggers_push_hooks(
394 self, pr_util, user_admin, capture_rcextensions, merge_extras,
394 self, pr_util, user_admin, capture_rcextensions, merge_extras,
395 extra_config):
395 extra_config):
396 pull_request = pr_util.create_pull_request(
396 pull_request = pr_util.create_pull_request(
397 approved=True, mergeable=True)
397 approved=True, mergeable=True)
398 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
398 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
399 merge_extras['repository'] = pull_request.target_repo.repo_name
399 merge_extras['repository'] = pull_request.target_repo.repo_name
400 Session().commit()
400 Session().commit()
401
401
402 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
402 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
403 merge_state = PullRequestModel().merge_repo(
403 merge_state = PullRequestModel().merge_repo(
404 pull_request, user_admin, extras=merge_extras)
404 pull_request, user_admin, extras=merge_extras)
405
405
406 assert merge_state.executed
406 assert merge_state.executed
407 assert 'pre_push' in capture_rcextensions
407 assert 'pre_push' in capture_rcextensions
408 assert 'post_push' in capture_rcextensions
408 assert 'post_push' in capture_rcextensions
409
409
410 def test_merge_can_be_rejected_by_pre_push_hook(
410 def test_merge_can_be_rejected_by_pre_push_hook(
411 self, pr_util, user_admin, capture_rcextensions, merge_extras):
411 self, pr_util, user_admin, capture_rcextensions, merge_extras):
412 pull_request = pr_util.create_pull_request(
412 pull_request = pr_util.create_pull_request(
413 approved=True, mergeable=True)
413 approved=True, mergeable=True)
414 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
414 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
415 merge_extras['repository'] = pull_request.target_repo.repo_name
415 merge_extras['repository'] = pull_request.target_repo.repo_name
416 Session().commit()
416 Session().commit()
417
417
418 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
418 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
419 pre_pull.side_effect = RepositoryError("Disallow push!")
419 pre_pull.side_effect = RepositoryError("Disallow push!")
420 merge_status = PullRequestModel().merge_repo(
420 merge_status = PullRequestModel().merge_repo(
421 pull_request, user_admin, extras=merge_extras)
421 pull_request, user_admin, extras=merge_extras)
422
422
423 assert not merge_status.executed
423 assert not merge_status.executed
424 assert 'pre_push' not in capture_rcextensions
424 assert 'pre_push' not in capture_rcextensions
425 assert 'post_push' not in capture_rcextensions
425 assert 'post_push' not in capture_rcextensions
426
426
427 def test_merge_fails_if_target_is_locked(
427 def test_merge_fails_if_target_is_locked(
428 self, pr_util, user_regular, merge_extras):
428 self, pr_util, user_regular, merge_extras):
429 pull_request = pr_util.create_pull_request(
429 pull_request = pr_util.create_pull_request(
430 approved=True, mergeable=True)
430 approved=True, mergeable=True)
431 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
431 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
432 pull_request.target_repo.locked = locked_by
432 pull_request.target_repo.locked = locked_by
433 # TODO: johbo: Check if this can work based on the database, currently
433 # TODO: johbo: Check if this can work based on the database, currently
434 # all data is pre-computed, that's why just updating the DB is not
434 # all data is pre-computed, that's why just updating the DB is not
435 # enough.
435 # enough.
436 merge_extras['locked_by'] = locked_by
436 merge_extras['locked_by'] = locked_by
437 merge_extras['repository'] = pull_request.target_repo.repo_name
437 merge_extras['repository'] = pull_request.target_repo.repo_name
438 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
438 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
439 Session().commit()
439 Session().commit()
440 merge_status = PullRequestModel().merge_repo(
440 merge_status = PullRequestModel().merge_repo(
441 pull_request, user_regular, extras=merge_extras)
441 pull_request, user_regular, extras=merge_extras)
442 assert not merge_status.executed
442 assert not merge_status.executed
443
443
444
444
445 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
445 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
446 (False, 1, 0),
446 (False, 1, 0),
447 (True, 0, 1),
447 (True, 0, 1),
448 ])
448 ])
449 def test_outdated_comments(
449 def test_outdated_comments(
450 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
450 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
451 pull_request = pr_util.create_pull_request()
451 pull_request = pr_util.create_pull_request()
452 pr_util.create_inline_comment(file_path='not_in_updated_diff')
452 pr_util.create_inline_comment(file_path='not_in_updated_diff')
453
453
454 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
454 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
455 pr_util.add_one_commit()
455 pr_util.add_one_commit()
456 assert_inline_comments(
456 assert_inline_comments(
457 pull_request, visible=inlines_count, outdated=outdated_count)
457 pull_request, visible=inlines_count, outdated=outdated_count)
458 outdated_comment_mock.assert_called_with(pull_request)
458 outdated_comment_mock.assert_called_with(pull_request)
459
459
460
460
461 @pytest.fixture
461 @pytest.fixture
462 def merge_extras(user_regular):
462 def merge_extras(user_regular):
463 """
463 """
464 Context for the vcs operation when running a merge.
464 Context for the vcs operation when running a merge.
465 """
465 """
466 extras = {
466 extras = {
467 'ip': '127.0.0.1',
467 'ip': '127.0.0.1',
468 'username': user_regular.username,
468 'username': user_regular.username,
469 'user_id': user_regular.user_id,
469 'user_id': user_regular.user_id,
470 'action': 'push',
470 'action': 'push',
471 'repository': 'fake_target_repo_name',
471 'repository': 'fake_target_repo_name',
472 'scm': 'git',
472 'scm': 'git',
473 'config': 'fake_config_ini_path',
473 'config': 'fake_config_ini_path',
474 'make_lock': None,
474 'make_lock': None,
475 'locked_by': [None, None, None],
475 'locked_by': [None, None, None],
476 'server_url': 'http://test.example.com:5000',
476 'server_url': 'http://test.example.com:5000',
477 'hooks': ['push', 'pull'],
477 'hooks': ['push', 'pull'],
478 'is_shadow_repo': False,
478 'is_shadow_repo': False,
479 }
479 }
480 return extras
480 return extras
481
481
482
482
483 @pytest.mark.usefixtures('config_stub')
483 @pytest.mark.usefixtures('config_stub')
484 class TestUpdateCommentHandling(object):
484 class TestUpdateCommentHandling(object):
485
485
486 @pytest.fixture(autouse=True, scope='class')
486 @pytest.fixture(autouse=True, scope='class')
487 def enable_outdated_comments(self, request, baseapp):
487 def enable_outdated_comments(self, request, baseapp):
488 config_patch = mock.patch.dict(
488 config_patch = mock.patch.dict(
489 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
489 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
490 config_patch.start()
490 config_patch.start()
491
491
492 @request.addfinalizer
492 @request.addfinalizer
493 def cleanup():
493 def cleanup():
494 config_patch.stop()
494 config_patch.stop()
495
495
496 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
496 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
497 commits = [
497 commits = [
498 {'message': 'a'},
498 {'message': 'a'},
499 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
499 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
500 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
500 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
501 ]
501 ]
502 pull_request = pr_util.create_pull_request(
502 pull_request = pr_util.create_pull_request(
503 commits=commits, target_head='a', source_head='b', revisions=['b'])
503 commits=commits, target_head='a', source_head='b', revisions=['b'])
504 pr_util.create_inline_comment(file_path='file_b')
504 pr_util.create_inline_comment(file_path='file_b')
505 pr_util.add_one_commit(head='c')
505 pr_util.add_one_commit(head='c')
506
506
507 assert_inline_comments(pull_request, visible=1, outdated=0)
507 assert_inline_comments(pull_request, visible=1, outdated=0)
508
508
509 def test_comment_stays_unflagged_on_change_above(self, pr_util):
509 def test_comment_stays_unflagged_on_change_above(self, pr_util):
510 original_content = ''.join(
510 original_content = ''.join(
511 ['line {}\n'.format(x) for x in range(1, 11)])
511 ['line {}\n'.format(x) for x in range(1, 11)])
512 updated_content = 'new_line_at_top\n' + original_content
512 updated_content = 'new_line_at_top\n' + original_content
513 commits = [
513 commits = [
514 {'message': 'a'},
514 {'message': 'a'},
515 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
515 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
516 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
516 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
517 ]
517 ]
518 pull_request = pr_util.create_pull_request(
518 pull_request = pr_util.create_pull_request(
519 commits=commits, target_head='a', source_head='b', revisions=['b'])
519 commits=commits, target_head='a', source_head='b', revisions=['b'])
520
520
521 with outdated_comments_patcher():
521 with outdated_comments_patcher():
522 comment = pr_util.create_inline_comment(
522 comment = pr_util.create_inline_comment(
523 line_no=u'n8', file_path='file_b')
523 line_no=u'n8', file_path='file_b')
524 pr_util.add_one_commit(head='c')
524 pr_util.add_one_commit(head='c')
525
525
526 assert_inline_comments(pull_request, visible=1, outdated=0)
526 assert_inline_comments(pull_request, visible=1, outdated=0)
527 assert comment.line_no == u'n9'
527 assert comment.line_no == u'n9'
528
528
529 def test_comment_stays_unflagged_on_change_below(self, pr_util):
529 def test_comment_stays_unflagged_on_change_below(self, pr_util):
530 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
530 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
531 updated_content = original_content + 'new_line_at_end\n'
531 updated_content = original_content + 'new_line_at_end\n'
532 commits = [
532 commits = [
533 {'message': 'a'},
533 {'message': 'a'},
534 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
534 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
535 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
535 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
536 ]
536 ]
537 pull_request = pr_util.create_pull_request(
537 pull_request = pr_util.create_pull_request(
538 commits=commits, target_head='a', source_head='b', revisions=['b'])
538 commits=commits, target_head='a', source_head='b', revisions=['b'])
539 pr_util.create_inline_comment(file_path='file_b')
539 pr_util.create_inline_comment(file_path='file_b')
540 pr_util.add_one_commit(head='c')
540 pr_util.add_one_commit(head='c')
541
541
542 assert_inline_comments(pull_request, visible=1, outdated=0)
542 assert_inline_comments(pull_request, visible=1, outdated=0)
543
543
544 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
544 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
545 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
545 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
546 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
546 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
547 change_lines = list(base_lines)
547 change_lines = list(base_lines)
548 change_lines.insert(6, 'line 6a added\n')
548 change_lines.insert(6, 'line 6a added\n')
549
549
550 # Changes on the last line of sight
550 # Changes on the last line of sight
551 update_lines = list(change_lines)
551 update_lines = list(change_lines)
552 update_lines[0] = 'line 1 changed\n'
552 update_lines[0] = 'line 1 changed\n'
553 update_lines[-1] = 'line 12 changed\n'
553 update_lines[-1] = 'line 12 changed\n'
554
554
555 def file_b(lines):
555 def file_b(lines):
556 return FileNode('file_b', ''.join(lines))
556 return FileNode('file_b', ''.join(lines))
557
557
558 commits = [
558 commits = [
559 {'message': 'a', 'added': [file_b(base_lines)]},
559 {'message': 'a', 'added': [file_b(base_lines)]},
560 {'message': 'b', 'changed': [file_b(change_lines)]},
560 {'message': 'b', 'changed': [file_b(change_lines)]},
561 {'message': 'c', 'changed': [file_b(update_lines)]},
561 {'message': 'c', 'changed': [file_b(update_lines)]},
562 ]
562 ]
563
563
564 pull_request = pr_util.create_pull_request(
564 pull_request = pr_util.create_pull_request(
565 commits=commits, target_head='a', source_head='b', revisions=['b'])
565 commits=commits, target_head='a', source_head='b', revisions=['b'])
566 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
566 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
567
567
568 with outdated_comments_patcher():
568 with outdated_comments_patcher():
569 pr_util.add_one_commit(head='c')
569 pr_util.add_one_commit(head='c')
570 assert_inline_comments(pull_request, visible=0, outdated=1)
570 assert_inline_comments(pull_request, visible=0, outdated=1)
571
571
572 @pytest.mark.parametrize("change, content", [
572 @pytest.mark.parametrize("change, content", [
573 ('changed', 'changed\n'),
573 ('changed', 'changed\n'),
574 ('removed', ''),
574 ('removed', ''),
575 ], ids=['changed', 'removed'])
575 ], ids=['changed', 'removed'])
576 def test_comment_flagged_on_change(self, pr_util, change, content):
576 def test_comment_flagged_on_change(self, pr_util, change, content):
577 commits = [
577 commits = [
578 {'message': 'a'},
578 {'message': 'a'},
579 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
579 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
580 {'message': 'c', change: [FileNode('file_b', content)]},
580 {'message': 'c', change: [FileNode('file_b', content)]},
581 ]
581 ]
582 pull_request = pr_util.create_pull_request(
582 pull_request = pr_util.create_pull_request(
583 commits=commits, target_head='a', source_head='b', revisions=['b'])
583 commits=commits, target_head='a', source_head='b', revisions=['b'])
584 pr_util.create_inline_comment(file_path='file_b')
584 pr_util.create_inline_comment(file_path='file_b')
585
585
586 with outdated_comments_patcher():
586 with outdated_comments_patcher():
587 pr_util.add_one_commit(head='c')
587 pr_util.add_one_commit(head='c')
588 assert_inline_comments(pull_request, visible=0, outdated=1)
588 assert_inline_comments(pull_request, visible=0, outdated=1)
589
589
590
590
591 @pytest.mark.usefixtures('config_stub')
591 @pytest.mark.usefixtures('config_stub')
592 class TestUpdateChangedFiles(object):
592 class TestUpdateChangedFiles(object):
593
593
594 def test_no_changes_on_unchanged_diff(self, pr_util):
594 def test_no_changes_on_unchanged_diff(self, pr_util):
595 commits = [
595 commits = [
596 {'message': 'a'},
596 {'message': 'a'},
597 {'message': 'b',
597 {'message': 'b',
598 'added': [FileNode('file_b', 'test_content b\n')]},
598 'added': [FileNode('file_b', 'test_content b\n')]},
599 {'message': 'c',
599 {'message': 'c',
600 'added': [FileNode('file_c', 'test_content c\n')]},
600 'added': [FileNode('file_c', 'test_content c\n')]},
601 ]
601 ]
602 # open a PR from a to b, adding file_b
602 # open a PR from a to b, adding file_b
603 pull_request = pr_util.create_pull_request(
603 pull_request = pr_util.create_pull_request(
604 commits=commits, target_head='a', source_head='b', revisions=['b'],
604 commits=commits, target_head='a', source_head='b', revisions=['b'],
605 name_suffix='per-file-review')
605 name_suffix='per-file-review')
606
606
607 # modify PR adding new file file_c
607 # modify PR adding new file file_c
608 pr_util.add_one_commit(head='c')
608 pr_util.add_one_commit(head='c')
609
609
610 assert_pr_file_changes(
610 assert_pr_file_changes(
611 pull_request,
611 pull_request,
612 added=['file_c'],
612 added=['file_c'],
613 modified=[],
613 modified=[],
614 removed=[])
614 removed=[])
615
615
616 def test_modify_and_undo_modification_diff(self, pr_util):
616 def test_modify_and_undo_modification_diff(self, pr_util):
617 commits = [
617 commits = [
618 {'message': 'a'},
618 {'message': 'a'},
619 {'message': 'b',
619 {'message': 'b',
620 'added': [FileNode('file_b', 'test_content b\n')]},
620 'added': [FileNode('file_b', 'test_content b\n')]},
621 {'message': 'c',
621 {'message': 'c',
622 'changed': [FileNode('file_b', 'test_content b modified\n')]},
622 'changed': [FileNode('file_b', 'test_content b modified\n')]},
623 {'message': 'd',
623 {'message': 'd',
624 'changed': [FileNode('file_b', 'test_content b\n')]},
624 'changed': [FileNode('file_b', 'test_content b\n')]},
625 ]
625 ]
626 # open a PR from a to b, adding file_b
626 # open a PR from a to b, adding file_b
627 pull_request = pr_util.create_pull_request(
627 pull_request = pr_util.create_pull_request(
628 commits=commits, target_head='a', source_head='b', revisions=['b'],
628 commits=commits, target_head='a', source_head='b', revisions=['b'],
629 name_suffix='per-file-review')
629 name_suffix='per-file-review')
630
630
631 # modify PR modifying file file_b
631 # modify PR modifying file file_b
632 pr_util.add_one_commit(head='c')
632 pr_util.add_one_commit(head='c')
633
633
634 assert_pr_file_changes(
634 assert_pr_file_changes(
635 pull_request,
635 pull_request,
636 added=[],
636 added=[],
637 modified=['file_b'],
637 modified=['file_b'],
638 removed=[])
638 removed=[])
639
639
640 # move the head again to d, which rollbacks change,
640 # move the head again to d, which rollbacks change,
641 # meaning we should indicate no changes
641 # meaning we should indicate no changes
642 pr_util.add_one_commit(head='d')
642 pr_util.add_one_commit(head='d')
643
643
644 assert_pr_file_changes(
644 assert_pr_file_changes(
645 pull_request,
645 pull_request,
646 added=[],
646 added=[],
647 modified=[],
647 modified=[],
648 removed=[])
648 removed=[])
649
649
650 def test_updated_all_files_in_pr(self, pr_util):
650 def test_updated_all_files_in_pr(self, pr_util):
651 commits = [
651 commits = [
652 {'message': 'a'},
652 {'message': 'a'},
653 {'message': 'b', 'added': [
653 {'message': 'b', 'added': [
654 FileNode('file_a', 'test_content a\n'),
654 FileNode('file_a', 'test_content a\n'),
655 FileNode('file_b', 'test_content b\n'),
655 FileNode('file_b', 'test_content b\n'),
656 FileNode('file_c', 'test_content c\n')]},
656 FileNode('file_c', 'test_content c\n')]},
657 {'message': 'c', 'changed': [
657 {'message': 'c', 'changed': [
658 FileNode('file_a', 'test_content a changed\n'),
658 FileNode('file_a', 'test_content a changed\n'),
659 FileNode('file_b', 'test_content b changed\n'),
659 FileNode('file_b', 'test_content b changed\n'),
660 FileNode('file_c', 'test_content c changed\n')]},
660 FileNode('file_c', 'test_content c changed\n')]},
661 ]
661 ]
662 # open a PR from a to b, changing 3 files
662 # open a PR from a to b, changing 3 files
663 pull_request = pr_util.create_pull_request(
663 pull_request = pr_util.create_pull_request(
664 commits=commits, target_head='a', source_head='b', revisions=['b'],
664 commits=commits, target_head='a', source_head='b', revisions=['b'],
665 name_suffix='per-file-review')
665 name_suffix='per-file-review')
666
666
667 pr_util.add_one_commit(head='c')
667 pr_util.add_one_commit(head='c')
668
668
669 assert_pr_file_changes(
669 assert_pr_file_changes(
670 pull_request,
670 pull_request,
671 added=[],
671 added=[],
672 modified=['file_a', 'file_b', 'file_c'],
672 modified=['file_a', 'file_b', 'file_c'],
673 removed=[])
673 removed=[])
674
674
675 def test_updated_and_removed_all_files_in_pr(self, pr_util):
675 def test_updated_and_removed_all_files_in_pr(self, pr_util):
676 commits = [
676 commits = [
677 {'message': 'a'},
677 {'message': 'a'},
678 {'message': 'b', 'added': [
678 {'message': 'b', 'added': [
679 FileNode('file_a', 'test_content a\n'),
679 FileNode('file_a', 'test_content a\n'),
680 FileNode('file_b', 'test_content b\n'),
680 FileNode('file_b', 'test_content b\n'),
681 FileNode('file_c', 'test_content c\n')]},
681 FileNode('file_c', 'test_content c\n')]},
682 {'message': 'c', 'removed': [
682 {'message': 'c', 'removed': [
683 FileNode('file_a', 'test_content a changed\n'),
683 FileNode('file_a', 'test_content a changed\n'),
684 FileNode('file_b', 'test_content b changed\n'),
684 FileNode('file_b', 'test_content b changed\n'),
685 FileNode('file_c', 'test_content c changed\n')]},
685 FileNode('file_c', 'test_content c changed\n')]},
686 ]
686 ]
687 # open a PR from a to b, removing 3 files
687 # open a PR from a to b, removing 3 files
688 pull_request = pr_util.create_pull_request(
688 pull_request = pr_util.create_pull_request(
689 commits=commits, target_head='a', source_head='b', revisions=['b'],
689 commits=commits, target_head='a', source_head='b', revisions=['b'],
690 name_suffix='per-file-review')
690 name_suffix='per-file-review')
691
691
692 pr_util.add_one_commit(head='c')
692 pr_util.add_one_commit(head='c')
693
693
694 assert_pr_file_changes(
694 assert_pr_file_changes(
695 pull_request,
695 pull_request,
696 added=[],
696 added=[],
697 modified=[],
697 modified=[],
698 removed=['file_a', 'file_b', 'file_c'])
698 removed=['file_a', 'file_b', 'file_c'])
699
699
700
700
701 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
701 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
702 model = PullRequestModel()
702 model = PullRequestModel()
703 pull_request = pr_util.create_pull_request()
703 pull_request = pr_util.create_pull_request()
704 pr_util.update_source_repository()
704 pr_util.update_source_repository()
705
705
706 model.update_commits(pull_request)
706 model.update_commits(pull_request)
707
707
708 # Expect that it has a version entry now
708 # Expect that it has a version entry now
709 assert len(model.get_versions(pull_request)) == 1
709 assert len(model.get_versions(pull_request)) == 1
710
710
711
711
712 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
712 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
713 pull_request = pr_util.create_pull_request()
713 pull_request = pr_util.create_pull_request()
714 model = PullRequestModel()
714 model = PullRequestModel()
715 model.update_commits(pull_request)
715 model.update_commits(pull_request)
716
716
717 # Expect that it still has no versions
717 # Expect that it still has no versions
718 assert len(model.get_versions(pull_request)) == 0
718 assert len(model.get_versions(pull_request)) == 0
719
719
720
720
721 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
721 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
722 model = PullRequestModel()
722 model = PullRequestModel()
723 pull_request = pr_util.create_pull_request()
723 pull_request = pr_util.create_pull_request()
724 comment = pr_util.create_comment()
724 comment = pr_util.create_comment()
725 pr_util.update_source_repository()
725 pr_util.update_source_repository()
726
726
727 model.update_commits(pull_request)
727 model.update_commits(pull_request)
728
728
729 # Expect that the comment is linked to the pr version now
729 # Expect that the comment is linked to the pr version now
730 assert comment.pull_request_version == model.get_versions(pull_request)[0]
730 assert comment.pull_request_version == model.get_versions(pull_request)[0]
731
731
732
732
733 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
733 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
734 model = PullRequestModel()
734 model = PullRequestModel()
735 pull_request = pr_util.create_pull_request()
735 pull_request = pr_util.create_pull_request()
736 pr_util.update_source_repository()
736 pr_util.update_source_repository()
737 pr_util.update_source_repository()
737 pr_util.update_source_repository()
738
738
739 model.update_commits(pull_request)
739 model.update_commits(pull_request)
740
740
741 # Expect to find a new comment about the change
741 # Expect to find a new comment about the change
742 expected_message = textwrap.dedent(
742 expected_message = textwrap.dedent(
743 """\
743 """\
744 Pull request updated. Auto status change to |under_review|
744 Pull request updated. Auto status change to |under_review|
745
745
746 .. role:: added
746 .. role:: added
747 .. role:: removed
747 .. role:: removed
748 .. parsed-literal::
748 .. parsed-literal::
749
749
750 Changed commits:
750 Changed commits:
751 * :added:`1 added`
751 * :added:`1 added`
752 * :removed:`0 removed`
752 * :removed:`0 removed`
753
753
754 Changed files:
754 Changed files:
755 * `A file_2 <#a_c--92ed3b5f07b4>`_
755 * `A file_2 <#a_c--92ed3b5f07b4>`_
756
756
757 .. |under_review| replace:: *"Under Review"*"""
757 .. |under_review| replace:: *"Under Review"*"""
758 )
758 )
759 pull_request_comments = sorted(
759 pull_request_comments = sorted(
760 pull_request.comments, key=lambda c: c.modified_at)
760 pull_request.comments, key=lambda c: c.modified_at)
761 update_comment = pull_request_comments[-1]
761 update_comment = pull_request_comments[-1]
762 assert update_comment.text == expected_message
762 assert update_comment.text == expected_message
763
763
764
764
765 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
765 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
766 pull_request = pr_util.create_pull_request()
766 pull_request = pr_util.create_pull_request()
767
767
768 # Avoiding default values
768 # Avoiding default values
769 pull_request.status = PullRequest.STATUS_CLOSED
769 pull_request.status = PullRequest.STATUS_CLOSED
770 pull_request._last_merge_source_rev = "0" * 40
770 pull_request._last_merge_source_rev = "0" * 40
771 pull_request._last_merge_target_rev = "1" * 40
771 pull_request._last_merge_target_rev = "1" * 40
772 pull_request.last_merge_status = 1
772 pull_request.last_merge_status = 1
773 pull_request.merge_rev = "2" * 40
773 pull_request.merge_rev = "2" * 40
774
774
775 # Remember automatic values
775 # Remember automatic values
776 created_on = pull_request.created_on
776 created_on = pull_request.created_on
777 updated_on = pull_request.updated_on
777 updated_on = pull_request.updated_on
778
778
779 # Create a new version of the pull request
779 # Create a new version of the pull request
780 version = PullRequestModel()._create_version_from_snapshot(pull_request)
780 version = PullRequestModel()._create_version_from_snapshot(pull_request)
781
781
782 # Check attributes
782 # Check attributes
783 assert version.title == pr_util.create_parameters['title']
783 assert version.title == pr_util.create_parameters['title']
784 assert version.description == pr_util.create_parameters['description']
784 assert version.description == pr_util.create_parameters['description']
785 assert version.status == PullRequest.STATUS_CLOSED
785 assert version.status == PullRequest.STATUS_CLOSED
786
786
787 # versions get updated created_on
787 # versions get updated created_on
788 assert version.created_on != created_on
788 assert version.created_on != created_on
789
789
790 assert version.updated_on == updated_on
790 assert version.updated_on == updated_on
791 assert version.user_id == pull_request.user_id
791 assert version.user_id == pull_request.user_id
792 assert version.revisions == pr_util.create_parameters['revisions']
792 assert version.revisions == pr_util.create_parameters['revisions']
793 assert version.source_repo == pr_util.source_repository
793 assert version.source_repo == pr_util.source_repository
794 assert version.source_ref == pr_util.create_parameters['source_ref']
794 assert version.source_ref == pr_util.create_parameters['source_ref']
795 assert version.target_repo == pr_util.target_repository
795 assert version.target_repo == pr_util.target_repository
796 assert version.target_ref == pr_util.create_parameters['target_ref']
796 assert version.target_ref == pr_util.create_parameters['target_ref']
797 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
797 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
798 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
798 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
799 assert version.last_merge_status == pull_request.last_merge_status
799 assert version.last_merge_status == pull_request.last_merge_status
800 assert version.merge_rev == pull_request.merge_rev
800 assert version.merge_rev == pull_request.merge_rev
801 assert version.pull_request == pull_request
801 assert version.pull_request == pull_request
802
802
803
803
804 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
804 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
805 version1 = pr_util.create_version_of_pull_request()
805 version1 = pr_util.create_version_of_pull_request()
806 comment_linked = pr_util.create_comment(linked_to=version1)
806 comment_linked = pr_util.create_comment(linked_to=version1)
807 comment_unlinked = pr_util.create_comment()
807 comment_unlinked = pr_util.create_comment()
808 version2 = pr_util.create_version_of_pull_request()
808 version2 = pr_util.create_version_of_pull_request()
809
809
810 PullRequestModel()._link_comments_to_version(version2)
810 PullRequestModel()._link_comments_to_version(version2)
811
811
812 # Expect that only the new comment is linked to version2
812 # Expect that only the new comment is linked to version2
813 assert (
813 assert (
814 comment_unlinked.pull_request_version_id ==
814 comment_unlinked.pull_request_version_id ==
815 version2.pull_request_version_id)
815 version2.pull_request_version_id)
816 assert (
816 assert (
817 comment_linked.pull_request_version_id ==
817 comment_linked.pull_request_version_id ==
818 version1.pull_request_version_id)
818 version1.pull_request_version_id)
819 assert (
819 assert (
820 comment_unlinked.pull_request_version_id !=
820 comment_unlinked.pull_request_version_id !=
821 comment_linked.pull_request_version_id)
821 comment_linked.pull_request_version_id)
822
822
823
823
824 def test_calculate_commits():
824 def test_calculate_commits():
825 old_ids = [1, 2, 3]
825 old_ids = [1, 2, 3]
826 new_ids = [1, 3, 4, 5]
826 new_ids = [1, 3, 4, 5]
827 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
827 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
828 assert change.added == [4, 5]
828 assert change.added == [4, 5]
829 assert change.common == [1, 3]
829 assert change.common == [1, 3]
830 assert change.removed == [2]
830 assert change.removed == [2]
831 assert change.total == [1, 3, 4, 5]
831 assert change.total == [1, 3, 4, 5]
832
832
833
833
834 def assert_inline_comments(pull_request, visible=None, outdated=None):
834 def assert_inline_comments(pull_request, visible=None, outdated=None):
835 if visible is not None:
835 if visible is not None:
836 inline_comments = CommentsModel().get_inline_comments(
836 inline_comments = CommentsModel().get_inline_comments(
837 pull_request.target_repo.repo_id, pull_request=pull_request)
837 pull_request.target_repo.repo_id, pull_request=pull_request)
838 inline_cnt = CommentsModel().get_inline_comments_count(
838 inline_cnt = CommentsModel().get_inline_comments_count(
839 inline_comments)
839 inline_comments)
840 assert inline_cnt == visible
840 assert inline_cnt == visible
841 if outdated is not None:
841 if outdated is not None:
842 outdated_comments = CommentsModel().get_outdated_comments(
842 outdated_comments = CommentsModel().get_outdated_comments(
843 pull_request.target_repo.repo_id, pull_request)
843 pull_request.target_repo.repo_id, pull_request)
844 assert len(outdated_comments) == outdated
844 assert len(outdated_comments) == outdated
845
845
846
846
847 def assert_pr_file_changes(
847 def assert_pr_file_changes(
848 pull_request, added=None, modified=None, removed=None):
848 pull_request, added=None, modified=None, removed=None):
849 pr_versions = PullRequestModel().get_versions(pull_request)
849 pr_versions = PullRequestModel().get_versions(pull_request)
850 # always use first version, ie original PR to calculate changes
850 # always use first version, ie original PR to calculate changes
851 pull_request_version = pr_versions[0]
851 pull_request_version = pr_versions[0]
852 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
852 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
853 pull_request, pull_request_version)
853 pull_request, pull_request_version)
854 file_changes = PullRequestModel()._calculate_file_changes(
854 file_changes = PullRequestModel()._calculate_file_changes(
855 old_diff_data, new_diff_data)
855 old_diff_data, new_diff_data)
856
856
857 assert added == file_changes.added, \
857 assert added == file_changes.added, \
858 'expected added:%s vs value:%s' % (added, file_changes.added)
858 'expected added:%s vs value:%s' % (added, file_changes.added)
859 assert modified == file_changes.modified, \
859 assert modified == file_changes.modified, \
860 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
860 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
861 assert removed == file_changes.removed, \
861 assert removed == file_changes.removed, \
862 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
862 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
863
863
864
864
865 def outdated_comments_patcher(use_outdated=True):
865 def outdated_comments_patcher(use_outdated=True):
866 return mock.patch.object(
866 return mock.patch.object(
867 CommentsModel, 'use_outdated_comments',
867 CommentsModel, 'use_outdated_comments',
868 return_value=use_outdated)
868 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now