##// END OF EJS Templates

Compare Commits r1106:490ebeeb75af...r1108:ebe0247cd154

Target:

Source:

Time Author Commit Description
Martin Bornhold
r1106:490ebeeb75af
subrepo: Add merge failure reason code ad message for subrepo merge conflicts.
Martin Bornhold
r1107:6bc055e1504d
subrepo: Add exception for subrepo merge errors.
Martin Bornhold
r1108:ebe0247cd154
subrepo: Handle subrepo merge errors.
@@ -1,1545 +1,1549 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24
24
25 import collections
25 import collections
26 import datetime
26 import datetime
27 import itertools
27 import itertools
28 import logging
28 import logging
29 import os
29 import os
30 import time
30 import time
31 import warnings
31 import warnings
32
32
33 from zope.cachedescriptors.property import Lazy as LazyProperty
33 from zope.cachedescriptors.property import Lazy as LazyProperty
34
34
35 from rhodecode.lib.utils2 import safe_str, safe_unicode
35 from rhodecode.lib.utils2 import safe_str, safe_unicode
36 from rhodecode.lib.vcs import connection
36 from rhodecode.lib.vcs import connection
37 from rhodecode.lib.vcs.utils import author_name, author_email
37 from rhodecode.lib.vcs.utils import author_name, author_email
38 from rhodecode.lib.vcs.conf import settings
38 from rhodecode.lib.vcs.conf import settings
39 from rhodecode.lib.vcs.exceptions import (
39 from rhodecode.lib.vcs.exceptions import (
40 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
40 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
41 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
41 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
42 NodeDoesNotExistError, NodeNotChangedError, VCSError,
42 NodeDoesNotExistError, NodeNotChangedError, VCSError,
43 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
43 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
44 RepositoryError)
44 RepositoryError)
45
45
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 FILEMODE_DEFAULT = 0100644
50 FILEMODE_DEFAULT = 0100644
51 FILEMODE_EXECUTABLE = 0100755
51 FILEMODE_EXECUTABLE = 0100755
52
52
53 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
53 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
54 MergeResponse = collections.namedtuple(
54 MergeResponse = collections.namedtuple(
55 'MergeResponse',
55 'MergeResponse',
56 ('possible', 'executed', 'merge_ref', 'failure_reason'))
56 ('possible', 'executed', 'merge_ref', 'failure_reason'))
57
57
58
58
59 class MergeFailureReason(object):
59 class MergeFailureReason(object):
60 """
60 """
61 Enumeration with all the reasons why the server side merge could fail.
61 Enumeration with all the reasons why the server side merge could fail.
62
62
63 DO NOT change the number of the reasons, as they may be stored in the
63 DO NOT change the number of the reasons, as they may be stored in the
64 database.
64 database.
65
65
66 Changing the name of a reason is acceptable and encouraged to deprecate old
66 Changing the name of a reason is acceptable and encouraged to deprecate old
67 reasons.
67 reasons.
68 """
68 """
69
69
70 # Everything went well.
70 # Everything went well.
71 NONE = 0
71 NONE = 0
72
72
73 # An unexpected exception was raised. Check the logs for more details.
73 # An unexpected exception was raised. Check the logs for more details.
74 UNKNOWN = 1
74 UNKNOWN = 1
75
75
76 # The merge was not successful, there are conflicts.
76 # The merge was not successful, there are conflicts.
77 MERGE_FAILED = 2
77 MERGE_FAILED = 2
78
78
79 # The merge succeeded but we could not push it to the target repository.
79 # The merge succeeded but we could not push it to the target repository.
80 PUSH_FAILED = 3
80 PUSH_FAILED = 3
81
81
82 # The specified target is not a head in the target repository.
82 # The specified target is not a head in the target repository.
83 TARGET_IS_NOT_HEAD = 4
83 TARGET_IS_NOT_HEAD = 4
84
84
85 # The source repository contains more branches than the target. Pushing
85 # The source repository contains more branches than the target. Pushing
86 # the merge will create additional branches in the target.
86 # the merge will create additional branches in the target.
87 HG_SOURCE_HAS_MORE_BRANCHES = 5
87 HG_SOURCE_HAS_MORE_BRANCHES = 5
88
88
89 # The target reference has multiple heads. That does not allow to correctly
89 # The target reference has multiple heads. That does not allow to correctly
90 # identify the target location. This could only happen for mercurial
90 # identify the target location. This could only happen for mercurial
91 # branches.
91 # branches.
92 HG_TARGET_HAS_MULTIPLE_HEADS = 6
92 HG_TARGET_HAS_MULTIPLE_HEADS = 6
93
93
94 # The target repository is locked
94 # The target repository is locked
95 TARGET_IS_LOCKED = 7
95 TARGET_IS_LOCKED = 7
96
96
97 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
97 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
98 # A involved commit could not be found.
98 # A involved commit could not be found.
99 _DEPRECATED_MISSING_COMMIT = 8
99 _DEPRECATED_MISSING_COMMIT = 8
100
100
101 # The target repo reference is missing.
101 # The target repo reference is missing.
102 MISSING_TARGET_REF = 9
102 MISSING_TARGET_REF = 9
103
103
104 # The source repo reference is missing.
104 # The source repo reference is missing.
105 MISSING_SOURCE_REF = 10
105 MISSING_SOURCE_REF = 10
106
106
107 # The merge was not successful, there are conflicts related to sub
108 # repositories.
109 SUBREPO_MERGE_FAILED = 11
110
107
111
108 class UpdateFailureReason(object):
112 class UpdateFailureReason(object):
109 """
113 """
110 Enumeration with all the reasons why the pull request update could fail.
114 Enumeration with all the reasons why the pull request update could fail.
111
115
112 DO NOT change the number of the reasons, as they may be stored in the
116 DO NOT change the number of the reasons, as they may be stored in the
113 database.
117 database.
114
118
115 Changing the name of a reason is acceptable and encouraged to deprecate old
119 Changing the name of a reason is acceptable and encouraged to deprecate old
116 reasons.
120 reasons.
117 """
121 """
118
122
119 # Everything went well.
123 # Everything went well.
120 NONE = 0
124 NONE = 0
121
125
122 # An unexpected exception was raised. Check the logs for more details.
126 # An unexpected exception was raised. Check the logs for more details.
123 UNKNOWN = 1
127 UNKNOWN = 1
124
128
125 # The pull request is up to date.
129 # The pull request is up to date.
126 NO_CHANGE = 2
130 NO_CHANGE = 2
127
131
128 # The pull request has a reference type that is not supported for update.
132 # The pull request has a reference type that is not supported for update.
129 WRONG_REF_TPYE = 3
133 WRONG_REF_TPYE = 3
130
134
131 # Update failed because the target reference is missing.
135 # Update failed because the target reference is missing.
132 MISSING_TARGET_REF = 4
136 MISSING_TARGET_REF = 4
133
137
134 # Update failed because the source reference is missing.
138 # Update failed because the source reference is missing.
135 MISSING_SOURCE_REF = 5
139 MISSING_SOURCE_REF = 5
136
140
137
141
138 class BaseRepository(object):
142 class BaseRepository(object):
139 """
143 """
140 Base Repository for final backends
144 Base Repository for final backends
141
145
142 .. attribute:: DEFAULT_BRANCH_NAME
146 .. attribute:: DEFAULT_BRANCH_NAME
143
147
144 name of default branch (i.e. "trunk" for svn, "master" for git etc.
148 name of default branch (i.e. "trunk" for svn, "master" for git etc.
145
149
146 .. attribute:: commit_ids
150 .. attribute:: commit_ids
147
151
148 list of all available commit ids, in ascending order
152 list of all available commit ids, in ascending order
149
153
150 .. attribute:: path
154 .. attribute:: path
151
155
152 absolute path to the repository
156 absolute path to the repository
153
157
154 .. attribute:: bookmarks
158 .. attribute:: bookmarks
155
159
156 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
160 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
157 there are no bookmarks or the backend implementation does not support
161 there are no bookmarks or the backend implementation does not support
158 bookmarks.
162 bookmarks.
159
163
160 .. attribute:: tags
164 .. attribute:: tags
161
165
162 Mapping from name to :term:`Commit ID` of the tag.
166 Mapping from name to :term:`Commit ID` of the tag.
163
167
164 """
168 """
165
169
166 DEFAULT_BRANCH_NAME = None
170 DEFAULT_BRANCH_NAME = None
167 DEFAULT_CONTACT = u"Unknown"
171 DEFAULT_CONTACT = u"Unknown"
168 DEFAULT_DESCRIPTION = u"unknown"
172 DEFAULT_DESCRIPTION = u"unknown"
169 EMPTY_COMMIT_ID = '0' * 40
173 EMPTY_COMMIT_ID = '0' * 40
170
174
171 path = None
175 path = None
172
176
173 def __init__(self, repo_path, config=None, create=False, **kwargs):
177 def __init__(self, repo_path, config=None, create=False, **kwargs):
174 """
178 """
175 Initializes repository. Raises RepositoryError if repository could
179 Initializes repository. Raises RepositoryError if repository could
176 not be find at the given ``repo_path`` or directory at ``repo_path``
180 not be find at the given ``repo_path`` or directory at ``repo_path``
177 exists and ``create`` is set to True.
181 exists and ``create`` is set to True.
178
182
179 :param repo_path: local path of the repository
183 :param repo_path: local path of the repository
180 :param config: repository configuration
184 :param config: repository configuration
181 :param create=False: if set to True, would try to create repository.
185 :param create=False: if set to True, would try to create repository.
182 :param src_url=None: if set, should be proper url from which repository
186 :param src_url=None: if set, should be proper url from which repository
183 would be cloned; requires ``create`` parameter to be set to True -
187 would be cloned; requires ``create`` parameter to be set to True -
184 raises RepositoryError if src_url is set and create evaluates to
188 raises RepositoryError if src_url is set and create evaluates to
185 False
189 False
186 """
190 """
187 raise NotImplementedError
191 raise NotImplementedError
188
192
189 def __repr__(self):
193 def __repr__(self):
190 return '<%s at %s>' % (self.__class__.__name__, self.path)
194 return '<%s at %s>' % (self.__class__.__name__, self.path)
191
195
192 def __len__(self):
196 def __len__(self):
193 return self.count()
197 return self.count()
194
198
195 def __eq__(self, other):
199 def __eq__(self, other):
196 same_instance = isinstance(other, self.__class__)
200 same_instance = isinstance(other, self.__class__)
197 return same_instance and other.path == self.path
201 return same_instance and other.path == self.path
198
202
199 def __ne__(self, other):
203 def __ne__(self, other):
200 return not self.__eq__(other)
204 return not self.__eq__(other)
201
205
202 @LazyProperty
206 @LazyProperty
203 def EMPTY_COMMIT(self):
207 def EMPTY_COMMIT(self):
204 return EmptyCommit(self.EMPTY_COMMIT_ID)
208 return EmptyCommit(self.EMPTY_COMMIT_ID)
205
209
206 @LazyProperty
210 @LazyProperty
207 def alias(self):
211 def alias(self):
208 for k, v in settings.BACKENDS.items():
212 for k, v in settings.BACKENDS.items():
209 if v.split('.')[-1] == str(self.__class__.__name__):
213 if v.split('.')[-1] == str(self.__class__.__name__):
210 return k
214 return k
211
215
212 @LazyProperty
216 @LazyProperty
213 def name(self):
217 def name(self):
214 return safe_unicode(os.path.basename(self.path))
218 return safe_unicode(os.path.basename(self.path))
215
219
216 @LazyProperty
220 @LazyProperty
217 def description(self):
221 def description(self):
218 raise NotImplementedError
222 raise NotImplementedError
219
223
220 def refs(self):
224 def refs(self):
221 """
225 """
222 returns a `dict` with branches, bookmarks, tags, and closed_branches
226 returns a `dict` with branches, bookmarks, tags, and closed_branches
223 for this repository
227 for this repository
224 """
228 """
225 raise NotImplementedError
229 raise NotImplementedError
226
230
227 @LazyProperty
231 @LazyProperty
228 def branches(self):
232 def branches(self):
229 """
233 """
230 A `dict` which maps branch names to commit ids.
234 A `dict` which maps branch names to commit ids.
231 """
235 """
232 raise NotImplementedError
236 raise NotImplementedError
233
237
234 @LazyProperty
238 @LazyProperty
235 def size(self):
239 def size(self):
236 """
240 """
237 Returns combined size in bytes for all repository files
241 Returns combined size in bytes for all repository files
238 """
242 """
239 tip = self.get_commit()
243 tip = self.get_commit()
240 return tip.size
244 return tip.size
241
245
242 def size_at_commit(self, commit_id):
246 def size_at_commit(self, commit_id):
243 commit = self.get_commit(commit_id)
247 commit = self.get_commit(commit_id)
244 return commit.size
248 return commit.size
245
249
246 def is_empty(self):
250 def is_empty(self):
247 return not bool(self.commit_ids)
251 return not bool(self.commit_ids)
248
252
249 @staticmethod
253 @staticmethod
250 def check_url(url, config):
254 def check_url(url, config):
251 """
255 """
252 Function will check given url and try to verify if it's a valid
256 Function will check given url and try to verify if it's a valid
253 link.
257 link.
254 """
258 """
255 raise NotImplementedError
259 raise NotImplementedError
256
260
257 @staticmethod
261 @staticmethod
258 def is_valid_repository(path):
262 def is_valid_repository(path):
259 """
263 """
260 Check if given `path` contains a valid repository of this backend
264 Check if given `path` contains a valid repository of this backend
261 """
265 """
262 raise NotImplementedError
266 raise NotImplementedError
263
267
264 # ==========================================================================
268 # ==========================================================================
265 # COMMITS
269 # COMMITS
266 # ==========================================================================
270 # ==========================================================================
267
271
268 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
272 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
269 """
273 """
270 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
274 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
271 are both None, most recent commit is returned.
275 are both None, most recent commit is returned.
272
276
273 :param pre_load: Optional. List of commit attributes to load.
277 :param pre_load: Optional. List of commit attributes to load.
274
278
275 :raises ``EmptyRepositoryError``: if there are no commits
279 :raises ``EmptyRepositoryError``: if there are no commits
276 """
280 """
277 raise NotImplementedError
281 raise NotImplementedError
278
282
279 def __iter__(self):
283 def __iter__(self):
280 for commit_id in self.commit_ids:
284 for commit_id in self.commit_ids:
281 yield self.get_commit(commit_id=commit_id)
285 yield self.get_commit(commit_id=commit_id)
282
286
283 def get_commits(
287 def get_commits(
284 self, start_id=None, end_id=None, start_date=None, end_date=None,
288 self, start_id=None, end_id=None, start_date=None, end_date=None,
285 branch_name=None, pre_load=None):
289 branch_name=None, pre_load=None):
286 """
290 """
287 Returns iterator of `BaseCommit` objects from start to end
291 Returns iterator of `BaseCommit` objects from start to end
288 not inclusive. This should behave just like a list, ie. end is not
292 not inclusive. This should behave just like a list, ie. end is not
289 inclusive.
293 inclusive.
290
294
291 :param start_id: None or str, must be a valid commit id
295 :param start_id: None or str, must be a valid commit id
292 :param end_id: None or str, must be a valid commit id
296 :param end_id: None or str, must be a valid commit id
293 :param start_date:
297 :param start_date:
294 :param end_date:
298 :param end_date:
295 :param branch_name:
299 :param branch_name:
296 :param pre_load:
300 :param pre_load:
297 """
301 """
298 raise NotImplementedError
302 raise NotImplementedError
299
303
300 def __getitem__(self, key):
304 def __getitem__(self, key):
301 """
305 """
302 Allows index based access to the commit objects of this repository.
306 Allows index based access to the commit objects of this repository.
303 """
307 """
304 pre_load = ["author", "branch", "date", "message", "parents"]
308 pre_load = ["author", "branch", "date", "message", "parents"]
305 if isinstance(key, slice):
309 if isinstance(key, slice):
306 return self._get_range(key, pre_load)
310 return self._get_range(key, pre_load)
307 return self.get_commit(commit_idx=key, pre_load=pre_load)
311 return self.get_commit(commit_idx=key, pre_load=pre_load)
308
312
309 def _get_range(self, slice_obj, pre_load):
313 def _get_range(self, slice_obj, pre_load):
310 for commit_id in self.commit_ids.__getitem__(slice_obj):
314 for commit_id in self.commit_ids.__getitem__(slice_obj):
311 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
315 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
312
316
313 def count(self):
317 def count(self):
314 return len(self.commit_ids)
318 return len(self.commit_ids)
315
319
316 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
320 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
317 """
321 """
318 Creates and returns a tag for the given ``commit_id``.
322 Creates and returns a tag for the given ``commit_id``.
319
323
320 :param name: name for new tag
324 :param name: name for new tag
321 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
325 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
322 :param commit_id: commit id for which new tag would be created
326 :param commit_id: commit id for which new tag would be created
323 :param message: message of the tag's commit
327 :param message: message of the tag's commit
324 :param date: date of tag's commit
328 :param date: date of tag's commit
325
329
326 :raises TagAlreadyExistError: if tag with same name already exists
330 :raises TagAlreadyExistError: if tag with same name already exists
327 """
331 """
328 raise NotImplementedError
332 raise NotImplementedError
329
333
330 def remove_tag(self, name, user, message=None, date=None):
334 def remove_tag(self, name, user, message=None, date=None):
331 """
335 """
332 Removes tag with the given ``name``.
336 Removes tag with the given ``name``.
333
337
334 :param name: name of the tag to be removed
338 :param name: name of the tag to be removed
335 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
339 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
336 :param message: message of the tag's removal commit
340 :param message: message of the tag's removal commit
337 :param date: date of tag's removal commit
341 :param date: date of tag's removal commit
338
342
339 :raises TagDoesNotExistError: if tag with given name does not exists
343 :raises TagDoesNotExistError: if tag with given name does not exists
340 """
344 """
341 raise NotImplementedError
345 raise NotImplementedError
342
346
343 def get_diff(
347 def get_diff(
344 self, commit1, commit2, path=None, ignore_whitespace=False,
348 self, commit1, commit2, path=None, ignore_whitespace=False,
345 context=3, path1=None):
349 context=3, path1=None):
346 """
350 """
347 Returns (git like) *diff*, as plain text. Shows changes introduced by
351 Returns (git like) *diff*, as plain text. Shows changes introduced by
348 `commit2` since `commit1`.
352 `commit2` since `commit1`.
349
353
350 :param commit1: Entry point from which diff is shown. Can be
354 :param commit1: Entry point from which diff is shown. Can be
351 ``self.EMPTY_COMMIT`` - in this case, patch showing all
355 ``self.EMPTY_COMMIT`` - in this case, patch showing all
352 the changes since empty state of the repository until `commit2`
356 the changes since empty state of the repository until `commit2`
353 :param commit2: Until which commit changes should be shown.
357 :param commit2: Until which commit changes should be shown.
354 :param path: Can be set to a path of a file to create a diff of that
358 :param path: Can be set to a path of a file to create a diff of that
355 file. If `path1` is also set, this value is only associated to
359 file. If `path1` is also set, this value is only associated to
356 `commit2`.
360 `commit2`.
357 :param ignore_whitespace: If set to ``True``, would not show whitespace
361 :param ignore_whitespace: If set to ``True``, would not show whitespace
358 changes. Defaults to ``False``.
362 changes. Defaults to ``False``.
359 :param context: How many lines before/after changed lines should be
363 :param context: How many lines before/after changed lines should be
360 shown. Defaults to ``3``.
364 shown. Defaults to ``3``.
361 :param path1: Can be set to a path to associate with `commit1`. This
365 :param path1: Can be set to a path to associate with `commit1`. This
362 parameter works only for backends which support diff generation for
366 parameter works only for backends which support diff generation for
363 different paths. Other backends will raise a `ValueError` if `path1`
367 different paths. Other backends will raise a `ValueError` if `path1`
364 is set and has a different value than `path`.
368 is set and has a different value than `path`.
365 """
369 """
366 raise NotImplementedError
370 raise NotImplementedError
367
371
368 def strip(self, commit_id, branch=None):
372 def strip(self, commit_id, branch=None):
369 """
373 """
370 Strip given commit_id from the repository
374 Strip given commit_id from the repository
371 """
375 """
372 raise NotImplementedError
376 raise NotImplementedError
373
377
374 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
378 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
375 """
379 """
376 Return a latest common ancestor commit if one exists for this repo
380 Return a latest common ancestor commit if one exists for this repo
377 `commit_id1` vs `commit_id2` from `repo2`.
381 `commit_id1` vs `commit_id2` from `repo2`.
378
382
379 :param commit_id1: Commit it from this repository to use as a
383 :param commit_id1: Commit it from this repository to use as a
380 target for the comparison.
384 target for the comparison.
381 :param commit_id2: Source commit id to use for comparison.
385 :param commit_id2: Source commit id to use for comparison.
382 :param repo2: Source repository to use for comparison.
386 :param repo2: Source repository to use for comparison.
383 """
387 """
384 raise NotImplementedError
388 raise NotImplementedError
385
389
386 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
390 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
387 """
391 """
388 Compare this repository's revision `commit_id1` with `commit_id2`.
392 Compare this repository's revision `commit_id1` with `commit_id2`.
389
393
390 Returns a tuple(commits, ancestor) that would be merged from
394 Returns a tuple(commits, ancestor) that would be merged from
391 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
395 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
392 will be returned as ancestor.
396 will be returned as ancestor.
393
397
394 :param commit_id1: Commit it from this repository to use as a
398 :param commit_id1: Commit it from this repository to use as a
395 target for the comparison.
399 target for the comparison.
396 :param commit_id2: Source commit id to use for comparison.
400 :param commit_id2: Source commit id to use for comparison.
397 :param repo2: Source repository to use for comparison.
401 :param repo2: Source repository to use for comparison.
398 :param merge: If set to ``True`` will do a merge compare which also
402 :param merge: If set to ``True`` will do a merge compare which also
399 returns the common ancestor.
403 returns the common ancestor.
400 :param pre_load: Optional. List of commit attributes to load.
404 :param pre_load: Optional. List of commit attributes to load.
401 """
405 """
402 raise NotImplementedError
406 raise NotImplementedError
403
407
404 def merge(self, target_ref, source_repo, source_ref, workspace_id,
408 def merge(self, target_ref, source_repo, source_ref, workspace_id,
405 user_name='', user_email='', message='', dry_run=False,
409 user_name='', user_email='', message='', dry_run=False,
406 use_rebase=False):
410 use_rebase=False):
407 """
411 """
408 Merge the revisions specified in `source_ref` from `source_repo`
412 Merge the revisions specified in `source_ref` from `source_repo`
409 onto the `target_ref` of this repository.
413 onto the `target_ref` of this repository.
410
414
411 `source_ref` and `target_ref` are named tupls with the following
415 `source_ref` and `target_ref` are named tupls with the following
412 fields `type`, `name` and `commit_id`.
416 fields `type`, `name` and `commit_id`.
413
417
414 Returns a MergeResponse named tuple with the following fields
418 Returns a MergeResponse named tuple with the following fields
415 'possible', 'executed', 'source_commit', 'target_commit',
419 'possible', 'executed', 'source_commit', 'target_commit',
416 'merge_commit'.
420 'merge_commit'.
417
421
418 :param target_ref: `target_ref` points to the commit on top of which
422 :param target_ref: `target_ref` points to the commit on top of which
419 the `source_ref` should be merged.
423 the `source_ref` should be merged.
420 :param source_repo: The repository that contains the commits to be
424 :param source_repo: The repository that contains the commits to be
421 merged.
425 merged.
422 :param source_ref: `source_ref` points to the topmost commit from
426 :param source_ref: `source_ref` points to the topmost commit from
423 the `source_repo` which should be merged.
427 the `source_repo` which should be merged.
424 :param workspace_id: `workspace_id` unique identifier.
428 :param workspace_id: `workspace_id` unique identifier.
425 :param user_name: Merge commit `user_name`.
429 :param user_name: Merge commit `user_name`.
426 :param user_email: Merge commit `user_email`.
430 :param user_email: Merge commit `user_email`.
427 :param message: Merge commit `message`.
431 :param message: Merge commit `message`.
428 :param dry_run: If `True` the merge will not take place.
432 :param dry_run: If `True` the merge will not take place.
429 :param use_rebase: If `True` commits from the source will be rebased
433 :param use_rebase: If `True` commits from the source will be rebased
430 on top of the target instead of being merged.
434 on top of the target instead of being merged.
431 """
435 """
432 if dry_run:
436 if dry_run:
433 message = message or 'dry_run_merge_message'
437 message = message or 'dry_run_merge_message'
434 user_email = user_email or 'dry-run-merge@rhodecode.com'
438 user_email = user_email or 'dry-run-merge@rhodecode.com'
435 user_name = user_name or 'Dry-Run User'
439 user_name = user_name or 'Dry-Run User'
436 else:
440 else:
437 if not user_name:
441 if not user_name:
438 raise ValueError('user_name cannot be empty')
442 raise ValueError('user_name cannot be empty')
439 if not user_email:
443 if not user_email:
440 raise ValueError('user_email cannot be empty')
444 raise ValueError('user_email cannot be empty')
441 if not message:
445 if not message:
442 raise ValueError('message cannot be empty')
446 raise ValueError('message cannot be empty')
443
447
444 shadow_repository_path = self._maybe_prepare_merge_workspace(
448 shadow_repository_path = self._maybe_prepare_merge_workspace(
445 workspace_id, target_ref)
449 workspace_id, target_ref)
446
450
447 try:
451 try:
448 return self._merge_repo(
452 return self._merge_repo(
449 shadow_repository_path, target_ref, source_repo,
453 shadow_repository_path, target_ref, source_repo,
450 source_ref, message, user_name, user_email, dry_run=dry_run,
454 source_ref, message, user_name, user_email, dry_run=dry_run,
451 use_rebase=use_rebase)
455 use_rebase=use_rebase)
452 except RepositoryError:
456 except RepositoryError:
453 log.exception(
457 log.exception(
454 'Unexpected failure when running merge, dry-run=%s',
458 'Unexpected failure when running merge, dry-run=%s',
455 dry_run)
459 dry_run)
456 return MergeResponse(
460 return MergeResponse(
457 False, False, None, MergeFailureReason.UNKNOWN)
461 False, False, None, MergeFailureReason.UNKNOWN)
458
462
459 def _merge_repo(self, shadow_repository_path, target_ref,
463 def _merge_repo(self, shadow_repository_path, target_ref,
460 source_repo, source_ref, merge_message,
464 source_repo, source_ref, merge_message,
461 merger_name, merger_email, dry_run=False, use_rebase=False):
465 merger_name, merger_email, dry_run=False, use_rebase=False):
462 """Internal implementation of merge."""
466 """Internal implementation of merge."""
463 raise NotImplementedError
467 raise NotImplementedError
464
468
465 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
469 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
466 """
470 """
467 Create the merge workspace.
471 Create the merge workspace.
468
472
469 :param workspace_id: `workspace_id` unique identifier.
473 :param workspace_id: `workspace_id` unique identifier.
470 """
474 """
471 raise NotImplementedError
475 raise NotImplementedError
472
476
473 def cleanup_merge_workspace(self, workspace_id):
477 def cleanup_merge_workspace(self, workspace_id):
474 """
478 """
475 Remove merge workspace.
479 Remove merge workspace.
476
480
477 This function MUST not fail in case there is no workspace associated to
481 This function MUST not fail in case there is no workspace associated to
478 the given `workspace_id`.
482 the given `workspace_id`.
479
483
480 :param workspace_id: `workspace_id` unique identifier.
484 :param workspace_id: `workspace_id` unique identifier.
481 """
485 """
482 raise NotImplementedError
486 raise NotImplementedError
483
487
484 # ========== #
488 # ========== #
485 # COMMIT API #
489 # COMMIT API #
486 # ========== #
490 # ========== #
487
491
488 @LazyProperty
492 @LazyProperty
489 def in_memory_commit(self):
493 def in_memory_commit(self):
490 """
494 """
491 Returns :class:`InMemoryCommit` object for this repository.
495 Returns :class:`InMemoryCommit` object for this repository.
492 """
496 """
493 raise NotImplementedError
497 raise NotImplementedError
494
498
495 # ======================== #
499 # ======================== #
496 # UTILITIES FOR SUBCLASSES #
500 # UTILITIES FOR SUBCLASSES #
497 # ======================== #
501 # ======================== #
498
502
499 def _validate_diff_commits(self, commit1, commit2):
503 def _validate_diff_commits(self, commit1, commit2):
500 """
504 """
501 Validates that the given commits are related to this repository.
505 Validates that the given commits are related to this repository.
502
506
503 Intended as a utility for sub classes to have a consistent validation
507 Intended as a utility for sub classes to have a consistent validation
504 of input parameters in methods like :meth:`get_diff`.
508 of input parameters in methods like :meth:`get_diff`.
505 """
509 """
506 self._validate_commit(commit1)
510 self._validate_commit(commit1)
507 self._validate_commit(commit2)
511 self._validate_commit(commit2)
508 if (isinstance(commit1, EmptyCommit) and
512 if (isinstance(commit1, EmptyCommit) and
509 isinstance(commit2, EmptyCommit)):
513 isinstance(commit2, EmptyCommit)):
510 raise ValueError("Cannot compare two empty commits")
514 raise ValueError("Cannot compare two empty commits")
511
515
512 def _validate_commit(self, commit):
516 def _validate_commit(self, commit):
513 if not isinstance(commit, BaseCommit):
517 if not isinstance(commit, BaseCommit):
514 raise TypeError(
518 raise TypeError(
515 "%s is not of type BaseCommit" % repr(commit))
519 "%s is not of type BaseCommit" % repr(commit))
516 if commit.repository != self and not isinstance(commit, EmptyCommit):
520 if commit.repository != self and not isinstance(commit, EmptyCommit):
517 raise ValueError(
521 raise ValueError(
518 "Commit %s must be a valid commit from this repository %s, "
522 "Commit %s must be a valid commit from this repository %s, "
519 "related to this repository instead %s." %
523 "related to this repository instead %s." %
520 (commit, self, commit.repository))
524 (commit, self, commit.repository))
521
525
522 def _validate_commit_id(self, commit_id):
526 def _validate_commit_id(self, commit_id):
523 if not isinstance(commit_id, basestring):
527 if not isinstance(commit_id, basestring):
524 raise TypeError("commit_id must be a string value")
528 raise TypeError("commit_id must be a string value")
525
529
526 def _validate_commit_idx(self, commit_idx):
530 def _validate_commit_idx(self, commit_idx):
527 if not isinstance(commit_idx, (int, long)):
531 if not isinstance(commit_idx, (int, long)):
528 raise TypeError("commit_idx must be a numeric value")
532 raise TypeError("commit_idx must be a numeric value")
529
533
530 def _validate_branch_name(self, branch_name):
534 def _validate_branch_name(self, branch_name):
531 if branch_name and branch_name not in self.branches_all:
535 if branch_name and branch_name not in self.branches_all:
532 msg = ("Branch %s not found in %s" % (branch_name, self))
536 msg = ("Branch %s not found in %s" % (branch_name, self))
533 raise BranchDoesNotExistError(msg)
537 raise BranchDoesNotExistError(msg)
534
538
535 #
539 #
536 # Supporting deprecated API parts
540 # Supporting deprecated API parts
537 # TODO: johbo: consider to move this into a mixin
541 # TODO: johbo: consider to move this into a mixin
538 #
542 #
539
543
540 @property
544 @property
541 def EMPTY_CHANGESET(self):
545 def EMPTY_CHANGESET(self):
542 warnings.warn(
546 warnings.warn(
543 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
547 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
544 return self.EMPTY_COMMIT_ID
548 return self.EMPTY_COMMIT_ID
545
549
546 @property
550 @property
547 def revisions(self):
551 def revisions(self):
548 warnings.warn("Use commits attribute instead", DeprecationWarning)
552 warnings.warn("Use commits attribute instead", DeprecationWarning)
549 return self.commit_ids
553 return self.commit_ids
550
554
551 @revisions.setter
555 @revisions.setter
552 def revisions(self, value):
556 def revisions(self, value):
553 warnings.warn("Use commits attribute instead", DeprecationWarning)
557 warnings.warn("Use commits attribute instead", DeprecationWarning)
554 self.commit_ids = value
558 self.commit_ids = value
555
559
556 def get_changeset(self, revision=None, pre_load=None):
560 def get_changeset(self, revision=None, pre_load=None):
557 warnings.warn("Use get_commit instead", DeprecationWarning)
561 warnings.warn("Use get_commit instead", DeprecationWarning)
558 commit_id = None
562 commit_id = None
559 commit_idx = None
563 commit_idx = None
560 if isinstance(revision, basestring):
564 if isinstance(revision, basestring):
561 commit_id = revision
565 commit_id = revision
562 else:
566 else:
563 commit_idx = revision
567 commit_idx = revision
564 return self.get_commit(
568 return self.get_commit(
565 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
569 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
566
570
567 def get_changesets(
571 def get_changesets(
568 self, start=None, end=None, start_date=None, end_date=None,
572 self, start=None, end=None, start_date=None, end_date=None,
569 branch_name=None, pre_load=None):
573 branch_name=None, pre_load=None):
570 warnings.warn("Use get_commits instead", DeprecationWarning)
574 warnings.warn("Use get_commits instead", DeprecationWarning)
571 start_id = self._revision_to_commit(start)
575 start_id = self._revision_to_commit(start)
572 end_id = self._revision_to_commit(end)
576 end_id = self._revision_to_commit(end)
573 return self.get_commits(
577 return self.get_commits(
574 start_id=start_id, end_id=end_id, start_date=start_date,
578 start_id=start_id, end_id=end_id, start_date=start_date,
575 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
579 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
576
580
577 def _revision_to_commit(self, revision):
581 def _revision_to_commit(self, revision):
578 """
582 """
579 Translates a revision to a commit_id
583 Translates a revision to a commit_id
580
584
581 Helps to support the old changeset based API which allows to use
585 Helps to support the old changeset based API which allows to use
582 commit ids and commit indices interchangeable.
586 commit ids and commit indices interchangeable.
583 """
587 """
584 if revision is None:
588 if revision is None:
585 return revision
589 return revision
586
590
587 if isinstance(revision, basestring):
591 if isinstance(revision, basestring):
588 commit_id = revision
592 commit_id = revision
589 else:
593 else:
590 commit_id = self.commit_ids[revision]
594 commit_id = self.commit_ids[revision]
591 return commit_id
595 return commit_id
592
596
593 @property
597 @property
594 def in_memory_changeset(self):
598 def in_memory_changeset(self):
595 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
599 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
596 return self.in_memory_commit
600 return self.in_memory_commit
597
601
598
602
599 class BaseCommit(object):
603 class BaseCommit(object):
600 """
604 """
601 Each backend should implement it's commit representation.
605 Each backend should implement it's commit representation.
602
606
603 **Attributes**
607 **Attributes**
604
608
605 ``repository``
609 ``repository``
606 repository object within which commit exists
610 repository object within which commit exists
607
611
608 ``id``
612 ``id``
609 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
613 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
610 just ``tip``.
614 just ``tip``.
611
615
612 ``raw_id``
616 ``raw_id``
613 raw commit representation (i.e. full 40 length sha for git
617 raw commit representation (i.e. full 40 length sha for git
614 backend)
618 backend)
615
619
616 ``short_id``
620 ``short_id``
617 shortened (if apply) version of ``raw_id``; it would be simple
621 shortened (if apply) version of ``raw_id``; it would be simple
618 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
622 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
619 as ``raw_id`` for subversion
623 as ``raw_id`` for subversion
620
624
621 ``idx``
625 ``idx``
622 commit index
626 commit index
623
627
624 ``files``
628 ``files``
625 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
629 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
626
630
627 ``dirs``
631 ``dirs``
628 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
632 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
629
633
630 ``nodes``
634 ``nodes``
631 combined list of ``Node`` objects
635 combined list of ``Node`` objects
632
636
633 ``author``
637 ``author``
634 author of the commit, as unicode
638 author of the commit, as unicode
635
639
636 ``message``
640 ``message``
637 message of the commit, as unicode
641 message of the commit, as unicode
638
642
639 ``parents``
643 ``parents``
640 list of parent commits
644 list of parent commits
641
645
642 """
646 """
643
647
644 branch = None
648 branch = None
645 """
649 """
646 Depending on the backend this should be set to the branch name of the
650 Depending on the backend this should be set to the branch name of the
647 commit. Backends not supporting branches on commits should leave this
651 commit. Backends not supporting branches on commits should leave this
648 value as ``None``.
652 value as ``None``.
649 """
653 """
650
654
651 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
655 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
652 """
656 """
653 This template is used to generate a default prefix for repository archives
657 This template is used to generate a default prefix for repository archives
654 if no prefix has been specified.
658 if no prefix has been specified.
655 """
659 """
656
660
657 def __str__(self):
661 def __str__(self):
658 return '<%s at %s:%s>' % (
662 return '<%s at %s:%s>' % (
659 self.__class__.__name__, self.idx, self.short_id)
663 self.__class__.__name__, self.idx, self.short_id)
660
664
661 def __repr__(self):
665 def __repr__(self):
662 return self.__str__()
666 return self.__str__()
663
667
664 def __unicode__(self):
668 def __unicode__(self):
665 return u'%s:%s' % (self.idx, self.short_id)
669 return u'%s:%s' % (self.idx, self.short_id)
666
670
667 def __eq__(self, other):
671 def __eq__(self, other):
668 same_instance = isinstance(other, self.__class__)
672 same_instance = isinstance(other, self.__class__)
669 return same_instance and self.raw_id == other.raw_id
673 return same_instance and self.raw_id == other.raw_id
670
674
671 def __json__(self):
675 def __json__(self):
672 parents = []
676 parents = []
673 try:
677 try:
674 for parent in self.parents:
678 for parent in self.parents:
675 parents.append({'raw_id': parent.raw_id})
679 parents.append({'raw_id': parent.raw_id})
676 except NotImplementedError:
680 except NotImplementedError:
677 # empty commit doesn't have parents implemented
681 # empty commit doesn't have parents implemented
678 pass
682 pass
679
683
680 return {
684 return {
681 'short_id': self.short_id,
685 'short_id': self.short_id,
682 'raw_id': self.raw_id,
686 'raw_id': self.raw_id,
683 'revision': self.idx,
687 'revision': self.idx,
684 'message': self.message,
688 'message': self.message,
685 'date': self.date,
689 'date': self.date,
686 'author': self.author,
690 'author': self.author,
687 'parents': parents,
691 'parents': parents,
688 'branch': self.branch
692 'branch': self.branch
689 }
693 }
690
694
691 @LazyProperty
695 @LazyProperty
692 def last(self):
696 def last(self):
693 """
697 """
694 ``True`` if this is last commit in repository, ``False``
698 ``True`` if this is last commit in repository, ``False``
695 otherwise; trying to access this attribute while there is no
699 otherwise; trying to access this attribute while there is no
696 commits would raise `EmptyRepositoryError`
700 commits would raise `EmptyRepositoryError`
697 """
701 """
698 if self.repository is None:
702 if self.repository is None:
699 raise CommitError("Cannot check if it's most recent commit")
703 raise CommitError("Cannot check if it's most recent commit")
700 return self.raw_id == self.repository.commit_ids[-1]
704 return self.raw_id == self.repository.commit_ids[-1]
701
705
702 @LazyProperty
706 @LazyProperty
703 def parents(self):
707 def parents(self):
704 """
708 """
705 Returns list of parent commits.
709 Returns list of parent commits.
706 """
710 """
707 raise NotImplementedError
711 raise NotImplementedError
708
712
709 @property
713 @property
710 def merge(self):
714 def merge(self):
711 """
715 """
712 Returns boolean if commit is a merge.
716 Returns boolean if commit is a merge.
713 """
717 """
714 return len(self.parents) > 1
718 return len(self.parents) > 1
715
719
716 @LazyProperty
720 @LazyProperty
717 def children(self):
721 def children(self):
718 """
722 """
719 Returns list of child commits.
723 Returns list of child commits.
720 """
724 """
721 raise NotImplementedError
725 raise NotImplementedError
722
726
723 @LazyProperty
727 @LazyProperty
724 def id(self):
728 def id(self):
725 """
729 """
726 Returns string identifying this commit.
730 Returns string identifying this commit.
727 """
731 """
728 raise NotImplementedError
732 raise NotImplementedError
729
733
730 @LazyProperty
734 @LazyProperty
731 def raw_id(self):
735 def raw_id(self):
732 """
736 """
733 Returns raw string identifying this commit.
737 Returns raw string identifying this commit.
734 """
738 """
735 raise NotImplementedError
739 raise NotImplementedError
736
740
737 @LazyProperty
741 @LazyProperty
738 def short_id(self):
742 def short_id(self):
739 """
743 """
740 Returns shortened version of ``raw_id`` attribute, as string,
744 Returns shortened version of ``raw_id`` attribute, as string,
741 identifying this commit, useful for presentation to users.
745 identifying this commit, useful for presentation to users.
742 """
746 """
743 raise NotImplementedError
747 raise NotImplementedError
744
748
745 @LazyProperty
749 @LazyProperty
746 def idx(self):
750 def idx(self):
747 """
751 """
748 Returns integer identifying this commit.
752 Returns integer identifying this commit.
749 """
753 """
750 raise NotImplementedError
754 raise NotImplementedError
751
755
752 @LazyProperty
756 @LazyProperty
753 def committer(self):
757 def committer(self):
754 """
758 """
755 Returns committer for this commit
759 Returns committer for this commit
756 """
760 """
757 raise NotImplementedError
761 raise NotImplementedError
758
762
759 @LazyProperty
763 @LazyProperty
760 def committer_name(self):
764 def committer_name(self):
761 """
765 """
762 Returns committer name for this commit
766 Returns committer name for this commit
763 """
767 """
764
768
765 return author_name(self.committer)
769 return author_name(self.committer)
766
770
767 @LazyProperty
771 @LazyProperty
768 def committer_email(self):
772 def committer_email(self):
769 """
773 """
770 Returns committer email address for this commit
774 Returns committer email address for this commit
771 """
775 """
772
776
773 return author_email(self.committer)
777 return author_email(self.committer)
774
778
775 @LazyProperty
779 @LazyProperty
776 def author(self):
780 def author(self):
777 """
781 """
778 Returns author for this commit
782 Returns author for this commit
779 """
783 """
780
784
781 raise NotImplementedError
785 raise NotImplementedError
782
786
783 @LazyProperty
787 @LazyProperty
784 def author_name(self):
788 def author_name(self):
785 """
789 """
786 Returns author name for this commit
790 Returns author name for this commit
787 """
791 """
788
792
789 return author_name(self.author)
793 return author_name(self.author)
790
794
791 @LazyProperty
795 @LazyProperty
792 def author_email(self):
796 def author_email(self):
793 """
797 """
794 Returns author email address for this commit
798 Returns author email address for this commit
795 """
799 """
796
800
797 return author_email(self.author)
801 return author_email(self.author)
798
802
799 def get_file_mode(self, path):
803 def get_file_mode(self, path):
800 """
804 """
801 Returns stat mode of the file at `path`.
805 Returns stat mode of the file at `path`.
802 """
806 """
803 raise NotImplementedError
807 raise NotImplementedError
804
808
805 def is_link(self, path):
809 def is_link(self, path):
806 """
810 """
807 Returns ``True`` if given `path` is a symlink
811 Returns ``True`` if given `path` is a symlink
808 """
812 """
809 raise NotImplementedError
813 raise NotImplementedError
810
814
811 def get_file_content(self, path):
815 def get_file_content(self, path):
812 """
816 """
813 Returns content of the file at the given `path`.
817 Returns content of the file at the given `path`.
814 """
818 """
815 raise NotImplementedError
819 raise NotImplementedError
816
820
817 def get_file_size(self, path):
821 def get_file_size(self, path):
818 """
822 """
819 Returns size of the file at the given `path`.
823 Returns size of the file at the given `path`.
820 """
824 """
821 raise NotImplementedError
825 raise NotImplementedError
822
826
823 def get_file_commit(self, path, pre_load=None):
827 def get_file_commit(self, path, pre_load=None):
824 """
828 """
825 Returns last commit of the file at the given `path`.
829 Returns last commit of the file at the given `path`.
826
830
827 :param pre_load: Optional. List of commit attributes to load.
831 :param pre_load: Optional. List of commit attributes to load.
828 """
832 """
829 return self.get_file_history(path, limit=1, pre_load=pre_load)[0]
833 return self.get_file_history(path, limit=1, pre_load=pre_load)[0]
830
834
831 def get_file_history(self, path, limit=None, pre_load=None):
835 def get_file_history(self, path, limit=None, pre_load=None):
832 """
836 """
833 Returns history of file as reversed list of :class:`BaseCommit`
837 Returns history of file as reversed list of :class:`BaseCommit`
834 objects for which file at given `path` has been modified.
838 objects for which file at given `path` has been modified.
835
839
836 :param limit: Optional. Allows to limit the size of the returned
840 :param limit: Optional. Allows to limit the size of the returned
837 history. This is intended as a hint to the underlying backend, so
841 history. This is intended as a hint to the underlying backend, so
838 that it can apply optimizations depending on the limit.
842 that it can apply optimizations depending on the limit.
839 :param pre_load: Optional. List of commit attributes to load.
843 :param pre_load: Optional. List of commit attributes to load.
840 """
844 """
841 raise NotImplementedError
845 raise NotImplementedError
842
846
843 def get_file_annotate(self, path, pre_load=None):
847 def get_file_annotate(self, path, pre_load=None):
844 """
848 """
845 Returns a generator of four element tuples with
849 Returns a generator of four element tuples with
846 lineno, sha, commit lazy loader and line
850 lineno, sha, commit lazy loader and line
847
851
848 :param pre_load: Optional. List of commit attributes to load.
852 :param pre_load: Optional. List of commit attributes to load.
849 """
853 """
850 raise NotImplementedError
854 raise NotImplementedError
851
855
852 def get_nodes(self, path):
856 def get_nodes(self, path):
853 """
857 """
854 Returns combined ``DirNode`` and ``FileNode`` objects list representing
858 Returns combined ``DirNode`` and ``FileNode`` objects list representing
855 state of commit at the given ``path``.
859 state of commit at the given ``path``.
856
860
857 :raises ``CommitError``: if node at the given ``path`` is not
861 :raises ``CommitError``: if node at the given ``path`` is not
858 instance of ``DirNode``
862 instance of ``DirNode``
859 """
863 """
860 raise NotImplementedError
864 raise NotImplementedError
861
865
862 def get_node(self, path):
866 def get_node(self, path):
863 """
867 """
864 Returns ``Node`` object from the given ``path``.
868 Returns ``Node`` object from the given ``path``.
865
869
866 :raises ``NodeDoesNotExistError``: if there is no node at the given
870 :raises ``NodeDoesNotExistError``: if there is no node at the given
867 ``path``
871 ``path``
868 """
872 """
869 raise NotImplementedError
873 raise NotImplementedError
870
874
871 def get_largefile_node(self, path):
875 def get_largefile_node(self, path):
872 """
876 """
873 Returns the path to largefile from Mercurial storage.
877 Returns the path to largefile from Mercurial storage.
874 """
878 """
875 raise NotImplementedError
879 raise NotImplementedError
876
880
877 def archive_repo(self, file_path, kind='tgz', subrepos=None,
881 def archive_repo(self, file_path, kind='tgz', subrepos=None,
878 prefix=None, write_metadata=False, mtime=None):
882 prefix=None, write_metadata=False, mtime=None):
879 """
883 """
880 Creates an archive containing the contents of the repository.
884 Creates an archive containing the contents of the repository.
881
885
882 :param file_path: path to the file which to create the archive.
886 :param file_path: path to the file which to create the archive.
883 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
887 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
884 :param prefix: name of root directory in archive.
888 :param prefix: name of root directory in archive.
885 Default is repository name and commit's short_id joined with dash:
889 Default is repository name and commit's short_id joined with dash:
886 ``"{repo_name}-{short_id}"``.
890 ``"{repo_name}-{short_id}"``.
887 :param write_metadata: write a metadata file into archive.
891 :param write_metadata: write a metadata file into archive.
888 :param mtime: custom modification time for archive creation, defaults
892 :param mtime: custom modification time for archive creation, defaults
889 to time.time() if not given.
893 to time.time() if not given.
890
894
891 :raise VCSError: If prefix has a problem.
895 :raise VCSError: If prefix has a problem.
892 """
896 """
893 allowed_kinds = settings.ARCHIVE_SPECS.keys()
897 allowed_kinds = settings.ARCHIVE_SPECS.keys()
894 if kind not in allowed_kinds:
898 if kind not in allowed_kinds:
895 raise ImproperArchiveTypeError(
899 raise ImproperArchiveTypeError(
896 'Archive kind (%s) not supported use one of %s' %
900 'Archive kind (%s) not supported use one of %s' %
897 (kind, allowed_kinds))
901 (kind, allowed_kinds))
898
902
899 prefix = self._validate_archive_prefix(prefix)
903 prefix = self._validate_archive_prefix(prefix)
900
904
901 mtime = mtime or time.mktime(self.date.timetuple())
905 mtime = mtime or time.mktime(self.date.timetuple())
902
906
903 file_info = []
907 file_info = []
904 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
908 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
905 for _r, _d, files in cur_rev.walk('/'):
909 for _r, _d, files in cur_rev.walk('/'):
906 for f in files:
910 for f in files:
907 f_path = os.path.join(prefix, f.path)
911 f_path = os.path.join(prefix, f.path)
908 file_info.append(
912 file_info.append(
909 (f_path, f.mode, f.is_link(), f.raw_bytes))
913 (f_path, f.mode, f.is_link(), f.raw_bytes))
910
914
911 if write_metadata:
915 if write_metadata:
912 metadata = [
916 metadata = [
913 ('repo_name', self.repository.name),
917 ('repo_name', self.repository.name),
914 ('rev', self.raw_id),
918 ('rev', self.raw_id),
915 ('create_time', mtime),
919 ('create_time', mtime),
916 ('branch', self.branch),
920 ('branch', self.branch),
917 ('tags', ','.join(self.tags)),
921 ('tags', ','.join(self.tags)),
918 ]
922 ]
919 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
923 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
920 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
924 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
921
925
922 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
926 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
923
927
924 def _validate_archive_prefix(self, prefix):
928 def _validate_archive_prefix(self, prefix):
925 if prefix is None:
929 if prefix is None:
926 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
930 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
927 repo_name=safe_str(self.repository.name),
931 repo_name=safe_str(self.repository.name),
928 short_id=self.short_id)
932 short_id=self.short_id)
929 elif not isinstance(prefix, str):
933 elif not isinstance(prefix, str):
930 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
934 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
931 elif prefix.startswith('/'):
935 elif prefix.startswith('/'):
932 raise VCSError("Prefix cannot start with leading slash")
936 raise VCSError("Prefix cannot start with leading slash")
933 elif prefix.strip() == '':
937 elif prefix.strip() == '':
934 raise VCSError("Prefix cannot be empty")
938 raise VCSError("Prefix cannot be empty")
935 return prefix
939 return prefix
936
940
937 @LazyProperty
941 @LazyProperty
938 def root(self):
942 def root(self):
939 """
943 """
940 Returns ``RootNode`` object for this commit.
944 Returns ``RootNode`` object for this commit.
941 """
945 """
942 return self.get_node('')
946 return self.get_node('')
943
947
944 def next(self, branch=None):
948 def next(self, branch=None):
945 """
949 """
946 Returns next commit from current, if branch is gives it will return
950 Returns next commit from current, if branch is gives it will return
947 next commit belonging to this branch
951 next commit belonging to this branch
948
952
949 :param branch: show commits within the given named branch
953 :param branch: show commits within the given named branch
950 """
954 """
951 indexes = xrange(self.idx + 1, self.repository.count())
955 indexes = xrange(self.idx + 1, self.repository.count())
952 return self._find_next(indexes, branch)
956 return self._find_next(indexes, branch)
953
957
954 def prev(self, branch=None):
958 def prev(self, branch=None):
955 """
959 """
956 Returns previous commit from current, if branch is gives it will
960 Returns previous commit from current, if branch is gives it will
957 return previous commit belonging to this branch
961 return previous commit belonging to this branch
958
962
959 :param branch: show commit within the given named branch
963 :param branch: show commit within the given named branch
960 """
964 """
961 indexes = xrange(self.idx - 1, -1, -1)
965 indexes = xrange(self.idx - 1, -1, -1)
962 return self._find_next(indexes, branch)
966 return self._find_next(indexes, branch)
963
967
964 def _find_next(self, indexes, branch=None):
968 def _find_next(self, indexes, branch=None):
965 if branch and self.branch != branch:
969 if branch and self.branch != branch:
966 raise VCSError('Branch option used on commit not belonging '
970 raise VCSError('Branch option used on commit not belonging '
967 'to that branch')
971 'to that branch')
968
972
969 for next_idx in indexes:
973 for next_idx in indexes:
970 commit = self.repository.get_commit(commit_idx=next_idx)
974 commit = self.repository.get_commit(commit_idx=next_idx)
971 if branch and branch != commit.branch:
975 if branch and branch != commit.branch:
972 continue
976 continue
973 return commit
977 return commit
974 raise CommitDoesNotExistError
978 raise CommitDoesNotExistError
975
979
976 def diff(self, ignore_whitespace=True, context=3):
980 def diff(self, ignore_whitespace=True, context=3):
977 """
981 """
978 Returns a `Diff` object representing the change made by this commit.
982 Returns a `Diff` object representing the change made by this commit.
979 """
983 """
980 parent = (
984 parent = (
981 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
985 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
982 diff = self.repository.get_diff(
986 diff = self.repository.get_diff(
983 parent, self,
987 parent, self,
984 ignore_whitespace=ignore_whitespace,
988 ignore_whitespace=ignore_whitespace,
985 context=context)
989 context=context)
986 return diff
990 return diff
987
991
988 @LazyProperty
992 @LazyProperty
989 def added(self):
993 def added(self):
990 """
994 """
991 Returns list of added ``FileNode`` objects.
995 Returns list of added ``FileNode`` objects.
992 """
996 """
993 raise NotImplementedError
997 raise NotImplementedError
994
998
995 @LazyProperty
999 @LazyProperty
996 def changed(self):
1000 def changed(self):
997 """
1001 """
998 Returns list of modified ``FileNode`` objects.
1002 Returns list of modified ``FileNode`` objects.
999 """
1003 """
1000 raise NotImplementedError
1004 raise NotImplementedError
1001
1005
1002 @LazyProperty
1006 @LazyProperty
1003 def removed(self):
1007 def removed(self):
1004 """
1008 """
1005 Returns list of removed ``FileNode`` objects.
1009 Returns list of removed ``FileNode`` objects.
1006 """
1010 """
1007 raise NotImplementedError
1011 raise NotImplementedError
1008
1012
1009 @LazyProperty
1013 @LazyProperty
1010 def size(self):
1014 def size(self):
1011 """
1015 """
1012 Returns total number of bytes from contents of all filenodes.
1016 Returns total number of bytes from contents of all filenodes.
1013 """
1017 """
1014 return sum((node.size for node in self.get_filenodes_generator()))
1018 return sum((node.size for node in self.get_filenodes_generator()))
1015
1019
1016 def walk(self, topurl=''):
1020 def walk(self, topurl=''):
1017 """
1021 """
1018 Similar to os.walk method. Insted of filesystem it walks through
1022 Similar to os.walk method. Insted of filesystem it walks through
1019 commit starting at given ``topurl``. Returns generator of tuples
1023 commit starting at given ``topurl``. Returns generator of tuples
1020 (topnode, dirnodes, filenodes).
1024 (topnode, dirnodes, filenodes).
1021 """
1025 """
1022 topnode = self.get_node(topurl)
1026 topnode = self.get_node(topurl)
1023 if not topnode.is_dir():
1027 if not topnode.is_dir():
1024 return
1028 return
1025 yield (topnode, topnode.dirs, topnode.files)
1029 yield (topnode, topnode.dirs, topnode.files)
1026 for dirnode in topnode.dirs:
1030 for dirnode in topnode.dirs:
1027 for tup in self.walk(dirnode.path):
1031 for tup in self.walk(dirnode.path):
1028 yield tup
1032 yield tup
1029
1033
1030 def get_filenodes_generator(self):
1034 def get_filenodes_generator(self):
1031 """
1035 """
1032 Returns generator that yields *all* file nodes.
1036 Returns generator that yields *all* file nodes.
1033 """
1037 """
1034 for topnode, dirs, files in self.walk():
1038 for topnode, dirs, files in self.walk():
1035 for node in files:
1039 for node in files:
1036 yield node
1040 yield node
1037
1041
1038 #
1042 #
1039 # Utilities for sub classes to support consistent behavior
1043 # Utilities for sub classes to support consistent behavior
1040 #
1044 #
1041
1045
1042 def no_node_at_path(self, path):
1046 def no_node_at_path(self, path):
1043 return NodeDoesNotExistError(
1047 return NodeDoesNotExistError(
1044 "There is no file nor directory at the given path: "
1048 "There is no file nor directory at the given path: "
1045 "'%s' at commit %s" % (path, self.short_id))
1049 "'%s' at commit %s" % (path, self.short_id))
1046
1050
1047 def _fix_path(self, path):
1051 def _fix_path(self, path):
1048 """
1052 """
1049 Paths are stored without trailing slash so we need to get rid off it if
1053 Paths are stored without trailing slash so we need to get rid off it if
1050 needed.
1054 needed.
1051 """
1055 """
1052 return path.rstrip('/')
1056 return path.rstrip('/')
1053
1057
1054 #
1058 #
1055 # Deprecated API based on changesets
1059 # Deprecated API based on changesets
1056 #
1060 #
1057
1061
1058 @property
1062 @property
1059 def revision(self):
1063 def revision(self):
1060 warnings.warn("Use idx instead", DeprecationWarning)
1064 warnings.warn("Use idx instead", DeprecationWarning)
1061 return self.idx
1065 return self.idx
1062
1066
1063 @revision.setter
1067 @revision.setter
1064 def revision(self, value):
1068 def revision(self, value):
1065 warnings.warn("Use idx instead", DeprecationWarning)
1069 warnings.warn("Use idx instead", DeprecationWarning)
1066 self.idx = value
1070 self.idx = value
1067
1071
1068 def get_file_changeset(self, path):
1072 def get_file_changeset(self, path):
1069 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1073 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1070 return self.get_file_commit(path)
1074 return self.get_file_commit(path)
1071
1075
1072
1076
1073 class BaseChangesetClass(type):
1077 class BaseChangesetClass(type):
1074
1078
1075 def __instancecheck__(self, instance):
1079 def __instancecheck__(self, instance):
1076 return isinstance(instance, BaseCommit)
1080 return isinstance(instance, BaseCommit)
1077
1081
1078
1082
1079 class BaseChangeset(BaseCommit):
1083 class BaseChangeset(BaseCommit):
1080
1084
1081 __metaclass__ = BaseChangesetClass
1085 __metaclass__ = BaseChangesetClass
1082
1086
1083 def __new__(cls, *args, **kwargs):
1087 def __new__(cls, *args, **kwargs):
1084 warnings.warn(
1088 warnings.warn(
1085 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1089 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1086 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1090 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1087
1091
1088
1092
1089 class BaseInMemoryCommit(object):
1093 class BaseInMemoryCommit(object):
1090 """
1094 """
1091 Represents differences between repository's state (most recent head) and
1095 Represents differences between repository's state (most recent head) and
1092 changes made *in place*.
1096 changes made *in place*.
1093
1097
1094 **Attributes**
1098 **Attributes**
1095
1099
1096 ``repository``
1100 ``repository``
1097 repository object for this in-memory-commit
1101 repository object for this in-memory-commit
1098
1102
1099 ``added``
1103 ``added``
1100 list of ``FileNode`` objects marked as *added*
1104 list of ``FileNode`` objects marked as *added*
1101
1105
1102 ``changed``
1106 ``changed``
1103 list of ``FileNode`` objects marked as *changed*
1107 list of ``FileNode`` objects marked as *changed*
1104
1108
1105 ``removed``
1109 ``removed``
1106 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1110 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1107 *removed*
1111 *removed*
1108
1112
1109 ``parents``
1113 ``parents``
1110 list of :class:`BaseCommit` instances representing parents of
1114 list of :class:`BaseCommit` instances representing parents of
1111 in-memory commit. Should always be 2-element sequence.
1115 in-memory commit. Should always be 2-element sequence.
1112
1116
1113 """
1117 """
1114
1118
1115 def __init__(self, repository):
1119 def __init__(self, repository):
1116 self.repository = repository
1120 self.repository = repository
1117 self.added = []
1121 self.added = []
1118 self.changed = []
1122 self.changed = []
1119 self.removed = []
1123 self.removed = []
1120 self.parents = []
1124 self.parents = []
1121
1125
1122 def add(self, *filenodes):
1126 def add(self, *filenodes):
1123 """
1127 """
1124 Marks given ``FileNode`` objects as *to be committed*.
1128 Marks given ``FileNode`` objects as *to be committed*.
1125
1129
1126 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1130 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1127 latest commit
1131 latest commit
1128 :raises ``NodeAlreadyAddedError``: if node with same path is already
1132 :raises ``NodeAlreadyAddedError``: if node with same path is already
1129 marked as *added*
1133 marked as *added*
1130 """
1134 """
1131 # Check if not already marked as *added* first
1135 # Check if not already marked as *added* first
1132 for node in filenodes:
1136 for node in filenodes:
1133 if node.path in (n.path for n in self.added):
1137 if node.path in (n.path for n in self.added):
1134 raise NodeAlreadyAddedError(
1138 raise NodeAlreadyAddedError(
1135 "Such FileNode %s is already marked for addition"
1139 "Such FileNode %s is already marked for addition"
1136 % node.path)
1140 % node.path)
1137 for node in filenodes:
1141 for node in filenodes:
1138 self.added.append(node)
1142 self.added.append(node)
1139
1143
1140 def change(self, *filenodes):
1144 def change(self, *filenodes):
1141 """
1145 """
1142 Marks given ``FileNode`` objects to be *changed* in next commit.
1146 Marks given ``FileNode`` objects to be *changed* in next commit.
1143
1147
1144 :raises ``EmptyRepositoryError``: if there are no commits yet
1148 :raises ``EmptyRepositoryError``: if there are no commits yet
1145 :raises ``NodeAlreadyExistsError``: if node with same path is already
1149 :raises ``NodeAlreadyExistsError``: if node with same path is already
1146 marked to be *changed*
1150 marked to be *changed*
1147 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1151 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1148 marked to be *removed*
1152 marked to be *removed*
1149 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1153 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1150 commit
1154 commit
1151 :raises ``NodeNotChangedError``: if node hasn't really be changed
1155 :raises ``NodeNotChangedError``: if node hasn't really be changed
1152 """
1156 """
1153 for node in filenodes:
1157 for node in filenodes:
1154 if node.path in (n.path for n in self.removed):
1158 if node.path in (n.path for n in self.removed):
1155 raise NodeAlreadyRemovedError(
1159 raise NodeAlreadyRemovedError(
1156 "Node at %s is already marked as removed" % node.path)
1160 "Node at %s is already marked as removed" % node.path)
1157 try:
1161 try:
1158 self.repository.get_commit()
1162 self.repository.get_commit()
1159 except EmptyRepositoryError:
1163 except EmptyRepositoryError:
1160 raise EmptyRepositoryError(
1164 raise EmptyRepositoryError(
1161 "Nothing to change - try to *add* new nodes rather than "
1165 "Nothing to change - try to *add* new nodes rather than "
1162 "changing them")
1166 "changing them")
1163 for node in filenodes:
1167 for node in filenodes:
1164 if node.path in (n.path for n in self.changed):
1168 if node.path in (n.path for n in self.changed):
1165 raise NodeAlreadyChangedError(
1169 raise NodeAlreadyChangedError(
1166 "Node at '%s' is already marked as changed" % node.path)
1170 "Node at '%s' is already marked as changed" % node.path)
1167 self.changed.append(node)
1171 self.changed.append(node)
1168
1172
1169 def remove(self, *filenodes):
1173 def remove(self, *filenodes):
1170 """
1174 """
1171 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1175 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1172 *removed* in next commit.
1176 *removed* in next commit.
1173
1177
1174 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1178 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1175 be *removed*
1179 be *removed*
1176 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1180 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1177 be *changed*
1181 be *changed*
1178 """
1182 """
1179 for node in filenodes:
1183 for node in filenodes:
1180 if node.path in (n.path for n in self.removed):
1184 if node.path in (n.path for n in self.removed):
1181 raise NodeAlreadyRemovedError(
1185 raise NodeAlreadyRemovedError(
1182 "Node is already marked to for removal at %s" % node.path)
1186 "Node is already marked to for removal at %s" % node.path)
1183 if node.path in (n.path for n in self.changed):
1187 if node.path in (n.path for n in self.changed):
1184 raise NodeAlreadyChangedError(
1188 raise NodeAlreadyChangedError(
1185 "Node is already marked to be changed at %s" % node.path)
1189 "Node is already marked to be changed at %s" % node.path)
1186 # We only mark node as *removed* - real removal is done by
1190 # We only mark node as *removed* - real removal is done by
1187 # commit method
1191 # commit method
1188 self.removed.append(node)
1192 self.removed.append(node)
1189
1193
1190 def reset(self):
1194 def reset(self):
1191 """
1195 """
1192 Resets this instance to initial state (cleans ``added``, ``changed``
1196 Resets this instance to initial state (cleans ``added``, ``changed``
1193 and ``removed`` lists).
1197 and ``removed`` lists).
1194 """
1198 """
1195 self.added = []
1199 self.added = []
1196 self.changed = []
1200 self.changed = []
1197 self.removed = []
1201 self.removed = []
1198 self.parents = []
1202 self.parents = []
1199
1203
1200 def get_ipaths(self):
1204 def get_ipaths(self):
1201 """
1205 """
1202 Returns generator of paths from nodes marked as added, changed or
1206 Returns generator of paths from nodes marked as added, changed or
1203 removed.
1207 removed.
1204 """
1208 """
1205 for node in itertools.chain(self.added, self.changed, self.removed):
1209 for node in itertools.chain(self.added, self.changed, self.removed):
1206 yield node.path
1210 yield node.path
1207
1211
1208 def get_paths(self):
1212 def get_paths(self):
1209 """
1213 """
1210 Returns list of paths from nodes marked as added, changed or removed.
1214 Returns list of paths from nodes marked as added, changed or removed.
1211 """
1215 """
1212 return list(self.get_ipaths())
1216 return list(self.get_ipaths())
1213
1217
1214 def check_integrity(self, parents=None):
1218 def check_integrity(self, parents=None):
1215 """
1219 """
1216 Checks in-memory commit's integrity. Also, sets parents if not
1220 Checks in-memory commit's integrity. Also, sets parents if not
1217 already set.
1221 already set.
1218
1222
1219 :raises CommitError: if any error occurs (i.e.
1223 :raises CommitError: if any error occurs (i.e.
1220 ``NodeDoesNotExistError``).
1224 ``NodeDoesNotExistError``).
1221 """
1225 """
1222 if not self.parents:
1226 if not self.parents:
1223 parents = parents or []
1227 parents = parents or []
1224 if len(parents) == 0:
1228 if len(parents) == 0:
1225 try:
1229 try:
1226 parents = [self.repository.get_commit(), None]
1230 parents = [self.repository.get_commit(), None]
1227 except EmptyRepositoryError:
1231 except EmptyRepositoryError:
1228 parents = [None, None]
1232 parents = [None, None]
1229 elif len(parents) == 1:
1233 elif len(parents) == 1:
1230 parents += [None]
1234 parents += [None]
1231 self.parents = parents
1235 self.parents = parents
1232
1236
1233 # Local parents, only if not None
1237 # Local parents, only if not None
1234 parents = [p for p in self.parents if p]
1238 parents = [p for p in self.parents if p]
1235
1239
1236 # Check nodes marked as added
1240 # Check nodes marked as added
1237 for p in parents:
1241 for p in parents:
1238 for node in self.added:
1242 for node in self.added:
1239 try:
1243 try:
1240 p.get_node(node.path)
1244 p.get_node(node.path)
1241 except NodeDoesNotExistError:
1245 except NodeDoesNotExistError:
1242 pass
1246 pass
1243 else:
1247 else:
1244 raise NodeAlreadyExistsError(
1248 raise NodeAlreadyExistsError(
1245 "Node `%s` already exists at %s" % (node.path, p))
1249 "Node `%s` already exists at %s" % (node.path, p))
1246
1250
1247 # Check nodes marked as changed
1251 # Check nodes marked as changed
1248 missing = set(self.changed)
1252 missing = set(self.changed)
1249 not_changed = set(self.changed)
1253 not_changed = set(self.changed)
1250 if self.changed and not parents:
1254 if self.changed and not parents:
1251 raise NodeDoesNotExistError(str(self.changed[0].path))
1255 raise NodeDoesNotExistError(str(self.changed[0].path))
1252 for p in parents:
1256 for p in parents:
1253 for node in self.changed:
1257 for node in self.changed:
1254 try:
1258 try:
1255 old = p.get_node(node.path)
1259 old = p.get_node(node.path)
1256 missing.remove(node)
1260 missing.remove(node)
1257 # if content actually changed, remove node from not_changed
1261 # if content actually changed, remove node from not_changed
1258 if old.content != node.content:
1262 if old.content != node.content:
1259 not_changed.remove(node)
1263 not_changed.remove(node)
1260 except NodeDoesNotExistError:
1264 except NodeDoesNotExistError:
1261 pass
1265 pass
1262 if self.changed and missing:
1266 if self.changed and missing:
1263 raise NodeDoesNotExistError(
1267 raise NodeDoesNotExistError(
1264 "Node `%s` marked as modified but missing in parents: %s"
1268 "Node `%s` marked as modified but missing in parents: %s"
1265 % (node.path, parents))
1269 % (node.path, parents))
1266
1270
1267 if self.changed and not_changed:
1271 if self.changed and not_changed:
1268 raise NodeNotChangedError(
1272 raise NodeNotChangedError(
1269 "Node `%s` wasn't actually changed (parents: %s)"
1273 "Node `%s` wasn't actually changed (parents: %s)"
1270 % (not_changed.pop().path, parents))
1274 % (not_changed.pop().path, parents))
1271
1275
1272 # Check nodes marked as removed
1276 # Check nodes marked as removed
1273 if self.removed and not parents:
1277 if self.removed and not parents:
1274 raise NodeDoesNotExistError(
1278 raise NodeDoesNotExistError(
1275 "Cannot remove node at %s as there "
1279 "Cannot remove node at %s as there "
1276 "were no parents specified" % self.removed[0].path)
1280 "were no parents specified" % self.removed[0].path)
1277 really_removed = set()
1281 really_removed = set()
1278 for p in parents:
1282 for p in parents:
1279 for node in self.removed:
1283 for node in self.removed:
1280 try:
1284 try:
1281 p.get_node(node.path)
1285 p.get_node(node.path)
1282 really_removed.add(node)
1286 really_removed.add(node)
1283 except CommitError:
1287 except CommitError:
1284 pass
1288 pass
1285 not_removed = set(self.removed) - really_removed
1289 not_removed = set(self.removed) - really_removed
1286 if not_removed:
1290 if not_removed:
1287 # TODO: johbo: This code branch does not seem to be covered
1291 # TODO: johbo: This code branch does not seem to be covered
1288 raise NodeDoesNotExistError(
1292 raise NodeDoesNotExistError(
1289 "Cannot remove node at %s from "
1293 "Cannot remove node at %s from "
1290 "following parents: %s" % (not_removed, parents))
1294 "following parents: %s" % (not_removed, parents))
1291
1295
1292 def commit(
1296 def commit(
1293 self, message, author, parents=None, branch=None, date=None,
1297 self, message, author, parents=None, branch=None, date=None,
1294 **kwargs):
1298 **kwargs):
1295 """
1299 """
1296 Performs in-memory commit (doesn't check workdir in any way) and
1300 Performs in-memory commit (doesn't check workdir in any way) and
1297 returns newly created :class:`BaseCommit`. Updates repository's
1301 returns newly created :class:`BaseCommit`. Updates repository's
1298 attribute `commits`.
1302 attribute `commits`.
1299
1303
1300 .. note::
1304 .. note::
1301
1305
1302 While overriding this method each backend's should call
1306 While overriding this method each backend's should call
1303 ``self.check_integrity(parents)`` in the first place.
1307 ``self.check_integrity(parents)`` in the first place.
1304
1308
1305 :param message: message of the commit
1309 :param message: message of the commit
1306 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1310 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1307 :param parents: single parent or sequence of parents from which commit
1311 :param parents: single parent or sequence of parents from which commit
1308 would be derived
1312 would be derived
1309 :param date: ``datetime.datetime`` instance. Defaults to
1313 :param date: ``datetime.datetime`` instance. Defaults to
1310 ``datetime.datetime.now()``.
1314 ``datetime.datetime.now()``.
1311 :param branch: branch name, as string. If none given, default backend's
1315 :param branch: branch name, as string. If none given, default backend's
1312 branch would be used.
1316 branch would be used.
1313
1317
1314 :raises ``CommitError``: if any error occurs while committing
1318 :raises ``CommitError``: if any error occurs while committing
1315 """
1319 """
1316 raise NotImplementedError
1320 raise NotImplementedError
1317
1321
1318
1322
1319 class BaseInMemoryChangesetClass(type):
1323 class BaseInMemoryChangesetClass(type):
1320
1324
1321 def __instancecheck__(self, instance):
1325 def __instancecheck__(self, instance):
1322 return isinstance(instance, BaseInMemoryCommit)
1326 return isinstance(instance, BaseInMemoryCommit)
1323
1327
1324
1328
1325 class BaseInMemoryChangeset(BaseInMemoryCommit):
1329 class BaseInMemoryChangeset(BaseInMemoryCommit):
1326
1330
1327 __metaclass__ = BaseInMemoryChangesetClass
1331 __metaclass__ = BaseInMemoryChangesetClass
1328
1332
1329 def __new__(cls, *args, **kwargs):
1333 def __new__(cls, *args, **kwargs):
1330 warnings.warn(
1334 warnings.warn(
1331 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1335 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1332 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1336 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1333
1337
1334
1338
1335 class EmptyCommit(BaseCommit):
1339 class EmptyCommit(BaseCommit):
1336 """
1340 """
1337 An dummy empty commit. It's possible to pass hash when creating
1341 An dummy empty commit. It's possible to pass hash when creating
1338 an EmptyCommit
1342 an EmptyCommit
1339 """
1343 """
1340
1344
1341 def __init__(
1345 def __init__(
1342 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1346 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1343 message='', author='', date=None):
1347 message='', author='', date=None):
1344 self._empty_commit_id = commit_id
1348 self._empty_commit_id = commit_id
1345 # TODO: johbo: Solve idx parameter, default value does not make
1349 # TODO: johbo: Solve idx parameter, default value does not make
1346 # too much sense
1350 # too much sense
1347 self.idx = idx
1351 self.idx = idx
1348 self.message = message
1352 self.message = message
1349 self.author = author
1353 self.author = author
1350 self.date = date or datetime.datetime.fromtimestamp(0)
1354 self.date = date or datetime.datetime.fromtimestamp(0)
1351 self.repository = repo
1355 self.repository = repo
1352 self.alias = alias
1356 self.alias = alias
1353
1357
1354 @LazyProperty
1358 @LazyProperty
1355 def raw_id(self):
1359 def raw_id(self):
1356 """
1360 """
1357 Returns raw string identifying this commit, useful for web
1361 Returns raw string identifying this commit, useful for web
1358 representation.
1362 representation.
1359 """
1363 """
1360
1364
1361 return self._empty_commit_id
1365 return self._empty_commit_id
1362
1366
1363 @LazyProperty
1367 @LazyProperty
1364 def branch(self):
1368 def branch(self):
1365 if self.alias:
1369 if self.alias:
1366 from rhodecode.lib.vcs.backends import get_backend
1370 from rhodecode.lib.vcs.backends import get_backend
1367 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1371 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1368
1372
1369 @LazyProperty
1373 @LazyProperty
1370 def short_id(self):
1374 def short_id(self):
1371 return self.raw_id[:12]
1375 return self.raw_id[:12]
1372
1376
1373 @LazyProperty
1377 @LazyProperty
1374 def id(self):
1378 def id(self):
1375 return self.raw_id
1379 return self.raw_id
1376
1380
1377 def get_file_commit(self, path):
1381 def get_file_commit(self, path):
1378 return self
1382 return self
1379
1383
1380 def get_file_content(self, path):
1384 def get_file_content(self, path):
1381 return u''
1385 return u''
1382
1386
1383 def get_file_size(self, path):
1387 def get_file_size(self, path):
1384 return 0
1388 return 0
1385
1389
1386
1390
1387 class EmptyChangesetClass(type):
1391 class EmptyChangesetClass(type):
1388
1392
1389 def __instancecheck__(self, instance):
1393 def __instancecheck__(self, instance):
1390 return isinstance(instance, EmptyCommit)
1394 return isinstance(instance, EmptyCommit)
1391
1395
1392
1396
1393 class EmptyChangeset(EmptyCommit):
1397 class EmptyChangeset(EmptyCommit):
1394
1398
1395 __metaclass__ = EmptyChangesetClass
1399 __metaclass__ = EmptyChangesetClass
1396
1400
1397 def __new__(cls, *args, **kwargs):
1401 def __new__(cls, *args, **kwargs):
1398 warnings.warn(
1402 warnings.warn(
1399 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1403 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1400 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1404 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1401
1405
1402 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1406 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1403 alias=None, revision=-1, message='', author='', date=None):
1407 alias=None, revision=-1, message='', author='', date=None):
1404 if requested_revision is not None:
1408 if requested_revision is not None:
1405 warnings.warn(
1409 warnings.warn(
1406 "Parameter requested_revision not supported anymore",
1410 "Parameter requested_revision not supported anymore",
1407 DeprecationWarning)
1411 DeprecationWarning)
1408 super(EmptyChangeset, self).__init__(
1412 super(EmptyChangeset, self).__init__(
1409 commit_id=cs, repo=repo, alias=alias, idx=revision,
1413 commit_id=cs, repo=repo, alias=alias, idx=revision,
1410 message=message, author=author, date=date)
1414 message=message, author=author, date=date)
1411
1415
1412 @property
1416 @property
1413 def revision(self):
1417 def revision(self):
1414 warnings.warn("Use idx instead", DeprecationWarning)
1418 warnings.warn("Use idx instead", DeprecationWarning)
1415 return self.idx
1419 return self.idx
1416
1420
1417 @revision.setter
1421 @revision.setter
1418 def revision(self, value):
1422 def revision(self, value):
1419 warnings.warn("Use idx instead", DeprecationWarning)
1423 warnings.warn("Use idx instead", DeprecationWarning)
1420 self.idx = value
1424 self.idx = value
1421
1425
1422
1426
1423 class CollectionGenerator(object):
1427 class CollectionGenerator(object):
1424
1428
1425 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1429 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1426 self.repo = repo
1430 self.repo = repo
1427 self.commit_ids = commit_ids
1431 self.commit_ids = commit_ids
1428 # TODO: (oliver) this isn't currently hooked up
1432 # TODO: (oliver) this isn't currently hooked up
1429 self.collection_size = None
1433 self.collection_size = None
1430 self.pre_load = pre_load
1434 self.pre_load = pre_load
1431
1435
1432 def __len__(self):
1436 def __len__(self):
1433 if self.collection_size is not None:
1437 if self.collection_size is not None:
1434 return self.collection_size
1438 return self.collection_size
1435 return self.commit_ids.__len__()
1439 return self.commit_ids.__len__()
1436
1440
1437 def __iter__(self):
1441 def __iter__(self):
1438 for commit_id in self.commit_ids:
1442 for commit_id in self.commit_ids:
1439 # TODO: johbo: Mercurial passes in commit indices or commit ids
1443 # TODO: johbo: Mercurial passes in commit indices or commit ids
1440 yield self._commit_factory(commit_id)
1444 yield self._commit_factory(commit_id)
1441
1445
1442 def _commit_factory(self, commit_id):
1446 def _commit_factory(self, commit_id):
1443 """
1447 """
1444 Allows backends to override the way commits are generated.
1448 Allows backends to override the way commits are generated.
1445 """
1449 """
1446 return self.repo.get_commit(commit_id=commit_id,
1450 return self.repo.get_commit(commit_id=commit_id,
1447 pre_load=self.pre_load)
1451 pre_load=self.pre_load)
1448
1452
1449 def __getslice__(self, i, j):
1453 def __getslice__(self, i, j):
1450 """
1454 """
1451 Returns an iterator of sliced repository
1455 Returns an iterator of sliced repository
1452 """
1456 """
1453 commit_ids = self.commit_ids[i:j]
1457 commit_ids = self.commit_ids[i:j]
1454 return self.__class__(
1458 return self.__class__(
1455 self.repo, commit_ids, pre_load=self.pre_load)
1459 self.repo, commit_ids, pre_load=self.pre_load)
1456
1460
1457 def __repr__(self):
1461 def __repr__(self):
1458 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1462 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1459
1463
1460
1464
1461 class Config(object):
1465 class Config(object):
1462 """
1466 """
1463 Represents the configuration for a repository.
1467 Represents the configuration for a repository.
1464
1468
1465 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1469 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1466 standard library. It implements only the needed subset.
1470 standard library. It implements only the needed subset.
1467 """
1471 """
1468
1472
1469 def __init__(self):
1473 def __init__(self):
1470 self._values = {}
1474 self._values = {}
1471
1475
1472 def copy(self):
1476 def copy(self):
1473 clone = Config()
1477 clone = Config()
1474 for section, values in self._values.items():
1478 for section, values in self._values.items():
1475 clone._values[section] = values.copy()
1479 clone._values[section] = values.copy()
1476 return clone
1480 return clone
1477
1481
1478 def __repr__(self):
1482 def __repr__(self):
1479 return '<Config(%s sections) at %s>' % (
1483 return '<Config(%s sections) at %s>' % (
1480 len(self._values), hex(id(self)))
1484 len(self._values), hex(id(self)))
1481
1485
1482 def items(self, section):
1486 def items(self, section):
1483 return self._values.get(section, {}).iteritems()
1487 return self._values.get(section, {}).iteritems()
1484
1488
1485 def get(self, section, option):
1489 def get(self, section, option):
1486 return self._values.get(section, {}).get(option)
1490 return self._values.get(section, {}).get(option)
1487
1491
1488 def set(self, section, option, value):
1492 def set(self, section, option, value):
1489 section_values = self._values.setdefault(section, {})
1493 section_values = self._values.setdefault(section, {})
1490 section_values[option] = value
1494 section_values[option] = value
1491
1495
1492 def clear_section(self, section):
1496 def clear_section(self, section):
1493 self._values[section] = {}
1497 self._values[section] = {}
1494
1498
1495 def serialize(self):
1499 def serialize(self):
1496 """
1500 """
1497 Creates a list of three tuples (section, key, value) representing
1501 Creates a list of three tuples (section, key, value) representing
1498 this config object.
1502 this config object.
1499 """
1503 """
1500 items = []
1504 items = []
1501 for section in self._values:
1505 for section in self._values:
1502 for option, value in self._values[section].items():
1506 for option, value in self._values[section].items():
1503 items.append(
1507 items.append(
1504 (safe_str(section), safe_str(option), safe_str(value)))
1508 (safe_str(section), safe_str(option), safe_str(value)))
1505 return items
1509 return items
1506
1510
1507
1511
1508 class Diff(object):
1512 class Diff(object):
1509 """
1513 """
1510 Represents a diff result from a repository backend.
1514 Represents a diff result from a repository backend.
1511
1515
1512 Subclasses have to provide a backend specific value for :attr:`_header_re`.
1516 Subclasses have to provide a backend specific value for :attr:`_header_re`.
1513 """
1517 """
1514
1518
1515 _header_re = None
1519 _header_re = None
1516
1520
1517 def __init__(self, raw_diff):
1521 def __init__(self, raw_diff):
1518 self.raw = raw_diff
1522 self.raw = raw_diff
1519
1523
1520 def chunks(self):
1524 def chunks(self):
1521 """
1525 """
1522 split the diff in chunks of separate --git a/file b/file chunks
1526 split the diff in chunks of separate --git a/file b/file chunks
1523 to make diffs consistent we must prepend with \n, and make sure
1527 to make diffs consistent we must prepend with \n, and make sure
1524 we can detect last chunk as this was also has special rule
1528 we can detect last chunk as this was also has special rule
1525 """
1529 """
1526 chunks = ('\n' + self.raw).split('\ndiff --git')[1:]
1530 chunks = ('\n' + self.raw).split('\ndiff --git')[1:]
1527 total_chunks = len(chunks)
1531 total_chunks = len(chunks)
1528 return (DiffChunk(chunk, self, cur_chunk == total_chunks)
1532 return (DiffChunk(chunk, self, cur_chunk == total_chunks)
1529 for cur_chunk, chunk in enumerate(chunks, start=1))
1533 for cur_chunk, chunk in enumerate(chunks, start=1))
1530
1534
1531
1535
1532 class DiffChunk(object):
1536 class DiffChunk(object):
1533
1537
1534 def __init__(self, chunk, diff, last_chunk):
1538 def __init__(self, chunk, diff, last_chunk):
1535 self._diff = diff
1539 self._diff = diff
1536
1540
1537 # since we split by \ndiff --git that part is lost from original diff
1541 # since we split by \ndiff --git that part is lost from original diff
1538 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1542 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1539 if not last_chunk:
1543 if not last_chunk:
1540 chunk += '\n'
1544 chunk += '\n'
1541
1545
1542 match = self._diff._header_re.match(chunk)
1546 match = self._diff._header_re.match(chunk)
1543 self.header = match.groupdict()
1547 self.header = match.groupdict()
1544 self.diff = chunk[match.end():]
1548 self.diff = chunk[match.end():]
1545 self.raw = chunk
1549 self.raw = chunk
@@ -1,803 +1,808 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import os
27 import os
28 import shutil
28 import shutil
29 import urllib
29 import urllib
30
30
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 from rhodecode.lib.compat import OrderedDict
33 from rhodecode.lib.compat import OrderedDict
34 from rhodecode.lib.datelib import (
34 from rhodecode.lib.datelib import (
35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate,
35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate,
36 date_astimestamp)
36 date_astimestamp)
37 from rhodecode.lib.utils import safe_unicode, safe_str
37 from rhodecode.lib.utils import safe_unicode, safe_str
38 from rhodecode.lib.vcs import connection
38 from rhodecode.lib.vcs import connection
39 from rhodecode.lib.vcs.backends.base import (
39 from rhodecode.lib.vcs.backends.base import (
40 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 BaseRepository, CollectionGenerator, Config, MergeResponse,
41 MergeFailureReason, Reference)
41 MergeFailureReason, Reference)
42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
47 TagDoesNotExistError, CommitDoesNotExistError)
47 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
48
48
49 hexlify = binascii.hexlify
49 hexlify = binascii.hexlify
50 nullid = "\0" * 20
50 nullid = "\0" * 20
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 class MercurialRepository(BaseRepository):
55 class MercurialRepository(BaseRepository):
56 """
56 """
57 Mercurial repository backend
57 Mercurial repository backend
58 """
58 """
59 DEFAULT_BRANCH_NAME = 'default'
59 DEFAULT_BRANCH_NAME = 'default'
60
60
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 update_after_clone=False, with_wire=None):
62 update_after_clone=False, with_wire=None):
63 """
63 """
64 Raises RepositoryError if repository could not be find at the given
64 Raises RepositoryError if repository could not be find at the given
65 ``repo_path``.
65 ``repo_path``.
66
66
67 :param repo_path: local path of the repository
67 :param repo_path: local path of the repository
68 :param config: config object containing the repo configuration
68 :param config: config object containing the repo configuration
69 :param create=False: if set to True, would try to create repository if
69 :param create=False: if set to True, would try to create repository if
70 it does not exist rather than raising exception
70 it does not exist rather than raising exception
71 :param src_url=None: would try to clone repository from given location
71 :param src_url=None: would try to clone repository from given location
72 :param update_after_clone=False: sets update of working copy after
72 :param update_after_clone=False: sets update of working copy after
73 making a clone
73 making a clone
74 """
74 """
75 self.path = safe_str(os.path.abspath(repo_path))
75 self.path = safe_str(os.path.abspath(repo_path))
76 self.config = config if config else Config()
76 self.config = config if config else Config()
77 self._remote = connection.Hg(
77 self._remote = connection.Hg(
78 self.path, self.config, with_wire=with_wire)
78 self.path, self.config, with_wire=with_wire)
79
79
80 self._init_repo(create, src_url, update_after_clone)
80 self._init_repo(create, src_url, update_after_clone)
81
81
82 # caches
82 # caches
83 self._commit_ids = {}
83 self._commit_ids = {}
84
84
85 @LazyProperty
85 @LazyProperty
86 def commit_ids(self):
86 def commit_ids(self):
87 """
87 """
88 Returns list of commit ids, in ascending order. Being lazy
88 Returns list of commit ids, in ascending order. Being lazy
89 attribute allows external tools to inject shas from cache.
89 attribute allows external tools to inject shas from cache.
90 """
90 """
91 commit_ids = self._get_all_commit_ids()
91 commit_ids = self._get_all_commit_ids()
92 self._rebuild_cache(commit_ids)
92 self._rebuild_cache(commit_ids)
93 return commit_ids
93 return commit_ids
94
94
95 def _rebuild_cache(self, commit_ids):
95 def _rebuild_cache(self, commit_ids):
96 self._commit_ids = dict((commit_id, index)
96 self._commit_ids = dict((commit_id, index)
97 for index, commit_id in enumerate(commit_ids))
97 for index, commit_id in enumerate(commit_ids))
98
98
99 @LazyProperty
99 @LazyProperty
100 def branches(self):
100 def branches(self):
101 return self._get_branches()
101 return self._get_branches()
102
102
103 @LazyProperty
103 @LazyProperty
104 def branches_closed(self):
104 def branches_closed(self):
105 return self._get_branches(active=False, closed=True)
105 return self._get_branches(active=False, closed=True)
106
106
107 @LazyProperty
107 @LazyProperty
108 def branches_all(self):
108 def branches_all(self):
109 all_branches = {}
109 all_branches = {}
110 all_branches.update(self.branches)
110 all_branches.update(self.branches)
111 all_branches.update(self.branches_closed)
111 all_branches.update(self.branches_closed)
112 return all_branches
112 return all_branches
113
113
114 def _get_branches(self, active=True, closed=False):
114 def _get_branches(self, active=True, closed=False):
115 """
115 """
116 Gets branches for this repository
116 Gets branches for this repository
117 Returns only not closed active branches by default
117 Returns only not closed active branches by default
118
118
119 :param active: return also active branches
119 :param active: return also active branches
120 :param closed: return also closed branches
120 :param closed: return also closed branches
121
121
122 """
122 """
123 if self.is_empty():
123 if self.is_empty():
124 return {}
124 return {}
125
125
126 def get_name(ctx):
126 def get_name(ctx):
127 return ctx[0]
127 return ctx[0]
128
128
129 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
129 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
130 self._remote.branches(active, closed).items()]
130 self._remote.branches(active, closed).items()]
131
131
132 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
132 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
133
133
134 @LazyProperty
134 @LazyProperty
135 def tags(self):
135 def tags(self):
136 """
136 """
137 Gets tags for this repository
137 Gets tags for this repository
138 """
138 """
139 return self._get_tags()
139 return self._get_tags()
140
140
141 def _get_tags(self):
141 def _get_tags(self):
142 if self.is_empty():
142 if self.is_empty():
143 return {}
143 return {}
144
144
145 def get_name(ctx):
145 def get_name(ctx):
146 return ctx[0]
146 return ctx[0]
147
147
148 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
148 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
149 self._remote.tags().items()]
149 self._remote.tags().items()]
150
150
151 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
151 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
152
152
153 def tag(self, name, user, commit_id=None, message=None, date=None,
153 def tag(self, name, user, commit_id=None, message=None, date=None,
154 **kwargs):
154 **kwargs):
155 """
155 """
156 Creates and returns a tag for the given ``commit_id``.
156 Creates and returns a tag for the given ``commit_id``.
157
157
158 :param name: name for new tag
158 :param name: name for new tag
159 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
159 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
160 :param commit_id: commit id for which new tag would be created
160 :param commit_id: commit id for which new tag would be created
161 :param message: message of the tag's commit
161 :param message: message of the tag's commit
162 :param date: date of tag's commit
162 :param date: date of tag's commit
163
163
164 :raises TagAlreadyExistError: if tag with same name already exists
164 :raises TagAlreadyExistError: if tag with same name already exists
165 """
165 """
166 if name in self.tags:
166 if name in self.tags:
167 raise TagAlreadyExistError("Tag %s already exists" % name)
167 raise TagAlreadyExistError("Tag %s already exists" % name)
168 commit = self.get_commit(commit_id=commit_id)
168 commit = self.get_commit(commit_id=commit_id)
169 local = kwargs.setdefault('local', False)
169 local = kwargs.setdefault('local', False)
170
170
171 if message is None:
171 if message is None:
172 message = "Added tag %s for commit %s" % (name, commit.short_id)
172 message = "Added tag %s for commit %s" % (name, commit.short_id)
173
173
174 date, tz = date_to_timestamp_plus_offset(date)
174 date, tz = date_to_timestamp_plus_offset(date)
175
175
176 self._remote.tag(
176 self._remote.tag(
177 name, commit.raw_id, message, local, user, date, tz)
177 name, commit.raw_id, message, local, user, date, tz)
178 self._remote.invalidate_vcs_cache()
178 self._remote.invalidate_vcs_cache()
179
179
180 # Reinitialize tags
180 # Reinitialize tags
181 self.tags = self._get_tags()
181 self.tags = self._get_tags()
182 tag_id = self.tags[name]
182 tag_id = self.tags[name]
183
183
184 return self.get_commit(commit_id=tag_id)
184 return self.get_commit(commit_id=tag_id)
185
185
186 def remove_tag(self, name, user, message=None, date=None):
186 def remove_tag(self, name, user, message=None, date=None):
187 """
187 """
188 Removes tag with the given `name`.
188 Removes tag with the given `name`.
189
189
190 :param name: name of the tag to be removed
190 :param name: name of the tag to be removed
191 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
191 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
192 :param message: message of the tag's removal commit
192 :param message: message of the tag's removal commit
193 :param date: date of tag's removal commit
193 :param date: date of tag's removal commit
194
194
195 :raises TagDoesNotExistError: if tag with given name does not exists
195 :raises TagDoesNotExistError: if tag with given name does not exists
196 """
196 """
197 if name not in self.tags:
197 if name not in self.tags:
198 raise TagDoesNotExistError("Tag %s does not exist" % name)
198 raise TagDoesNotExistError("Tag %s does not exist" % name)
199 if message is None:
199 if message is None:
200 message = "Removed tag %s" % name
200 message = "Removed tag %s" % name
201 local = False
201 local = False
202
202
203 date, tz = date_to_timestamp_plus_offset(date)
203 date, tz = date_to_timestamp_plus_offset(date)
204
204
205 self._remote.tag(name, nullid, message, local, user, date, tz)
205 self._remote.tag(name, nullid, message, local, user, date, tz)
206 self._remote.invalidate_vcs_cache()
206 self._remote.invalidate_vcs_cache()
207 self.tags = self._get_tags()
207 self.tags = self._get_tags()
208
208
209 @LazyProperty
209 @LazyProperty
210 def bookmarks(self):
210 def bookmarks(self):
211 """
211 """
212 Gets bookmarks for this repository
212 Gets bookmarks for this repository
213 """
213 """
214 return self._get_bookmarks()
214 return self._get_bookmarks()
215
215
216 def _get_bookmarks(self):
216 def _get_bookmarks(self):
217 if self.is_empty():
217 if self.is_empty():
218 return {}
218 return {}
219
219
220 def get_name(ctx):
220 def get_name(ctx):
221 return ctx[0]
221 return ctx[0]
222
222
223 _bookmarks = [
223 _bookmarks = [
224 (safe_unicode(n), hexlify(h)) for n, h in
224 (safe_unicode(n), hexlify(h)) for n, h in
225 self._remote.bookmarks().items()]
225 self._remote.bookmarks().items()]
226
226
227 return OrderedDict(sorted(_bookmarks, key=get_name))
227 return OrderedDict(sorted(_bookmarks, key=get_name))
228
228
229 def _get_all_commit_ids(self):
229 def _get_all_commit_ids(self):
230 return self._remote.get_all_commit_ids('visible')
230 return self._remote.get_all_commit_ids('visible')
231
231
232 def get_diff(
232 def get_diff(
233 self, commit1, commit2, path='', ignore_whitespace=False,
233 self, commit1, commit2, path='', ignore_whitespace=False,
234 context=3, path1=None):
234 context=3, path1=None):
235 """
235 """
236 Returns (git like) *diff*, as plain text. Shows changes introduced by
236 Returns (git like) *diff*, as plain text. Shows changes introduced by
237 `commit2` since `commit1`.
237 `commit2` since `commit1`.
238
238
239 :param commit1: Entry point from which diff is shown. Can be
239 :param commit1: Entry point from which diff is shown. Can be
240 ``self.EMPTY_COMMIT`` - in this case, patch showing all
240 ``self.EMPTY_COMMIT`` - in this case, patch showing all
241 the changes since empty state of the repository until `commit2`
241 the changes since empty state of the repository until `commit2`
242 :param commit2: Until which commit changes should be shown.
242 :param commit2: Until which commit changes should be shown.
243 :param ignore_whitespace: If set to ``True``, would not show whitespace
243 :param ignore_whitespace: If set to ``True``, would not show whitespace
244 changes. Defaults to ``False``.
244 changes. Defaults to ``False``.
245 :param context: How many lines before/after changed lines should be
245 :param context: How many lines before/after changed lines should be
246 shown. Defaults to ``3``.
246 shown. Defaults to ``3``.
247 """
247 """
248 self._validate_diff_commits(commit1, commit2)
248 self._validate_diff_commits(commit1, commit2)
249 if path1 is not None and path1 != path:
249 if path1 is not None and path1 != path:
250 raise ValueError("Diff of two different paths not supported.")
250 raise ValueError("Diff of two different paths not supported.")
251
251
252 if path:
252 if path:
253 file_filter = [self.path, path]
253 file_filter = [self.path, path]
254 else:
254 else:
255 file_filter = None
255 file_filter = None
256
256
257 diff = self._remote.diff(
257 diff = self._remote.diff(
258 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
258 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
259 opt_git=True, opt_ignorews=ignore_whitespace,
259 opt_git=True, opt_ignorews=ignore_whitespace,
260 context=context)
260 context=context)
261 return MercurialDiff(diff)
261 return MercurialDiff(diff)
262
262
263 def strip(self, commit_id, branch=None):
263 def strip(self, commit_id, branch=None):
264 self._remote.strip(commit_id, update=False, backup="none")
264 self._remote.strip(commit_id, update=False, backup="none")
265
265
266 self._remote.invalidate_vcs_cache()
266 self._remote.invalidate_vcs_cache()
267 self.commit_ids = self._get_all_commit_ids()
267 self.commit_ids = self._get_all_commit_ids()
268 self._rebuild_cache(self.commit_ids)
268 self._rebuild_cache(self.commit_ids)
269
269
270 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
270 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
271 if commit_id1 == commit_id2:
271 if commit_id1 == commit_id2:
272 return commit_id1
272 return commit_id1
273
273
274 ancestors = self._remote.revs_from_revspec(
274 ancestors = self._remote.revs_from_revspec(
275 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
275 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
276 other_path=repo2.path)
276 other_path=repo2.path)
277 return repo2[ancestors[0]].raw_id if ancestors else None
277 return repo2[ancestors[0]].raw_id if ancestors else None
278
278
279 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
279 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
280 if commit_id1 == commit_id2:
280 if commit_id1 == commit_id2:
281 commits = []
281 commits = []
282 else:
282 else:
283 if merge:
283 if merge:
284 indexes = self._remote.revs_from_revspec(
284 indexes = self._remote.revs_from_revspec(
285 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
285 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
286 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
286 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
287 else:
287 else:
288 indexes = self._remote.revs_from_revspec(
288 indexes = self._remote.revs_from_revspec(
289 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
289 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
290 commit_id1, other_path=repo2.path)
290 commit_id1, other_path=repo2.path)
291
291
292 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
292 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
293 for idx in indexes]
293 for idx in indexes]
294
294
295 return commits
295 return commits
296
296
297 @staticmethod
297 @staticmethod
298 def check_url(url, config):
298 def check_url(url, config):
299 """
299 """
300 Function will check given url and try to verify if it's a valid
300 Function will check given url and try to verify if it's a valid
301 link. Sometimes it may happened that mercurial will issue basic
301 link. Sometimes it may happened that mercurial will issue basic
302 auth request that can cause whole API to hang when used from python
302 auth request that can cause whole API to hang when used from python
303 or other external calls.
303 or other external calls.
304
304
305 On failures it'll raise urllib2.HTTPError, exception is also thrown
305 On failures it'll raise urllib2.HTTPError, exception is also thrown
306 when the return code is non 200
306 when the return code is non 200
307 """
307 """
308 # check first if it's not an local url
308 # check first if it's not an local url
309 if os.path.isdir(url) or url.startswith('file:'):
309 if os.path.isdir(url) or url.startswith('file:'):
310 return True
310 return True
311
311
312 # Request the _remote to verify the url
312 # Request the _remote to verify the url
313 return connection.Hg.check_url(url, config.serialize())
313 return connection.Hg.check_url(url, config.serialize())
314
314
315 @staticmethod
315 @staticmethod
316 def is_valid_repository(path):
316 def is_valid_repository(path):
317 return os.path.isdir(os.path.join(path, '.hg'))
317 return os.path.isdir(os.path.join(path, '.hg'))
318
318
319 def _init_repo(self, create, src_url=None, update_after_clone=False):
319 def _init_repo(self, create, src_url=None, update_after_clone=False):
320 """
320 """
321 Function will check for mercurial repository in given path. If there
321 Function will check for mercurial repository in given path. If there
322 is no repository in that path it will raise an exception unless
322 is no repository in that path it will raise an exception unless
323 `create` parameter is set to True - in that case repository would
323 `create` parameter is set to True - in that case repository would
324 be created.
324 be created.
325
325
326 If `src_url` is given, would try to clone repository from the
326 If `src_url` is given, would try to clone repository from the
327 location at given clone_point. Additionally it'll make update to
327 location at given clone_point. Additionally it'll make update to
328 working copy accordingly to `update_after_clone` flag.
328 working copy accordingly to `update_after_clone` flag.
329 """
329 """
330 if create and os.path.exists(self.path):
330 if create and os.path.exists(self.path):
331 raise RepositoryError(
331 raise RepositoryError(
332 "Cannot create repository at %s, location already exist"
332 "Cannot create repository at %s, location already exist"
333 % self.path)
333 % self.path)
334
334
335 if src_url:
335 if src_url:
336 url = str(self._get_url(src_url))
336 url = str(self._get_url(src_url))
337 MercurialRepository.check_url(url, self.config)
337 MercurialRepository.check_url(url, self.config)
338
338
339 self._remote.clone(url, self.path, update_after_clone)
339 self._remote.clone(url, self.path, update_after_clone)
340
340
341 # Don't try to create if we've already cloned repo
341 # Don't try to create if we've already cloned repo
342 create = False
342 create = False
343
343
344 if create:
344 if create:
345 os.makedirs(self.path, mode=0755)
345 os.makedirs(self.path, mode=0755)
346
346
347 self._remote.localrepository(create)
347 self._remote.localrepository(create)
348
348
349 @LazyProperty
349 @LazyProperty
350 def in_memory_commit(self):
350 def in_memory_commit(self):
351 return MercurialInMemoryCommit(self)
351 return MercurialInMemoryCommit(self)
352
352
353 @LazyProperty
353 @LazyProperty
354 def description(self):
354 def description(self):
355 description = self._remote.get_config_value(
355 description = self._remote.get_config_value(
356 'web', 'description', untrusted=True)
356 'web', 'description', untrusted=True)
357 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
357 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
358
358
359 @LazyProperty
359 @LazyProperty
360 def contact(self):
360 def contact(self):
361 contact = (
361 contact = (
362 self._remote.get_config_value("web", "contact") or
362 self._remote.get_config_value("web", "contact") or
363 self._remote.get_config_value("ui", "username"))
363 self._remote.get_config_value("ui", "username"))
364 return safe_unicode(contact or self.DEFAULT_CONTACT)
364 return safe_unicode(contact or self.DEFAULT_CONTACT)
365
365
366 @LazyProperty
366 @LazyProperty
367 def last_change(self):
367 def last_change(self):
368 """
368 """
369 Returns last change made on this repository as
369 Returns last change made on this repository as
370 `datetime.datetime` object
370 `datetime.datetime` object
371 """
371 """
372 return utcdate_fromtimestamp(self._get_mtime(), makedate()[1])
372 return utcdate_fromtimestamp(self._get_mtime(), makedate()[1])
373
373
374 def _get_mtime(self):
374 def _get_mtime(self):
375 try:
375 try:
376 return date_astimestamp(self.get_commit().date)
376 return date_astimestamp(self.get_commit().date)
377 except RepositoryError:
377 except RepositoryError:
378 # fallback to filesystem
378 # fallback to filesystem
379 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
379 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
380 st_path = os.path.join(self.path, '.hg', "store")
380 st_path = os.path.join(self.path, '.hg', "store")
381 if os.path.exists(cl_path):
381 if os.path.exists(cl_path):
382 return os.stat(cl_path).st_mtime
382 return os.stat(cl_path).st_mtime
383 else:
383 else:
384 return os.stat(st_path).st_mtime
384 return os.stat(st_path).st_mtime
385
385
386 def _sanitize_commit_idx(self, idx):
386 def _sanitize_commit_idx(self, idx):
387 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
387 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
388 # number. A `long` is treated in the correct way though. So we convert
388 # number. A `long` is treated in the correct way though. So we convert
389 # `int` to `long` here to make sure it is handled correctly.
389 # `int` to `long` here to make sure it is handled correctly.
390 if isinstance(idx, int):
390 if isinstance(idx, int):
391 return long(idx)
391 return long(idx)
392 return idx
392 return idx
393
393
394 def _get_url(self, url):
394 def _get_url(self, url):
395 """
395 """
396 Returns normalized url. If schema is not given, would fall
396 Returns normalized url. If schema is not given, would fall
397 to filesystem
397 to filesystem
398 (``file:///``) schema.
398 (``file:///``) schema.
399 """
399 """
400 url = url.encode('utf8')
400 url = url.encode('utf8')
401 if url != 'default' and '://' not in url:
401 if url != 'default' and '://' not in url:
402 url = "file:" + urllib.pathname2url(url)
402 url = "file:" + urllib.pathname2url(url)
403 return url
403 return url
404
404
405 def get_hook_location(self):
405 def get_hook_location(self):
406 """
406 """
407 returns absolute path to location where hooks are stored
407 returns absolute path to location where hooks are stored
408 """
408 """
409 return os.path.join(self.path, '.hg', '.hgrc')
409 return os.path.join(self.path, '.hg', '.hgrc')
410
410
411 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
411 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
412 """
412 """
413 Returns ``MercurialCommit`` object representing repository's
413 Returns ``MercurialCommit`` object representing repository's
414 commit at the given `commit_id` or `commit_idx`.
414 commit at the given `commit_id` or `commit_idx`.
415 """
415 """
416 if self.is_empty():
416 if self.is_empty():
417 raise EmptyRepositoryError("There are no commits yet")
417 raise EmptyRepositoryError("There are no commits yet")
418
418
419 if commit_id is not None:
419 if commit_id is not None:
420 self._validate_commit_id(commit_id)
420 self._validate_commit_id(commit_id)
421 try:
421 try:
422 idx = self._commit_ids[commit_id]
422 idx = self._commit_ids[commit_id]
423 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
423 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
424 except KeyError:
424 except KeyError:
425 pass
425 pass
426 elif commit_idx is not None:
426 elif commit_idx is not None:
427 self._validate_commit_idx(commit_idx)
427 self._validate_commit_idx(commit_idx)
428 commit_idx = self._sanitize_commit_idx(commit_idx)
428 commit_idx = self._sanitize_commit_idx(commit_idx)
429 try:
429 try:
430 id_ = self.commit_ids[commit_idx]
430 id_ = self.commit_ids[commit_idx]
431 if commit_idx < 0:
431 if commit_idx < 0:
432 commit_idx += len(self.commit_ids)
432 commit_idx += len(self.commit_ids)
433 return MercurialCommit(
433 return MercurialCommit(
434 self, id_, commit_idx, pre_load=pre_load)
434 self, id_, commit_idx, pre_load=pre_load)
435 except IndexError:
435 except IndexError:
436 commit_id = commit_idx
436 commit_id = commit_idx
437 else:
437 else:
438 commit_id = "tip"
438 commit_id = "tip"
439
439
440 # TODO Paris: Ugly hack to "serialize" long for msgpack
440 # TODO Paris: Ugly hack to "serialize" long for msgpack
441 if isinstance(commit_id, long):
441 if isinstance(commit_id, long):
442 commit_id = float(commit_id)
442 commit_id = float(commit_id)
443
443
444 if isinstance(commit_id, unicode):
444 if isinstance(commit_id, unicode):
445 commit_id = safe_str(commit_id)
445 commit_id = safe_str(commit_id)
446
446
447 raw_id, idx = self._remote.lookup(commit_id, both=True)
447 raw_id, idx = self._remote.lookup(commit_id, both=True)
448
448
449 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
449 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
450
450
451 def get_commits(
451 def get_commits(
452 self, start_id=None, end_id=None, start_date=None, end_date=None,
452 self, start_id=None, end_id=None, start_date=None, end_date=None,
453 branch_name=None, pre_load=None):
453 branch_name=None, pre_load=None):
454 """
454 """
455 Returns generator of ``MercurialCommit`` objects from start to end
455 Returns generator of ``MercurialCommit`` objects from start to end
456 (both are inclusive)
456 (both are inclusive)
457
457
458 :param start_id: None, str(commit_id)
458 :param start_id: None, str(commit_id)
459 :param end_id: None, str(commit_id)
459 :param end_id: None, str(commit_id)
460 :param start_date: if specified, commits with commit date less than
460 :param start_date: if specified, commits with commit date less than
461 ``start_date`` would be filtered out from returned set
461 ``start_date`` would be filtered out from returned set
462 :param end_date: if specified, commits with commit date greater than
462 :param end_date: if specified, commits with commit date greater than
463 ``end_date`` would be filtered out from returned set
463 ``end_date`` would be filtered out from returned set
464 :param branch_name: if specified, commits not reachable from given
464 :param branch_name: if specified, commits not reachable from given
465 branch would be filtered out from returned set
465 branch would be filtered out from returned set
466
466
467 :raise BranchDoesNotExistError: If given ``branch_name`` does not
467 :raise BranchDoesNotExistError: If given ``branch_name`` does not
468 exist.
468 exist.
469 :raise CommitDoesNotExistError: If commit for given ``start`` or
469 :raise CommitDoesNotExistError: If commit for given ``start`` or
470 ``end`` could not be found.
470 ``end`` could not be found.
471 """
471 """
472 # actually we should check now if it's not an empty repo
472 # actually we should check now if it's not an empty repo
473 branch_ancestors = False
473 branch_ancestors = False
474 if self.is_empty():
474 if self.is_empty():
475 raise EmptyRepositoryError("There are no commits yet")
475 raise EmptyRepositoryError("There are no commits yet")
476 self._validate_branch_name(branch_name)
476 self._validate_branch_name(branch_name)
477
477
478 if start_id is not None:
478 if start_id is not None:
479 self._validate_commit_id(start_id)
479 self._validate_commit_id(start_id)
480 c_start = self.get_commit(commit_id=start_id)
480 c_start = self.get_commit(commit_id=start_id)
481 start_pos = self._commit_ids[c_start.raw_id]
481 start_pos = self._commit_ids[c_start.raw_id]
482 else:
482 else:
483 start_pos = None
483 start_pos = None
484
484
485 if end_id is not None:
485 if end_id is not None:
486 self._validate_commit_id(end_id)
486 self._validate_commit_id(end_id)
487 c_end = self.get_commit(commit_id=end_id)
487 c_end = self.get_commit(commit_id=end_id)
488 end_pos = max(0, self._commit_ids[c_end.raw_id])
488 end_pos = max(0, self._commit_ids[c_end.raw_id])
489 else:
489 else:
490 end_pos = None
490 end_pos = None
491
491
492 if None not in [start_id, end_id] and start_pos > end_pos:
492 if None not in [start_id, end_id] and start_pos > end_pos:
493 raise RepositoryError(
493 raise RepositoryError(
494 "Start commit '%s' cannot be after end commit '%s'" %
494 "Start commit '%s' cannot be after end commit '%s'" %
495 (start_id, end_id))
495 (start_id, end_id))
496
496
497 if end_pos is not None:
497 if end_pos is not None:
498 end_pos += 1
498 end_pos += 1
499
499
500 commit_filter = []
500 commit_filter = []
501 if branch_name and not branch_ancestors:
501 if branch_name and not branch_ancestors:
502 commit_filter.append('branch("%s")' % branch_name)
502 commit_filter.append('branch("%s")' % branch_name)
503 elif branch_name and branch_ancestors:
503 elif branch_name and branch_ancestors:
504 commit_filter.append('ancestors(branch("%s"))' % branch_name)
504 commit_filter.append('ancestors(branch("%s"))' % branch_name)
505 if start_date and not end_date:
505 if start_date and not end_date:
506 commit_filter.append('date(">%s")' % start_date)
506 commit_filter.append('date(">%s")' % start_date)
507 if end_date and not start_date:
507 if end_date and not start_date:
508 commit_filter.append('date("<%s")' % end_date)
508 commit_filter.append('date("<%s")' % end_date)
509 if start_date and end_date:
509 if start_date and end_date:
510 commit_filter.append(
510 commit_filter.append(
511 'date(">%s") and date("<%s")' % (start_date, end_date))
511 'date(">%s") and date("<%s")' % (start_date, end_date))
512
512
513 # TODO: johbo: Figure out a simpler way for this solution
513 # TODO: johbo: Figure out a simpler way for this solution
514 collection_generator = CollectionGenerator
514 collection_generator = CollectionGenerator
515 if commit_filter:
515 if commit_filter:
516 commit_filter = map(safe_str, commit_filter)
516 commit_filter = map(safe_str, commit_filter)
517 revisions = self._remote.rev_range(commit_filter)
517 revisions = self._remote.rev_range(commit_filter)
518 collection_generator = MercurialIndexBasedCollectionGenerator
518 collection_generator = MercurialIndexBasedCollectionGenerator
519 else:
519 else:
520 revisions = self.commit_ids
520 revisions = self.commit_ids
521
521
522 if start_pos or end_pos:
522 if start_pos or end_pos:
523 revisions = revisions[start_pos:end_pos]
523 revisions = revisions[start_pos:end_pos]
524
524
525 return collection_generator(self, revisions, pre_load=pre_load)
525 return collection_generator(self, revisions, pre_load=pre_load)
526
526
527 def pull(self, url, commit_ids=None):
527 def pull(self, url, commit_ids=None):
528 """
528 """
529 Tries to pull changes from external location.
529 Tries to pull changes from external location.
530
530
531 :param commit_ids: Optional. Can be set to a list of commit ids
531 :param commit_ids: Optional. Can be set to a list of commit ids
532 which shall be pulled from the other repository.
532 which shall be pulled from the other repository.
533 """
533 """
534 url = self._get_url(url)
534 url = self._get_url(url)
535 self._remote.pull(url, commit_ids=commit_ids)
535 self._remote.pull(url, commit_ids=commit_ids)
536 self._remote.invalidate_vcs_cache()
536 self._remote.invalidate_vcs_cache()
537
537
538 def _local_clone(self, clone_path):
538 def _local_clone(self, clone_path):
539 """
539 """
540 Create a local clone of the current repo.
540 Create a local clone of the current repo.
541 """
541 """
542 self._remote.clone(self.path, clone_path, update_after_clone=True,
542 self._remote.clone(self.path, clone_path, update_after_clone=True,
543 hooks=False)
543 hooks=False)
544
544
545 def _update(self, revision, clean=False):
545 def _update(self, revision, clean=False):
546 """
546 """
547 Update the working copty to the specified revision.
547 Update the working copty to the specified revision.
548 """
548 """
549 self._remote.update(revision, clean=clean)
549 self._remote.update(revision, clean=clean)
550
550
551 def _identify(self):
551 def _identify(self):
552 """
552 """
553 Return the current state of the working directory.
553 Return the current state of the working directory.
554 """
554 """
555 return self._remote.identify().strip().rstrip('+')
555 return self._remote.identify().strip().rstrip('+')
556
556
557 def _heads(self, branch=None):
557 def _heads(self, branch=None):
558 """
558 """
559 Return the commit ids of the repository heads.
559 Return the commit ids of the repository heads.
560 """
560 """
561 return self._remote.heads(branch=branch).strip().split(' ')
561 return self._remote.heads(branch=branch).strip().split(' ')
562
562
563 def _ancestor(self, revision1, revision2):
563 def _ancestor(self, revision1, revision2):
564 """
564 """
565 Return the common ancestor of the two revisions.
565 Return the common ancestor of the two revisions.
566 """
566 """
567 return self._remote.ancestor(
567 return self._remote.ancestor(
568 revision1, revision2).strip().split(':')[-1]
568 revision1, revision2).strip().split(':')[-1]
569
569
570 def _local_push(
570 def _local_push(
571 self, revision, repository_path, push_branches=False,
571 self, revision, repository_path, push_branches=False,
572 enable_hooks=False):
572 enable_hooks=False):
573 """
573 """
574 Push the given revision to the specified repository.
574 Push the given revision to the specified repository.
575
575
576 :param push_branches: allow to create branches in the target repo.
576 :param push_branches: allow to create branches in the target repo.
577 """
577 """
578 self._remote.push(
578 self._remote.push(
579 [revision], repository_path, hooks=enable_hooks,
579 [revision], repository_path, hooks=enable_hooks,
580 push_branches=push_branches)
580 push_branches=push_branches)
581
581
582 def _local_merge(self, target_ref, merge_message, user_name, user_email,
582 def _local_merge(self, target_ref, merge_message, user_name, user_email,
583 source_ref, use_rebase=False):
583 source_ref, use_rebase=False):
584 """
584 """
585 Merge the given source_revision into the checked out revision.
585 Merge the given source_revision into the checked out revision.
586
586
587 Returns the commit id of the merge and a boolean indicating if the
587 Returns the commit id of the merge and a boolean indicating if the
588 commit needs to be pushed.
588 commit needs to be pushed.
589 """
589 """
590 self._update(target_ref.commit_id)
590 self._update(target_ref.commit_id)
591
591
592 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
592 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
593 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
593 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
594
594
595 if ancestor == source_ref.commit_id:
595 if ancestor == source_ref.commit_id:
596 # Nothing to do, the changes were already integrated
596 # Nothing to do, the changes were already integrated
597 return target_ref.commit_id, False
597 return target_ref.commit_id, False
598
598
599 elif ancestor == target_ref.commit_id and is_the_same_branch:
599 elif ancestor == target_ref.commit_id and is_the_same_branch:
600 # In this case we should force a commit message
600 # In this case we should force a commit message
601 return source_ref.commit_id, True
601 return source_ref.commit_id, True
602
602
603 if use_rebase:
603 if use_rebase:
604 try:
604 try:
605 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
605 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
606 target_ref.commit_id)
606 target_ref.commit_id)
607 self.bookmark(bookmark_name, revision=source_ref.commit_id)
607 self.bookmark(bookmark_name, revision=source_ref.commit_id)
608 self._remote.rebase(
608 self._remote.rebase(
609 source=source_ref.commit_id, dest=target_ref.commit_id)
609 source=source_ref.commit_id, dest=target_ref.commit_id)
610 self._remote.invalidate_vcs_cache()
610 self._remote.invalidate_vcs_cache()
611 self._update(bookmark_name)
611 self._update(bookmark_name)
612 return self._identify(), True
612 return self._identify(), True
613 except RepositoryError:
613 except RepositoryError:
614 # The rebase-abort may raise another exception which 'hides'
614 # The rebase-abort may raise another exception which 'hides'
615 # the original one, therefore we log it here.
615 # the original one, therefore we log it here.
616 log.exception('Error while rebasing shadow repo during merge.')
616 log.exception('Error while rebasing shadow repo during merge.')
617
617
618 # Cleanup any rebase leftovers
618 # Cleanup any rebase leftovers
619 self._remote.invalidate_vcs_cache()
619 self._remote.invalidate_vcs_cache()
620 self._remote.rebase(abort=True)
620 self._remote.rebase(abort=True)
621 self._remote.invalidate_vcs_cache()
621 self._remote.invalidate_vcs_cache()
622 self._remote.update(clean=True)
622 self._remote.update(clean=True)
623 raise
623 raise
624 else:
624 else:
625 try:
625 try:
626 self._remote.merge(source_ref.commit_id)
626 self._remote.merge(source_ref.commit_id)
627 self._remote.invalidate_vcs_cache()
627 self._remote.invalidate_vcs_cache()
628 self._remote.commit(
628 self._remote.commit(
629 message=safe_str(merge_message),
629 message=safe_str(merge_message),
630 username=safe_str('%s <%s>' % (user_name, user_email)))
630 username=safe_str('%s <%s>' % (user_name, user_email)))
631 self._remote.invalidate_vcs_cache()
631 self._remote.invalidate_vcs_cache()
632 return self._identify(), True
632 return self._identify(), True
633 except RepositoryError:
633 except RepositoryError:
634 # Cleanup any merge leftovers
634 # Cleanup any merge leftovers
635 self._remote.update(clean=True)
635 self._remote.update(clean=True)
636 raise
636 raise
637
637
638 def _is_the_same_branch(self, target_ref, source_ref):
638 def _is_the_same_branch(self, target_ref, source_ref):
639 return (
639 return (
640 self._get_branch_name(target_ref) ==
640 self._get_branch_name(target_ref) ==
641 self._get_branch_name(source_ref))
641 self._get_branch_name(source_ref))
642
642
643 def _get_branch_name(self, ref):
643 def _get_branch_name(self, ref):
644 if ref.type == 'branch':
644 if ref.type == 'branch':
645 return ref.name
645 return ref.name
646 return self._remote.ctx_branch(ref.commit_id)
646 return self._remote.ctx_branch(ref.commit_id)
647
647
648 def _get_shadow_repository_path(self, workspace_id):
648 def _get_shadow_repository_path(self, workspace_id):
649 # The name of the shadow repository must start with '.', so it is
649 # The name of the shadow repository must start with '.', so it is
650 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
650 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
651 return os.path.join(
651 return os.path.join(
652 os.path.dirname(self.path),
652 os.path.dirname(self.path),
653 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
653 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
654
654
655 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref):
655 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref):
656 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
656 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
657 if not os.path.exists(shadow_repository_path):
657 if not os.path.exists(shadow_repository_path):
658 self._local_clone(shadow_repository_path)
658 self._local_clone(shadow_repository_path)
659 log.debug(
659 log.debug(
660 'Prepared shadow repository in %s', shadow_repository_path)
660 'Prepared shadow repository in %s', shadow_repository_path)
661
661
662 return shadow_repository_path
662 return shadow_repository_path
663
663
664 def cleanup_merge_workspace(self, workspace_id):
664 def cleanup_merge_workspace(self, workspace_id):
665 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
665 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
666 shutil.rmtree(shadow_repository_path, ignore_errors=True)
666 shutil.rmtree(shadow_repository_path, ignore_errors=True)
667
667
668 def _merge_repo(self, shadow_repository_path, target_ref,
668 def _merge_repo(self, shadow_repository_path, target_ref,
669 source_repo, source_ref, merge_message,
669 source_repo, source_ref, merge_message,
670 merger_name, merger_email, dry_run=False,
670 merger_name, merger_email, dry_run=False,
671 use_rebase=False):
671 use_rebase=False):
672 if target_ref.commit_id not in self._heads():
672 if target_ref.commit_id not in self._heads():
673 return MergeResponse(
673 return MergeResponse(
674 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
674 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
675
675
676 try:
676 try:
677 if (target_ref.type == 'branch' and
677 if (target_ref.type == 'branch' and
678 len(self._heads(target_ref.name)) != 1):
678 len(self._heads(target_ref.name)) != 1):
679 return MergeResponse(
679 return MergeResponse(
680 False, False, None,
680 False, False, None,
681 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
681 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
682 except CommitDoesNotExistError as e:
682 except CommitDoesNotExistError as e:
683 log.exception('Failure when looking up branch heads on hg target')
683 log.exception('Failure when looking up branch heads on hg target')
684 return MergeResponse(
684 return MergeResponse(
685 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
685 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
686
686
687 shadow_repo = self._get_shadow_instance(shadow_repository_path)
687 shadow_repo = self._get_shadow_instance(shadow_repository_path)
688
688
689 log.debug('Pulling in target reference %s', target_ref)
689 log.debug('Pulling in target reference %s', target_ref)
690 self._validate_pull_reference(target_ref)
690 self._validate_pull_reference(target_ref)
691 shadow_repo._local_pull(self.path, target_ref)
691 shadow_repo._local_pull(self.path, target_ref)
692 try:
692 try:
693 log.debug('Pulling in source reference %s', source_ref)
693 log.debug('Pulling in source reference %s', source_ref)
694 source_repo._validate_pull_reference(source_ref)
694 source_repo._validate_pull_reference(source_ref)
695 shadow_repo._local_pull(source_repo.path, source_ref)
695 shadow_repo._local_pull(source_repo.path, source_ref)
696 except CommitDoesNotExistError:
696 except CommitDoesNotExistError:
697 log.exception('Failure when doing local pull on hg shadow repo')
697 log.exception('Failure when doing local pull on hg shadow repo')
698 return MergeResponse(
698 return MergeResponse(
699 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
699 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
700
700
701 merge_ref = None
701 merge_ref = None
702 merge_failure_reason = MergeFailureReason.NONE
702 merge_failure_reason = MergeFailureReason.NONE
703
703
704 try:
704 try:
705 merge_commit_id, needs_push = shadow_repo._local_merge(
705 merge_commit_id, needs_push = shadow_repo._local_merge(
706 target_ref, merge_message, merger_name, merger_email,
706 target_ref, merge_message, merger_name, merger_email,
707 source_ref, use_rebase=use_rebase)
707 source_ref, use_rebase=use_rebase)
708 merge_possible = True
708 merge_possible = True
709
709
710 # Set a bookmark pointing to the merge commit. This bookmark may be
710 # Set a bookmark pointing to the merge commit. This bookmark may be
711 # used to easily identify the last successful merge commit in the
711 # used to easily identify the last successful merge commit in the
712 # shadow repository.
712 # shadow repository.
713 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
713 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
714 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
714 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
715 except SubrepoMergeError:
716 log.exception(
717 'Subrepo merge error during local merge on hg shadow repo.')
718 merge_possible = False
719 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
715 except RepositoryError:
720 except RepositoryError:
716 log.exception('Failure when doing local merge on hg shadow repo')
721 log.exception('Failure when doing local merge on hg shadow repo')
717 merge_possible = False
722 merge_possible = False
718 merge_failure_reason = MergeFailureReason.MERGE_FAILED
723 merge_failure_reason = MergeFailureReason.MERGE_FAILED
719
724
720 if merge_possible and not dry_run:
725 if merge_possible and not dry_run:
721 if needs_push:
726 if needs_push:
722 # In case the target is a bookmark, update it, so after pushing
727 # In case the target is a bookmark, update it, so after pushing
723 # the bookmarks is also updated in the target.
728 # the bookmarks is also updated in the target.
724 if target_ref.type == 'book':
729 if target_ref.type == 'book':
725 shadow_repo.bookmark(
730 shadow_repo.bookmark(
726 target_ref.name, revision=merge_commit_id)
731 target_ref.name, revision=merge_commit_id)
727
732
728 try:
733 try:
729 shadow_repo_with_hooks = self._get_shadow_instance(
734 shadow_repo_with_hooks = self._get_shadow_instance(
730 shadow_repository_path,
735 shadow_repository_path,
731 enable_hooks=True)
736 enable_hooks=True)
732 # Note: the push_branches option will push any new branch
737 # Note: the push_branches option will push any new branch
733 # defined in the source repository to the target. This may
738 # defined in the source repository to the target. This may
734 # be dangerous as branches are permanent in Mercurial.
739 # be dangerous as branches are permanent in Mercurial.
735 # This feature was requested in issue #441.
740 # This feature was requested in issue #441.
736 shadow_repo_with_hooks._local_push(
741 shadow_repo_with_hooks._local_push(
737 merge_commit_id, self.path, push_branches=True,
742 merge_commit_id, self.path, push_branches=True,
738 enable_hooks=True)
743 enable_hooks=True)
739 merge_succeeded = True
744 merge_succeeded = True
740 except RepositoryError:
745 except RepositoryError:
741 log.exception(
746 log.exception(
742 'Failure when doing local push from the shadow '
747 'Failure when doing local push from the shadow '
743 'repository to the target repository.')
748 'repository to the target repository.')
744 merge_succeeded = False
749 merge_succeeded = False
745 merge_failure_reason = MergeFailureReason.PUSH_FAILED
750 merge_failure_reason = MergeFailureReason.PUSH_FAILED
746 else:
751 else:
747 merge_succeeded = True
752 merge_succeeded = True
748 else:
753 else:
749 merge_succeeded = False
754 merge_succeeded = False
750
755
751 return MergeResponse(
756 return MergeResponse(
752 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
757 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
753
758
754 def _get_shadow_instance(
759 def _get_shadow_instance(
755 self, shadow_repository_path, enable_hooks=False):
760 self, shadow_repository_path, enable_hooks=False):
756 config = self.config.copy()
761 config = self.config.copy()
757 if not enable_hooks:
762 if not enable_hooks:
758 config.clear_section('hooks')
763 config.clear_section('hooks')
759 return MercurialRepository(shadow_repository_path, config)
764 return MercurialRepository(shadow_repository_path, config)
760
765
761 def _validate_pull_reference(self, reference):
766 def _validate_pull_reference(self, reference):
762 if not (reference.name in self.bookmarks or
767 if not (reference.name in self.bookmarks or
763 reference.name in self.branches or
768 reference.name in self.branches or
764 self.get_commit(reference.commit_id)):
769 self.get_commit(reference.commit_id)):
765 raise CommitDoesNotExistError(
770 raise CommitDoesNotExistError(
766 'Unknown branch, bookmark or commit id')
771 'Unknown branch, bookmark or commit id')
767
772
768 def _local_pull(self, repository_path, reference):
773 def _local_pull(self, repository_path, reference):
769 """
774 """
770 Fetch a branch, bookmark or commit from a local repository.
775 Fetch a branch, bookmark or commit from a local repository.
771 """
776 """
772 repository_path = os.path.abspath(repository_path)
777 repository_path = os.path.abspath(repository_path)
773 if repository_path == self.path:
778 if repository_path == self.path:
774 raise ValueError('Cannot pull from the same repository')
779 raise ValueError('Cannot pull from the same repository')
775
780
776 reference_type_to_option_name = {
781 reference_type_to_option_name = {
777 'book': 'bookmark',
782 'book': 'bookmark',
778 'branch': 'branch',
783 'branch': 'branch',
779 }
784 }
780 option_name = reference_type_to_option_name.get(
785 option_name = reference_type_to_option_name.get(
781 reference.type, 'revision')
786 reference.type, 'revision')
782
787
783 if option_name == 'revision':
788 if option_name == 'revision':
784 ref = reference.commit_id
789 ref = reference.commit_id
785 else:
790 else:
786 ref = reference.name
791 ref = reference.name
787
792
788 options = {option_name: [ref]}
793 options = {option_name: [ref]}
789 self._remote.pull_cmd(repository_path, hooks=False, **options)
794 self._remote.pull_cmd(repository_path, hooks=False, **options)
790 self._remote.invalidate_vcs_cache()
795 self._remote.invalidate_vcs_cache()
791
796
792 def bookmark(self, bookmark, revision=None):
797 def bookmark(self, bookmark, revision=None):
793 if isinstance(bookmark, unicode):
798 if isinstance(bookmark, unicode):
794 bookmark = safe_str(bookmark)
799 bookmark = safe_str(bookmark)
795 self._remote.bookmark(bookmark, revision=revision)
800 self._remote.bookmark(bookmark, revision=revision)
796 self._remote.invalidate_vcs_cache()
801 self._remote.invalidate_vcs_cache()
797
802
798
803
799 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
804 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
800
805
801 def _commit_factory(self, commit_id):
806 def _commit_factory(self, commit_id):
802 return self.repo.get_commit(
807 return self.repo.get_commit(
803 commit_idx=commit_id, pre_load=self.pre_load)
808 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,196 +1,205 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Custom vcs exceptions module.
22 Custom vcs exceptions module.
23 """
23 """
24
24
25 import functools
25 import functools
26 import urllib2
26 import urllib2
27
27
28
28
29 class VCSCommunicationError(Exception):
29 class VCSCommunicationError(Exception):
30 pass
30 pass
31
31
32
32
33 class PyroVCSCommunicationError(VCSCommunicationError):
33 class PyroVCSCommunicationError(VCSCommunicationError):
34 pass
34 pass
35
35
36
36
37 class HttpVCSCommunicationError(VCSCommunicationError):
37 class HttpVCSCommunicationError(VCSCommunicationError):
38 pass
38 pass
39
39
40
40
41 class VCSError(Exception):
41 class VCSError(Exception):
42 pass
42 pass
43
43
44
44
45 class RepositoryError(VCSError):
45 class RepositoryError(VCSError):
46 pass
46 pass
47
47
48
48
49 class RepositoryRequirementError(RepositoryError):
49 class RepositoryRequirementError(RepositoryError):
50 pass
50 pass
51
51
52
52
53 class VCSBackendNotSupportedError(VCSError):
53 class VCSBackendNotSupportedError(VCSError):
54 """
54 """
55 Exception raised when VCSServer does not support requested backend
55 Exception raised when VCSServer does not support requested backend
56 """
56 """
57
57
58
58
59 class EmptyRepositoryError(RepositoryError):
59 class EmptyRepositoryError(RepositoryError):
60 pass
60 pass
61
61
62
62
63 class TagAlreadyExistError(RepositoryError):
63 class TagAlreadyExistError(RepositoryError):
64 pass
64 pass
65
65
66
66
67 class TagDoesNotExistError(RepositoryError):
67 class TagDoesNotExistError(RepositoryError):
68 pass
68 pass
69
69
70
70
71 class BranchAlreadyExistError(RepositoryError):
71 class BranchAlreadyExistError(RepositoryError):
72 pass
72 pass
73
73
74
74
75 class BranchDoesNotExistError(RepositoryError):
75 class BranchDoesNotExistError(RepositoryError):
76 pass
76 pass
77
77
78
78
79 class CommitError(RepositoryError):
79 class CommitError(RepositoryError):
80 """
80 """
81 Exceptions related to an existing commit
81 Exceptions related to an existing commit
82 """
82 """
83
83
84
84
85 class CommitDoesNotExistError(CommitError):
85 class CommitDoesNotExistError(CommitError):
86 pass
86 pass
87
87
88
88
89 class CommittingError(RepositoryError):
89 class CommittingError(RepositoryError):
90 """
90 """
91 Exceptions happening while creating a new commit
91 Exceptions happening while creating a new commit
92 """
92 """
93
93
94
94
95 class NothingChangedError(CommittingError):
95 class NothingChangedError(CommittingError):
96 pass
96 pass
97
97
98
98
99 class NodeError(VCSError):
99 class NodeError(VCSError):
100 pass
100 pass
101
101
102
102
103 class RemovedFileNodeError(NodeError):
103 class RemovedFileNodeError(NodeError):
104 pass
104 pass
105
105
106
106
107 class NodeAlreadyExistsError(CommittingError):
107 class NodeAlreadyExistsError(CommittingError):
108 pass
108 pass
109
109
110
110
111 class NodeAlreadyChangedError(CommittingError):
111 class NodeAlreadyChangedError(CommittingError):
112 pass
112 pass
113
113
114
114
115 class NodeDoesNotExistError(CommittingError):
115 class NodeDoesNotExistError(CommittingError):
116 pass
116 pass
117
117
118
118
119 class NodeNotChangedError(CommittingError):
119 class NodeNotChangedError(CommittingError):
120 pass
120 pass
121
121
122
122
123 class NodeAlreadyAddedError(CommittingError):
123 class NodeAlreadyAddedError(CommittingError):
124 pass
124 pass
125
125
126
126
127 class NodeAlreadyRemovedError(CommittingError):
127 class NodeAlreadyRemovedError(CommittingError):
128 pass
128 pass
129
129
130
130
131 class SubrepoMergeError(RepositoryError):
132 """
133 This happens if we try to merge a repository which contains subrepos and
134 the subrepos cannot be merged. The subrepos are not merged itself but
135 their references in the root repo are merged.
136 """
137
138
131 class ImproperArchiveTypeError(VCSError):
139 class ImproperArchiveTypeError(VCSError):
132 pass
140 pass
133
141
134
142
135 class CommandError(VCSError):
143 class CommandError(VCSError):
136 pass
144 pass
137
145
138
146
139 class UnhandledException(VCSError):
147 class UnhandledException(VCSError):
140 """
148 """
141 Signals that something unexpected went wrong.
149 Signals that something unexpected went wrong.
142
150
143 This usually means we have a programming error on the side of the VCSServer
151 This usually means we have a programming error on the side of the VCSServer
144 and should inspect the logfile of the VCSServer to find more details.
152 and should inspect the logfile of the VCSServer to find more details.
145 """
153 """
146
154
147
155
148 _EXCEPTION_MAP = {
156 _EXCEPTION_MAP = {
149 'abort': RepositoryError,
157 'abort': RepositoryError,
150 'archive': ImproperArchiveTypeError,
158 'archive': ImproperArchiveTypeError,
151 'error': RepositoryError,
159 'error': RepositoryError,
152 'lookup': CommitDoesNotExistError,
160 'lookup': CommitDoesNotExistError,
153 'repo_locked': RepositoryError,
161 'repo_locked': RepositoryError,
154 'requirement': RepositoryRequirementError,
162 'requirement': RepositoryRequirementError,
155 'unhandled': UnhandledException,
163 'unhandled': UnhandledException,
156 # TODO: johbo: Define our own exception for this and stop abusing
164 # TODO: johbo: Define our own exception for this and stop abusing
157 # urllib's exception class.
165 # urllib's exception class.
158 'url_error': urllib2.URLError,
166 'url_error': urllib2.URLError,
167 'subrepo_merge_error': SubrepoMergeError,
159 }
168 }
160
169
161
170
162 def map_vcs_exceptions(func):
171 def map_vcs_exceptions(func):
163 """
172 """
164 Utility to decorate functions so that plain exceptions are translated.
173 Utility to decorate functions so that plain exceptions are translated.
165
174
166 The translation is based on `exc_map` which maps a `str` indicating
175 The translation is based on `exc_map` which maps a `str` indicating
167 the error type into an exception class representing this error inside
176 the error type into an exception class representing this error inside
168 of the vcs layer.
177 of the vcs layer.
169 """
178 """
170
179
171 @functools.wraps(func)
180 @functools.wraps(func)
172 def wrapper(*args, **kwargs):
181 def wrapper(*args, **kwargs):
173 try:
182 try:
174 return func(*args, **kwargs)
183 return func(*args, **kwargs)
175 except Exception as e:
184 except Exception as e:
176 # The error middleware adds information if it finds
185 # The error middleware adds information if it finds
177 # __traceback_info__ in a frame object. This way the remote
186 # __traceback_info__ in a frame object. This way the remote
178 # traceback information is made available in error reports.
187 # traceback information is made available in error reports.
179 remote_tb = getattr(e, '_pyroTraceback', None)
188 remote_tb = getattr(e, '_pyroTraceback', None)
180 if remote_tb:
189 if remote_tb:
181 __traceback_info__ = (
190 __traceback_info__ = (
182 'Found Pyro4 remote traceback information:\n\n' +
191 'Found Pyro4 remote traceback information:\n\n' +
183 '\n'.join(remote_tb))
192 '\n'.join(remote_tb))
184
193
185 # Avoid that remote_tb also appears in the frame
194 # Avoid that remote_tb also appears in the frame
186 del remote_tb
195 del remote_tb
187
196
188 # Special vcs errors had an attribute "_vcs_kind" which is used
197 # Special vcs errors had an attribute "_vcs_kind" which is used
189 # to translate them to the proper exception class in the vcs
198 # to translate them to the proper exception class in the vcs
190 # client layer.
199 # client layer.
191 kind = getattr(e, '_vcs_kind', None)
200 kind = getattr(e, '_vcs_kind', None)
192 if kind:
201 if kind:
193 raise _EXCEPTION_MAP[kind](*e.args)
202 raise _EXCEPTION_MAP[kind](*e.args)
194 else:
203 else:
195 raise
204 raise
196 return wrapper
205 return wrapper
@@ -1,1314 +1,1317 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26 from collections import namedtuple
26 from collections import namedtuple
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import urllib
30 import urllib
31
31
32 from pylons.i18n.translation import _
32 from pylons.i18n.translation import _
33 from pylons.i18n.translation import lazy_ugettext
33 from pylons.i18n.translation import lazy_ugettext
34 from sqlalchemy import or_
34 from sqlalchemy import or_
35
35
36 from rhodecode.lib import helpers as h, hooks_utils, diffs
36 from rhodecode.lib import helpers as h, hooks_utils, diffs
37 from rhodecode.lib.compat import OrderedDict
37 from rhodecode.lib.compat import OrderedDict
38 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
38 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
39 from rhodecode.lib.markup_renderer import (
39 from rhodecode.lib.markup_renderer import (
40 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
40 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
41 from rhodecode.lib.utils import action_logger
41 from rhodecode.lib.utils import action_logger
42 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
42 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
43 from rhodecode.lib.vcs.backends.base import (
43 from rhodecode.lib.vcs.backends.base import (
44 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
44 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
45 from rhodecode.lib.vcs.conf import settings as vcs_settings
45 from rhodecode.lib.vcs.conf import settings as vcs_settings
46 from rhodecode.lib.vcs.exceptions import (
46 from rhodecode.lib.vcs.exceptions import (
47 CommitDoesNotExistError, EmptyRepositoryError)
47 CommitDoesNotExistError, EmptyRepositoryError)
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.changeset_status import ChangesetStatusModel
49 from rhodecode.model.changeset_status import ChangesetStatusModel
50 from rhodecode.model.comment import ChangesetCommentsModel
50 from rhodecode.model.comment import ChangesetCommentsModel
51 from rhodecode.model.db import (
51 from rhodecode.model.db import (
52 PullRequest, PullRequestReviewers, ChangesetStatus,
52 PullRequest, PullRequestReviewers, ChangesetStatus,
53 PullRequestVersion, ChangesetComment)
53 PullRequestVersion, ChangesetComment)
54 from rhodecode.model.meta import Session
54 from rhodecode.model.meta import Session
55 from rhodecode.model.notification import NotificationModel, \
55 from rhodecode.model.notification import NotificationModel, \
56 EmailNotificationModel
56 EmailNotificationModel
57 from rhodecode.model.scm import ScmModel
57 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.settings import VcsSettingsModel
58 from rhodecode.model.settings import VcsSettingsModel
59
59
60
60
61 log = logging.getLogger(__name__)
61 log = logging.getLogger(__name__)
62
62
63
63
64 # Data structure to hold the response data when updating commits during a pull
64 # Data structure to hold the response data when updating commits during a pull
65 # request update.
65 # request update.
66 UpdateResponse = namedtuple(
66 UpdateResponse = namedtuple(
67 'UpdateResponse', 'executed, reason, new, old, changes')
67 'UpdateResponse', 'executed, reason, new, old, changes')
68
68
69
69
70 class PullRequestModel(BaseModel):
70 class PullRequestModel(BaseModel):
71
71
72 cls = PullRequest
72 cls = PullRequest
73
73
74 DIFF_CONTEXT = 3
74 DIFF_CONTEXT = 3
75
75
76 MERGE_STATUS_MESSAGES = {
76 MERGE_STATUS_MESSAGES = {
77 MergeFailureReason.NONE: lazy_ugettext(
77 MergeFailureReason.NONE: lazy_ugettext(
78 'This pull request can be automatically merged.'),
78 'This pull request can be automatically merged.'),
79 MergeFailureReason.UNKNOWN: lazy_ugettext(
79 MergeFailureReason.UNKNOWN: lazy_ugettext(
80 'This pull request cannot be merged because of an unhandled'
80 'This pull request cannot be merged because of an unhandled'
81 ' exception.'),
81 ' exception.'),
82 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
82 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
83 'This pull request cannot be merged because of conflicts.'),
83 'This pull request cannot be merged because of conflicts.'),
84 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
84 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
85 'This pull request could not be merged because push to target'
85 'This pull request could not be merged because push to target'
86 ' failed.'),
86 ' failed.'),
87 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
87 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
88 'This pull request cannot be merged because the target is not a'
88 'This pull request cannot be merged because the target is not a'
89 ' head.'),
89 ' head.'),
90 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
90 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
91 'This pull request cannot be merged because the source contains'
91 'This pull request cannot be merged because the source contains'
92 ' more branches than the target.'),
92 ' more branches than the target.'),
93 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
93 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
94 'This pull request cannot be merged because the target has'
94 'This pull request cannot be merged because the target has'
95 ' multiple heads.'),
95 ' multiple heads.'),
96 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
96 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
97 'This pull request cannot be merged because the target repository'
97 'This pull request cannot be merged because the target repository'
98 ' is locked.'),
98 ' is locked.'),
99 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
99 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
100 'This pull request cannot be merged because the target or the '
100 'This pull request cannot be merged because the target or the '
101 'source reference is missing.'),
101 'source reference is missing.'),
102 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
102 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
103 'This pull request cannot be merged because the target '
103 'This pull request cannot be merged because the target '
104 'reference is missing.'),
104 'reference is missing.'),
105 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
105 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
106 'This pull request cannot be merged because the source '
106 'This pull request cannot be merged because the source '
107 'reference is missing.'),
107 'reference is missing.'),
108 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
109 'This pull request cannot be merged because of conflicts related '
110 'to sub repositories.'),
108 }
111 }
109
112
110 UPDATE_STATUS_MESSAGES = {
113 UPDATE_STATUS_MESSAGES = {
111 UpdateFailureReason.NONE: lazy_ugettext(
114 UpdateFailureReason.NONE: lazy_ugettext(
112 'Pull request update successful.'),
115 'Pull request update successful.'),
113 UpdateFailureReason.UNKNOWN: lazy_ugettext(
116 UpdateFailureReason.UNKNOWN: lazy_ugettext(
114 'Pull request update failed because of an unknown error.'),
117 'Pull request update failed because of an unknown error.'),
115 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
118 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
116 'No update needed because the source reference is already '
119 'No update needed because the source reference is already '
117 'up to date.'),
120 'up to date.'),
118 UpdateFailureReason.WRONG_REF_TPYE: lazy_ugettext(
121 UpdateFailureReason.WRONG_REF_TPYE: lazy_ugettext(
119 'Pull request cannot be updated because the reference type is '
122 'Pull request cannot be updated because the reference type is '
120 'not supported for an update.'),
123 'not supported for an update.'),
121 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
124 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
122 'This pull request cannot be updated because the target '
125 'This pull request cannot be updated because the target '
123 'reference is missing.'),
126 'reference is missing.'),
124 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
127 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
125 'This pull request cannot be updated because the source '
128 'This pull request cannot be updated because the source '
126 'reference is missing.'),
129 'reference is missing.'),
127 }
130 }
128
131
129 def __get_pull_request(self, pull_request):
132 def __get_pull_request(self, pull_request):
130 return self._get_instance(PullRequest, pull_request)
133 return self._get_instance(PullRequest, pull_request)
131
134
132 def _check_perms(self, perms, pull_request, user, api=False):
135 def _check_perms(self, perms, pull_request, user, api=False):
133 if not api:
136 if not api:
134 return h.HasRepoPermissionAny(*perms)(
137 return h.HasRepoPermissionAny(*perms)(
135 user=user, repo_name=pull_request.target_repo.repo_name)
138 user=user, repo_name=pull_request.target_repo.repo_name)
136 else:
139 else:
137 return h.HasRepoPermissionAnyApi(*perms)(
140 return h.HasRepoPermissionAnyApi(*perms)(
138 user=user, repo_name=pull_request.target_repo.repo_name)
141 user=user, repo_name=pull_request.target_repo.repo_name)
139
142
140 def check_user_read(self, pull_request, user, api=False):
143 def check_user_read(self, pull_request, user, api=False):
141 _perms = ('repository.admin', 'repository.write', 'repository.read',)
144 _perms = ('repository.admin', 'repository.write', 'repository.read',)
142 return self._check_perms(_perms, pull_request, user, api)
145 return self._check_perms(_perms, pull_request, user, api)
143
146
144 def check_user_merge(self, pull_request, user, api=False):
147 def check_user_merge(self, pull_request, user, api=False):
145 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
148 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
146 return self._check_perms(_perms, pull_request, user, api)
149 return self._check_perms(_perms, pull_request, user, api)
147
150
148 def check_user_update(self, pull_request, user, api=False):
151 def check_user_update(self, pull_request, user, api=False):
149 owner = user.user_id == pull_request.user_id
152 owner = user.user_id == pull_request.user_id
150 return self.check_user_merge(pull_request, user, api) or owner
153 return self.check_user_merge(pull_request, user, api) or owner
151
154
152 def check_user_delete(self, pull_request, user):
155 def check_user_delete(self, pull_request, user):
153 owner = user.user_id == pull_request.user_id
156 owner = user.user_id == pull_request.user_id
154 _perms = ('repository.admin')
157 _perms = ('repository.admin')
155 return self._check_perms(_perms, pull_request, user) or owner
158 return self._check_perms(_perms, pull_request, user) or owner
156
159
157 def check_user_change_status(self, pull_request, user, api=False):
160 def check_user_change_status(self, pull_request, user, api=False):
158 reviewer = user.user_id in [x.user_id for x in
161 reviewer = user.user_id in [x.user_id for x in
159 pull_request.reviewers]
162 pull_request.reviewers]
160 return self.check_user_update(pull_request, user, api) or reviewer
163 return self.check_user_update(pull_request, user, api) or reviewer
161
164
162 def get(self, pull_request):
165 def get(self, pull_request):
163 return self.__get_pull_request(pull_request)
166 return self.__get_pull_request(pull_request)
164
167
165 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
168 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
166 opened_by=None, order_by=None,
169 opened_by=None, order_by=None,
167 order_dir='desc'):
170 order_dir='desc'):
168 repo = None
171 repo = None
169 if repo_name:
172 if repo_name:
170 repo = self._get_repo(repo_name)
173 repo = self._get_repo(repo_name)
171
174
172 q = PullRequest.query()
175 q = PullRequest.query()
173
176
174 # source or target
177 # source or target
175 if repo and source:
178 if repo and source:
176 q = q.filter(PullRequest.source_repo == repo)
179 q = q.filter(PullRequest.source_repo == repo)
177 elif repo:
180 elif repo:
178 q = q.filter(PullRequest.target_repo == repo)
181 q = q.filter(PullRequest.target_repo == repo)
179
182
180 # closed,opened
183 # closed,opened
181 if statuses:
184 if statuses:
182 q = q.filter(PullRequest.status.in_(statuses))
185 q = q.filter(PullRequest.status.in_(statuses))
183
186
184 # opened by filter
187 # opened by filter
185 if opened_by:
188 if opened_by:
186 q = q.filter(PullRequest.user_id.in_(opened_by))
189 q = q.filter(PullRequest.user_id.in_(opened_by))
187
190
188 if order_by:
191 if order_by:
189 order_map = {
192 order_map = {
190 'name_raw': PullRequest.pull_request_id,
193 'name_raw': PullRequest.pull_request_id,
191 'title': PullRequest.title,
194 'title': PullRequest.title,
192 'updated_on_raw': PullRequest.updated_on,
195 'updated_on_raw': PullRequest.updated_on,
193 'target_repo': PullRequest.target_repo_id
196 'target_repo': PullRequest.target_repo_id
194 }
197 }
195 if order_dir == 'asc':
198 if order_dir == 'asc':
196 q = q.order_by(order_map[order_by].asc())
199 q = q.order_by(order_map[order_by].asc())
197 else:
200 else:
198 q = q.order_by(order_map[order_by].desc())
201 q = q.order_by(order_map[order_by].desc())
199
202
200 return q
203 return q
201
204
202 def count_all(self, repo_name, source=False, statuses=None,
205 def count_all(self, repo_name, source=False, statuses=None,
203 opened_by=None):
206 opened_by=None):
204 """
207 """
205 Count the number of pull requests for a specific repository.
208 Count the number of pull requests for a specific repository.
206
209
207 :param repo_name: target or source repo
210 :param repo_name: target or source repo
208 :param source: boolean flag to specify if repo_name refers to source
211 :param source: boolean flag to specify if repo_name refers to source
209 :param statuses: list of pull request statuses
212 :param statuses: list of pull request statuses
210 :param opened_by: author user of the pull request
213 :param opened_by: author user of the pull request
211 :returns: int number of pull requests
214 :returns: int number of pull requests
212 """
215 """
213 q = self._prepare_get_all_query(
216 q = self._prepare_get_all_query(
214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
217 repo_name, source=source, statuses=statuses, opened_by=opened_by)
215
218
216 return q.count()
219 return q.count()
217
220
218 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
221 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
219 offset=0, length=None, order_by=None, order_dir='desc'):
222 offset=0, length=None, order_by=None, order_dir='desc'):
220 """
223 """
221 Get all pull requests for a specific repository.
224 Get all pull requests for a specific repository.
222
225
223 :param repo_name: target or source repo
226 :param repo_name: target or source repo
224 :param source: boolean flag to specify if repo_name refers to source
227 :param source: boolean flag to specify if repo_name refers to source
225 :param statuses: list of pull request statuses
228 :param statuses: list of pull request statuses
226 :param opened_by: author user of the pull request
229 :param opened_by: author user of the pull request
227 :param offset: pagination offset
230 :param offset: pagination offset
228 :param length: length of returned list
231 :param length: length of returned list
229 :param order_by: order of the returned list
232 :param order_by: order of the returned list
230 :param order_dir: 'asc' or 'desc' ordering direction
233 :param order_dir: 'asc' or 'desc' ordering direction
231 :returns: list of pull requests
234 :returns: list of pull requests
232 """
235 """
233 q = self._prepare_get_all_query(
236 q = self._prepare_get_all_query(
234 repo_name, source=source, statuses=statuses, opened_by=opened_by,
237 repo_name, source=source, statuses=statuses, opened_by=opened_by,
235 order_by=order_by, order_dir=order_dir)
238 order_by=order_by, order_dir=order_dir)
236
239
237 if length:
240 if length:
238 pull_requests = q.limit(length).offset(offset).all()
241 pull_requests = q.limit(length).offset(offset).all()
239 else:
242 else:
240 pull_requests = q.all()
243 pull_requests = q.all()
241
244
242 return pull_requests
245 return pull_requests
243
246
244 def count_awaiting_review(self, repo_name, source=False, statuses=None,
247 def count_awaiting_review(self, repo_name, source=False, statuses=None,
245 opened_by=None):
248 opened_by=None):
246 """
249 """
247 Count the number of pull requests for a specific repository that are
250 Count the number of pull requests for a specific repository that are
248 awaiting review.
251 awaiting review.
249
252
250 :param repo_name: target or source repo
253 :param repo_name: target or source repo
251 :param source: boolean flag to specify if repo_name refers to source
254 :param source: boolean flag to specify if repo_name refers to source
252 :param statuses: list of pull request statuses
255 :param statuses: list of pull request statuses
253 :param opened_by: author user of the pull request
256 :param opened_by: author user of the pull request
254 :returns: int number of pull requests
257 :returns: int number of pull requests
255 """
258 """
256 pull_requests = self.get_awaiting_review(
259 pull_requests = self.get_awaiting_review(
257 repo_name, source=source, statuses=statuses, opened_by=opened_by)
260 repo_name, source=source, statuses=statuses, opened_by=opened_by)
258
261
259 return len(pull_requests)
262 return len(pull_requests)
260
263
261 def get_awaiting_review(self, repo_name, source=False, statuses=None,
264 def get_awaiting_review(self, repo_name, source=False, statuses=None,
262 opened_by=None, offset=0, length=None,
265 opened_by=None, offset=0, length=None,
263 order_by=None, order_dir='desc'):
266 order_by=None, order_dir='desc'):
264 """
267 """
265 Get all pull requests for a specific repository that are awaiting
268 Get all pull requests for a specific repository that are awaiting
266 review.
269 review.
267
270
268 :param repo_name: target or source repo
271 :param repo_name: target or source repo
269 :param source: boolean flag to specify if repo_name refers to source
272 :param source: boolean flag to specify if repo_name refers to source
270 :param statuses: list of pull request statuses
273 :param statuses: list of pull request statuses
271 :param opened_by: author user of the pull request
274 :param opened_by: author user of the pull request
272 :param offset: pagination offset
275 :param offset: pagination offset
273 :param length: length of returned list
276 :param length: length of returned list
274 :param order_by: order of the returned list
277 :param order_by: order of the returned list
275 :param order_dir: 'asc' or 'desc' ordering direction
278 :param order_dir: 'asc' or 'desc' ordering direction
276 :returns: list of pull requests
279 :returns: list of pull requests
277 """
280 """
278 pull_requests = self.get_all(
281 pull_requests = self.get_all(
279 repo_name, source=source, statuses=statuses, opened_by=opened_by,
282 repo_name, source=source, statuses=statuses, opened_by=opened_by,
280 order_by=order_by, order_dir=order_dir)
283 order_by=order_by, order_dir=order_dir)
281
284
282 _filtered_pull_requests = []
285 _filtered_pull_requests = []
283 for pr in pull_requests:
286 for pr in pull_requests:
284 status = pr.calculated_review_status()
287 status = pr.calculated_review_status()
285 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
288 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
286 ChangesetStatus.STATUS_UNDER_REVIEW]:
289 ChangesetStatus.STATUS_UNDER_REVIEW]:
287 _filtered_pull_requests.append(pr)
290 _filtered_pull_requests.append(pr)
288 if length:
291 if length:
289 return _filtered_pull_requests[offset:offset+length]
292 return _filtered_pull_requests[offset:offset+length]
290 else:
293 else:
291 return _filtered_pull_requests
294 return _filtered_pull_requests
292
295
293 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
296 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
294 opened_by=None, user_id=None):
297 opened_by=None, user_id=None):
295 """
298 """
296 Count the number of pull requests for a specific repository that are
299 Count the number of pull requests for a specific repository that are
297 awaiting review from a specific user.
300 awaiting review from a specific user.
298
301
299 :param repo_name: target or source repo
302 :param repo_name: target or source repo
300 :param source: boolean flag to specify if repo_name refers to source
303 :param source: boolean flag to specify if repo_name refers to source
301 :param statuses: list of pull request statuses
304 :param statuses: list of pull request statuses
302 :param opened_by: author user of the pull request
305 :param opened_by: author user of the pull request
303 :param user_id: reviewer user of the pull request
306 :param user_id: reviewer user of the pull request
304 :returns: int number of pull requests
307 :returns: int number of pull requests
305 """
308 """
306 pull_requests = self.get_awaiting_my_review(
309 pull_requests = self.get_awaiting_my_review(
307 repo_name, source=source, statuses=statuses, opened_by=opened_by,
310 repo_name, source=source, statuses=statuses, opened_by=opened_by,
308 user_id=user_id)
311 user_id=user_id)
309
312
310 return len(pull_requests)
313 return len(pull_requests)
311
314
312 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
315 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
313 opened_by=None, user_id=None, offset=0,
316 opened_by=None, user_id=None, offset=0,
314 length=None, order_by=None, order_dir='desc'):
317 length=None, order_by=None, order_dir='desc'):
315 """
318 """
316 Get all pull requests for a specific repository that are awaiting
319 Get all pull requests for a specific repository that are awaiting
317 review from a specific user.
320 review from a specific user.
318
321
319 :param repo_name: target or source repo
322 :param repo_name: target or source repo
320 :param source: boolean flag to specify if repo_name refers to source
323 :param source: boolean flag to specify if repo_name refers to source
321 :param statuses: list of pull request statuses
324 :param statuses: list of pull request statuses
322 :param opened_by: author user of the pull request
325 :param opened_by: author user of the pull request
323 :param user_id: reviewer user of the pull request
326 :param user_id: reviewer user of the pull request
324 :param offset: pagination offset
327 :param offset: pagination offset
325 :param length: length of returned list
328 :param length: length of returned list
326 :param order_by: order of the returned list
329 :param order_by: order of the returned list
327 :param order_dir: 'asc' or 'desc' ordering direction
330 :param order_dir: 'asc' or 'desc' ordering direction
328 :returns: list of pull requests
331 :returns: list of pull requests
329 """
332 """
330 pull_requests = self.get_all(
333 pull_requests = self.get_all(
331 repo_name, source=source, statuses=statuses, opened_by=opened_by,
334 repo_name, source=source, statuses=statuses, opened_by=opened_by,
332 order_by=order_by, order_dir=order_dir)
335 order_by=order_by, order_dir=order_dir)
333
336
334 _my = PullRequestModel().get_not_reviewed(user_id)
337 _my = PullRequestModel().get_not_reviewed(user_id)
335 my_participation = []
338 my_participation = []
336 for pr in pull_requests:
339 for pr in pull_requests:
337 if pr in _my:
340 if pr in _my:
338 my_participation.append(pr)
341 my_participation.append(pr)
339 _filtered_pull_requests = my_participation
342 _filtered_pull_requests = my_participation
340 if length:
343 if length:
341 return _filtered_pull_requests[offset:offset+length]
344 return _filtered_pull_requests[offset:offset+length]
342 else:
345 else:
343 return _filtered_pull_requests
346 return _filtered_pull_requests
344
347
345 def get_not_reviewed(self, user_id):
348 def get_not_reviewed(self, user_id):
346 return [
349 return [
347 x.pull_request for x in PullRequestReviewers.query().filter(
350 x.pull_request for x in PullRequestReviewers.query().filter(
348 PullRequestReviewers.user_id == user_id).all()
351 PullRequestReviewers.user_id == user_id).all()
349 ]
352 ]
350
353
351 def _prepare_participating_query(self, user_id=None, statuses=None,
354 def _prepare_participating_query(self, user_id=None, statuses=None,
352 order_by=None, order_dir='desc'):
355 order_by=None, order_dir='desc'):
353 q = PullRequest.query()
356 q = PullRequest.query()
354 if user_id:
357 if user_id:
355 reviewers_subquery = Session().query(
358 reviewers_subquery = Session().query(
356 PullRequestReviewers.pull_request_id).filter(
359 PullRequestReviewers.pull_request_id).filter(
357 PullRequestReviewers.user_id == user_id).subquery()
360 PullRequestReviewers.user_id == user_id).subquery()
358 user_filter= or_(
361 user_filter= or_(
359 PullRequest.user_id == user_id,
362 PullRequest.user_id == user_id,
360 PullRequest.pull_request_id.in_(reviewers_subquery)
363 PullRequest.pull_request_id.in_(reviewers_subquery)
361 )
364 )
362 q = PullRequest.query().filter(user_filter)
365 q = PullRequest.query().filter(user_filter)
363
366
364 # closed,opened
367 # closed,opened
365 if statuses:
368 if statuses:
366 q = q.filter(PullRequest.status.in_(statuses))
369 q = q.filter(PullRequest.status.in_(statuses))
367
370
368 if order_by:
371 if order_by:
369 order_map = {
372 order_map = {
370 'name_raw': PullRequest.pull_request_id,
373 'name_raw': PullRequest.pull_request_id,
371 'title': PullRequest.title,
374 'title': PullRequest.title,
372 'updated_on_raw': PullRequest.updated_on,
375 'updated_on_raw': PullRequest.updated_on,
373 'target_repo': PullRequest.target_repo_id
376 'target_repo': PullRequest.target_repo_id
374 }
377 }
375 if order_dir == 'asc':
378 if order_dir == 'asc':
376 q = q.order_by(order_map[order_by].asc())
379 q = q.order_by(order_map[order_by].asc())
377 else:
380 else:
378 q = q.order_by(order_map[order_by].desc())
381 q = q.order_by(order_map[order_by].desc())
379
382
380 return q
383 return q
381
384
382 def count_im_participating_in(self, user_id=None, statuses=None):
385 def count_im_participating_in(self, user_id=None, statuses=None):
383 q = self._prepare_participating_query(user_id, statuses=statuses)
386 q = self._prepare_participating_query(user_id, statuses=statuses)
384 return q.count()
387 return q.count()
385
388
386 def get_im_participating_in(
389 def get_im_participating_in(
387 self, user_id=None, statuses=None, offset=0,
390 self, user_id=None, statuses=None, offset=0,
388 length=None, order_by=None, order_dir='desc'):
391 length=None, order_by=None, order_dir='desc'):
389 """
392 """
390 Get all Pull requests that i'm participating in, or i have opened
393 Get all Pull requests that i'm participating in, or i have opened
391 """
394 """
392
395
393 q = self._prepare_participating_query(
396 q = self._prepare_participating_query(
394 user_id, statuses=statuses, order_by=order_by,
397 user_id, statuses=statuses, order_by=order_by,
395 order_dir=order_dir)
398 order_dir=order_dir)
396
399
397 if length:
400 if length:
398 pull_requests = q.limit(length).offset(offset).all()
401 pull_requests = q.limit(length).offset(offset).all()
399 else:
402 else:
400 pull_requests = q.all()
403 pull_requests = q.all()
401
404
402 return pull_requests
405 return pull_requests
403
406
404 def get_versions(self, pull_request):
407 def get_versions(self, pull_request):
405 """
408 """
406 returns version of pull request sorted by ID descending
409 returns version of pull request sorted by ID descending
407 """
410 """
408 return PullRequestVersion.query()\
411 return PullRequestVersion.query()\
409 .filter(PullRequestVersion.pull_request == pull_request)\
412 .filter(PullRequestVersion.pull_request == pull_request)\
410 .order_by(PullRequestVersion.pull_request_version_id.asc())\
413 .order_by(PullRequestVersion.pull_request_version_id.asc())\
411 .all()
414 .all()
412
415
413 def create(self, created_by, source_repo, source_ref, target_repo,
416 def create(self, created_by, source_repo, source_ref, target_repo,
414 target_ref, revisions, reviewers, title, description=None):
417 target_ref, revisions, reviewers, title, description=None):
415 created_by_user = self._get_user(created_by)
418 created_by_user = self._get_user(created_by)
416 source_repo = self._get_repo(source_repo)
419 source_repo = self._get_repo(source_repo)
417 target_repo = self._get_repo(target_repo)
420 target_repo = self._get_repo(target_repo)
418
421
419 pull_request = PullRequest()
422 pull_request = PullRequest()
420 pull_request.source_repo = source_repo
423 pull_request.source_repo = source_repo
421 pull_request.source_ref = source_ref
424 pull_request.source_ref = source_ref
422 pull_request.target_repo = target_repo
425 pull_request.target_repo = target_repo
423 pull_request.target_ref = target_ref
426 pull_request.target_ref = target_ref
424 pull_request.revisions = revisions
427 pull_request.revisions = revisions
425 pull_request.title = title
428 pull_request.title = title
426 pull_request.description = description
429 pull_request.description = description
427 pull_request.author = created_by_user
430 pull_request.author = created_by_user
428
431
429 Session().add(pull_request)
432 Session().add(pull_request)
430 Session().flush()
433 Session().flush()
431
434
432 reviewer_ids = set()
435 reviewer_ids = set()
433 # members / reviewers
436 # members / reviewers
434 for reviewer_object in reviewers:
437 for reviewer_object in reviewers:
435 if isinstance(reviewer_object, tuple):
438 if isinstance(reviewer_object, tuple):
436 user_id, reasons = reviewer_object
439 user_id, reasons = reviewer_object
437 else:
440 else:
438 user_id, reasons = reviewer_object, []
441 user_id, reasons = reviewer_object, []
439
442
440 user = self._get_user(user_id)
443 user = self._get_user(user_id)
441 reviewer_ids.add(user.user_id)
444 reviewer_ids.add(user.user_id)
442
445
443 reviewer = PullRequestReviewers(user, pull_request, reasons)
446 reviewer = PullRequestReviewers(user, pull_request, reasons)
444 Session().add(reviewer)
447 Session().add(reviewer)
445
448
446 # Set approval status to "Under Review" for all commits which are
449 # Set approval status to "Under Review" for all commits which are
447 # part of this pull request.
450 # part of this pull request.
448 ChangesetStatusModel().set_status(
451 ChangesetStatusModel().set_status(
449 repo=target_repo,
452 repo=target_repo,
450 status=ChangesetStatus.STATUS_UNDER_REVIEW,
453 status=ChangesetStatus.STATUS_UNDER_REVIEW,
451 user=created_by_user,
454 user=created_by_user,
452 pull_request=pull_request
455 pull_request=pull_request
453 )
456 )
454
457
455 self.notify_reviewers(pull_request, reviewer_ids)
458 self.notify_reviewers(pull_request, reviewer_ids)
456 self._trigger_pull_request_hook(
459 self._trigger_pull_request_hook(
457 pull_request, created_by_user, 'create')
460 pull_request, created_by_user, 'create')
458
461
459 return pull_request
462 return pull_request
460
463
461 def _trigger_pull_request_hook(self, pull_request, user, action):
464 def _trigger_pull_request_hook(self, pull_request, user, action):
462 pull_request = self.__get_pull_request(pull_request)
465 pull_request = self.__get_pull_request(pull_request)
463 target_scm = pull_request.target_repo.scm_instance()
466 target_scm = pull_request.target_repo.scm_instance()
464 if action == 'create':
467 if action == 'create':
465 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
468 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
466 elif action == 'merge':
469 elif action == 'merge':
467 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
470 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
468 elif action == 'close':
471 elif action == 'close':
469 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
472 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
470 elif action == 'review_status_change':
473 elif action == 'review_status_change':
471 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
474 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
472 elif action == 'update':
475 elif action == 'update':
473 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
476 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
474 else:
477 else:
475 return
478 return
476
479
477 trigger_hook(
480 trigger_hook(
478 username=user.username,
481 username=user.username,
479 repo_name=pull_request.target_repo.repo_name,
482 repo_name=pull_request.target_repo.repo_name,
480 repo_alias=target_scm.alias,
483 repo_alias=target_scm.alias,
481 pull_request=pull_request)
484 pull_request=pull_request)
482
485
483 def _get_commit_ids(self, pull_request):
486 def _get_commit_ids(self, pull_request):
484 """
487 """
485 Return the commit ids of the merged pull request.
488 Return the commit ids of the merged pull request.
486
489
487 This method is not dealing correctly yet with the lack of autoupdates
490 This method is not dealing correctly yet with the lack of autoupdates
488 nor with the implicit target updates.
491 nor with the implicit target updates.
489 For example: if a commit in the source repo is already in the target it
492 For example: if a commit in the source repo is already in the target it
490 will be reported anyways.
493 will be reported anyways.
491 """
494 """
492 merge_rev = pull_request.merge_rev
495 merge_rev = pull_request.merge_rev
493 if merge_rev is None:
496 if merge_rev is None:
494 raise ValueError('This pull request was not merged yet')
497 raise ValueError('This pull request was not merged yet')
495
498
496 commit_ids = list(pull_request.revisions)
499 commit_ids = list(pull_request.revisions)
497 if merge_rev not in commit_ids:
500 if merge_rev not in commit_ids:
498 commit_ids.append(merge_rev)
501 commit_ids.append(merge_rev)
499
502
500 return commit_ids
503 return commit_ids
501
504
502 def merge(self, pull_request, user, extras):
505 def merge(self, pull_request, user, extras):
503 log.debug("Merging pull request %s", pull_request.pull_request_id)
506 log.debug("Merging pull request %s", pull_request.pull_request_id)
504 merge_state = self._merge_pull_request(pull_request, user, extras)
507 merge_state = self._merge_pull_request(pull_request, user, extras)
505 if merge_state.executed:
508 if merge_state.executed:
506 log.debug(
509 log.debug(
507 "Merge was successful, updating the pull request comments.")
510 "Merge was successful, updating the pull request comments.")
508 self._comment_and_close_pr(pull_request, user, merge_state)
511 self._comment_and_close_pr(pull_request, user, merge_state)
509 self._log_action('user_merged_pull_request', user, pull_request)
512 self._log_action('user_merged_pull_request', user, pull_request)
510 else:
513 else:
511 log.warn("Merge failed, not updating the pull request.")
514 log.warn("Merge failed, not updating the pull request.")
512 return merge_state
515 return merge_state
513
516
514 def _merge_pull_request(self, pull_request, user, extras):
517 def _merge_pull_request(self, pull_request, user, extras):
515 target_vcs = pull_request.target_repo.scm_instance()
518 target_vcs = pull_request.target_repo.scm_instance()
516 source_vcs = pull_request.source_repo.scm_instance()
519 source_vcs = pull_request.source_repo.scm_instance()
517 target_ref = self._refresh_reference(
520 target_ref = self._refresh_reference(
518 pull_request.target_ref_parts, target_vcs)
521 pull_request.target_ref_parts, target_vcs)
519
522
520 message = _(
523 message = _(
521 'Merge pull request #%(pr_id)s from '
524 'Merge pull request #%(pr_id)s from '
522 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
525 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
523 'pr_id': pull_request.pull_request_id,
526 'pr_id': pull_request.pull_request_id,
524 'source_repo': source_vcs.name,
527 'source_repo': source_vcs.name,
525 'source_ref_name': pull_request.source_ref_parts.name,
528 'source_ref_name': pull_request.source_ref_parts.name,
526 'pr_title': pull_request.title
529 'pr_title': pull_request.title
527 }
530 }
528
531
529 workspace_id = self._workspace_id(pull_request)
532 workspace_id = self._workspace_id(pull_request)
530 use_rebase = self._use_rebase_for_merging(pull_request)
533 use_rebase = self._use_rebase_for_merging(pull_request)
531
534
532 callback_daemon, extras = prepare_callback_daemon(
535 callback_daemon, extras = prepare_callback_daemon(
533 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
536 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
534 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
537 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
535
538
536 with callback_daemon:
539 with callback_daemon:
537 # TODO: johbo: Implement a clean way to run a config_override
540 # TODO: johbo: Implement a clean way to run a config_override
538 # for a single call.
541 # for a single call.
539 target_vcs.config.set(
542 target_vcs.config.set(
540 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
543 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
541 merge_state = target_vcs.merge(
544 merge_state = target_vcs.merge(
542 target_ref, source_vcs, pull_request.source_ref_parts,
545 target_ref, source_vcs, pull_request.source_ref_parts,
543 workspace_id, user_name=user.username,
546 workspace_id, user_name=user.username,
544 user_email=user.email, message=message, use_rebase=use_rebase)
547 user_email=user.email, message=message, use_rebase=use_rebase)
545 return merge_state
548 return merge_state
546
549
547 def _comment_and_close_pr(self, pull_request, user, merge_state):
550 def _comment_and_close_pr(self, pull_request, user, merge_state):
548 pull_request.merge_rev = merge_state.merge_ref.commit_id
551 pull_request.merge_rev = merge_state.merge_ref.commit_id
549 pull_request.updated_on = datetime.datetime.now()
552 pull_request.updated_on = datetime.datetime.now()
550
553
551 ChangesetCommentsModel().create(
554 ChangesetCommentsModel().create(
552 text=unicode(_('Pull request merged and closed')),
555 text=unicode(_('Pull request merged and closed')),
553 repo=pull_request.target_repo.repo_id,
556 repo=pull_request.target_repo.repo_id,
554 user=user.user_id,
557 user=user.user_id,
555 pull_request=pull_request.pull_request_id,
558 pull_request=pull_request.pull_request_id,
556 f_path=None,
559 f_path=None,
557 line_no=None,
560 line_no=None,
558 closing_pr=True
561 closing_pr=True
559 )
562 )
560
563
561 Session().add(pull_request)
564 Session().add(pull_request)
562 Session().flush()
565 Session().flush()
563 # TODO: paris: replace invalidation with less radical solution
566 # TODO: paris: replace invalidation with less radical solution
564 ScmModel().mark_for_invalidation(
567 ScmModel().mark_for_invalidation(
565 pull_request.target_repo.repo_name)
568 pull_request.target_repo.repo_name)
566 self._trigger_pull_request_hook(pull_request, user, 'merge')
569 self._trigger_pull_request_hook(pull_request, user, 'merge')
567
570
568 def has_valid_update_type(self, pull_request):
571 def has_valid_update_type(self, pull_request):
569 source_ref_type = pull_request.source_ref_parts.type
572 source_ref_type = pull_request.source_ref_parts.type
570 return source_ref_type in ['book', 'branch', 'tag']
573 return source_ref_type in ['book', 'branch', 'tag']
571
574
572 def update_commits(self, pull_request):
575 def update_commits(self, pull_request):
573 """
576 """
574 Get the updated list of commits for the pull request
577 Get the updated list of commits for the pull request
575 and return the new pull request version and the list
578 and return the new pull request version and the list
576 of commits processed by this update action
579 of commits processed by this update action
577 """
580 """
578 pull_request = self.__get_pull_request(pull_request)
581 pull_request = self.__get_pull_request(pull_request)
579 source_ref_type = pull_request.source_ref_parts.type
582 source_ref_type = pull_request.source_ref_parts.type
580 source_ref_name = pull_request.source_ref_parts.name
583 source_ref_name = pull_request.source_ref_parts.name
581 source_ref_id = pull_request.source_ref_parts.commit_id
584 source_ref_id = pull_request.source_ref_parts.commit_id
582
585
583 if not self.has_valid_update_type(pull_request):
586 if not self.has_valid_update_type(pull_request):
584 log.debug(
587 log.debug(
585 "Skipping update of pull request %s due to ref type: %s",
588 "Skipping update of pull request %s due to ref type: %s",
586 pull_request, source_ref_type)
589 pull_request, source_ref_type)
587 return UpdateResponse(
590 return UpdateResponse(
588 executed=False,
591 executed=False,
589 reason=UpdateFailureReason.WRONG_REF_TPYE,
592 reason=UpdateFailureReason.WRONG_REF_TPYE,
590 old=pull_request, new=None, changes=None)
593 old=pull_request, new=None, changes=None)
591
594
592 source_repo = pull_request.source_repo.scm_instance()
595 source_repo = pull_request.source_repo.scm_instance()
593 try:
596 try:
594 source_commit = source_repo.get_commit(commit_id=source_ref_name)
597 source_commit = source_repo.get_commit(commit_id=source_ref_name)
595 except CommitDoesNotExistError:
598 except CommitDoesNotExistError:
596 return UpdateResponse(
599 return UpdateResponse(
597 executed=False,
600 executed=False,
598 reason=UpdateFailureReason.MISSING_SOURCE_REF,
601 reason=UpdateFailureReason.MISSING_SOURCE_REF,
599 old=pull_request, new=None, changes=None)
602 old=pull_request, new=None, changes=None)
600
603
601 if source_ref_id == source_commit.raw_id:
604 if source_ref_id == source_commit.raw_id:
602 log.debug("Nothing changed in pull request %s", pull_request)
605 log.debug("Nothing changed in pull request %s", pull_request)
603 return UpdateResponse(
606 return UpdateResponse(
604 executed=False,
607 executed=False,
605 reason=UpdateFailureReason.NO_CHANGE,
608 reason=UpdateFailureReason.NO_CHANGE,
606 old=pull_request, new=None, changes=None)
609 old=pull_request, new=None, changes=None)
607
610
608 # Finally there is a need for an update
611 # Finally there is a need for an update
609 pull_request_version = self._create_version_from_snapshot(pull_request)
612 pull_request_version = self._create_version_from_snapshot(pull_request)
610 self._link_comments_to_version(pull_request_version)
613 self._link_comments_to_version(pull_request_version)
611
614
612 target_ref_type = pull_request.target_ref_parts.type
615 target_ref_type = pull_request.target_ref_parts.type
613 target_ref_name = pull_request.target_ref_parts.name
616 target_ref_name = pull_request.target_ref_parts.name
614 target_ref_id = pull_request.target_ref_parts.commit_id
617 target_ref_id = pull_request.target_ref_parts.commit_id
615 target_repo = pull_request.target_repo.scm_instance()
618 target_repo = pull_request.target_repo.scm_instance()
616
619
617 try:
620 try:
618 if target_ref_type in ('tag', 'branch', 'book'):
621 if target_ref_type in ('tag', 'branch', 'book'):
619 target_commit = target_repo.get_commit(target_ref_name)
622 target_commit = target_repo.get_commit(target_ref_name)
620 else:
623 else:
621 target_commit = target_repo.get_commit(target_ref_id)
624 target_commit = target_repo.get_commit(target_ref_id)
622 except CommitDoesNotExistError:
625 except CommitDoesNotExistError:
623 return UpdateResponse(
626 return UpdateResponse(
624 executed=False,
627 executed=False,
625 reason=UpdateFailureReason.MISSING_TARGET_REF,
628 reason=UpdateFailureReason.MISSING_TARGET_REF,
626 old=pull_request, new=None, changes=None)
629 old=pull_request, new=None, changes=None)
627
630
628 # re-compute commit ids
631 # re-compute commit ids
629 old_commit_ids = set(pull_request.revisions)
632 old_commit_ids = set(pull_request.revisions)
630 pre_load = ["author", "branch", "date", "message"]
633 pre_load = ["author", "branch", "date", "message"]
631 commit_ranges = target_repo.compare(
634 commit_ranges = target_repo.compare(
632 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
635 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
633 pre_load=pre_load)
636 pre_load=pre_load)
634
637
635 ancestor = target_repo.get_common_ancestor(
638 ancestor = target_repo.get_common_ancestor(
636 target_commit.raw_id, source_commit.raw_id, source_repo)
639 target_commit.raw_id, source_commit.raw_id, source_repo)
637
640
638 pull_request.source_ref = '%s:%s:%s' % (
641 pull_request.source_ref = '%s:%s:%s' % (
639 source_ref_type, source_ref_name, source_commit.raw_id)
642 source_ref_type, source_ref_name, source_commit.raw_id)
640 pull_request.target_ref = '%s:%s:%s' % (
643 pull_request.target_ref = '%s:%s:%s' % (
641 target_ref_type, target_ref_name, ancestor)
644 target_ref_type, target_ref_name, ancestor)
642 pull_request.revisions = [
645 pull_request.revisions = [
643 commit.raw_id for commit in reversed(commit_ranges)]
646 commit.raw_id for commit in reversed(commit_ranges)]
644 pull_request.updated_on = datetime.datetime.now()
647 pull_request.updated_on = datetime.datetime.now()
645 Session().add(pull_request)
648 Session().add(pull_request)
646 new_commit_ids = set(pull_request.revisions)
649 new_commit_ids = set(pull_request.revisions)
647
650
648 changes = self._calculate_commit_id_changes(
651 changes = self._calculate_commit_id_changes(
649 old_commit_ids, new_commit_ids)
652 old_commit_ids, new_commit_ids)
650
653
651 old_diff_data, new_diff_data = self._generate_update_diffs(
654 old_diff_data, new_diff_data = self._generate_update_diffs(
652 pull_request, pull_request_version)
655 pull_request, pull_request_version)
653
656
654 ChangesetCommentsModel().outdate_comments(
657 ChangesetCommentsModel().outdate_comments(
655 pull_request, old_diff_data=old_diff_data,
658 pull_request, old_diff_data=old_diff_data,
656 new_diff_data=new_diff_data)
659 new_diff_data=new_diff_data)
657
660
658 file_changes = self._calculate_file_changes(
661 file_changes = self._calculate_file_changes(
659 old_diff_data, new_diff_data)
662 old_diff_data, new_diff_data)
660
663
661 # Add an automatic comment to the pull request
664 # Add an automatic comment to the pull request
662 update_comment = ChangesetCommentsModel().create(
665 update_comment = ChangesetCommentsModel().create(
663 text=self._render_update_message(changes, file_changes),
666 text=self._render_update_message(changes, file_changes),
664 repo=pull_request.target_repo,
667 repo=pull_request.target_repo,
665 user=pull_request.author,
668 user=pull_request.author,
666 pull_request=pull_request,
669 pull_request=pull_request,
667 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
670 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
668
671
669 # Update status to "Under Review" for added commits
672 # Update status to "Under Review" for added commits
670 for commit_id in changes.added:
673 for commit_id in changes.added:
671 ChangesetStatusModel().set_status(
674 ChangesetStatusModel().set_status(
672 repo=pull_request.source_repo,
675 repo=pull_request.source_repo,
673 status=ChangesetStatus.STATUS_UNDER_REVIEW,
676 status=ChangesetStatus.STATUS_UNDER_REVIEW,
674 comment=update_comment,
677 comment=update_comment,
675 user=pull_request.author,
678 user=pull_request.author,
676 pull_request=pull_request,
679 pull_request=pull_request,
677 revision=commit_id)
680 revision=commit_id)
678
681
679 log.debug(
682 log.debug(
680 'Updated pull request %s, added_ids: %s, common_ids: %s, '
683 'Updated pull request %s, added_ids: %s, common_ids: %s, '
681 'removed_ids: %s', pull_request.pull_request_id,
684 'removed_ids: %s', pull_request.pull_request_id,
682 changes.added, changes.common, changes.removed)
685 changes.added, changes.common, changes.removed)
683 log.debug('Updated pull request with the following file changes: %s',
686 log.debug('Updated pull request with the following file changes: %s',
684 file_changes)
687 file_changes)
685
688
686 log.info(
689 log.info(
687 "Updated pull request %s from commit %s to commit %s, "
690 "Updated pull request %s from commit %s to commit %s, "
688 "stored new version %s of this pull request.",
691 "stored new version %s of this pull request.",
689 pull_request.pull_request_id, source_ref_id,
692 pull_request.pull_request_id, source_ref_id,
690 pull_request.source_ref_parts.commit_id,
693 pull_request.source_ref_parts.commit_id,
691 pull_request_version.pull_request_version_id)
694 pull_request_version.pull_request_version_id)
692 Session().commit()
695 Session().commit()
693 self._trigger_pull_request_hook(pull_request, pull_request.author,
696 self._trigger_pull_request_hook(pull_request, pull_request.author,
694 'update')
697 'update')
695
698
696 return UpdateResponse(
699 return UpdateResponse(
697 executed=True, reason=UpdateFailureReason.NONE,
700 executed=True, reason=UpdateFailureReason.NONE,
698 old=pull_request, new=pull_request_version, changes=changes)
701 old=pull_request, new=pull_request_version, changes=changes)
699
702
700 def _create_version_from_snapshot(self, pull_request):
703 def _create_version_from_snapshot(self, pull_request):
701 version = PullRequestVersion()
704 version = PullRequestVersion()
702 version.title = pull_request.title
705 version.title = pull_request.title
703 version.description = pull_request.description
706 version.description = pull_request.description
704 version.status = pull_request.status
707 version.status = pull_request.status
705 version.created_on = pull_request.created_on
708 version.created_on = pull_request.created_on
706 version.updated_on = pull_request.updated_on
709 version.updated_on = pull_request.updated_on
707 version.user_id = pull_request.user_id
710 version.user_id = pull_request.user_id
708 version.source_repo = pull_request.source_repo
711 version.source_repo = pull_request.source_repo
709 version.source_ref = pull_request.source_ref
712 version.source_ref = pull_request.source_ref
710 version.target_repo = pull_request.target_repo
713 version.target_repo = pull_request.target_repo
711 version.target_ref = pull_request.target_ref
714 version.target_ref = pull_request.target_ref
712
715
713 version._last_merge_source_rev = pull_request._last_merge_source_rev
716 version._last_merge_source_rev = pull_request._last_merge_source_rev
714 version._last_merge_target_rev = pull_request._last_merge_target_rev
717 version._last_merge_target_rev = pull_request._last_merge_target_rev
715 version._last_merge_status = pull_request._last_merge_status
718 version._last_merge_status = pull_request._last_merge_status
716 version.shadow_merge_ref = pull_request.shadow_merge_ref
719 version.shadow_merge_ref = pull_request.shadow_merge_ref
717 version.merge_rev = pull_request.merge_rev
720 version.merge_rev = pull_request.merge_rev
718
721
719 version.revisions = pull_request.revisions
722 version.revisions = pull_request.revisions
720 version.pull_request = pull_request
723 version.pull_request = pull_request
721 Session().add(version)
724 Session().add(version)
722 Session().flush()
725 Session().flush()
723
726
724 return version
727 return version
725
728
726 def _generate_update_diffs(self, pull_request, pull_request_version):
729 def _generate_update_diffs(self, pull_request, pull_request_version):
727 diff_context = (
730 diff_context = (
728 self.DIFF_CONTEXT +
731 self.DIFF_CONTEXT +
729 ChangesetCommentsModel.needed_extra_diff_context())
732 ChangesetCommentsModel.needed_extra_diff_context())
730 old_diff = self._get_diff_from_pr_or_version(
733 old_diff = self._get_diff_from_pr_or_version(
731 pull_request_version, context=diff_context)
734 pull_request_version, context=diff_context)
732 new_diff = self._get_diff_from_pr_or_version(
735 new_diff = self._get_diff_from_pr_or_version(
733 pull_request, context=diff_context)
736 pull_request, context=diff_context)
734
737
735 old_diff_data = diffs.DiffProcessor(old_diff)
738 old_diff_data = diffs.DiffProcessor(old_diff)
736 old_diff_data.prepare()
739 old_diff_data.prepare()
737 new_diff_data = diffs.DiffProcessor(new_diff)
740 new_diff_data = diffs.DiffProcessor(new_diff)
738 new_diff_data.prepare()
741 new_diff_data.prepare()
739
742
740 return old_diff_data, new_diff_data
743 return old_diff_data, new_diff_data
741
744
742 def _link_comments_to_version(self, pull_request_version):
745 def _link_comments_to_version(self, pull_request_version):
743 """
746 """
744 Link all unlinked comments of this pull request to the given version.
747 Link all unlinked comments of this pull request to the given version.
745
748
746 :param pull_request_version: The `PullRequestVersion` to which
749 :param pull_request_version: The `PullRequestVersion` to which
747 the comments shall be linked.
750 the comments shall be linked.
748
751
749 """
752 """
750 pull_request = pull_request_version.pull_request
753 pull_request = pull_request_version.pull_request
751 comments = ChangesetComment.query().filter(
754 comments = ChangesetComment.query().filter(
752 # TODO: johbo: Should we query for the repo at all here?
755 # TODO: johbo: Should we query for the repo at all here?
753 # Pending decision on how comments of PRs are to be related
756 # Pending decision on how comments of PRs are to be related
754 # to either the source repo, the target repo or no repo at all.
757 # to either the source repo, the target repo or no repo at all.
755 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
758 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
756 ChangesetComment.pull_request == pull_request,
759 ChangesetComment.pull_request == pull_request,
757 ChangesetComment.pull_request_version == None)
760 ChangesetComment.pull_request_version == None)
758
761
759 # TODO: johbo: Find out why this breaks if it is done in a bulk
762 # TODO: johbo: Find out why this breaks if it is done in a bulk
760 # operation.
763 # operation.
761 for comment in comments:
764 for comment in comments:
762 comment.pull_request_version_id = (
765 comment.pull_request_version_id = (
763 pull_request_version.pull_request_version_id)
766 pull_request_version.pull_request_version_id)
764 Session().add(comment)
767 Session().add(comment)
765
768
766 def _calculate_commit_id_changes(self, old_ids, new_ids):
769 def _calculate_commit_id_changes(self, old_ids, new_ids):
767 added = new_ids.difference(old_ids)
770 added = new_ids.difference(old_ids)
768 common = old_ids.intersection(new_ids)
771 common = old_ids.intersection(new_ids)
769 removed = old_ids.difference(new_ids)
772 removed = old_ids.difference(new_ids)
770 return ChangeTuple(added, common, removed)
773 return ChangeTuple(added, common, removed)
771
774
772 def _calculate_file_changes(self, old_diff_data, new_diff_data):
775 def _calculate_file_changes(self, old_diff_data, new_diff_data):
773
776
774 old_files = OrderedDict()
777 old_files = OrderedDict()
775 for diff_data in old_diff_data.parsed_diff:
778 for diff_data in old_diff_data.parsed_diff:
776 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
779 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
777
780
778 added_files = []
781 added_files = []
779 modified_files = []
782 modified_files = []
780 removed_files = []
783 removed_files = []
781 for diff_data in new_diff_data.parsed_diff:
784 for diff_data in new_diff_data.parsed_diff:
782 new_filename = diff_data['filename']
785 new_filename = diff_data['filename']
783 new_hash = md5_safe(diff_data['raw_diff'])
786 new_hash = md5_safe(diff_data['raw_diff'])
784
787
785 old_hash = old_files.get(new_filename)
788 old_hash = old_files.get(new_filename)
786 if not old_hash:
789 if not old_hash:
787 # file is not present in old diff, means it's added
790 # file is not present in old diff, means it's added
788 added_files.append(new_filename)
791 added_files.append(new_filename)
789 else:
792 else:
790 if new_hash != old_hash:
793 if new_hash != old_hash:
791 modified_files.append(new_filename)
794 modified_files.append(new_filename)
792 # now remove a file from old, since we have seen it already
795 # now remove a file from old, since we have seen it already
793 del old_files[new_filename]
796 del old_files[new_filename]
794
797
795 # removed files is when there are present in old, but not in NEW,
798 # removed files is when there are present in old, but not in NEW,
796 # since we remove old files that are present in new diff, left-overs
799 # since we remove old files that are present in new diff, left-overs
797 # if any should be the removed files
800 # if any should be the removed files
798 removed_files.extend(old_files.keys())
801 removed_files.extend(old_files.keys())
799
802
800 return FileChangeTuple(added_files, modified_files, removed_files)
803 return FileChangeTuple(added_files, modified_files, removed_files)
801
804
802 def _render_update_message(self, changes, file_changes):
805 def _render_update_message(self, changes, file_changes):
803 """
806 """
804 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
807 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
805 so it's always looking the same disregarding on which default
808 so it's always looking the same disregarding on which default
806 renderer system is using.
809 renderer system is using.
807
810
808 :param changes: changes named tuple
811 :param changes: changes named tuple
809 :param file_changes: file changes named tuple
812 :param file_changes: file changes named tuple
810
813
811 """
814 """
812 new_status = ChangesetStatus.get_status_lbl(
815 new_status = ChangesetStatus.get_status_lbl(
813 ChangesetStatus.STATUS_UNDER_REVIEW)
816 ChangesetStatus.STATUS_UNDER_REVIEW)
814
817
815 changed_files = (
818 changed_files = (
816 file_changes.added + file_changes.modified + file_changes.removed)
819 file_changes.added + file_changes.modified + file_changes.removed)
817
820
818 params = {
821 params = {
819 'under_review_label': new_status,
822 'under_review_label': new_status,
820 'added_commits': changes.added,
823 'added_commits': changes.added,
821 'removed_commits': changes.removed,
824 'removed_commits': changes.removed,
822 'changed_files': changed_files,
825 'changed_files': changed_files,
823 'added_files': file_changes.added,
826 'added_files': file_changes.added,
824 'modified_files': file_changes.modified,
827 'modified_files': file_changes.modified,
825 'removed_files': file_changes.removed,
828 'removed_files': file_changes.removed,
826 }
829 }
827 renderer = RstTemplateRenderer()
830 renderer = RstTemplateRenderer()
828 return renderer.render('pull_request_update.mako', **params)
831 return renderer.render('pull_request_update.mako', **params)
829
832
830 def edit(self, pull_request, title, description):
833 def edit(self, pull_request, title, description):
831 pull_request = self.__get_pull_request(pull_request)
834 pull_request = self.__get_pull_request(pull_request)
832 if pull_request.is_closed():
835 if pull_request.is_closed():
833 raise ValueError('This pull request is closed')
836 raise ValueError('This pull request is closed')
834 if title:
837 if title:
835 pull_request.title = title
838 pull_request.title = title
836 pull_request.description = description
839 pull_request.description = description
837 pull_request.updated_on = datetime.datetime.now()
840 pull_request.updated_on = datetime.datetime.now()
838 Session().add(pull_request)
841 Session().add(pull_request)
839
842
840 def update_reviewers(self, pull_request, reviewer_data):
843 def update_reviewers(self, pull_request, reviewer_data):
841 """
844 """
842 Update the reviewers in the pull request
845 Update the reviewers in the pull request
843
846
844 :param pull_request: the pr to update
847 :param pull_request: the pr to update
845 :param reviewer_data: list of tuples [(user, ['reason1', 'reason2'])]
848 :param reviewer_data: list of tuples [(user, ['reason1', 'reason2'])]
846 """
849 """
847
850
848 reviewers_reasons = {}
851 reviewers_reasons = {}
849 for user_id, reasons in reviewer_data:
852 for user_id, reasons in reviewer_data:
850 if isinstance(user_id, (int, basestring)):
853 if isinstance(user_id, (int, basestring)):
851 user_id = self._get_user(user_id).user_id
854 user_id = self._get_user(user_id).user_id
852 reviewers_reasons[user_id] = reasons
855 reviewers_reasons[user_id] = reasons
853
856
854 reviewers_ids = set(reviewers_reasons.keys())
857 reviewers_ids = set(reviewers_reasons.keys())
855 pull_request = self.__get_pull_request(pull_request)
858 pull_request = self.__get_pull_request(pull_request)
856 current_reviewers = PullRequestReviewers.query()\
859 current_reviewers = PullRequestReviewers.query()\
857 .filter(PullRequestReviewers.pull_request ==
860 .filter(PullRequestReviewers.pull_request ==
858 pull_request).all()
861 pull_request).all()
859 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
862 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
860
863
861 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
864 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
862 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
865 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
863
866
864 log.debug("Adding %s reviewers", ids_to_add)
867 log.debug("Adding %s reviewers", ids_to_add)
865 log.debug("Removing %s reviewers", ids_to_remove)
868 log.debug("Removing %s reviewers", ids_to_remove)
866 changed = False
869 changed = False
867 for uid in ids_to_add:
870 for uid in ids_to_add:
868 changed = True
871 changed = True
869 _usr = self._get_user(uid)
872 _usr = self._get_user(uid)
870 reasons = reviewers_reasons[uid]
873 reasons = reviewers_reasons[uid]
871 reviewer = PullRequestReviewers(_usr, pull_request, reasons)
874 reviewer = PullRequestReviewers(_usr, pull_request, reasons)
872 Session().add(reviewer)
875 Session().add(reviewer)
873
876
874 self.notify_reviewers(pull_request, ids_to_add)
877 self.notify_reviewers(pull_request, ids_to_add)
875
878
876 for uid in ids_to_remove:
879 for uid in ids_to_remove:
877 changed = True
880 changed = True
878 reviewer = PullRequestReviewers.query()\
881 reviewer = PullRequestReviewers.query()\
879 .filter(PullRequestReviewers.user_id == uid,
882 .filter(PullRequestReviewers.user_id == uid,
880 PullRequestReviewers.pull_request == pull_request)\
883 PullRequestReviewers.pull_request == pull_request)\
881 .scalar()
884 .scalar()
882 if reviewer:
885 if reviewer:
883 Session().delete(reviewer)
886 Session().delete(reviewer)
884 if changed:
887 if changed:
885 pull_request.updated_on = datetime.datetime.now()
888 pull_request.updated_on = datetime.datetime.now()
886 Session().add(pull_request)
889 Session().add(pull_request)
887
890
888 return ids_to_add, ids_to_remove
891 return ids_to_add, ids_to_remove
889
892
890 def get_url(self, pull_request):
893 def get_url(self, pull_request):
891 return h.url('pullrequest_show',
894 return h.url('pullrequest_show',
892 repo_name=safe_str(pull_request.target_repo.repo_name),
895 repo_name=safe_str(pull_request.target_repo.repo_name),
893 pull_request_id=pull_request.pull_request_id,
896 pull_request_id=pull_request.pull_request_id,
894 qualified=True)
897 qualified=True)
895
898
896 def get_shadow_clone_url(self, pull_request):
899 def get_shadow_clone_url(self, pull_request):
897 """
900 """
898 Returns qualified url pointing to the shadow repository. If this pull
901 Returns qualified url pointing to the shadow repository. If this pull
899 request is closed there is no shadow repository and ``None`` will be
902 request is closed there is no shadow repository and ``None`` will be
900 returned.
903 returned.
901 """
904 """
902 if pull_request.is_closed():
905 if pull_request.is_closed():
903 return None
906 return None
904 else:
907 else:
905 pr_url = urllib.unquote(self.get_url(pull_request))
908 pr_url = urllib.unquote(self.get_url(pull_request))
906 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
909 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
907
910
908 def notify_reviewers(self, pull_request, reviewers_ids):
911 def notify_reviewers(self, pull_request, reviewers_ids):
909 # notification to reviewers
912 # notification to reviewers
910 if not reviewers_ids:
913 if not reviewers_ids:
911 return
914 return
912
915
913 pull_request_obj = pull_request
916 pull_request_obj = pull_request
914 # get the current participants of this pull request
917 # get the current participants of this pull request
915 recipients = reviewers_ids
918 recipients = reviewers_ids
916 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
919 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
917
920
918 pr_source_repo = pull_request_obj.source_repo
921 pr_source_repo = pull_request_obj.source_repo
919 pr_target_repo = pull_request_obj.target_repo
922 pr_target_repo = pull_request_obj.target_repo
920
923
921 pr_url = h.url(
924 pr_url = h.url(
922 'pullrequest_show',
925 'pullrequest_show',
923 repo_name=pr_target_repo.repo_name,
926 repo_name=pr_target_repo.repo_name,
924 pull_request_id=pull_request_obj.pull_request_id,
927 pull_request_id=pull_request_obj.pull_request_id,
925 qualified=True,)
928 qualified=True,)
926
929
927 # set some variables for email notification
930 # set some variables for email notification
928 pr_target_repo_url = h.url(
931 pr_target_repo_url = h.url(
929 'summary_home',
932 'summary_home',
930 repo_name=pr_target_repo.repo_name,
933 repo_name=pr_target_repo.repo_name,
931 qualified=True)
934 qualified=True)
932
935
933 pr_source_repo_url = h.url(
936 pr_source_repo_url = h.url(
934 'summary_home',
937 'summary_home',
935 repo_name=pr_source_repo.repo_name,
938 repo_name=pr_source_repo.repo_name,
936 qualified=True)
939 qualified=True)
937
940
938 # pull request specifics
941 # pull request specifics
939 pull_request_commits = [
942 pull_request_commits = [
940 (x.raw_id, x.message)
943 (x.raw_id, x.message)
941 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
944 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
942
945
943 kwargs = {
946 kwargs = {
944 'user': pull_request.author,
947 'user': pull_request.author,
945 'pull_request': pull_request_obj,
948 'pull_request': pull_request_obj,
946 'pull_request_commits': pull_request_commits,
949 'pull_request_commits': pull_request_commits,
947
950
948 'pull_request_target_repo': pr_target_repo,
951 'pull_request_target_repo': pr_target_repo,
949 'pull_request_target_repo_url': pr_target_repo_url,
952 'pull_request_target_repo_url': pr_target_repo_url,
950
953
951 'pull_request_source_repo': pr_source_repo,
954 'pull_request_source_repo': pr_source_repo,
952 'pull_request_source_repo_url': pr_source_repo_url,
955 'pull_request_source_repo_url': pr_source_repo_url,
953
956
954 'pull_request_url': pr_url,
957 'pull_request_url': pr_url,
955 }
958 }
956
959
957 # pre-generate the subject for notification itself
960 # pre-generate the subject for notification itself
958 (subject,
961 (subject,
959 _h, _e, # we don't care about those
962 _h, _e, # we don't care about those
960 body_plaintext) = EmailNotificationModel().render_email(
963 body_plaintext) = EmailNotificationModel().render_email(
961 notification_type, **kwargs)
964 notification_type, **kwargs)
962
965
963 # create notification objects, and emails
966 # create notification objects, and emails
964 NotificationModel().create(
967 NotificationModel().create(
965 created_by=pull_request.author,
968 created_by=pull_request.author,
966 notification_subject=subject,
969 notification_subject=subject,
967 notification_body=body_plaintext,
970 notification_body=body_plaintext,
968 notification_type=notification_type,
971 notification_type=notification_type,
969 recipients=recipients,
972 recipients=recipients,
970 email_kwargs=kwargs,
973 email_kwargs=kwargs,
971 )
974 )
972
975
973 def delete(self, pull_request):
976 def delete(self, pull_request):
974 pull_request = self.__get_pull_request(pull_request)
977 pull_request = self.__get_pull_request(pull_request)
975 self._cleanup_merge_workspace(pull_request)
978 self._cleanup_merge_workspace(pull_request)
976 Session().delete(pull_request)
979 Session().delete(pull_request)
977
980
978 def close_pull_request(self, pull_request, user):
981 def close_pull_request(self, pull_request, user):
979 pull_request = self.__get_pull_request(pull_request)
982 pull_request = self.__get_pull_request(pull_request)
980 self._cleanup_merge_workspace(pull_request)
983 self._cleanup_merge_workspace(pull_request)
981 pull_request.status = PullRequest.STATUS_CLOSED
984 pull_request.status = PullRequest.STATUS_CLOSED
982 pull_request.updated_on = datetime.datetime.now()
985 pull_request.updated_on = datetime.datetime.now()
983 Session().add(pull_request)
986 Session().add(pull_request)
984 self._trigger_pull_request_hook(
987 self._trigger_pull_request_hook(
985 pull_request, pull_request.author, 'close')
988 pull_request, pull_request.author, 'close')
986 self._log_action('user_closed_pull_request', user, pull_request)
989 self._log_action('user_closed_pull_request', user, pull_request)
987
990
988 def close_pull_request_with_comment(self, pull_request, user, repo,
991 def close_pull_request_with_comment(self, pull_request, user, repo,
989 message=None):
992 message=None):
990 status = ChangesetStatus.STATUS_REJECTED
993 status = ChangesetStatus.STATUS_REJECTED
991
994
992 if not message:
995 if not message:
993 message = (
996 message = (
994 _('Status change %(transition_icon)s %(status)s') % {
997 _('Status change %(transition_icon)s %(status)s') % {
995 'transition_icon': '>',
998 'transition_icon': '>',
996 'status': ChangesetStatus.get_status_lbl(status)})
999 'status': ChangesetStatus.get_status_lbl(status)})
997
1000
998 internal_message = _('Closing with') + ' ' + message
1001 internal_message = _('Closing with') + ' ' + message
999
1002
1000 comm = ChangesetCommentsModel().create(
1003 comm = ChangesetCommentsModel().create(
1001 text=internal_message,
1004 text=internal_message,
1002 repo=repo.repo_id,
1005 repo=repo.repo_id,
1003 user=user.user_id,
1006 user=user.user_id,
1004 pull_request=pull_request.pull_request_id,
1007 pull_request=pull_request.pull_request_id,
1005 f_path=None,
1008 f_path=None,
1006 line_no=None,
1009 line_no=None,
1007 status_change=ChangesetStatus.get_status_lbl(status),
1010 status_change=ChangesetStatus.get_status_lbl(status),
1008 status_change_type=status,
1011 status_change_type=status,
1009 closing_pr=True
1012 closing_pr=True
1010 )
1013 )
1011
1014
1012 ChangesetStatusModel().set_status(
1015 ChangesetStatusModel().set_status(
1013 repo.repo_id,
1016 repo.repo_id,
1014 status,
1017 status,
1015 user.user_id,
1018 user.user_id,
1016 comm,
1019 comm,
1017 pull_request=pull_request.pull_request_id
1020 pull_request=pull_request.pull_request_id
1018 )
1021 )
1019 Session().flush()
1022 Session().flush()
1020
1023
1021 PullRequestModel().close_pull_request(
1024 PullRequestModel().close_pull_request(
1022 pull_request.pull_request_id, user)
1025 pull_request.pull_request_id, user)
1023
1026
1024 def merge_status(self, pull_request):
1027 def merge_status(self, pull_request):
1025 if not self._is_merge_enabled(pull_request):
1028 if not self._is_merge_enabled(pull_request):
1026 return False, _('Server-side pull request merging is disabled.')
1029 return False, _('Server-side pull request merging is disabled.')
1027 if pull_request.is_closed():
1030 if pull_request.is_closed():
1028 return False, _('This pull request is closed.')
1031 return False, _('This pull request is closed.')
1029 merge_possible, msg = self._check_repo_requirements(
1032 merge_possible, msg = self._check_repo_requirements(
1030 target=pull_request.target_repo, source=pull_request.source_repo)
1033 target=pull_request.target_repo, source=pull_request.source_repo)
1031 if not merge_possible:
1034 if not merge_possible:
1032 return merge_possible, msg
1035 return merge_possible, msg
1033
1036
1034 try:
1037 try:
1035 resp = self._try_merge(pull_request)
1038 resp = self._try_merge(pull_request)
1036 log.debug("Merge response: %s", resp)
1039 log.debug("Merge response: %s", resp)
1037 status = resp.possible, self.merge_status_message(
1040 status = resp.possible, self.merge_status_message(
1038 resp.failure_reason)
1041 resp.failure_reason)
1039 except NotImplementedError:
1042 except NotImplementedError:
1040 status = False, _('Pull request merging is not supported.')
1043 status = False, _('Pull request merging is not supported.')
1041
1044
1042 return status
1045 return status
1043
1046
1044 def _check_repo_requirements(self, target, source):
1047 def _check_repo_requirements(self, target, source):
1045 """
1048 """
1046 Check if `target` and `source` have compatible requirements.
1049 Check if `target` and `source` have compatible requirements.
1047
1050
1048 Currently this is just checking for largefiles.
1051 Currently this is just checking for largefiles.
1049 """
1052 """
1050 target_has_largefiles = self._has_largefiles(target)
1053 target_has_largefiles = self._has_largefiles(target)
1051 source_has_largefiles = self._has_largefiles(source)
1054 source_has_largefiles = self._has_largefiles(source)
1052 merge_possible = True
1055 merge_possible = True
1053 message = u''
1056 message = u''
1054
1057
1055 if target_has_largefiles != source_has_largefiles:
1058 if target_has_largefiles != source_has_largefiles:
1056 merge_possible = False
1059 merge_possible = False
1057 if source_has_largefiles:
1060 if source_has_largefiles:
1058 message = _(
1061 message = _(
1059 'Target repository large files support is disabled.')
1062 'Target repository large files support is disabled.')
1060 else:
1063 else:
1061 message = _(
1064 message = _(
1062 'Source repository large files support is disabled.')
1065 'Source repository large files support is disabled.')
1063
1066
1064 return merge_possible, message
1067 return merge_possible, message
1065
1068
1066 def _has_largefiles(self, repo):
1069 def _has_largefiles(self, repo):
1067 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1070 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1068 'extensions', 'largefiles')
1071 'extensions', 'largefiles')
1069 return largefiles_ui and largefiles_ui[0].active
1072 return largefiles_ui and largefiles_ui[0].active
1070
1073
1071 def _try_merge(self, pull_request):
1074 def _try_merge(self, pull_request):
1072 """
1075 """
1073 Try to merge the pull request and return the merge status.
1076 Try to merge the pull request and return the merge status.
1074 """
1077 """
1075 log.debug(
1078 log.debug(
1076 "Trying out if the pull request %s can be merged.",
1079 "Trying out if the pull request %s can be merged.",
1077 pull_request.pull_request_id)
1080 pull_request.pull_request_id)
1078 target_vcs = pull_request.target_repo.scm_instance()
1081 target_vcs = pull_request.target_repo.scm_instance()
1079
1082
1080 # Refresh the target reference.
1083 # Refresh the target reference.
1081 try:
1084 try:
1082 target_ref = self._refresh_reference(
1085 target_ref = self._refresh_reference(
1083 pull_request.target_ref_parts, target_vcs)
1086 pull_request.target_ref_parts, target_vcs)
1084 except CommitDoesNotExistError:
1087 except CommitDoesNotExistError:
1085 merge_state = MergeResponse(
1088 merge_state = MergeResponse(
1086 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1089 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1087 return merge_state
1090 return merge_state
1088
1091
1089 target_locked = pull_request.target_repo.locked
1092 target_locked = pull_request.target_repo.locked
1090 if target_locked and target_locked[0]:
1093 if target_locked and target_locked[0]:
1091 log.debug("The target repository is locked.")
1094 log.debug("The target repository is locked.")
1092 merge_state = MergeResponse(
1095 merge_state = MergeResponse(
1093 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1096 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1094 elif self._needs_merge_state_refresh(pull_request, target_ref):
1097 elif self._needs_merge_state_refresh(pull_request, target_ref):
1095 log.debug("Refreshing the merge status of the repository.")
1098 log.debug("Refreshing the merge status of the repository.")
1096 merge_state = self._refresh_merge_state(
1099 merge_state = self._refresh_merge_state(
1097 pull_request, target_vcs, target_ref)
1100 pull_request, target_vcs, target_ref)
1098 else:
1101 else:
1099 possible = pull_request.\
1102 possible = pull_request.\
1100 _last_merge_status == MergeFailureReason.NONE
1103 _last_merge_status == MergeFailureReason.NONE
1101 merge_state = MergeResponse(
1104 merge_state = MergeResponse(
1102 possible, False, None, pull_request._last_merge_status)
1105 possible, False, None, pull_request._last_merge_status)
1103
1106
1104 return merge_state
1107 return merge_state
1105
1108
1106 def _refresh_reference(self, reference, vcs_repository):
1109 def _refresh_reference(self, reference, vcs_repository):
1107 if reference.type in ('branch', 'book'):
1110 if reference.type in ('branch', 'book'):
1108 name_or_id = reference.name
1111 name_or_id = reference.name
1109 else:
1112 else:
1110 name_or_id = reference.commit_id
1113 name_or_id = reference.commit_id
1111 refreshed_commit = vcs_repository.get_commit(name_or_id)
1114 refreshed_commit = vcs_repository.get_commit(name_or_id)
1112 refreshed_reference = Reference(
1115 refreshed_reference = Reference(
1113 reference.type, reference.name, refreshed_commit.raw_id)
1116 reference.type, reference.name, refreshed_commit.raw_id)
1114 return refreshed_reference
1117 return refreshed_reference
1115
1118
1116 def _needs_merge_state_refresh(self, pull_request, target_reference):
1119 def _needs_merge_state_refresh(self, pull_request, target_reference):
1117 return not(
1120 return not(
1118 pull_request.revisions and
1121 pull_request.revisions and
1119 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1122 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1120 target_reference.commit_id == pull_request._last_merge_target_rev)
1123 target_reference.commit_id == pull_request._last_merge_target_rev)
1121
1124
1122 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1125 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1123 workspace_id = self._workspace_id(pull_request)
1126 workspace_id = self._workspace_id(pull_request)
1124 source_vcs = pull_request.source_repo.scm_instance()
1127 source_vcs = pull_request.source_repo.scm_instance()
1125 use_rebase = self._use_rebase_for_merging(pull_request)
1128 use_rebase = self._use_rebase_for_merging(pull_request)
1126 merge_state = target_vcs.merge(
1129 merge_state = target_vcs.merge(
1127 target_reference, source_vcs, pull_request.source_ref_parts,
1130 target_reference, source_vcs, pull_request.source_ref_parts,
1128 workspace_id, dry_run=True, use_rebase=use_rebase)
1131 workspace_id, dry_run=True, use_rebase=use_rebase)
1129
1132
1130 # Do not store the response if there was an unknown error.
1133 # Do not store the response if there was an unknown error.
1131 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1134 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1132 pull_request._last_merge_source_rev = \
1135 pull_request._last_merge_source_rev = \
1133 pull_request.source_ref_parts.commit_id
1136 pull_request.source_ref_parts.commit_id
1134 pull_request._last_merge_target_rev = target_reference.commit_id
1137 pull_request._last_merge_target_rev = target_reference.commit_id
1135 pull_request._last_merge_status = merge_state.failure_reason
1138 pull_request._last_merge_status = merge_state.failure_reason
1136 pull_request.shadow_merge_ref = merge_state.merge_ref
1139 pull_request.shadow_merge_ref = merge_state.merge_ref
1137 Session().add(pull_request)
1140 Session().add(pull_request)
1138 Session().commit()
1141 Session().commit()
1139
1142
1140 return merge_state
1143 return merge_state
1141
1144
1142 def _workspace_id(self, pull_request):
1145 def _workspace_id(self, pull_request):
1143 workspace_id = 'pr-%s' % pull_request.pull_request_id
1146 workspace_id = 'pr-%s' % pull_request.pull_request_id
1144 return workspace_id
1147 return workspace_id
1145
1148
1146 def merge_status_message(self, status_code):
1149 def merge_status_message(self, status_code):
1147 """
1150 """
1148 Return a human friendly error message for the given merge status code.
1151 Return a human friendly error message for the given merge status code.
1149 """
1152 """
1150 return self.MERGE_STATUS_MESSAGES[status_code]
1153 return self.MERGE_STATUS_MESSAGES[status_code]
1151
1154
1152 def generate_repo_data(self, repo, commit_id=None, branch=None,
1155 def generate_repo_data(self, repo, commit_id=None, branch=None,
1153 bookmark=None):
1156 bookmark=None):
1154 all_refs, selected_ref = \
1157 all_refs, selected_ref = \
1155 self._get_repo_pullrequest_sources(
1158 self._get_repo_pullrequest_sources(
1156 repo.scm_instance(), commit_id=commit_id,
1159 repo.scm_instance(), commit_id=commit_id,
1157 branch=branch, bookmark=bookmark)
1160 branch=branch, bookmark=bookmark)
1158
1161
1159 refs_select2 = []
1162 refs_select2 = []
1160 for element in all_refs:
1163 for element in all_refs:
1161 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1164 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1162 refs_select2.append({'text': element[1], 'children': children})
1165 refs_select2.append({'text': element[1], 'children': children})
1163
1166
1164 return {
1167 return {
1165 'user': {
1168 'user': {
1166 'user_id': repo.user.user_id,
1169 'user_id': repo.user.user_id,
1167 'username': repo.user.username,
1170 'username': repo.user.username,
1168 'firstname': repo.user.firstname,
1171 'firstname': repo.user.firstname,
1169 'lastname': repo.user.lastname,
1172 'lastname': repo.user.lastname,
1170 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1173 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1171 },
1174 },
1172 'description': h.chop_at_smart(repo.description, '\n'),
1175 'description': h.chop_at_smart(repo.description, '\n'),
1173 'refs': {
1176 'refs': {
1174 'all_refs': all_refs,
1177 'all_refs': all_refs,
1175 'selected_ref': selected_ref,
1178 'selected_ref': selected_ref,
1176 'select2_refs': refs_select2
1179 'select2_refs': refs_select2
1177 }
1180 }
1178 }
1181 }
1179
1182
1180 def generate_pullrequest_title(self, source, source_ref, target):
1183 def generate_pullrequest_title(self, source, source_ref, target):
1181 return u'{source}#{at_ref} to {target}'.format(
1184 return u'{source}#{at_ref} to {target}'.format(
1182 source=source,
1185 source=source,
1183 at_ref=source_ref,
1186 at_ref=source_ref,
1184 target=target,
1187 target=target,
1185 )
1188 )
1186
1189
1187 def _cleanup_merge_workspace(self, pull_request):
1190 def _cleanup_merge_workspace(self, pull_request):
1188 # Merging related cleanup
1191 # Merging related cleanup
1189 target_scm = pull_request.target_repo.scm_instance()
1192 target_scm = pull_request.target_repo.scm_instance()
1190 workspace_id = 'pr-%s' % pull_request.pull_request_id
1193 workspace_id = 'pr-%s' % pull_request.pull_request_id
1191
1194
1192 try:
1195 try:
1193 target_scm.cleanup_merge_workspace(workspace_id)
1196 target_scm.cleanup_merge_workspace(workspace_id)
1194 except NotImplementedError:
1197 except NotImplementedError:
1195 pass
1198 pass
1196
1199
1197 def _get_repo_pullrequest_sources(
1200 def _get_repo_pullrequest_sources(
1198 self, repo, commit_id=None, branch=None, bookmark=None):
1201 self, repo, commit_id=None, branch=None, bookmark=None):
1199 """
1202 """
1200 Return a structure with repo's interesting commits, suitable for
1203 Return a structure with repo's interesting commits, suitable for
1201 the selectors in pullrequest controller
1204 the selectors in pullrequest controller
1202
1205
1203 :param commit_id: a commit that must be in the list somehow
1206 :param commit_id: a commit that must be in the list somehow
1204 and selected by default
1207 and selected by default
1205 :param branch: a branch that must be in the list and selected
1208 :param branch: a branch that must be in the list and selected
1206 by default - even if closed
1209 by default - even if closed
1207 :param bookmark: a bookmark that must be in the list and selected
1210 :param bookmark: a bookmark that must be in the list and selected
1208 """
1211 """
1209
1212
1210 commit_id = safe_str(commit_id) if commit_id else None
1213 commit_id = safe_str(commit_id) if commit_id else None
1211 branch = safe_str(branch) if branch else None
1214 branch = safe_str(branch) if branch else None
1212 bookmark = safe_str(bookmark) if bookmark else None
1215 bookmark = safe_str(bookmark) if bookmark else None
1213
1216
1214 selected = None
1217 selected = None
1215
1218
1216 # order matters: first source that has commit_id in it will be selected
1219 # order matters: first source that has commit_id in it will be selected
1217 sources = []
1220 sources = []
1218 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1221 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1219 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1222 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1220
1223
1221 if commit_id:
1224 if commit_id:
1222 ref_commit = (h.short_id(commit_id), commit_id)
1225 ref_commit = (h.short_id(commit_id), commit_id)
1223 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1226 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1224
1227
1225 sources.append(
1228 sources.append(
1226 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1229 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1227 )
1230 )
1228
1231
1229 groups = []
1232 groups = []
1230 for group_key, ref_list, group_name, match in sources:
1233 for group_key, ref_list, group_name, match in sources:
1231 group_refs = []
1234 group_refs = []
1232 for ref_name, ref_id in ref_list:
1235 for ref_name, ref_id in ref_list:
1233 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1236 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1234 group_refs.append((ref_key, ref_name))
1237 group_refs.append((ref_key, ref_name))
1235
1238
1236 if not selected:
1239 if not selected:
1237 if set([commit_id, match]) & set([ref_id, ref_name]):
1240 if set([commit_id, match]) & set([ref_id, ref_name]):
1238 selected = ref_key
1241 selected = ref_key
1239
1242
1240 if group_refs:
1243 if group_refs:
1241 groups.append((group_refs, group_name))
1244 groups.append((group_refs, group_name))
1242
1245
1243 if not selected:
1246 if not selected:
1244 ref = commit_id or branch or bookmark
1247 ref = commit_id or branch or bookmark
1245 if ref:
1248 if ref:
1246 raise CommitDoesNotExistError(
1249 raise CommitDoesNotExistError(
1247 'No commit refs could be found matching: %s' % ref)
1250 'No commit refs could be found matching: %s' % ref)
1248 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1251 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1249 selected = 'branch:%s:%s' % (
1252 selected = 'branch:%s:%s' % (
1250 repo.DEFAULT_BRANCH_NAME,
1253 repo.DEFAULT_BRANCH_NAME,
1251 repo.branches[repo.DEFAULT_BRANCH_NAME]
1254 repo.branches[repo.DEFAULT_BRANCH_NAME]
1252 )
1255 )
1253 elif repo.commit_ids:
1256 elif repo.commit_ids:
1254 rev = repo.commit_ids[0]
1257 rev = repo.commit_ids[0]
1255 selected = 'rev:%s:%s' % (rev, rev)
1258 selected = 'rev:%s:%s' % (rev, rev)
1256 else:
1259 else:
1257 raise EmptyRepositoryError()
1260 raise EmptyRepositoryError()
1258 return groups, selected
1261 return groups, selected
1259
1262
1260 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1263 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1261 pull_request = self.__get_pull_request(pull_request)
1264 pull_request = self.__get_pull_request(pull_request)
1262 return self._get_diff_from_pr_or_version(pull_request, context=context)
1265 return self._get_diff_from_pr_or_version(pull_request, context=context)
1263
1266
1264 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1267 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1265 source_repo = pr_or_version.source_repo
1268 source_repo = pr_or_version.source_repo
1266
1269
1267 # we swap org/other ref since we run a simple diff on one repo
1270 # we swap org/other ref since we run a simple diff on one repo
1268 target_ref_id = pr_or_version.target_ref_parts.commit_id
1271 target_ref_id = pr_or_version.target_ref_parts.commit_id
1269 source_ref_id = pr_or_version.source_ref_parts.commit_id
1272 source_ref_id = pr_or_version.source_ref_parts.commit_id
1270 target_commit = source_repo.get_commit(
1273 target_commit = source_repo.get_commit(
1271 commit_id=safe_str(target_ref_id))
1274 commit_id=safe_str(target_ref_id))
1272 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1275 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1273 vcs_repo = source_repo.scm_instance()
1276 vcs_repo = source_repo.scm_instance()
1274
1277
1275 # TODO: johbo: In the context of an update, we cannot reach
1278 # TODO: johbo: In the context of an update, we cannot reach
1276 # the old commit anymore with our normal mechanisms. It needs
1279 # the old commit anymore with our normal mechanisms. It needs
1277 # some sort of special support in the vcs layer to avoid this
1280 # some sort of special support in the vcs layer to avoid this
1278 # workaround.
1281 # workaround.
1279 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1282 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1280 vcs_repo.alias == 'git'):
1283 vcs_repo.alias == 'git'):
1281 source_commit.raw_id = safe_str(source_ref_id)
1284 source_commit.raw_id = safe_str(source_ref_id)
1282
1285
1283 log.debug('calculating diff between '
1286 log.debug('calculating diff between '
1284 'source_ref:%s and target_ref:%s for repo `%s`',
1287 'source_ref:%s and target_ref:%s for repo `%s`',
1285 target_ref_id, source_ref_id,
1288 target_ref_id, source_ref_id,
1286 safe_unicode(vcs_repo.path))
1289 safe_unicode(vcs_repo.path))
1287
1290
1288 vcs_diff = vcs_repo.get_diff(
1291 vcs_diff = vcs_repo.get_diff(
1289 commit1=target_commit, commit2=source_commit, context=context)
1292 commit1=target_commit, commit2=source_commit, context=context)
1290 return vcs_diff
1293 return vcs_diff
1291
1294
1292 def _is_merge_enabled(self, pull_request):
1295 def _is_merge_enabled(self, pull_request):
1293 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1296 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1294 settings = settings_model.get_general_settings()
1297 settings = settings_model.get_general_settings()
1295 return settings.get('rhodecode_pr_merge_enabled', False)
1298 return settings.get('rhodecode_pr_merge_enabled', False)
1296
1299
1297 def _use_rebase_for_merging(self, pull_request):
1300 def _use_rebase_for_merging(self, pull_request):
1298 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1301 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1299 settings = settings_model.get_general_settings()
1302 settings = settings_model.get_general_settings()
1300 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1303 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1301
1304
1302 def _log_action(self, action, user, pull_request):
1305 def _log_action(self, action, user, pull_request):
1303 action_logger(
1306 action_logger(
1304 user,
1307 user,
1305 '{action}:{pr_id}'.format(
1308 '{action}:{pr_id}'.format(
1306 action=action, pr_id=pull_request.pull_request_id),
1309 action=action, pr_id=pull_request.pull_request_id),
1307 pull_request.target_repo)
1310 pull_request.target_repo)
1308
1311
1309
1312
1310 ChangeTuple = namedtuple('ChangeTuple',
1313 ChangeTuple = namedtuple('ChangeTuple',
1311 ['added', 'common', 'removed'])
1314 ['added', 'common', 'removed'])
1312
1315
1313 FileChangeTuple = namedtuple('FileChangeTuple',
1316 FileChangeTuple = namedtuple('FileChangeTuple',
1314 ['added', 'modified', 'removed'])
1317 ['added', 'modified', 'removed'])