##// END OF EJS Templates

Compare Commits

Target:

Source:

Time Author Commit Description
No commits in this compare
@@ -1,1549 +1,1545 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24
24
25 import collections
25 import collections
26 import datetime
26 import datetime
27 import itertools
27 import itertools
28 import logging
28 import logging
29 import os
29 import os
30 import time
30 import time
31 import warnings
31 import warnings
32
32
33 from zope.cachedescriptors.property import Lazy as LazyProperty
33 from zope.cachedescriptors.property import Lazy as LazyProperty
34
34
35 from rhodecode.lib.utils2 import safe_str, safe_unicode
35 from rhodecode.lib.utils2 import safe_str, safe_unicode
36 from rhodecode.lib.vcs import connection
36 from rhodecode.lib.vcs import connection
37 from rhodecode.lib.vcs.utils import author_name, author_email
37 from rhodecode.lib.vcs.utils import author_name, author_email
38 from rhodecode.lib.vcs.conf import settings
38 from rhodecode.lib.vcs.conf import settings
39 from rhodecode.lib.vcs.exceptions import (
39 from rhodecode.lib.vcs.exceptions import (
40 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
40 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
41 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
41 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
42 NodeDoesNotExistError, NodeNotChangedError, VCSError,
42 NodeDoesNotExistError, NodeNotChangedError, VCSError,
43 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
43 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
44 RepositoryError)
44 RepositoryError)
45
45
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 FILEMODE_DEFAULT = 0100644
50 FILEMODE_DEFAULT = 0100644
51 FILEMODE_EXECUTABLE = 0100755
51 FILEMODE_EXECUTABLE = 0100755
52
52
53 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
53 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
54 MergeResponse = collections.namedtuple(
54 MergeResponse = collections.namedtuple(
55 'MergeResponse',
55 'MergeResponse',
56 ('possible', 'executed', 'merge_ref', 'failure_reason'))
56 ('possible', 'executed', 'merge_ref', 'failure_reason'))
57
57
58
58
59 class MergeFailureReason(object):
59 class MergeFailureReason(object):
60 """
60 """
61 Enumeration with all the reasons why the server side merge could fail.
61 Enumeration with all the reasons why the server side merge could fail.
62
62
63 DO NOT change the number of the reasons, as they may be stored in the
63 DO NOT change the number of the reasons, as they may be stored in the
64 database.
64 database.
65
65
66 Changing the name of a reason is acceptable and encouraged to deprecate old
66 Changing the name of a reason is acceptable and encouraged to deprecate old
67 reasons.
67 reasons.
68 """
68 """
69
69
70 # Everything went well.
70 # Everything went well.
71 NONE = 0
71 NONE = 0
72
72
73 # An unexpected exception was raised. Check the logs for more details.
73 # An unexpected exception was raised. Check the logs for more details.
74 UNKNOWN = 1
74 UNKNOWN = 1
75
75
76 # The merge was not successful, there are conflicts.
76 # The merge was not successful, there are conflicts.
77 MERGE_FAILED = 2
77 MERGE_FAILED = 2
78
78
79 # The merge succeeded but we could not push it to the target repository.
79 # The merge succeeded but we could not push it to the target repository.
80 PUSH_FAILED = 3
80 PUSH_FAILED = 3
81
81
82 # The specified target is not a head in the target repository.
82 # The specified target is not a head in the target repository.
83 TARGET_IS_NOT_HEAD = 4
83 TARGET_IS_NOT_HEAD = 4
84
84
85 # The source repository contains more branches than the target. Pushing
85 # The source repository contains more branches than the target. Pushing
86 # the merge will create additional branches in the target.
86 # the merge will create additional branches in the target.
87 HG_SOURCE_HAS_MORE_BRANCHES = 5
87 HG_SOURCE_HAS_MORE_BRANCHES = 5
88
88
89 # The target reference has multiple heads. That does not allow to correctly
89 # The target reference has multiple heads. That does not allow to correctly
90 # identify the target location. This could only happen for mercurial
90 # identify the target location. This could only happen for mercurial
91 # branches.
91 # branches.
92 HG_TARGET_HAS_MULTIPLE_HEADS = 6
92 HG_TARGET_HAS_MULTIPLE_HEADS = 6
93
93
94 # The target repository is locked
94 # The target repository is locked
95 TARGET_IS_LOCKED = 7
95 TARGET_IS_LOCKED = 7
96
96
97 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
97 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
98 # A involved commit could not be found.
98 # A involved commit could not be found.
99 _DEPRECATED_MISSING_COMMIT = 8
99 _DEPRECATED_MISSING_COMMIT = 8
100
100
101 # The target repo reference is missing.
101 # The target repo reference is missing.
102 MISSING_TARGET_REF = 9
102 MISSING_TARGET_REF = 9
103
103
104 # The source repo reference is missing.
104 # The source repo reference is missing.
105 MISSING_SOURCE_REF = 10
105 MISSING_SOURCE_REF = 10
106
106
107 # The merge was not successful, there are conflicts related to sub
108 # repositories.
109 SUBREPO_MERGE_FAILED = 11
110
111
107
112 class UpdateFailureReason(object):
108 class UpdateFailureReason(object):
113 """
109 """
114 Enumeration with all the reasons why the pull request update could fail.
110 Enumeration with all the reasons why the pull request update could fail.
115
111
116 DO NOT change the number of the reasons, as they may be stored in the
112 DO NOT change the number of the reasons, as they may be stored in the
117 database.
113 database.
118
114
119 Changing the name of a reason is acceptable and encouraged to deprecate old
115 Changing the name of a reason is acceptable and encouraged to deprecate old
120 reasons.
116 reasons.
121 """
117 """
122
118
123 # Everything went well.
119 # Everything went well.
124 NONE = 0
120 NONE = 0
125
121
126 # An unexpected exception was raised. Check the logs for more details.
122 # An unexpected exception was raised. Check the logs for more details.
127 UNKNOWN = 1
123 UNKNOWN = 1
128
124
129 # The pull request is up to date.
125 # The pull request is up to date.
130 NO_CHANGE = 2
126 NO_CHANGE = 2
131
127
132 # The pull request has a reference type that is not supported for update.
128 # The pull request has a reference type that is not supported for update.
133 WRONG_REF_TPYE = 3
129 WRONG_REF_TPYE = 3
134
130
135 # Update failed because the target reference is missing.
131 # Update failed because the target reference is missing.
136 MISSING_TARGET_REF = 4
132 MISSING_TARGET_REF = 4
137
133
138 # Update failed because the source reference is missing.
134 # Update failed because the source reference is missing.
139 MISSING_SOURCE_REF = 5
135 MISSING_SOURCE_REF = 5
140
136
141
137
142 class BaseRepository(object):
138 class BaseRepository(object):
143 """
139 """
144 Base Repository for final backends
140 Base Repository for final backends
145
141
146 .. attribute:: DEFAULT_BRANCH_NAME
142 .. attribute:: DEFAULT_BRANCH_NAME
147
143
148 name of default branch (i.e. "trunk" for svn, "master" for git etc.
144 name of default branch (i.e. "trunk" for svn, "master" for git etc.
149
145
150 .. attribute:: commit_ids
146 .. attribute:: commit_ids
151
147
152 list of all available commit ids, in ascending order
148 list of all available commit ids, in ascending order
153
149
154 .. attribute:: path
150 .. attribute:: path
155
151
156 absolute path to the repository
152 absolute path to the repository
157
153
158 .. attribute:: bookmarks
154 .. attribute:: bookmarks
159
155
160 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
156 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
161 there are no bookmarks or the backend implementation does not support
157 there are no bookmarks or the backend implementation does not support
162 bookmarks.
158 bookmarks.
163
159
164 .. attribute:: tags
160 .. attribute:: tags
165
161
166 Mapping from name to :term:`Commit ID` of the tag.
162 Mapping from name to :term:`Commit ID` of the tag.
167
163
168 """
164 """
169
165
170 DEFAULT_BRANCH_NAME = None
166 DEFAULT_BRANCH_NAME = None
171 DEFAULT_CONTACT = u"Unknown"
167 DEFAULT_CONTACT = u"Unknown"
172 DEFAULT_DESCRIPTION = u"unknown"
168 DEFAULT_DESCRIPTION = u"unknown"
173 EMPTY_COMMIT_ID = '0' * 40
169 EMPTY_COMMIT_ID = '0' * 40
174
170
175 path = None
171 path = None
176
172
177 def __init__(self, repo_path, config=None, create=False, **kwargs):
173 def __init__(self, repo_path, config=None, create=False, **kwargs):
178 """
174 """
179 Initializes repository. Raises RepositoryError if repository could
175 Initializes repository. Raises RepositoryError if repository could
180 not be find at the given ``repo_path`` or directory at ``repo_path``
176 not be find at the given ``repo_path`` or directory at ``repo_path``
181 exists and ``create`` is set to True.
177 exists and ``create`` is set to True.
182
178
183 :param repo_path: local path of the repository
179 :param repo_path: local path of the repository
184 :param config: repository configuration
180 :param config: repository configuration
185 :param create=False: if set to True, would try to create repository.
181 :param create=False: if set to True, would try to create repository.
186 :param src_url=None: if set, should be proper url from which repository
182 :param src_url=None: if set, should be proper url from which repository
187 would be cloned; requires ``create`` parameter to be set to True -
183 would be cloned; requires ``create`` parameter to be set to True -
188 raises RepositoryError if src_url is set and create evaluates to
184 raises RepositoryError if src_url is set and create evaluates to
189 False
185 False
190 """
186 """
191 raise NotImplementedError
187 raise NotImplementedError
192
188
193 def __repr__(self):
189 def __repr__(self):
194 return '<%s at %s>' % (self.__class__.__name__, self.path)
190 return '<%s at %s>' % (self.__class__.__name__, self.path)
195
191
196 def __len__(self):
192 def __len__(self):
197 return self.count()
193 return self.count()
198
194
199 def __eq__(self, other):
195 def __eq__(self, other):
200 same_instance = isinstance(other, self.__class__)
196 same_instance = isinstance(other, self.__class__)
201 return same_instance and other.path == self.path
197 return same_instance and other.path == self.path
202
198
203 def __ne__(self, other):
199 def __ne__(self, other):
204 return not self.__eq__(other)
200 return not self.__eq__(other)
205
201
206 @LazyProperty
202 @LazyProperty
207 def EMPTY_COMMIT(self):
203 def EMPTY_COMMIT(self):
208 return EmptyCommit(self.EMPTY_COMMIT_ID)
204 return EmptyCommit(self.EMPTY_COMMIT_ID)
209
205
210 @LazyProperty
206 @LazyProperty
211 def alias(self):
207 def alias(self):
212 for k, v in settings.BACKENDS.items():
208 for k, v in settings.BACKENDS.items():
213 if v.split('.')[-1] == str(self.__class__.__name__):
209 if v.split('.')[-1] == str(self.__class__.__name__):
214 return k
210 return k
215
211
216 @LazyProperty
212 @LazyProperty
217 def name(self):
213 def name(self):
218 return safe_unicode(os.path.basename(self.path))
214 return safe_unicode(os.path.basename(self.path))
219
215
220 @LazyProperty
216 @LazyProperty
221 def description(self):
217 def description(self):
222 raise NotImplementedError
218 raise NotImplementedError
223
219
224 def refs(self):
220 def refs(self):
225 """
221 """
226 returns a `dict` with branches, bookmarks, tags, and closed_branches
222 returns a `dict` with branches, bookmarks, tags, and closed_branches
227 for this repository
223 for this repository
228 """
224 """
229 raise NotImplementedError
225 raise NotImplementedError
230
226
231 @LazyProperty
227 @LazyProperty
232 def branches(self):
228 def branches(self):
233 """
229 """
234 A `dict` which maps branch names to commit ids.
230 A `dict` which maps branch names to commit ids.
235 """
231 """
236 raise NotImplementedError
232 raise NotImplementedError
237
233
238 @LazyProperty
234 @LazyProperty
239 def size(self):
235 def size(self):
240 """
236 """
241 Returns combined size in bytes for all repository files
237 Returns combined size in bytes for all repository files
242 """
238 """
243 tip = self.get_commit()
239 tip = self.get_commit()
244 return tip.size
240 return tip.size
245
241
246 def size_at_commit(self, commit_id):
242 def size_at_commit(self, commit_id):
247 commit = self.get_commit(commit_id)
243 commit = self.get_commit(commit_id)
248 return commit.size
244 return commit.size
249
245
250 def is_empty(self):
246 def is_empty(self):
251 return not bool(self.commit_ids)
247 return not bool(self.commit_ids)
252
248
253 @staticmethod
249 @staticmethod
254 def check_url(url, config):
250 def check_url(url, config):
255 """
251 """
256 Function will check given url and try to verify if it's a valid
252 Function will check given url and try to verify if it's a valid
257 link.
253 link.
258 """
254 """
259 raise NotImplementedError
255 raise NotImplementedError
260
256
261 @staticmethod
257 @staticmethod
262 def is_valid_repository(path):
258 def is_valid_repository(path):
263 """
259 """
264 Check if given `path` contains a valid repository of this backend
260 Check if given `path` contains a valid repository of this backend
265 """
261 """
266 raise NotImplementedError
262 raise NotImplementedError
267
263
268 # ==========================================================================
264 # ==========================================================================
269 # COMMITS
265 # COMMITS
270 # ==========================================================================
266 # ==========================================================================
271
267
272 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
268 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
273 """
269 """
274 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
270 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
275 are both None, most recent commit is returned.
271 are both None, most recent commit is returned.
276
272
277 :param pre_load: Optional. List of commit attributes to load.
273 :param pre_load: Optional. List of commit attributes to load.
278
274
279 :raises ``EmptyRepositoryError``: if there are no commits
275 :raises ``EmptyRepositoryError``: if there are no commits
280 """
276 """
281 raise NotImplementedError
277 raise NotImplementedError
282
278
283 def __iter__(self):
279 def __iter__(self):
284 for commit_id in self.commit_ids:
280 for commit_id in self.commit_ids:
285 yield self.get_commit(commit_id=commit_id)
281 yield self.get_commit(commit_id=commit_id)
286
282
287 def get_commits(
283 def get_commits(
288 self, start_id=None, end_id=None, start_date=None, end_date=None,
284 self, start_id=None, end_id=None, start_date=None, end_date=None,
289 branch_name=None, pre_load=None):
285 branch_name=None, pre_load=None):
290 """
286 """
291 Returns iterator of `BaseCommit` objects from start to end
287 Returns iterator of `BaseCommit` objects from start to end
292 not inclusive. This should behave just like a list, ie. end is not
288 not inclusive. This should behave just like a list, ie. end is not
293 inclusive.
289 inclusive.
294
290
295 :param start_id: None or str, must be a valid commit id
291 :param start_id: None or str, must be a valid commit id
296 :param end_id: None or str, must be a valid commit id
292 :param end_id: None or str, must be a valid commit id
297 :param start_date:
293 :param start_date:
298 :param end_date:
294 :param end_date:
299 :param branch_name:
295 :param branch_name:
300 :param pre_load:
296 :param pre_load:
301 """
297 """
302 raise NotImplementedError
298 raise NotImplementedError
303
299
304 def __getitem__(self, key):
300 def __getitem__(self, key):
305 """
301 """
306 Allows index based access to the commit objects of this repository.
302 Allows index based access to the commit objects of this repository.
307 """
303 """
308 pre_load = ["author", "branch", "date", "message", "parents"]
304 pre_load = ["author", "branch", "date", "message", "parents"]
309 if isinstance(key, slice):
305 if isinstance(key, slice):
310 return self._get_range(key, pre_load)
306 return self._get_range(key, pre_load)
311 return self.get_commit(commit_idx=key, pre_load=pre_load)
307 return self.get_commit(commit_idx=key, pre_load=pre_load)
312
308
313 def _get_range(self, slice_obj, pre_load):
309 def _get_range(self, slice_obj, pre_load):
314 for commit_id in self.commit_ids.__getitem__(slice_obj):
310 for commit_id in self.commit_ids.__getitem__(slice_obj):
315 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
311 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
316
312
317 def count(self):
313 def count(self):
318 return len(self.commit_ids)
314 return len(self.commit_ids)
319
315
320 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
316 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
321 """
317 """
322 Creates and returns a tag for the given ``commit_id``.
318 Creates and returns a tag for the given ``commit_id``.
323
319
324 :param name: name for new tag
320 :param name: name for new tag
325 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
321 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
326 :param commit_id: commit id for which new tag would be created
322 :param commit_id: commit id for which new tag would be created
327 :param message: message of the tag's commit
323 :param message: message of the tag's commit
328 :param date: date of tag's commit
324 :param date: date of tag's commit
329
325
330 :raises TagAlreadyExistError: if tag with same name already exists
326 :raises TagAlreadyExistError: if tag with same name already exists
331 """
327 """
332 raise NotImplementedError
328 raise NotImplementedError
333
329
334 def remove_tag(self, name, user, message=None, date=None):
330 def remove_tag(self, name, user, message=None, date=None):
335 """
331 """
336 Removes tag with the given ``name``.
332 Removes tag with the given ``name``.
337
333
338 :param name: name of the tag to be removed
334 :param name: name of the tag to be removed
339 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
335 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
340 :param message: message of the tag's removal commit
336 :param message: message of the tag's removal commit
341 :param date: date of tag's removal commit
337 :param date: date of tag's removal commit
342
338
343 :raises TagDoesNotExistError: if tag with given name does not exists
339 :raises TagDoesNotExistError: if tag with given name does not exists
344 """
340 """
345 raise NotImplementedError
341 raise NotImplementedError
346
342
347 def get_diff(
343 def get_diff(
348 self, commit1, commit2, path=None, ignore_whitespace=False,
344 self, commit1, commit2, path=None, ignore_whitespace=False,
349 context=3, path1=None):
345 context=3, path1=None):
350 """
346 """
351 Returns (git like) *diff*, as plain text. Shows changes introduced by
347 Returns (git like) *diff*, as plain text. Shows changes introduced by
352 `commit2` since `commit1`.
348 `commit2` since `commit1`.
353
349
354 :param commit1: Entry point from which diff is shown. Can be
350 :param commit1: Entry point from which diff is shown. Can be
355 ``self.EMPTY_COMMIT`` - in this case, patch showing all
351 ``self.EMPTY_COMMIT`` - in this case, patch showing all
356 the changes since empty state of the repository until `commit2`
352 the changes since empty state of the repository until `commit2`
357 :param commit2: Until which commit changes should be shown.
353 :param commit2: Until which commit changes should be shown.
358 :param path: Can be set to a path of a file to create a diff of that
354 :param path: Can be set to a path of a file to create a diff of that
359 file. If `path1` is also set, this value is only associated to
355 file. If `path1` is also set, this value is only associated to
360 `commit2`.
356 `commit2`.
361 :param ignore_whitespace: If set to ``True``, would not show whitespace
357 :param ignore_whitespace: If set to ``True``, would not show whitespace
362 changes. Defaults to ``False``.
358 changes. Defaults to ``False``.
363 :param context: How many lines before/after changed lines should be
359 :param context: How many lines before/after changed lines should be
364 shown. Defaults to ``3``.
360 shown. Defaults to ``3``.
365 :param path1: Can be set to a path to associate with `commit1`. This
361 :param path1: Can be set to a path to associate with `commit1`. This
366 parameter works only for backends which support diff generation for
362 parameter works only for backends which support diff generation for
367 different paths. Other backends will raise a `ValueError` if `path1`
363 different paths. Other backends will raise a `ValueError` if `path1`
368 is set and has a different value than `path`.
364 is set and has a different value than `path`.
369 """
365 """
370 raise NotImplementedError
366 raise NotImplementedError
371
367
372 def strip(self, commit_id, branch=None):
368 def strip(self, commit_id, branch=None):
373 """
369 """
374 Strip given commit_id from the repository
370 Strip given commit_id from the repository
375 """
371 """
376 raise NotImplementedError
372 raise NotImplementedError
377
373
378 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
374 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
379 """
375 """
380 Return a latest common ancestor commit if one exists for this repo
376 Return a latest common ancestor commit if one exists for this repo
381 `commit_id1` vs `commit_id2` from `repo2`.
377 `commit_id1` vs `commit_id2` from `repo2`.
382
378
383 :param commit_id1: Commit it from this repository to use as a
379 :param commit_id1: Commit it from this repository to use as a
384 target for the comparison.
380 target for the comparison.
385 :param commit_id2: Source commit id to use for comparison.
381 :param commit_id2: Source commit id to use for comparison.
386 :param repo2: Source repository to use for comparison.
382 :param repo2: Source repository to use for comparison.
387 """
383 """
388 raise NotImplementedError
384 raise NotImplementedError
389
385
390 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
386 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
391 """
387 """
392 Compare this repository's revision `commit_id1` with `commit_id2`.
388 Compare this repository's revision `commit_id1` with `commit_id2`.
393
389
394 Returns a tuple(commits, ancestor) that would be merged from
390 Returns a tuple(commits, ancestor) that would be merged from
395 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
391 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
396 will be returned as ancestor.
392 will be returned as ancestor.
397
393
398 :param commit_id1: Commit it from this repository to use as a
394 :param commit_id1: Commit it from this repository to use as a
399 target for the comparison.
395 target for the comparison.
400 :param commit_id2: Source commit id to use for comparison.
396 :param commit_id2: Source commit id to use for comparison.
401 :param repo2: Source repository to use for comparison.
397 :param repo2: Source repository to use for comparison.
402 :param merge: If set to ``True`` will do a merge compare which also
398 :param merge: If set to ``True`` will do a merge compare which also
403 returns the common ancestor.
399 returns the common ancestor.
404 :param pre_load: Optional. List of commit attributes to load.
400 :param pre_load: Optional. List of commit attributes to load.
405 """
401 """
406 raise NotImplementedError
402 raise NotImplementedError
407
403
408 def merge(self, target_ref, source_repo, source_ref, workspace_id,
404 def merge(self, target_ref, source_repo, source_ref, workspace_id,
409 user_name='', user_email='', message='', dry_run=False,
405 user_name='', user_email='', message='', dry_run=False,
410 use_rebase=False):
406 use_rebase=False):
411 """
407 """
412 Merge the revisions specified in `source_ref` from `source_repo`
408 Merge the revisions specified in `source_ref` from `source_repo`
413 onto the `target_ref` of this repository.
409 onto the `target_ref` of this repository.
414
410
415 `source_ref` and `target_ref` are named tupls with the following
411 `source_ref` and `target_ref` are named tupls with the following
416 fields `type`, `name` and `commit_id`.
412 fields `type`, `name` and `commit_id`.
417
413
418 Returns a MergeResponse named tuple with the following fields
414 Returns a MergeResponse named tuple with the following fields
419 'possible', 'executed', 'source_commit', 'target_commit',
415 'possible', 'executed', 'source_commit', 'target_commit',
420 'merge_commit'.
416 'merge_commit'.
421
417
422 :param target_ref: `target_ref` points to the commit on top of which
418 :param target_ref: `target_ref` points to the commit on top of which
423 the `source_ref` should be merged.
419 the `source_ref` should be merged.
424 :param source_repo: The repository that contains the commits to be
420 :param source_repo: The repository that contains the commits to be
425 merged.
421 merged.
426 :param source_ref: `source_ref` points to the topmost commit from
422 :param source_ref: `source_ref` points to the topmost commit from
427 the `source_repo` which should be merged.
423 the `source_repo` which should be merged.
428 :param workspace_id: `workspace_id` unique identifier.
424 :param workspace_id: `workspace_id` unique identifier.
429 :param user_name: Merge commit `user_name`.
425 :param user_name: Merge commit `user_name`.
430 :param user_email: Merge commit `user_email`.
426 :param user_email: Merge commit `user_email`.
431 :param message: Merge commit `message`.
427 :param message: Merge commit `message`.
432 :param dry_run: If `True` the merge will not take place.
428 :param dry_run: If `True` the merge will not take place.
433 :param use_rebase: If `True` commits from the source will be rebased
429 :param use_rebase: If `True` commits from the source will be rebased
434 on top of the target instead of being merged.
430 on top of the target instead of being merged.
435 """
431 """
436 if dry_run:
432 if dry_run:
437 message = message or 'dry_run_merge_message'
433 message = message or 'dry_run_merge_message'
438 user_email = user_email or 'dry-run-merge@rhodecode.com'
434 user_email = user_email or 'dry-run-merge@rhodecode.com'
439 user_name = user_name or 'Dry-Run User'
435 user_name = user_name or 'Dry-Run User'
440 else:
436 else:
441 if not user_name:
437 if not user_name:
442 raise ValueError('user_name cannot be empty')
438 raise ValueError('user_name cannot be empty')
443 if not user_email:
439 if not user_email:
444 raise ValueError('user_email cannot be empty')
440 raise ValueError('user_email cannot be empty')
445 if not message:
441 if not message:
446 raise ValueError('message cannot be empty')
442 raise ValueError('message cannot be empty')
447
443
448 shadow_repository_path = self._maybe_prepare_merge_workspace(
444 shadow_repository_path = self._maybe_prepare_merge_workspace(
449 workspace_id, target_ref)
445 workspace_id, target_ref)
450
446
451 try:
447 try:
452 return self._merge_repo(
448 return self._merge_repo(
453 shadow_repository_path, target_ref, source_repo,
449 shadow_repository_path, target_ref, source_repo,
454 source_ref, message, user_name, user_email, dry_run=dry_run,
450 source_ref, message, user_name, user_email, dry_run=dry_run,
455 use_rebase=use_rebase)
451 use_rebase=use_rebase)
456 except RepositoryError:
452 except RepositoryError:
457 log.exception(
453 log.exception(
458 'Unexpected failure when running merge, dry-run=%s',
454 'Unexpected failure when running merge, dry-run=%s',
459 dry_run)
455 dry_run)
460 return MergeResponse(
456 return MergeResponse(
461 False, False, None, MergeFailureReason.UNKNOWN)
457 False, False, None, MergeFailureReason.UNKNOWN)
462
458
463 def _merge_repo(self, shadow_repository_path, target_ref,
459 def _merge_repo(self, shadow_repository_path, target_ref,
464 source_repo, source_ref, merge_message,
460 source_repo, source_ref, merge_message,
465 merger_name, merger_email, dry_run=False, use_rebase=False):
461 merger_name, merger_email, dry_run=False, use_rebase=False):
466 """Internal implementation of merge."""
462 """Internal implementation of merge."""
467 raise NotImplementedError
463 raise NotImplementedError
468
464
469 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
465 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref):
470 """
466 """
471 Create the merge workspace.
467 Create the merge workspace.
472
468
473 :param workspace_id: `workspace_id` unique identifier.
469 :param workspace_id: `workspace_id` unique identifier.
474 """
470 """
475 raise NotImplementedError
471 raise NotImplementedError
476
472
477 def cleanup_merge_workspace(self, workspace_id):
473 def cleanup_merge_workspace(self, workspace_id):
478 """
474 """
479 Remove merge workspace.
475 Remove merge workspace.
480
476
481 This function MUST not fail in case there is no workspace associated to
477 This function MUST not fail in case there is no workspace associated to
482 the given `workspace_id`.
478 the given `workspace_id`.
483
479
484 :param workspace_id: `workspace_id` unique identifier.
480 :param workspace_id: `workspace_id` unique identifier.
485 """
481 """
486 raise NotImplementedError
482 raise NotImplementedError
487
483
488 # ========== #
484 # ========== #
489 # COMMIT API #
485 # COMMIT API #
490 # ========== #
486 # ========== #
491
487
492 @LazyProperty
488 @LazyProperty
493 def in_memory_commit(self):
489 def in_memory_commit(self):
494 """
490 """
495 Returns :class:`InMemoryCommit` object for this repository.
491 Returns :class:`InMemoryCommit` object for this repository.
496 """
492 """
497 raise NotImplementedError
493 raise NotImplementedError
498
494
499 # ======================== #
495 # ======================== #
500 # UTILITIES FOR SUBCLASSES #
496 # UTILITIES FOR SUBCLASSES #
501 # ======================== #
497 # ======================== #
502
498
503 def _validate_diff_commits(self, commit1, commit2):
499 def _validate_diff_commits(self, commit1, commit2):
504 """
500 """
505 Validates that the given commits are related to this repository.
501 Validates that the given commits are related to this repository.
506
502
507 Intended as a utility for sub classes to have a consistent validation
503 Intended as a utility for sub classes to have a consistent validation
508 of input parameters in methods like :meth:`get_diff`.
504 of input parameters in methods like :meth:`get_diff`.
509 """
505 """
510 self._validate_commit(commit1)
506 self._validate_commit(commit1)
511 self._validate_commit(commit2)
507 self._validate_commit(commit2)
512 if (isinstance(commit1, EmptyCommit) and
508 if (isinstance(commit1, EmptyCommit) and
513 isinstance(commit2, EmptyCommit)):
509 isinstance(commit2, EmptyCommit)):
514 raise ValueError("Cannot compare two empty commits")
510 raise ValueError("Cannot compare two empty commits")
515
511
516 def _validate_commit(self, commit):
512 def _validate_commit(self, commit):
517 if not isinstance(commit, BaseCommit):
513 if not isinstance(commit, BaseCommit):
518 raise TypeError(
514 raise TypeError(
519 "%s is not of type BaseCommit" % repr(commit))
515 "%s is not of type BaseCommit" % repr(commit))
520 if commit.repository != self and not isinstance(commit, EmptyCommit):
516 if commit.repository != self and not isinstance(commit, EmptyCommit):
521 raise ValueError(
517 raise ValueError(
522 "Commit %s must be a valid commit from this repository %s, "
518 "Commit %s must be a valid commit from this repository %s, "
523 "related to this repository instead %s." %
519 "related to this repository instead %s." %
524 (commit, self, commit.repository))
520 (commit, self, commit.repository))
525
521
526 def _validate_commit_id(self, commit_id):
522 def _validate_commit_id(self, commit_id):
527 if not isinstance(commit_id, basestring):
523 if not isinstance(commit_id, basestring):
528 raise TypeError("commit_id must be a string value")
524 raise TypeError("commit_id must be a string value")
529
525
530 def _validate_commit_idx(self, commit_idx):
526 def _validate_commit_idx(self, commit_idx):
531 if not isinstance(commit_idx, (int, long)):
527 if not isinstance(commit_idx, (int, long)):
532 raise TypeError("commit_idx must be a numeric value")
528 raise TypeError("commit_idx must be a numeric value")
533
529
534 def _validate_branch_name(self, branch_name):
530 def _validate_branch_name(self, branch_name):
535 if branch_name and branch_name not in self.branches_all:
531 if branch_name and branch_name not in self.branches_all:
536 msg = ("Branch %s not found in %s" % (branch_name, self))
532 msg = ("Branch %s not found in %s" % (branch_name, self))
537 raise BranchDoesNotExistError(msg)
533 raise BranchDoesNotExistError(msg)
538
534
539 #
535 #
540 # Supporting deprecated API parts
536 # Supporting deprecated API parts
541 # TODO: johbo: consider to move this into a mixin
537 # TODO: johbo: consider to move this into a mixin
542 #
538 #
543
539
544 @property
540 @property
545 def EMPTY_CHANGESET(self):
541 def EMPTY_CHANGESET(self):
546 warnings.warn(
542 warnings.warn(
547 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
543 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
548 return self.EMPTY_COMMIT_ID
544 return self.EMPTY_COMMIT_ID
549
545
550 @property
546 @property
551 def revisions(self):
547 def revisions(self):
552 warnings.warn("Use commits attribute instead", DeprecationWarning)
548 warnings.warn("Use commits attribute instead", DeprecationWarning)
553 return self.commit_ids
549 return self.commit_ids
554
550
555 @revisions.setter
551 @revisions.setter
556 def revisions(self, value):
552 def revisions(self, value):
557 warnings.warn("Use commits attribute instead", DeprecationWarning)
553 warnings.warn("Use commits attribute instead", DeprecationWarning)
558 self.commit_ids = value
554 self.commit_ids = value
559
555
560 def get_changeset(self, revision=None, pre_load=None):
556 def get_changeset(self, revision=None, pre_load=None):
561 warnings.warn("Use get_commit instead", DeprecationWarning)
557 warnings.warn("Use get_commit instead", DeprecationWarning)
562 commit_id = None
558 commit_id = None
563 commit_idx = None
559 commit_idx = None
564 if isinstance(revision, basestring):
560 if isinstance(revision, basestring):
565 commit_id = revision
561 commit_id = revision
566 else:
562 else:
567 commit_idx = revision
563 commit_idx = revision
568 return self.get_commit(
564 return self.get_commit(
569 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
565 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
570
566
571 def get_changesets(
567 def get_changesets(
572 self, start=None, end=None, start_date=None, end_date=None,
568 self, start=None, end=None, start_date=None, end_date=None,
573 branch_name=None, pre_load=None):
569 branch_name=None, pre_load=None):
574 warnings.warn("Use get_commits instead", DeprecationWarning)
570 warnings.warn("Use get_commits instead", DeprecationWarning)
575 start_id = self._revision_to_commit(start)
571 start_id = self._revision_to_commit(start)
576 end_id = self._revision_to_commit(end)
572 end_id = self._revision_to_commit(end)
577 return self.get_commits(
573 return self.get_commits(
578 start_id=start_id, end_id=end_id, start_date=start_date,
574 start_id=start_id, end_id=end_id, start_date=start_date,
579 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
575 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
580
576
581 def _revision_to_commit(self, revision):
577 def _revision_to_commit(self, revision):
582 """
578 """
583 Translates a revision to a commit_id
579 Translates a revision to a commit_id
584
580
585 Helps to support the old changeset based API which allows to use
581 Helps to support the old changeset based API which allows to use
586 commit ids and commit indices interchangeable.
582 commit ids and commit indices interchangeable.
587 """
583 """
588 if revision is None:
584 if revision is None:
589 return revision
585 return revision
590
586
591 if isinstance(revision, basestring):
587 if isinstance(revision, basestring):
592 commit_id = revision
588 commit_id = revision
593 else:
589 else:
594 commit_id = self.commit_ids[revision]
590 commit_id = self.commit_ids[revision]
595 return commit_id
591 return commit_id
596
592
597 @property
593 @property
598 def in_memory_changeset(self):
594 def in_memory_changeset(self):
599 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
595 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
600 return self.in_memory_commit
596 return self.in_memory_commit
601
597
602
598
603 class BaseCommit(object):
599 class BaseCommit(object):
604 """
600 """
605 Each backend should implement it's commit representation.
601 Each backend should implement it's commit representation.
606
602
607 **Attributes**
603 **Attributes**
608
604
609 ``repository``
605 ``repository``
610 repository object within which commit exists
606 repository object within which commit exists
611
607
612 ``id``
608 ``id``
613 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
609 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
614 just ``tip``.
610 just ``tip``.
615
611
616 ``raw_id``
612 ``raw_id``
617 raw commit representation (i.e. full 40 length sha for git
613 raw commit representation (i.e. full 40 length sha for git
618 backend)
614 backend)
619
615
620 ``short_id``
616 ``short_id``
621 shortened (if apply) version of ``raw_id``; it would be simple
617 shortened (if apply) version of ``raw_id``; it would be simple
622 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
618 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
623 as ``raw_id`` for subversion
619 as ``raw_id`` for subversion
624
620
625 ``idx``
621 ``idx``
626 commit index
622 commit index
627
623
628 ``files``
624 ``files``
629 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
625 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
630
626
631 ``dirs``
627 ``dirs``
632 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
628 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
633
629
634 ``nodes``
630 ``nodes``
635 combined list of ``Node`` objects
631 combined list of ``Node`` objects
636
632
637 ``author``
633 ``author``
638 author of the commit, as unicode
634 author of the commit, as unicode
639
635
640 ``message``
636 ``message``
641 message of the commit, as unicode
637 message of the commit, as unicode
642
638
643 ``parents``
639 ``parents``
644 list of parent commits
640 list of parent commits
645
641
646 """
642 """
647
643
648 branch = None
644 branch = None
649 """
645 """
650 Depending on the backend this should be set to the branch name of the
646 Depending on the backend this should be set to the branch name of the
651 commit. Backends not supporting branches on commits should leave this
647 commit. Backends not supporting branches on commits should leave this
652 value as ``None``.
648 value as ``None``.
653 """
649 """
654
650
655 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
651 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
656 """
652 """
657 This template is used to generate a default prefix for repository archives
653 This template is used to generate a default prefix for repository archives
658 if no prefix has been specified.
654 if no prefix has been specified.
659 """
655 """
660
656
661 def __str__(self):
657 def __str__(self):
662 return '<%s at %s:%s>' % (
658 return '<%s at %s:%s>' % (
663 self.__class__.__name__, self.idx, self.short_id)
659 self.__class__.__name__, self.idx, self.short_id)
664
660
665 def __repr__(self):
661 def __repr__(self):
666 return self.__str__()
662 return self.__str__()
667
663
668 def __unicode__(self):
664 def __unicode__(self):
669 return u'%s:%s' % (self.idx, self.short_id)
665 return u'%s:%s' % (self.idx, self.short_id)
670
666
671 def __eq__(self, other):
667 def __eq__(self, other):
672 same_instance = isinstance(other, self.__class__)
668 same_instance = isinstance(other, self.__class__)
673 return same_instance and self.raw_id == other.raw_id
669 return same_instance and self.raw_id == other.raw_id
674
670
675 def __json__(self):
671 def __json__(self):
676 parents = []
672 parents = []
677 try:
673 try:
678 for parent in self.parents:
674 for parent in self.parents:
679 parents.append({'raw_id': parent.raw_id})
675 parents.append({'raw_id': parent.raw_id})
680 except NotImplementedError:
676 except NotImplementedError:
681 # empty commit doesn't have parents implemented
677 # empty commit doesn't have parents implemented
682 pass
678 pass
683
679
684 return {
680 return {
685 'short_id': self.short_id,
681 'short_id': self.short_id,
686 'raw_id': self.raw_id,
682 'raw_id': self.raw_id,
687 'revision': self.idx,
683 'revision': self.idx,
688 'message': self.message,
684 'message': self.message,
689 'date': self.date,
685 'date': self.date,
690 'author': self.author,
686 'author': self.author,
691 'parents': parents,
687 'parents': parents,
692 'branch': self.branch
688 'branch': self.branch
693 }
689 }
694
690
695 @LazyProperty
691 @LazyProperty
696 def last(self):
692 def last(self):
697 """
693 """
698 ``True`` if this is last commit in repository, ``False``
694 ``True`` if this is last commit in repository, ``False``
699 otherwise; trying to access this attribute while there is no
695 otherwise; trying to access this attribute while there is no
700 commits would raise `EmptyRepositoryError`
696 commits would raise `EmptyRepositoryError`
701 """
697 """
702 if self.repository is None:
698 if self.repository is None:
703 raise CommitError("Cannot check if it's most recent commit")
699 raise CommitError("Cannot check if it's most recent commit")
704 return self.raw_id == self.repository.commit_ids[-1]
700 return self.raw_id == self.repository.commit_ids[-1]
705
701
706 @LazyProperty
702 @LazyProperty
707 def parents(self):
703 def parents(self):
708 """
704 """
709 Returns list of parent commits.
705 Returns list of parent commits.
710 """
706 """
711 raise NotImplementedError
707 raise NotImplementedError
712
708
713 @property
709 @property
714 def merge(self):
710 def merge(self):
715 """
711 """
716 Returns boolean if commit is a merge.
712 Returns boolean if commit is a merge.
717 """
713 """
718 return len(self.parents) > 1
714 return len(self.parents) > 1
719
715
720 @LazyProperty
716 @LazyProperty
721 def children(self):
717 def children(self):
722 """
718 """
723 Returns list of child commits.
719 Returns list of child commits.
724 """
720 """
725 raise NotImplementedError
721 raise NotImplementedError
726
722
727 @LazyProperty
723 @LazyProperty
728 def id(self):
724 def id(self):
729 """
725 """
730 Returns string identifying this commit.
726 Returns string identifying this commit.
731 """
727 """
732 raise NotImplementedError
728 raise NotImplementedError
733
729
734 @LazyProperty
730 @LazyProperty
735 def raw_id(self):
731 def raw_id(self):
736 """
732 """
737 Returns raw string identifying this commit.
733 Returns raw string identifying this commit.
738 """
734 """
739 raise NotImplementedError
735 raise NotImplementedError
740
736
741 @LazyProperty
737 @LazyProperty
742 def short_id(self):
738 def short_id(self):
743 """
739 """
744 Returns shortened version of ``raw_id`` attribute, as string,
740 Returns shortened version of ``raw_id`` attribute, as string,
745 identifying this commit, useful for presentation to users.
741 identifying this commit, useful for presentation to users.
746 """
742 """
747 raise NotImplementedError
743 raise NotImplementedError
748
744
749 @LazyProperty
745 @LazyProperty
750 def idx(self):
746 def idx(self):
751 """
747 """
752 Returns integer identifying this commit.
748 Returns integer identifying this commit.
753 """
749 """
754 raise NotImplementedError
750 raise NotImplementedError
755
751
756 @LazyProperty
752 @LazyProperty
757 def committer(self):
753 def committer(self):
758 """
754 """
759 Returns committer for this commit
755 Returns committer for this commit
760 """
756 """
761 raise NotImplementedError
757 raise NotImplementedError
762
758
763 @LazyProperty
759 @LazyProperty
764 def committer_name(self):
760 def committer_name(self):
765 """
761 """
766 Returns committer name for this commit
762 Returns committer name for this commit
767 """
763 """
768
764
769 return author_name(self.committer)
765 return author_name(self.committer)
770
766
771 @LazyProperty
767 @LazyProperty
772 def committer_email(self):
768 def committer_email(self):
773 """
769 """
774 Returns committer email address for this commit
770 Returns committer email address for this commit
775 """
771 """
776
772
777 return author_email(self.committer)
773 return author_email(self.committer)
778
774
779 @LazyProperty
775 @LazyProperty
780 def author(self):
776 def author(self):
781 """
777 """
782 Returns author for this commit
778 Returns author for this commit
783 """
779 """
784
780
785 raise NotImplementedError
781 raise NotImplementedError
786
782
787 @LazyProperty
783 @LazyProperty
788 def author_name(self):
784 def author_name(self):
789 """
785 """
790 Returns author name for this commit
786 Returns author name for this commit
791 """
787 """
792
788
793 return author_name(self.author)
789 return author_name(self.author)
794
790
795 @LazyProperty
791 @LazyProperty
796 def author_email(self):
792 def author_email(self):
797 """
793 """
798 Returns author email address for this commit
794 Returns author email address for this commit
799 """
795 """
800
796
801 return author_email(self.author)
797 return author_email(self.author)
802
798
803 def get_file_mode(self, path):
799 def get_file_mode(self, path):
804 """
800 """
805 Returns stat mode of the file at `path`.
801 Returns stat mode of the file at `path`.
806 """
802 """
807 raise NotImplementedError
803 raise NotImplementedError
808
804
809 def is_link(self, path):
805 def is_link(self, path):
810 """
806 """
811 Returns ``True`` if given `path` is a symlink
807 Returns ``True`` if given `path` is a symlink
812 """
808 """
813 raise NotImplementedError
809 raise NotImplementedError
814
810
815 def get_file_content(self, path):
811 def get_file_content(self, path):
816 """
812 """
817 Returns content of the file at the given `path`.
813 Returns content of the file at the given `path`.
818 """
814 """
819 raise NotImplementedError
815 raise NotImplementedError
820
816
821 def get_file_size(self, path):
817 def get_file_size(self, path):
822 """
818 """
823 Returns size of the file at the given `path`.
819 Returns size of the file at the given `path`.
824 """
820 """
825 raise NotImplementedError
821 raise NotImplementedError
826
822
827 def get_file_commit(self, path, pre_load=None):
823 def get_file_commit(self, path, pre_load=None):
828 """
824 """
829 Returns last commit of the file at the given `path`.
825 Returns last commit of the file at the given `path`.
830
826
831 :param pre_load: Optional. List of commit attributes to load.
827 :param pre_load: Optional. List of commit attributes to load.
832 """
828 """
833 return self.get_file_history(path, limit=1, pre_load=pre_load)[0]
829 return self.get_file_history(path, limit=1, pre_load=pre_load)[0]
834
830
835 def get_file_history(self, path, limit=None, pre_load=None):
831 def get_file_history(self, path, limit=None, pre_load=None):
836 """
832 """
837 Returns history of file as reversed list of :class:`BaseCommit`
833 Returns history of file as reversed list of :class:`BaseCommit`
838 objects for which file at given `path` has been modified.
834 objects for which file at given `path` has been modified.
839
835
840 :param limit: Optional. Allows to limit the size of the returned
836 :param limit: Optional. Allows to limit the size of the returned
841 history. This is intended as a hint to the underlying backend, so
837 history. This is intended as a hint to the underlying backend, so
842 that it can apply optimizations depending on the limit.
838 that it can apply optimizations depending on the limit.
843 :param pre_load: Optional. List of commit attributes to load.
839 :param pre_load: Optional. List of commit attributes to load.
844 """
840 """
845 raise NotImplementedError
841 raise NotImplementedError
846
842
847 def get_file_annotate(self, path, pre_load=None):
843 def get_file_annotate(self, path, pre_load=None):
848 """
844 """
849 Returns a generator of four element tuples with
845 Returns a generator of four element tuples with
850 lineno, sha, commit lazy loader and line
846 lineno, sha, commit lazy loader and line
851
847
852 :param pre_load: Optional. List of commit attributes to load.
848 :param pre_load: Optional. List of commit attributes to load.
853 """
849 """
854 raise NotImplementedError
850 raise NotImplementedError
855
851
856 def get_nodes(self, path):
852 def get_nodes(self, path):
857 """
853 """
858 Returns combined ``DirNode`` and ``FileNode`` objects list representing
854 Returns combined ``DirNode`` and ``FileNode`` objects list representing
859 state of commit at the given ``path``.
855 state of commit at the given ``path``.
860
856
861 :raises ``CommitError``: if node at the given ``path`` is not
857 :raises ``CommitError``: if node at the given ``path`` is not
862 instance of ``DirNode``
858 instance of ``DirNode``
863 """
859 """
864 raise NotImplementedError
860 raise NotImplementedError
865
861
866 def get_node(self, path):
862 def get_node(self, path):
867 """
863 """
868 Returns ``Node`` object from the given ``path``.
864 Returns ``Node`` object from the given ``path``.
869
865
870 :raises ``NodeDoesNotExistError``: if there is no node at the given
866 :raises ``NodeDoesNotExistError``: if there is no node at the given
871 ``path``
867 ``path``
872 """
868 """
873 raise NotImplementedError
869 raise NotImplementedError
874
870
875 def get_largefile_node(self, path):
871 def get_largefile_node(self, path):
876 """
872 """
877 Returns the path to largefile from Mercurial storage.
873 Returns the path to largefile from Mercurial storage.
878 """
874 """
879 raise NotImplementedError
875 raise NotImplementedError
880
876
881 def archive_repo(self, file_path, kind='tgz', subrepos=None,
877 def archive_repo(self, file_path, kind='tgz', subrepos=None,
882 prefix=None, write_metadata=False, mtime=None):
878 prefix=None, write_metadata=False, mtime=None):
883 """
879 """
884 Creates an archive containing the contents of the repository.
880 Creates an archive containing the contents of the repository.
885
881
886 :param file_path: path to the file which to create the archive.
882 :param file_path: path to the file which to create the archive.
887 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
883 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
888 :param prefix: name of root directory in archive.
884 :param prefix: name of root directory in archive.
889 Default is repository name and commit's short_id joined with dash:
885 Default is repository name and commit's short_id joined with dash:
890 ``"{repo_name}-{short_id}"``.
886 ``"{repo_name}-{short_id}"``.
891 :param write_metadata: write a metadata file into archive.
887 :param write_metadata: write a metadata file into archive.
892 :param mtime: custom modification time for archive creation, defaults
888 :param mtime: custom modification time for archive creation, defaults
893 to time.time() if not given.
889 to time.time() if not given.
894
890
895 :raise VCSError: If prefix has a problem.
891 :raise VCSError: If prefix has a problem.
896 """
892 """
897 allowed_kinds = settings.ARCHIVE_SPECS.keys()
893 allowed_kinds = settings.ARCHIVE_SPECS.keys()
898 if kind not in allowed_kinds:
894 if kind not in allowed_kinds:
899 raise ImproperArchiveTypeError(
895 raise ImproperArchiveTypeError(
900 'Archive kind (%s) not supported use one of %s' %
896 'Archive kind (%s) not supported use one of %s' %
901 (kind, allowed_kinds))
897 (kind, allowed_kinds))
902
898
903 prefix = self._validate_archive_prefix(prefix)
899 prefix = self._validate_archive_prefix(prefix)
904
900
905 mtime = mtime or time.mktime(self.date.timetuple())
901 mtime = mtime or time.mktime(self.date.timetuple())
906
902
907 file_info = []
903 file_info = []
908 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
904 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
909 for _r, _d, files in cur_rev.walk('/'):
905 for _r, _d, files in cur_rev.walk('/'):
910 for f in files:
906 for f in files:
911 f_path = os.path.join(prefix, f.path)
907 f_path = os.path.join(prefix, f.path)
912 file_info.append(
908 file_info.append(
913 (f_path, f.mode, f.is_link(), f.raw_bytes))
909 (f_path, f.mode, f.is_link(), f.raw_bytes))
914
910
915 if write_metadata:
911 if write_metadata:
916 metadata = [
912 metadata = [
917 ('repo_name', self.repository.name),
913 ('repo_name', self.repository.name),
918 ('rev', self.raw_id),
914 ('rev', self.raw_id),
919 ('create_time', mtime),
915 ('create_time', mtime),
920 ('branch', self.branch),
916 ('branch', self.branch),
921 ('tags', ','.join(self.tags)),
917 ('tags', ','.join(self.tags)),
922 ]
918 ]
923 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
919 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
924 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
920 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
925
921
926 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
922 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
927
923
928 def _validate_archive_prefix(self, prefix):
924 def _validate_archive_prefix(self, prefix):
929 if prefix is None:
925 if prefix is None:
930 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
926 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
931 repo_name=safe_str(self.repository.name),
927 repo_name=safe_str(self.repository.name),
932 short_id=self.short_id)
928 short_id=self.short_id)
933 elif not isinstance(prefix, str):
929 elif not isinstance(prefix, str):
934 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
930 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
935 elif prefix.startswith('/'):
931 elif prefix.startswith('/'):
936 raise VCSError("Prefix cannot start with leading slash")
932 raise VCSError("Prefix cannot start with leading slash")
937 elif prefix.strip() == '':
933 elif prefix.strip() == '':
938 raise VCSError("Prefix cannot be empty")
934 raise VCSError("Prefix cannot be empty")
939 return prefix
935 return prefix
940
936
941 @LazyProperty
937 @LazyProperty
942 def root(self):
938 def root(self):
943 """
939 """
944 Returns ``RootNode`` object for this commit.
940 Returns ``RootNode`` object for this commit.
945 """
941 """
946 return self.get_node('')
942 return self.get_node('')
947
943
948 def next(self, branch=None):
944 def next(self, branch=None):
949 """
945 """
950 Returns next commit from current, if branch is gives it will return
946 Returns next commit from current, if branch is gives it will return
951 next commit belonging to this branch
947 next commit belonging to this branch
952
948
953 :param branch: show commits within the given named branch
949 :param branch: show commits within the given named branch
954 """
950 """
955 indexes = xrange(self.idx + 1, self.repository.count())
951 indexes = xrange(self.idx + 1, self.repository.count())
956 return self._find_next(indexes, branch)
952 return self._find_next(indexes, branch)
957
953
958 def prev(self, branch=None):
954 def prev(self, branch=None):
959 """
955 """
960 Returns previous commit from current, if branch is gives it will
956 Returns previous commit from current, if branch is gives it will
961 return previous commit belonging to this branch
957 return previous commit belonging to this branch
962
958
963 :param branch: show commit within the given named branch
959 :param branch: show commit within the given named branch
964 """
960 """
965 indexes = xrange(self.idx - 1, -1, -1)
961 indexes = xrange(self.idx - 1, -1, -1)
966 return self._find_next(indexes, branch)
962 return self._find_next(indexes, branch)
967
963
968 def _find_next(self, indexes, branch=None):
964 def _find_next(self, indexes, branch=None):
969 if branch and self.branch != branch:
965 if branch and self.branch != branch:
970 raise VCSError('Branch option used on commit not belonging '
966 raise VCSError('Branch option used on commit not belonging '
971 'to that branch')
967 'to that branch')
972
968
973 for next_idx in indexes:
969 for next_idx in indexes:
974 commit = self.repository.get_commit(commit_idx=next_idx)
970 commit = self.repository.get_commit(commit_idx=next_idx)
975 if branch and branch != commit.branch:
971 if branch and branch != commit.branch:
976 continue
972 continue
977 return commit
973 return commit
978 raise CommitDoesNotExistError
974 raise CommitDoesNotExistError
979
975
980 def diff(self, ignore_whitespace=True, context=3):
976 def diff(self, ignore_whitespace=True, context=3):
981 """
977 """
982 Returns a `Diff` object representing the change made by this commit.
978 Returns a `Diff` object representing the change made by this commit.
983 """
979 """
984 parent = (
980 parent = (
985 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
981 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
986 diff = self.repository.get_diff(
982 diff = self.repository.get_diff(
987 parent, self,
983 parent, self,
988 ignore_whitespace=ignore_whitespace,
984 ignore_whitespace=ignore_whitespace,
989 context=context)
985 context=context)
990 return diff
986 return diff
991
987
992 @LazyProperty
988 @LazyProperty
993 def added(self):
989 def added(self):
994 """
990 """
995 Returns list of added ``FileNode`` objects.
991 Returns list of added ``FileNode`` objects.
996 """
992 """
997 raise NotImplementedError
993 raise NotImplementedError
998
994
999 @LazyProperty
995 @LazyProperty
1000 def changed(self):
996 def changed(self):
1001 """
997 """
1002 Returns list of modified ``FileNode`` objects.
998 Returns list of modified ``FileNode`` objects.
1003 """
999 """
1004 raise NotImplementedError
1000 raise NotImplementedError
1005
1001
1006 @LazyProperty
1002 @LazyProperty
1007 def removed(self):
1003 def removed(self):
1008 """
1004 """
1009 Returns list of removed ``FileNode`` objects.
1005 Returns list of removed ``FileNode`` objects.
1010 """
1006 """
1011 raise NotImplementedError
1007 raise NotImplementedError
1012
1008
1013 @LazyProperty
1009 @LazyProperty
1014 def size(self):
1010 def size(self):
1015 """
1011 """
1016 Returns total number of bytes from contents of all filenodes.
1012 Returns total number of bytes from contents of all filenodes.
1017 """
1013 """
1018 return sum((node.size for node in self.get_filenodes_generator()))
1014 return sum((node.size for node in self.get_filenodes_generator()))
1019
1015
1020 def walk(self, topurl=''):
1016 def walk(self, topurl=''):
1021 """
1017 """
1022 Similar to os.walk method. Insted of filesystem it walks through
1018 Similar to os.walk method. Insted of filesystem it walks through
1023 commit starting at given ``topurl``. Returns generator of tuples
1019 commit starting at given ``topurl``. Returns generator of tuples
1024 (topnode, dirnodes, filenodes).
1020 (topnode, dirnodes, filenodes).
1025 """
1021 """
1026 topnode = self.get_node(topurl)
1022 topnode = self.get_node(topurl)
1027 if not topnode.is_dir():
1023 if not topnode.is_dir():
1028 return
1024 return
1029 yield (topnode, topnode.dirs, topnode.files)
1025 yield (topnode, topnode.dirs, topnode.files)
1030 for dirnode in topnode.dirs:
1026 for dirnode in topnode.dirs:
1031 for tup in self.walk(dirnode.path):
1027 for tup in self.walk(dirnode.path):
1032 yield tup
1028 yield tup
1033
1029
1034 def get_filenodes_generator(self):
1030 def get_filenodes_generator(self):
1035 """
1031 """
1036 Returns generator that yields *all* file nodes.
1032 Returns generator that yields *all* file nodes.
1037 """
1033 """
1038 for topnode, dirs, files in self.walk():
1034 for topnode, dirs, files in self.walk():
1039 for node in files:
1035 for node in files:
1040 yield node
1036 yield node
1041
1037
1042 #
1038 #
1043 # Utilities for sub classes to support consistent behavior
1039 # Utilities for sub classes to support consistent behavior
1044 #
1040 #
1045
1041
1046 def no_node_at_path(self, path):
1042 def no_node_at_path(self, path):
1047 return NodeDoesNotExistError(
1043 return NodeDoesNotExistError(
1048 "There is no file nor directory at the given path: "
1044 "There is no file nor directory at the given path: "
1049 "'%s' at commit %s" % (path, self.short_id))
1045 "'%s' at commit %s" % (path, self.short_id))
1050
1046
1051 def _fix_path(self, path):
1047 def _fix_path(self, path):
1052 """
1048 """
1053 Paths are stored without trailing slash so we need to get rid off it if
1049 Paths are stored without trailing slash so we need to get rid off it if
1054 needed.
1050 needed.
1055 """
1051 """
1056 return path.rstrip('/')
1052 return path.rstrip('/')
1057
1053
1058 #
1054 #
1059 # Deprecated API based on changesets
1055 # Deprecated API based on changesets
1060 #
1056 #
1061
1057
1062 @property
1058 @property
1063 def revision(self):
1059 def revision(self):
1064 warnings.warn("Use idx instead", DeprecationWarning)
1060 warnings.warn("Use idx instead", DeprecationWarning)
1065 return self.idx
1061 return self.idx
1066
1062
1067 @revision.setter
1063 @revision.setter
1068 def revision(self, value):
1064 def revision(self, value):
1069 warnings.warn("Use idx instead", DeprecationWarning)
1065 warnings.warn("Use idx instead", DeprecationWarning)
1070 self.idx = value
1066 self.idx = value
1071
1067
1072 def get_file_changeset(self, path):
1068 def get_file_changeset(self, path):
1073 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1069 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1074 return self.get_file_commit(path)
1070 return self.get_file_commit(path)
1075
1071
1076
1072
1077 class BaseChangesetClass(type):
1073 class BaseChangesetClass(type):
1078
1074
1079 def __instancecheck__(self, instance):
1075 def __instancecheck__(self, instance):
1080 return isinstance(instance, BaseCommit)
1076 return isinstance(instance, BaseCommit)
1081
1077
1082
1078
1083 class BaseChangeset(BaseCommit):
1079 class BaseChangeset(BaseCommit):
1084
1080
1085 __metaclass__ = BaseChangesetClass
1081 __metaclass__ = BaseChangesetClass
1086
1082
1087 def __new__(cls, *args, **kwargs):
1083 def __new__(cls, *args, **kwargs):
1088 warnings.warn(
1084 warnings.warn(
1089 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1085 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1090 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1086 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1091
1087
1092
1088
1093 class BaseInMemoryCommit(object):
1089 class BaseInMemoryCommit(object):
1094 """
1090 """
1095 Represents differences between repository's state (most recent head) and
1091 Represents differences between repository's state (most recent head) and
1096 changes made *in place*.
1092 changes made *in place*.
1097
1093
1098 **Attributes**
1094 **Attributes**
1099
1095
1100 ``repository``
1096 ``repository``
1101 repository object for this in-memory-commit
1097 repository object for this in-memory-commit
1102
1098
1103 ``added``
1099 ``added``
1104 list of ``FileNode`` objects marked as *added*
1100 list of ``FileNode`` objects marked as *added*
1105
1101
1106 ``changed``
1102 ``changed``
1107 list of ``FileNode`` objects marked as *changed*
1103 list of ``FileNode`` objects marked as *changed*
1108
1104
1109 ``removed``
1105 ``removed``
1110 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1106 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1111 *removed*
1107 *removed*
1112
1108
1113 ``parents``
1109 ``parents``
1114 list of :class:`BaseCommit` instances representing parents of
1110 list of :class:`BaseCommit` instances representing parents of
1115 in-memory commit. Should always be 2-element sequence.
1111 in-memory commit. Should always be 2-element sequence.
1116
1112
1117 """
1113 """
1118
1114
1119 def __init__(self, repository):
1115 def __init__(self, repository):
1120 self.repository = repository
1116 self.repository = repository
1121 self.added = []
1117 self.added = []
1122 self.changed = []
1118 self.changed = []
1123 self.removed = []
1119 self.removed = []
1124 self.parents = []
1120 self.parents = []
1125
1121
1126 def add(self, *filenodes):
1122 def add(self, *filenodes):
1127 """
1123 """
1128 Marks given ``FileNode`` objects as *to be committed*.
1124 Marks given ``FileNode`` objects as *to be committed*.
1129
1125
1130 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1126 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1131 latest commit
1127 latest commit
1132 :raises ``NodeAlreadyAddedError``: if node with same path is already
1128 :raises ``NodeAlreadyAddedError``: if node with same path is already
1133 marked as *added*
1129 marked as *added*
1134 """
1130 """
1135 # Check if not already marked as *added* first
1131 # Check if not already marked as *added* first
1136 for node in filenodes:
1132 for node in filenodes:
1137 if node.path in (n.path for n in self.added):
1133 if node.path in (n.path for n in self.added):
1138 raise NodeAlreadyAddedError(
1134 raise NodeAlreadyAddedError(
1139 "Such FileNode %s is already marked for addition"
1135 "Such FileNode %s is already marked for addition"
1140 % node.path)
1136 % node.path)
1141 for node in filenodes:
1137 for node in filenodes:
1142 self.added.append(node)
1138 self.added.append(node)
1143
1139
1144 def change(self, *filenodes):
1140 def change(self, *filenodes):
1145 """
1141 """
1146 Marks given ``FileNode`` objects to be *changed* in next commit.
1142 Marks given ``FileNode`` objects to be *changed* in next commit.
1147
1143
1148 :raises ``EmptyRepositoryError``: if there are no commits yet
1144 :raises ``EmptyRepositoryError``: if there are no commits yet
1149 :raises ``NodeAlreadyExistsError``: if node with same path is already
1145 :raises ``NodeAlreadyExistsError``: if node with same path is already
1150 marked to be *changed*
1146 marked to be *changed*
1151 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1147 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1152 marked to be *removed*
1148 marked to be *removed*
1153 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1149 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1154 commit
1150 commit
1155 :raises ``NodeNotChangedError``: if node hasn't really be changed
1151 :raises ``NodeNotChangedError``: if node hasn't really be changed
1156 """
1152 """
1157 for node in filenodes:
1153 for node in filenodes:
1158 if node.path in (n.path for n in self.removed):
1154 if node.path in (n.path for n in self.removed):
1159 raise NodeAlreadyRemovedError(
1155 raise NodeAlreadyRemovedError(
1160 "Node at %s is already marked as removed" % node.path)
1156 "Node at %s is already marked as removed" % node.path)
1161 try:
1157 try:
1162 self.repository.get_commit()
1158 self.repository.get_commit()
1163 except EmptyRepositoryError:
1159 except EmptyRepositoryError:
1164 raise EmptyRepositoryError(
1160 raise EmptyRepositoryError(
1165 "Nothing to change - try to *add* new nodes rather than "
1161 "Nothing to change - try to *add* new nodes rather than "
1166 "changing them")
1162 "changing them")
1167 for node in filenodes:
1163 for node in filenodes:
1168 if node.path in (n.path for n in self.changed):
1164 if node.path in (n.path for n in self.changed):
1169 raise NodeAlreadyChangedError(
1165 raise NodeAlreadyChangedError(
1170 "Node at '%s' is already marked as changed" % node.path)
1166 "Node at '%s' is already marked as changed" % node.path)
1171 self.changed.append(node)
1167 self.changed.append(node)
1172
1168
1173 def remove(self, *filenodes):
1169 def remove(self, *filenodes):
1174 """
1170 """
1175 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1171 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1176 *removed* in next commit.
1172 *removed* in next commit.
1177
1173
1178 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1174 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1179 be *removed*
1175 be *removed*
1180 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1176 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1181 be *changed*
1177 be *changed*
1182 """
1178 """
1183 for node in filenodes:
1179 for node in filenodes:
1184 if node.path in (n.path for n in self.removed):
1180 if node.path in (n.path for n in self.removed):
1185 raise NodeAlreadyRemovedError(
1181 raise NodeAlreadyRemovedError(
1186 "Node is already marked to for removal at %s" % node.path)
1182 "Node is already marked to for removal at %s" % node.path)
1187 if node.path in (n.path for n in self.changed):
1183 if node.path in (n.path for n in self.changed):
1188 raise NodeAlreadyChangedError(
1184 raise NodeAlreadyChangedError(
1189 "Node is already marked to be changed at %s" % node.path)
1185 "Node is already marked to be changed at %s" % node.path)
1190 # We only mark node as *removed* - real removal is done by
1186 # We only mark node as *removed* - real removal is done by
1191 # commit method
1187 # commit method
1192 self.removed.append(node)
1188 self.removed.append(node)
1193
1189
1194 def reset(self):
1190 def reset(self):
1195 """
1191 """
1196 Resets this instance to initial state (cleans ``added``, ``changed``
1192 Resets this instance to initial state (cleans ``added``, ``changed``
1197 and ``removed`` lists).
1193 and ``removed`` lists).
1198 """
1194 """
1199 self.added = []
1195 self.added = []
1200 self.changed = []
1196 self.changed = []
1201 self.removed = []
1197 self.removed = []
1202 self.parents = []
1198 self.parents = []
1203
1199
1204 def get_ipaths(self):
1200 def get_ipaths(self):
1205 """
1201 """
1206 Returns generator of paths from nodes marked as added, changed or
1202 Returns generator of paths from nodes marked as added, changed or
1207 removed.
1203 removed.
1208 """
1204 """
1209 for node in itertools.chain(self.added, self.changed, self.removed):
1205 for node in itertools.chain(self.added, self.changed, self.removed):
1210 yield node.path
1206 yield node.path
1211
1207
1212 def get_paths(self):
1208 def get_paths(self):
1213 """
1209 """
1214 Returns list of paths from nodes marked as added, changed or removed.
1210 Returns list of paths from nodes marked as added, changed or removed.
1215 """
1211 """
1216 return list(self.get_ipaths())
1212 return list(self.get_ipaths())
1217
1213
1218 def check_integrity(self, parents=None):
1214 def check_integrity(self, parents=None):
1219 """
1215 """
1220 Checks in-memory commit's integrity. Also, sets parents if not
1216 Checks in-memory commit's integrity. Also, sets parents if not
1221 already set.
1217 already set.
1222
1218
1223 :raises CommitError: if any error occurs (i.e.
1219 :raises CommitError: if any error occurs (i.e.
1224 ``NodeDoesNotExistError``).
1220 ``NodeDoesNotExistError``).
1225 """
1221 """
1226 if not self.parents:
1222 if not self.parents:
1227 parents = parents or []
1223 parents = parents or []
1228 if len(parents) == 0:
1224 if len(parents) == 0:
1229 try:
1225 try:
1230 parents = [self.repository.get_commit(), None]
1226 parents = [self.repository.get_commit(), None]
1231 except EmptyRepositoryError:
1227 except EmptyRepositoryError:
1232 parents = [None, None]
1228 parents = [None, None]
1233 elif len(parents) == 1:
1229 elif len(parents) == 1:
1234 parents += [None]
1230 parents += [None]
1235 self.parents = parents
1231 self.parents = parents
1236
1232
1237 # Local parents, only if not None
1233 # Local parents, only if not None
1238 parents = [p for p in self.parents if p]
1234 parents = [p for p in self.parents if p]
1239
1235
1240 # Check nodes marked as added
1236 # Check nodes marked as added
1241 for p in parents:
1237 for p in parents:
1242 for node in self.added:
1238 for node in self.added:
1243 try:
1239 try:
1244 p.get_node(node.path)
1240 p.get_node(node.path)
1245 except NodeDoesNotExistError:
1241 except NodeDoesNotExistError:
1246 pass
1242 pass
1247 else:
1243 else:
1248 raise NodeAlreadyExistsError(
1244 raise NodeAlreadyExistsError(
1249 "Node `%s` already exists at %s" % (node.path, p))
1245 "Node `%s` already exists at %s" % (node.path, p))
1250
1246
1251 # Check nodes marked as changed
1247 # Check nodes marked as changed
1252 missing = set(self.changed)
1248 missing = set(self.changed)
1253 not_changed = set(self.changed)
1249 not_changed = set(self.changed)
1254 if self.changed and not parents:
1250 if self.changed and not parents:
1255 raise NodeDoesNotExistError(str(self.changed[0].path))
1251 raise NodeDoesNotExistError(str(self.changed[0].path))
1256 for p in parents:
1252 for p in parents:
1257 for node in self.changed:
1253 for node in self.changed:
1258 try:
1254 try:
1259 old = p.get_node(node.path)
1255 old = p.get_node(node.path)
1260 missing.remove(node)
1256 missing.remove(node)
1261 # if content actually changed, remove node from not_changed
1257 # if content actually changed, remove node from not_changed
1262 if old.content != node.content:
1258 if old.content != node.content:
1263 not_changed.remove(node)
1259 not_changed.remove(node)
1264 except NodeDoesNotExistError:
1260 except NodeDoesNotExistError:
1265 pass
1261 pass
1266 if self.changed and missing:
1262 if self.changed and missing:
1267 raise NodeDoesNotExistError(
1263 raise NodeDoesNotExistError(
1268 "Node `%s` marked as modified but missing in parents: %s"
1264 "Node `%s` marked as modified but missing in parents: %s"
1269 % (node.path, parents))
1265 % (node.path, parents))
1270
1266
1271 if self.changed and not_changed:
1267 if self.changed and not_changed:
1272 raise NodeNotChangedError(
1268 raise NodeNotChangedError(
1273 "Node `%s` wasn't actually changed (parents: %s)"
1269 "Node `%s` wasn't actually changed (parents: %s)"
1274 % (not_changed.pop().path, parents))
1270 % (not_changed.pop().path, parents))
1275
1271
1276 # Check nodes marked as removed
1272 # Check nodes marked as removed
1277 if self.removed and not parents:
1273 if self.removed and not parents:
1278 raise NodeDoesNotExistError(
1274 raise NodeDoesNotExistError(
1279 "Cannot remove node at %s as there "
1275 "Cannot remove node at %s as there "
1280 "were no parents specified" % self.removed[0].path)
1276 "were no parents specified" % self.removed[0].path)
1281 really_removed = set()
1277 really_removed = set()
1282 for p in parents:
1278 for p in parents:
1283 for node in self.removed:
1279 for node in self.removed:
1284 try:
1280 try:
1285 p.get_node(node.path)
1281 p.get_node(node.path)
1286 really_removed.add(node)
1282 really_removed.add(node)
1287 except CommitError:
1283 except CommitError:
1288 pass
1284 pass
1289 not_removed = set(self.removed) - really_removed
1285 not_removed = set(self.removed) - really_removed
1290 if not_removed:
1286 if not_removed:
1291 # TODO: johbo: This code branch does not seem to be covered
1287 # TODO: johbo: This code branch does not seem to be covered
1292 raise NodeDoesNotExistError(
1288 raise NodeDoesNotExistError(
1293 "Cannot remove node at %s from "
1289 "Cannot remove node at %s from "
1294 "following parents: %s" % (not_removed, parents))
1290 "following parents: %s" % (not_removed, parents))
1295
1291
1296 def commit(
1292 def commit(
1297 self, message, author, parents=None, branch=None, date=None,
1293 self, message, author, parents=None, branch=None, date=None,
1298 **kwargs):
1294 **kwargs):
1299 """
1295 """
1300 Performs in-memory commit (doesn't check workdir in any way) and
1296 Performs in-memory commit (doesn't check workdir in any way) and
1301 returns newly created :class:`BaseCommit`. Updates repository's
1297 returns newly created :class:`BaseCommit`. Updates repository's
1302 attribute `commits`.
1298 attribute `commits`.
1303
1299
1304 .. note::
1300 .. note::
1305
1301
1306 While overriding this method each backend's should call
1302 While overriding this method each backend's should call
1307 ``self.check_integrity(parents)`` in the first place.
1303 ``self.check_integrity(parents)`` in the first place.
1308
1304
1309 :param message: message of the commit
1305 :param message: message of the commit
1310 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1306 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1311 :param parents: single parent or sequence of parents from which commit
1307 :param parents: single parent or sequence of parents from which commit
1312 would be derived
1308 would be derived
1313 :param date: ``datetime.datetime`` instance. Defaults to
1309 :param date: ``datetime.datetime`` instance. Defaults to
1314 ``datetime.datetime.now()``.
1310 ``datetime.datetime.now()``.
1315 :param branch: branch name, as string. If none given, default backend's
1311 :param branch: branch name, as string. If none given, default backend's
1316 branch would be used.
1312 branch would be used.
1317
1313
1318 :raises ``CommitError``: if any error occurs while committing
1314 :raises ``CommitError``: if any error occurs while committing
1319 """
1315 """
1320 raise NotImplementedError
1316 raise NotImplementedError
1321
1317
1322
1318
1323 class BaseInMemoryChangesetClass(type):
1319 class BaseInMemoryChangesetClass(type):
1324
1320
1325 def __instancecheck__(self, instance):
1321 def __instancecheck__(self, instance):
1326 return isinstance(instance, BaseInMemoryCommit)
1322 return isinstance(instance, BaseInMemoryCommit)
1327
1323
1328
1324
1329 class BaseInMemoryChangeset(BaseInMemoryCommit):
1325 class BaseInMemoryChangeset(BaseInMemoryCommit):
1330
1326
1331 __metaclass__ = BaseInMemoryChangesetClass
1327 __metaclass__ = BaseInMemoryChangesetClass
1332
1328
1333 def __new__(cls, *args, **kwargs):
1329 def __new__(cls, *args, **kwargs):
1334 warnings.warn(
1330 warnings.warn(
1335 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1331 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1336 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1332 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1337
1333
1338
1334
1339 class EmptyCommit(BaseCommit):
1335 class EmptyCommit(BaseCommit):
1340 """
1336 """
1341 An dummy empty commit. It's possible to pass hash when creating
1337 An dummy empty commit. It's possible to pass hash when creating
1342 an EmptyCommit
1338 an EmptyCommit
1343 """
1339 """
1344
1340
1345 def __init__(
1341 def __init__(
1346 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1342 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1347 message='', author='', date=None):
1343 message='', author='', date=None):
1348 self._empty_commit_id = commit_id
1344 self._empty_commit_id = commit_id
1349 # TODO: johbo: Solve idx parameter, default value does not make
1345 # TODO: johbo: Solve idx parameter, default value does not make
1350 # too much sense
1346 # too much sense
1351 self.idx = idx
1347 self.idx = idx
1352 self.message = message
1348 self.message = message
1353 self.author = author
1349 self.author = author
1354 self.date = date or datetime.datetime.fromtimestamp(0)
1350 self.date = date or datetime.datetime.fromtimestamp(0)
1355 self.repository = repo
1351 self.repository = repo
1356 self.alias = alias
1352 self.alias = alias
1357
1353
1358 @LazyProperty
1354 @LazyProperty
1359 def raw_id(self):
1355 def raw_id(self):
1360 """
1356 """
1361 Returns raw string identifying this commit, useful for web
1357 Returns raw string identifying this commit, useful for web
1362 representation.
1358 representation.
1363 """
1359 """
1364
1360
1365 return self._empty_commit_id
1361 return self._empty_commit_id
1366
1362
1367 @LazyProperty
1363 @LazyProperty
1368 def branch(self):
1364 def branch(self):
1369 if self.alias:
1365 if self.alias:
1370 from rhodecode.lib.vcs.backends import get_backend
1366 from rhodecode.lib.vcs.backends import get_backend
1371 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1367 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1372
1368
1373 @LazyProperty
1369 @LazyProperty
1374 def short_id(self):
1370 def short_id(self):
1375 return self.raw_id[:12]
1371 return self.raw_id[:12]
1376
1372
1377 @LazyProperty
1373 @LazyProperty
1378 def id(self):
1374 def id(self):
1379 return self.raw_id
1375 return self.raw_id
1380
1376
1381 def get_file_commit(self, path):
1377 def get_file_commit(self, path):
1382 return self
1378 return self
1383
1379
1384 def get_file_content(self, path):
1380 def get_file_content(self, path):
1385 return u''
1381 return u''
1386
1382
1387 def get_file_size(self, path):
1383 def get_file_size(self, path):
1388 return 0
1384 return 0
1389
1385
1390
1386
1391 class EmptyChangesetClass(type):
1387 class EmptyChangesetClass(type):
1392
1388
1393 def __instancecheck__(self, instance):
1389 def __instancecheck__(self, instance):
1394 return isinstance(instance, EmptyCommit)
1390 return isinstance(instance, EmptyCommit)
1395
1391
1396
1392
1397 class EmptyChangeset(EmptyCommit):
1393 class EmptyChangeset(EmptyCommit):
1398
1394
1399 __metaclass__ = EmptyChangesetClass
1395 __metaclass__ = EmptyChangesetClass
1400
1396
1401 def __new__(cls, *args, **kwargs):
1397 def __new__(cls, *args, **kwargs):
1402 warnings.warn(
1398 warnings.warn(
1403 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1399 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1404 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1400 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1405
1401
1406 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1402 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1407 alias=None, revision=-1, message='', author='', date=None):
1403 alias=None, revision=-1, message='', author='', date=None):
1408 if requested_revision is not None:
1404 if requested_revision is not None:
1409 warnings.warn(
1405 warnings.warn(
1410 "Parameter requested_revision not supported anymore",
1406 "Parameter requested_revision not supported anymore",
1411 DeprecationWarning)
1407 DeprecationWarning)
1412 super(EmptyChangeset, self).__init__(
1408 super(EmptyChangeset, self).__init__(
1413 commit_id=cs, repo=repo, alias=alias, idx=revision,
1409 commit_id=cs, repo=repo, alias=alias, idx=revision,
1414 message=message, author=author, date=date)
1410 message=message, author=author, date=date)
1415
1411
1416 @property
1412 @property
1417 def revision(self):
1413 def revision(self):
1418 warnings.warn("Use idx instead", DeprecationWarning)
1414 warnings.warn("Use idx instead", DeprecationWarning)
1419 return self.idx
1415 return self.idx
1420
1416
1421 @revision.setter
1417 @revision.setter
1422 def revision(self, value):
1418 def revision(self, value):
1423 warnings.warn("Use idx instead", DeprecationWarning)
1419 warnings.warn("Use idx instead", DeprecationWarning)
1424 self.idx = value
1420 self.idx = value
1425
1421
1426
1422
1427 class CollectionGenerator(object):
1423 class CollectionGenerator(object):
1428
1424
1429 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1425 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1430 self.repo = repo
1426 self.repo = repo
1431 self.commit_ids = commit_ids
1427 self.commit_ids = commit_ids
1432 # TODO: (oliver) this isn't currently hooked up
1428 # TODO: (oliver) this isn't currently hooked up
1433 self.collection_size = None
1429 self.collection_size = None
1434 self.pre_load = pre_load
1430 self.pre_load = pre_load
1435
1431
1436 def __len__(self):
1432 def __len__(self):
1437 if self.collection_size is not None:
1433 if self.collection_size is not None:
1438 return self.collection_size
1434 return self.collection_size
1439 return self.commit_ids.__len__()
1435 return self.commit_ids.__len__()
1440
1436
1441 def __iter__(self):
1437 def __iter__(self):
1442 for commit_id in self.commit_ids:
1438 for commit_id in self.commit_ids:
1443 # TODO: johbo: Mercurial passes in commit indices or commit ids
1439 # TODO: johbo: Mercurial passes in commit indices or commit ids
1444 yield self._commit_factory(commit_id)
1440 yield self._commit_factory(commit_id)
1445
1441
1446 def _commit_factory(self, commit_id):
1442 def _commit_factory(self, commit_id):
1447 """
1443 """
1448 Allows backends to override the way commits are generated.
1444 Allows backends to override the way commits are generated.
1449 """
1445 """
1450 return self.repo.get_commit(commit_id=commit_id,
1446 return self.repo.get_commit(commit_id=commit_id,
1451 pre_load=self.pre_load)
1447 pre_load=self.pre_load)
1452
1448
1453 def __getslice__(self, i, j):
1449 def __getslice__(self, i, j):
1454 """
1450 """
1455 Returns an iterator of sliced repository
1451 Returns an iterator of sliced repository
1456 """
1452 """
1457 commit_ids = self.commit_ids[i:j]
1453 commit_ids = self.commit_ids[i:j]
1458 return self.__class__(
1454 return self.__class__(
1459 self.repo, commit_ids, pre_load=self.pre_load)
1455 self.repo, commit_ids, pre_load=self.pre_load)
1460
1456
1461 def __repr__(self):
1457 def __repr__(self):
1462 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1458 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1463
1459
1464
1460
1465 class Config(object):
1461 class Config(object):
1466 """
1462 """
1467 Represents the configuration for a repository.
1463 Represents the configuration for a repository.
1468
1464
1469 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1465 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1470 standard library. It implements only the needed subset.
1466 standard library. It implements only the needed subset.
1471 """
1467 """
1472
1468
1473 def __init__(self):
1469 def __init__(self):
1474 self._values = {}
1470 self._values = {}
1475
1471
1476 def copy(self):
1472 def copy(self):
1477 clone = Config()
1473 clone = Config()
1478 for section, values in self._values.items():
1474 for section, values in self._values.items():
1479 clone._values[section] = values.copy()
1475 clone._values[section] = values.copy()
1480 return clone
1476 return clone
1481
1477
1482 def __repr__(self):
1478 def __repr__(self):
1483 return '<Config(%s sections) at %s>' % (
1479 return '<Config(%s sections) at %s>' % (
1484 len(self._values), hex(id(self)))
1480 len(self._values), hex(id(self)))
1485
1481
1486 def items(self, section):
1482 def items(self, section):
1487 return self._values.get(section, {}).iteritems()
1483 return self._values.get(section, {}).iteritems()
1488
1484
1489 def get(self, section, option):
1485 def get(self, section, option):
1490 return self._values.get(section, {}).get(option)
1486 return self._values.get(section, {}).get(option)
1491
1487
1492 def set(self, section, option, value):
1488 def set(self, section, option, value):
1493 section_values = self._values.setdefault(section, {})
1489 section_values = self._values.setdefault(section, {})
1494 section_values[option] = value
1490 section_values[option] = value
1495
1491
1496 def clear_section(self, section):
1492 def clear_section(self, section):
1497 self._values[section] = {}
1493 self._values[section] = {}
1498
1494
1499 def serialize(self):
1495 def serialize(self):
1500 """
1496 """
1501 Creates a list of three tuples (section, key, value) representing
1497 Creates a list of three tuples (section, key, value) representing
1502 this config object.
1498 this config object.
1503 """
1499 """
1504 items = []
1500 items = []
1505 for section in self._values:
1501 for section in self._values:
1506 for option, value in self._values[section].items():
1502 for option, value in self._values[section].items():
1507 items.append(
1503 items.append(
1508 (safe_str(section), safe_str(option), safe_str(value)))
1504 (safe_str(section), safe_str(option), safe_str(value)))
1509 return items
1505 return items
1510
1506
1511
1507
1512 class Diff(object):
1508 class Diff(object):
1513 """
1509 """
1514 Represents a diff result from a repository backend.
1510 Represents a diff result from a repository backend.
1515
1511
1516 Subclasses have to provide a backend specific value for :attr:`_header_re`.
1512 Subclasses have to provide a backend specific value for :attr:`_header_re`.
1517 """
1513 """
1518
1514
1519 _header_re = None
1515 _header_re = None
1520
1516
1521 def __init__(self, raw_diff):
1517 def __init__(self, raw_diff):
1522 self.raw = raw_diff
1518 self.raw = raw_diff
1523
1519
1524 def chunks(self):
1520 def chunks(self):
1525 """
1521 """
1526 split the diff in chunks of separate --git a/file b/file chunks
1522 split the diff in chunks of separate --git a/file b/file chunks
1527 to make diffs consistent we must prepend with \n, and make sure
1523 to make diffs consistent we must prepend with \n, and make sure
1528 we can detect last chunk as this was also has special rule
1524 we can detect last chunk as this was also has special rule
1529 """
1525 """
1530 chunks = ('\n' + self.raw).split('\ndiff --git')[1:]
1526 chunks = ('\n' + self.raw).split('\ndiff --git')[1:]
1531 total_chunks = len(chunks)
1527 total_chunks = len(chunks)
1532 return (DiffChunk(chunk, self, cur_chunk == total_chunks)
1528 return (DiffChunk(chunk, self, cur_chunk == total_chunks)
1533 for cur_chunk, chunk in enumerate(chunks, start=1))
1529 for cur_chunk, chunk in enumerate(chunks, start=1))
1534
1530
1535
1531
1536 class DiffChunk(object):
1532 class DiffChunk(object):
1537
1533
1538 def __init__(self, chunk, diff, last_chunk):
1534 def __init__(self, chunk, diff, last_chunk):
1539 self._diff = diff
1535 self._diff = diff
1540
1536
1541 # since we split by \ndiff --git that part is lost from original diff
1537 # since we split by \ndiff --git that part is lost from original diff
1542 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1538 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1543 if not last_chunk:
1539 if not last_chunk:
1544 chunk += '\n'
1540 chunk += '\n'
1545
1541
1546 match = self._diff._header_re.match(chunk)
1542 match = self._diff._header_re.match(chunk)
1547 self.header = match.groupdict()
1543 self.header = match.groupdict()
1548 self.diff = chunk[match.end():]
1544 self.diff = chunk[match.end():]
1549 self.raw = chunk
1545 self.raw = chunk
@@ -1,808 +1,803 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24
24
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import os
27 import os
28 import shutil
28 import shutil
29 import urllib
29 import urllib
30
30
31 from zope.cachedescriptors.property import Lazy as LazyProperty
31 from zope.cachedescriptors.property import Lazy as LazyProperty
32
32
33 from rhodecode.lib.compat import OrderedDict
33 from rhodecode.lib.compat import OrderedDict
34 from rhodecode.lib.datelib import (
34 from rhodecode.lib.datelib import (
35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate,
35 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate,
36 date_astimestamp)
36 date_astimestamp)
37 from rhodecode.lib.utils import safe_unicode, safe_str
37 from rhodecode.lib.utils import safe_unicode, safe_str
38 from rhodecode.lib.vcs import connection
38 from rhodecode.lib.vcs import connection
39 from rhodecode.lib.vcs.backends.base import (
39 from rhodecode.lib.vcs.backends.base import (
40 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 BaseRepository, CollectionGenerator, Config, MergeResponse,
41 MergeFailureReason, Reference)
41 MergeFailureReason, Reference)
42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
47 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
47 TagDoesNotExistError, CommitDoesNotExistError)
48
48
49 hexlify = binascii.hexlify
49 hexlify = binascii.hexlify
50 nullid = "\0" * 20
50 nullid = "\0" * 20
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 class MercurialRepository(BaseRepository):
55 class MercurialRepository(BaseRepository):
56 """
56 """
57 Mercurial repository backend
57 Mercurial repository backend
58 """
58 """
59 DEFAULT_BRANCH_NAME = 'default'
59 DEFAULT_BRANCH_NAME = 'default'
60
60
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 update_after_clone=False, with_wire=None):
62 update_after_clone=False, with_wire=None):
63 """
63 """
64 Raises RepositoryError if repository could not be find at the given
64 Raises RepositoryError if repository could not be find at the given
65 ``repo_path``.
65 ``repo_path``.
66
66
67 :param repo_path: local path of the repository
67 :param repo_path: local path of the repository
68 :param config: config object containing the repo configuration
68 :param config: config object containing the repo configuration
69 :param create=False: if set to True, would try to create repository if
69 :param create=False: if set to True, would try to create repository if
70 it does not exist rather than raising exception
70 it does not exist rather than raising exception
71 :param src_url=None: would try to clone repository from given location
71 :param src_url=None: would try to clone repository from given location
72 :param update_after_clone=False: sets update of working copy after
72 :param update_after_clone=False: sets update of working copy after
73 making a clone
73 making a clone
74 """
74 """
75 self.path = safe_str(os.path.abspath(repo_path))
75 self.path = safe_str(os.path.abspath(repo_path))
76 self.config = config if config else Config()
76 self.config = config if config else Config()
77 self._remote = connection.Hg(
77 self._remote = connection.Hg(
78 self.path, self.config, with_wire=with_wire)
78 self.path, self.config, with_wire=with_wire)
79
79
80 self._init_repo(create, src_url, update_after_clone)
80 self._init_repo(create, src_url, update_after_clone)
81
81
82 # caches
82 # caches
83 self._commit_ids = {}
83 self._commit_ids = {}
84
84
85 @LazyProperty
85 @LazyProperty
86 def commit_ids(self):
86 def commit_ids(self):
87 """
87 """
88 Returns list of commit ids, in ascending order. Being lazy
88 Returns list of commit ids, in ascending order. Being lazy
89 attribute allows external tools to inject shas from cache.
89 attribute allows external tools to inject shas from cache.
90 """
90 """
91 commit_ids = self._get_all_commit_ids()
91 commit_ids = self._get_all_commit_ids()
92 self._rebuild_cache(commit_ids)
92 self._rebuild_cache(commit_ids)
93 return commit_ids
93 return commit_ids
94
94
95 def _rebuild_cache(self, commit_ids):
95 def _rebuild_cache(self, commit_ids):
96 self._commit_ids = dict((commit_id, index)
96 self._commit_ids = dict((commit_id, index)
97 for index, commit_id in enumerate(commit_ids))
97 for index, commit_id in enumerate(commit_ids))
98
98
99 @LazyProperty
99 @LazyProperty
100 def branches(self):
100 def branches(self):
101 return self._get_branches()
101 return self._get_branches()
102
102
103 @LazyProperty
103 @LazyProperty
104 def branches_closed(self):
104 def branches_closed(self):
105 return self._get_branches(active=False, closed=True)
105 return self._get_branches(active=False, closed=True)
106
106
107 @LazyProperty
107 @LazyProperty
108 def branches_all(self):
108 def branches_all(self):
109 all_branches = {}
109 all_branches = {}
110 all_branches.update(self.branches)
110 all_branches.update(self.branches)
111 all_branches.update(self.branches_closed)
111 all_branches.update(self.branches_closed)
112 return all_branches
112 return all_branches
113
113
114 def _get_branches(self, active=True, closed=False):
114 def _get_branches(self, active=True, closed=False):
115 """
115 """
116 Gets branches for this repository
116 Gets branches for this repository
117 Returns only not closed active branches by default
117 Returns only not closed active branches by default
118
118
119 :param active: return also active branches
119 :param active: return also active branches
120 :param closed: return also closed branches
120 :param closed: return also closed branches
121
121
122 """
122 """
123 if self.is_empty():
123 if self.is_empty():
124 return {}
124 return {}
125
125
126 def get_name(ctx):
126 def get_name(ctx):
127 return ctx[0]
127 return ctx[0]
128
128
129 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
129 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
130 self._remote.branches(active, closed).items()]
130 self._remote.branches(active, closed).items()]
131
131
132 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
132 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
133
133
134 @LazyProperty
134 @LazyProperty
135 def tags(self):
135 def tags(self):
136 """
136 """
137 Gets tags for this repository
137 Gets tags for this repository
138 """
138 """
139 return self._get_tags()
139 return self._get_tags()
140
140
141 def _get_tags(self):
141 def _get_tags(self):
142 if self.is_empty():
142 if self.is_empty():
143 return {}
143 return {}
144
144
145 def get_name(ctx):
145 def get_name(ctx):
146 return ctx[0]
146 return ctx[0]
147
147
148 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
148 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
149 self._remote.tags().items()]
149 self._remote.tags().items()]
150
150
151 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
151 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
152
152
153 def tag(self, name, user, commit_id=None, message=None, date=None,
153 def tag(self, name, user, commit_id=None, message=None, date=None,
154 **kwargs):
154 **kwargs):
155 """
155 """
156 Creates and returns a tag for the given ``commit_id``.
156 Creates and returns a tag for the given ``commit_id``.
157
157
158 :param name: name for new tag
158 :param name: name for new tag
159 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
159 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
160 :param commit_id: commit id for which new tag would be created
160 :param commit_id: commit id for which new tag would be created
161 :param message: message of the tag's commit
161 :param message: message of the tag's commit
162 :param date: date of tag's commit
162 :param date: date of tag's commit
163
163
164 :raises TagAlreadyExistError: if tag with same name already exists
164 :raises TagAlreadyExistError: if tag with same name already exists
165 """
165 """
166 if name in self.tags:
166 if name in self.tags:
167 raise TagAlreadyExistError("Tag %s already exists" % name)
167 raise TagAlreadyExistError("Tag %s already exists" % name)
168 commit = self.get_commit(commit_id=commit_id)
168 commit = self.get_commit(commit_id=commit_id)
169 local = kwargs.setdefault('local', False)
169 local = kwargs.setdefault('local', False)
170
170
171 if message is None:
171 if message is None:
172 message = "Added tag %s for commit %s" % (name, commit.short_id)
172 message = "Added tag %s for commit %s" % (name, commit.short_id)
173
173
174 date, tz = date_to_timestamp_plus_offset(date)
174 date, tz = date_to_timestamp_plus_offset(date)
175
175
176 self._remote.tag(
176 self._remote.tag(
177 name, commit.raw_id, message, local, user, date, tz)
177 name, commit.raw_id, message, local, user, date, tz)
178 self._remote.invalidate_vcs_cache()
178 self._remote.invalidate_vcs_cache()
179
179
180 # Reinitialize tags
180 # Reinitialize tags
181 self.tags = self._get_tags()
181 self.tags = self._get_tags()
182 tag_id = self.tags[name]
182 tag_id = self.tags[name]
183
183
184 return self.get_commit(commit_id=tag_id)
184 return self.get_commit(commit_id=tag_id)
185
185
186 def remove_tag(self, name, user, message=None, date=None):
186 def remove_tag(self, name, user, message=None, date=None):
187 """
187 """
188 Removes tag with the given `name`.
188 Removes tag with the given `name`.
189
189
190 :param name: name of the tag to be removed
190 :param name: name of the tag to be removed
191 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
191 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
192 :param message: message of the tag's removal commit
192 :param message: message of the tag's removal commit
193 :param date: date of tag's removal commit
193 :param date: date of tag's removal commit
194
194
195 :raises TagDoesNotExistError: if tag with given name does not exists
195 :raises TagDoesNotExistError: if tag with given name does not exists
196 """
196 """
197 if name not in self.tags:
197 if name not in self.tags:
198 raise TagDoesNotExistError("Tag %s does not exist" % name)
198 raise TagDoesNotExistError("Tag %s does not exist" % name)
199 if message is None:
199 if message is None:
200 message = "Removed tag %s" % name
200 message = "Removed tag %s" % name
201 local = False
201 local = False
202
202
203 date, tz = date_to_timestamp_plus_offset(date)
203 date, tz = date_to_timestamp_plus_offset(date)
204
204
205 self._remote.tag(name, nullid, message, local, user, date, tz)
205 self._remote.tag(name, nullid, message, local, user, date, tz)
206 self._remote.invalidate_vcs_cache()
206 self._remote.invalidate_vcs_cache()
207 self.tags = self._get_tags()
207 self.tags = self._get_tags()
208
208
209 @LazyProperty
209 @LazyProperty
210 def bookmarks(self):
210 def bookmarks(self):
211 """
211 """
212 Gets bookmarks for this repository
212 Gets bookmarks for this repository
213 """
213 """
214 return self._get_bookmarks()
214 return self._get_bookmarks()
215
215
216 def _get_bookmarks(self):
216 def _get_bookmarks(self):
217 if self.is_empty():
217 if self.is_empty():
218 return {}
218 return {}
219
219
220 def get_name(ctx):
220 def get_name(ctx):
221 return ctx[0]
221 return ctx[0]
222
222
223 _bookmarks = [
223 _bookmarks = [
224 (safe_unicode(n), hexlify(h)) for n, h in
224 (safe_unicode(n), hexlify(h)) for n, h in
225 self._remote.bookmarks().items()]
225 self._remote.bookmarks().items()]
226
226
227 return OrderedDict(sorted(_bookmarks, key=get_name))
227 return OrderedDict(sorted(_bookmarks, key=get_name))
228
228
229 def _get_all_commit_ids(self):
229 def _get_all_commit_ids(self):
230 return self._remote.get_all_commit_ids('visible')
230 return self._remote.get_all_commit_ids('visible')
231
231
232 def get_diff(
232 def get_diff(
233 self, commit1, commit2, path='', ignore_whitespace=False,
233 self, commit1, commit2, path='', ignore_whitespace=False,
234 context=3, path1=None):
234 context=3, path1=None):
235 """
235 """
236 Returns (git like) *diff*, as plain text. Shows changes introduced by
236 Returns (git like) *diff*, as plain text. Shows changes introduced by
237 `commit2` since `commit1`.
237 `commit2` since `commit1`.
238
238
239 :param commit1: Entry point from which diff is shown. Can be
239 :param commit1: Entry point from which diff is shown. Can be
240 ``self.EMPTY_COMMIT`` - in this case, patch showing all
240 ``self.EMPTY_COMMIT`` - in this case, patch showing all
241 the changes since empty state of the repository until `commit2`
241 the changes since empty state of the repository until `commit2`
242 :param commit2: Until which commit changes should be shown.
242 :param commit2: Until which commit changes should be shown.
243 :param ignore_whitespace: If set to ``True``, would not show whitespace
243 :param ignore_whitespace: If set to ``True``, would not show whitespace
244 changes. Defaults to ``False``.
244 changes. Defaults to ``False``.
245 :param context: How many lines before/after changed lines should be
245 :param context: How many lines before/after changed lines should be
246 shown. Defaults to ``3``.
246 shown. Defaults to ``3``.
247 """
247 """
248 self._validate_diff_commits(commit1, commit2)
248 self._validate_diff_commits(commit1, commit2)
249 if path1 is not None and path1 != path:
249 if path1 is not None and path1 != path:
250 raise ValueError("Diff of two different paths not supported.")
250 raise ValueError("Diff of two different paths not supported.")
251
251
252 if path:
252 if path:
253 file_filter = [self.path, path]
253 file_filter = [self.path, path]
254 else:
254 else:
255 file_filter = None
255 file_filter = None
256
256
257 diff = self._remote.diff(
257 diff = self._remote.diff(
258 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
258 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
259 opt_git=True, opt_ignorews=ignore_whitespace,
259 opt_git=True, opt_ignorews=ignore_whitespace,
260 context=context)
260 context=context)
261 return MercurialDiff(diff)
261 return MercurialDiff(diff)
262
262
263 def strip(self, commit_id, branch=None):
263 def strip(self, commit_id, branch=None):
264 self._remote.strip(commit_id, update=False, backup="none")
264 self._remote.strip(commit_id, update=False, backup="none")
265
265
266 self._remote.invalidate_vcs_cache()
266 self._remote.invalidate_vcs_cache()
267 self.commit_ids = self._get_all_commit_ids()
267 self.commit_ids = self._get_all_commit_ids()
268 self._rebuild_cache(self.commit_ids)
268 self._rebuild_cache(self.commit_ids)
269
269
270 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
270 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
271 if commit_id1 == commit_id2:
271 if commit_id1 == commit_id2:
272 return commit_id1
272 return commit_id1
273
273
274 ancestors = self._remote.revs_from_revspec(
274 ancestors = self._remote.revs_from_revspec(
275 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
275 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
276 other_path=repo2.path)
276 other_path=repo2.path)
277 return repo2[ancestors[0]].raw_id if ancestors else None
277 return repo2[ancestors[0]].raw_id if ancestors else None
278
278
279 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
279 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
280 if commit_id1 == commit_id2:
280 if commit_id1 == commit_id2:
281 commits = []
281 commits = []
282 else:
282 else:
283 if merge:
283 if merge:
284 indexes = self._remote.revs_from_revspec(
284 indexes = self._remote.revs_from_revspec(
285 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
285 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
286 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
286 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
287 else:
287 else:
288 indexes = self._remote.revs_from_revspec(
288 indexes = self._remote.revs_from_revspec(
289 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
289 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
290 commit_id1, other_path=repo2.path)
290 commit_id1, other_path=repo2.path)
291
291
292 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
292 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
293 for idx in indexes]
293 for idx in indexes]
294
294
295 return commits
295 return commits
296
296
297 @staticmethod
297 @staticmethod
298 def check_url(url, config):
298 def check_url(url, config):
299 """
299 """
300 Function will check given url and try to verify if it's a valid
300 Function will check given url and try to verify if it's a valid
301 link. Sometimes it may happened that mercurial will issue basic
301 link. Sometimes it may happened that mercurial will issue basic
302 auth request that can cause whole API to hang when used from python
302 auth request that can cause whole API to hang when used from python
303 or other external calls.
303 or other external calls.
304
304
305 On failures it'll raise urllib2.HTTPError, exception is also thrown
305 On failures it'll raise urllib2.HTTPError, exception is also thrown
306 when the return code is non 200
306 when the return code is non 200
307 """
307 """
308 # check first if it's not an local url
308 # check first if it's not an local url
309 if os.path.isdir(url) or url.startswith('file:'):
309 if os.path.isdir(url) or url.startswith('file:'):
310 return True
310 return True
311
311
312 # Request the _remote to verify the url
312 # Request the _remote to verify the url
313 return connection.Hg.check_url(url, config.serialize())
313 return connection.Hg.check_url(url, config.serialize())
314
314
315 @staticmethod
315 @staticmethod
316 def is_valid_repository(path):
316 def is_valid_repository(path):
317 return os.path.isdir(os.path.join(path, '.hg'))
317 return os.path.isdir(os.path.join(path, '.hg'))
318
318
319 def _init_repo(self, create, src_url=None, update_after_clone=False):
319 def _init_repo(self, create, src_url=None, update_after_clone=False):
320 """
320 """
321 Function will check for mercurial repository in given path. If there
321 Function will check for mercurial repository in given path. If there
322 is no repository in that path it will raise an exception unless
322 is no repository in that path it will raise an exception unless
323 `create` parameter is set to True - in that case repository would
323 `create` parameter is set to True - in that case repository would
324 be created.
324 be created.
325
325
326 If `src_url` is given, would try to clone repository from the
326 If `src_url` is given, would try to clone repository from the
327 location at given clone_point. Additionally it'll make update to
327 location at given clone_point. Additionally it'll make update to
328 working copy accordingly to `update_after_clone` flag.
328 working copy accordingly to `update_after_clone` flag.
329 """
329 """
330 if create and os.path.exists(self.path):
330 if create and os.path.exists(self.path):
331 raise RepositoryError(
331 raise RepositoryError(
332 "Cannot create repository at %s, location already exist"
332 "Cannot create repository at %s, location already exist"
333 % self.path)
333 % self.path)
334
334
335 if src_url:
335 if src_url:
336 url = str(self._get_url(src_url))
336 url = str(self._get_url(src_url))
337 MercurialRepository.check_url(url, self.config)
337 MercurialRepository.check_url(url, self.config)
338
338
339 self._remote.clone(url, self.path, update_after_clone)
339 self._remote.clone(url, self.path, update_after_clone)
340
340
341 # Don't try to create if we've already cloned repo
341 # Don't try to create if we've already cloned repo
342 create = False
342 create = False
343
343
344 if create:
344 if create:
345 os.makedirs(self.path, mode=0755)
345 os.makedirs(self.path, mode=0755)
346
346
347 self._remote.localrepository(create)
347 self._remote.localrepository(create)
348
348
349 @LazyProperty
349 @LazyProperty
350 def in_memory_commit(self):
350 def in_memory_commit(self):
351 return MercurialInMemoryCommit(self)
351 return MercurialInMemoryCommit(self)
352
352
353 @LazyProperty
353 @LazyProperty
354 def description(self):
354 def description(self):
355 description = self._remote.get_config_value(
355 description = self._remote.get_config_value(
356 'web', 'description', untrusted=True)
356 'web', 'description', untrusted=True)
357 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
357 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
358
358
359 @LazyProperty
359 @LazyProperty
360 def contact(self):
360 def contact(self):
361 contact = (
361 contact = (
362 self._remote.get_config_value("web", "contact") or
362 self._remote.get_config_value("web", "contact") or
363 self._remote.get_config_value("ui", "username"))
363 self._remote.get_config_value("ui", "username"))
364 return safe_unicode(contact or self.DEFAULT_CONTACT)
364 return safe_unicode(contact or self.DEFAULT_CONTACT)
365
365
366 @LazyProperty
366 @LazyProperty
367 def last_change(self):
367 def last_change(self):
368 """
368 """
369 Returns last change made on this repository as
369 Returns last change made on this repository as
370 `datetime.datetime` object
370 `datetime.datetime` object
371 """
371 """
372 return utcdate_fromtimestamp(self._get_mtime(), makedate()[1])
372 return utcdate_fromtimestamp(self._get_mtime(), makedate()[1])
373
373
374 def _get_mtime(self):
374 def _get_mtime(self):
375 try:
375 try:
376 return date_astimestamp(self.get_commit().date)
376 return date_astimestamp(self.get_commit().date)
377 except RepositoryError:
377 except RepositoryError:
378 # fallback to filesystem
378 # fallback to filesystem
379 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
379 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
380 st_path = os.path.join(self.path, '.hg', "store")
380 st_path = os.path.join(self.path, '.hg', "store")
381 if os.path.exists(cl_path):
381 if os.path.exists(cl_path):
382 return os.stat(cl_path).st_mtime
382 return os.stat(cl_path).st_mtime
383 else:
383 else:
384 return os.stat(st_path).st_mtime
384 return os.stat(st_path).st_mtime
385
385
386 def _sanitize_commit_idx(self, idx):
386 def _sanitize_commit_idx(self, idx):
387 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
387 # Note: Mercurial has ``int(-1)`` reserved as not existing id_or_idx
388 # number. A `long` is treated in the correct way though. So we convert
388 # number. A `long` is treated in the correct way though. So we convert
389 # `int` to `long` here to make sure it is handled correctly.
389 # `int` to `long` here to make sure it is handled correctly.
390 if isinstance(idx, int):
390 if isinstance(idx, int):
391 return long(idx)
391 return long(idx)
392 return idx
392 return idx
393
393
394 def _get_url(self, url):
394 def _get_url(self, url):
395 """
395 """
396 Returns normalized url. If schema is not given, would fall
396 Returns normalized url. If schema is not given, would fall
397 to filesystem
397 to filesystem
398 (``file:///``) schema.
398 (``file:///``) schema.
399 """
399 """
400 url = url.encode('utf8')
400 url = url.encode('utf8')
401 if url != 'default' and '://' not in url:
401 if url != 'default' and '://' not in url:
402 url = "file:" + urllib.pathname2url(url)
402 url = "file:" + urllib.pathname2url(url)
403 return url
403 return url
404
404
405 def get_hook_location(self):
405 def get_hook_location(self):
406 """
406 """
407 returns absolute path to location where hooks are stored
407 returns absolute path to location where hooks are stored
408 """
408 """
409 return os.path.join(self.path, '.hg', '.hgrc')
409 return os.path.join(self.path, '.hg', '.hgrc')
410
410
411 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
411 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
412 """
412 """
413 Returns ``MercurialCommit`` object representing repository's
413 Returns ``MercurialCommit`` object representing repository's
414 commit at the given `commit_id` or `commit_idx`.
414 commit at the given `commit_id` or `commit_idx`.
415 """
415 """
416 if self.is_empty():
416 if self.is_empty():
417 raise EmptyRepositoryError("There are no commits yet")
417 raise EmptyRepositoryError("There are no commits yet")
418
418
419 if commit_id is not None:
419 if commit_id is not None:
420 self._validate_commit_id(commit_id)
420 self._validate_commit_id(commit_id)
421 try:
421 try:
422 idx = self._commit_ids[commit_id]
422 idx = self._commit_ids[commit_id]
423 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
423 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
424 except KeyError:
424 except KeyError:
425 pass
425 pass
426 elif commit_idx is not None:
426 elif commit_idx is not None:
427 self._validate_commit_idx(commit_idx)
427 self._validate_commit_idx(commit_idx)
428 commit_idx = self._sanitize_commit_idx(commit_idx)
428 commit_idx = self._sanitize_commit_idx(commit_idx)
429 try:
429 try:
430 id_ = self.commit_ids[commit_idx]
430 id_ = self.commit_ids[commit_idx]
431 if commit_idx < 0:
431 if commit_idx < 0:
432 commit_idx += len(self.commit_ids)
432 commit_idx += len(self.commit_ids)
433 return MercurialCommit(
433 return MercurialCommit(
434 self, id_, commit_idx, pre_load=pre_load)
434 self, id_, commit_idx, pre_load=pre_load)
435 except IndexError:
435 except IndexError:
436 commit_id = commit_idx
436 commit_id = commit_idx
437 else:
437 else:
438 commit_id = "tip"
438 commit_id = "tip"
439
439
440 # TODO Paris: Ugly hack to "serialize" long for msgpack
440 # TODO Paris: Ugly hack to "serialize" long for msgpack
441 if isinstance(commit_id, long):
441 if isinstance(commit_id, long):
442 commit_id = float(commit_id)
442 commit_id = float(commit_id)
443
443
444 if isinstance(commit_id, unicode):
444 if isinstance(commit_id, unicode):
445 commit_id = safe_str(commit_id)
445 commit_id = safe_str(commit_id)
446
446
447 raw_id, idx = self._remote.lookup(commit_id, both=True)
447 raw_id, idx = self._remote.lookup(commit_id, both=True)
448
448
449 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
449 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
450
450
451 def get_commits(
451 def get_commits(
452 self, start_id=None, end_id=None, start_date=None, end_date=None,
452 self, start_id=None, end_id=None, start_date=None, end_date=None,
453 branch_name=None, pre_load=None):
453 branch_name=None, pre_load=None):
454 """
454 """
455 Returns generator of ``MercurialCommit`` objects from start to end
455 Returns generator of ``MercurialCommit`` objects from start to end
456 (both are inclusive)
456 (both are inclusive)
457
457
458 :param start_id: None, str(commit_id)
458 :param start_id: None, str(commit_id)
459 :param end_id: None, str(commit_id)
459 :param end_id: None, str(commit_id)
460 :param start_date: if specified, commits with commit date less than
460 :param start_date: if specified, commits with commit date less than
461 ``start_date`` would be filtered out from returned set
461 ``start_date`` would be filtered out from returned set
462 :param end_date: if specified, commits with commit date greater than
462 :param end_date: if specified, commits with commit date greater than
463 ``end_date`` would be filtered out from returned set
463 ``end_date`` would be filtered out from returned set
464 :param branch_name: if specified, commits not reachable from given
464 :param branch_name: if specified, commits not reachable from given
465 branch would be filtered out from returned set
465 branch would be filtered out from returned set
466
466
467 :raise BranchDoesNotExistError: If given ``branch_name`` does not
467 :raise BranchDoesNotExistError: If given ``branch_name`` does not
468 exist.
468 exist.
469 :raise CommitDoesNotExistError: If commit for given ``start`` or
469 :raise CommitDoesNotExistError: If commit for given ``start`` or
470 ``end`` could not be found.
470 ``end`` could not be found.
471 """
471 """
472 # actually we should check now if it's not an empty repo
472 # actually we should check now if it's not an empty repo
473 branch_ancestors = False
473 branch_ancestors = False
474 if self.is_empty():
474 if self.is_empty():
475 raise EmptyRepositoryError("There are no commits yet")
475 raise EmptyRepositoryError("There are no commits yet")
476 self._validate_branch_name(branch_name)
476 self._validate_branch_name(branch_name)
477
477
478 if start_id is not None:
478 if start_id is not None:
479 self._validate_commit_id(start_id)
479 self._validate_commit_id(start_id)
480 c_start = self.get_commit(commit_id=start_id)
480 c_start = self.get_commit(commit_id=start_id)
481 start_pos = self._commit_ids[c_start.raw_id]
481 start_pos = self._commit_ids[c_start.raw_id]
482 else:
482 else:
483 start_pos = None
483 start_pos = None
484
484
485 if end_id is not None:
485 if end_id is not None:
486 self._validate_commit_id(end_id)
486 self._validate_commit_id(end_id)
487 c_end = self.get_commit(commit_id=end_id)
487 c_end = self.get_commit(commit_id=end_id)
488 end_pos = max(0, self._commit_ids[c_end.raw_id])
488 end_pos = max(0, self._commit_ids[c_end.raw_id])
489 else:
489 else:
490 end_pos = None
490 end_pos = None
491
491
492 if None not in [start_id, end_id] and start_pos > end_pos:
492 if None not in [start_id, end_id] and start_pos > end_pos:
493 raise RepositoryError(
493 raise RepositoryError(
494 "Start commit '%s' cannot be after end commit '%s'" %
494 "Start commit '%s' cannot be after end commit '%s'" %
495 (start_id, end_id))
495 (start_id, end_id))
496
496
497 if end_pos is not None:
497 if end_pos is not None:
498 end_pos += 1
498 end_pos += 1
499
499
500 commit_filter = []
500 commit_filter = []
501 if branch_name and not branch_ancestors:
501 if branch_name and not branch_ancestors:
502 commit_filter.append('branch("%s")' % branch_name)
502 commit_filter.append('branch("%s")' % branch_name)
503 elif branch_name and branch_ancestors:
503 elif branch_name and branch_ancestors:
504 commit_filter.append('ancestors(branch("%s"))' % branch_name)
504 commit_filter.append('ancestors(branch("%s"))' % branch_name)
505 if start_date and not end_date:
505 if start_date and not end_date:
506 commit_filter.append('date(">%s")' % start_date)
506 commit_filter.append('date(">%s")' % start_date)
507 if end_date and not start_date:
507 if end_date and not start_date:
508 commit_filter.append('date("<%s")' % end_date)
508 commit_filter.append('date("<%s")' % end_date)
509 if start_date and end_date:
509 if start_date and end_date:
510 commit_filter.append(
510 commit_filter.append(
511 'date(">%s") and date("<%s")' % (start_date, end_date))
511 'date(">%s") and date("<%s")' % (start_date, end_date))
512
512
513 # TODO: johbo: Figure out a simpler way for this solution
513 # TODO: johbo: Figure out a simpler way for this solution
514 collection_generator = CollectionGenerator
514 collection_generator = CollectionGenerator
515 if commit_filter:
515 if commit_filter:
516 commit_filter = map(safe_str, commit_filter)
516 commit_filter = map(safe_str, commit_filter)
517 revisions = self._remote.rev_range(commit_filter)
517 revisions = self._remote.rev_range(commit_filter)
518 collection_generator = MercurialIndexBasedCollectionGenerator
518 collection_generator = MercurialIndexBasedCollectionGenerator
519 else:
519 else:
520 revisions = self.commit_ids
520 revisions = self.commit_ids
521
521
522 if start_pos or end_pos:
522 if start_pos or end_pos:
523 revisions = revisions[start_pos:end_pos]
523 revisions = revisions[start_pos:end_pos]
524
524
525 return collection_generator(self, revisions, pre_load=pre_load)
525 return collection_generator(self, revisions, pre_load=pre_load)
526
526
527 def pull(self, url, commit_ids=None):
527 def pull(self, url, commit_ids=None):
528 """
528 """
529 Tries to pull changes from external location.
529 Tries to pull changes from external location.
530
530
531 :param commit_ids: Optional. Can be set to a list of commit ids
531 :param commit_ids: Optional. Can be set to a list of commit ids
532 which shall be pulled from the other repository.
532 which shall be pulled from the other repository.
533 """
533 """
534 url = self._get_url(url)
534 url = self._get_url(url)
535 self._remote.pull(url, commit_ids=commit_ids)
535 self._remote.pull(url, commit_ids=commit_ids)
536 self._remote.invalidate_vcs_cache()
536 self._remote.invalidate_vcs_cache()
537
537
538 def _local_clone(self, clone_path):
538 def _local_clone(self, clone_path):
539 """
539 """
540 Create a local clone of the current repo.
540 Create a local clone of the current repo.
541 """
541 """
542 self._remote.clone(self.path, clone_path, update_after_clone=True,
542 self._remote.clone(self.path, clone_path, update_after_clone=True,
543 hooks=False)
543 hooks=False)
544
544
545 def _update(self, revision, clean=False):
545 def _update(self, revision, clean=False):
546 """
546 """
547 Update the working copty to the specified revision.
547 Update the working copty to the specified revision.
548 """
548 """
549 self._remote.update(revision, clean=clean)
549 self._remote.update(revision, clean=clean)
550
550
551 def _identify(self):
551 def _identify(self):
552 """
552 """
553 Return the current state of the working directory.
553 Return the current state of the working directory.
554 """
554 """
555 return self._remote.identify().strip().rstrip('+')
555 return self._remote.identify().strip().rstrip('+')
556
556
557 def _heads(self, branch=None):
557 def _heads(self, branch=None):
558 """
558 """
559 Return the commit ids of the repository heads.
559 Return the commit ids of the repository heads.
560 """
560 """
561 return self._remote.heads(branch=branch).strip().split(' ')
561 return self._remote.heads(branch=branch).strip().split(' ')
562
562
563 def _ancestor(self, revision1, revision2):
563 def _ancestor(self, revision1, revision2):
564 """
564 """
565 Return the common ancestor of the two revisions.
565 Return the common ancestor of the two revisions.
566 """
566 """
567 return self._remote.ancestor(
567 return self._remote.ancestor(
568 revision1, revision2).strip().split(':')[-1]
568 revision1, revision2).strip().split(':')[-1]
569
569
570 def _local_push(
570 def _local_push(
571 self, revision, repository_path, push_branches=False,
571 self, revision, repository_path, push_branches=False,
572 enable_hooks=False):
572 enable_hooks=False):
573 """
573 """
574 Push the given revision to the specified repository.
574 Push the given revision to the specified repository.
575
575
576 :param push_branches: allow to create branches in the target repo.
576 :param push_branches: allow to create branches in the target repo.
577 """
577 """
578 self._remote.push(
578 self._remote.push(
579 [revision], repository_path, hooks=enable_hooks,
579 [revision], repository_path, hooks=enable_hooks,
580 push_branches=push_branches)
580 push_branches=push_branches)
581
581
582 def _local_merge(self, target_ref, merge_message, user_name, user_email,
582 def _local_merge(self, target_ref, merge_message, user_name, user_email,
583 source_ref, use_rebase=False):
583 source_ref, use_rebase=False):
584 """
584 """
585 Merge the given source_revision into the checked out revision.
585 Merge the given source_revision into the checked out revision.
586
586
587 Returns the commit id of the merge and a boolean indicating if the
587 Returns the commit id of the merge and a boolean indicating if the
588 commit needs to be pushed.
588 commit needs to be pushed.
589 """
589 """
590 self._update(target_ref.commit_id)
590 self._update(target_ref.commit_id)
591
591
592 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
592 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
593 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
593 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
594
594
595 if ancestor == source_ref.commit_id:
595 if ancestor == source_ref.commit_id:
596 # Nothing to do, the changes were already integrated
596 # Nothing to do, the changes were already integrated
597 return target_ref.commit_id, False
597 return target_ref.commit_id, False
598
598
599 elif ancestor == target_ref.commit_id and is_the_same_branch:
599 elif ancestor == target_ref.commit_id and is_the_same_branch:
600 # In this case we should force a commit message
600 # In this case we should force a commit message
601 return source_ref.commit_id, True
601 return source_ref.commit_id, True
602
602
603 if use_rebase:
603 if use_rebase:
604 try:
604 try:
605 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
605 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
606 target_ref.commit_id)
606 target_ref.commit_id)
607 self.bookmark(bookmark_name, revision=source_ref.commit_id)
607 self.bookmark(bookmark_name, revision=source_ref.commit_id)
608 self._remote.rebase(
608 self._remote.rebase(
609 source=source_ref.commit_id, dest=target_ref.commit_id)
609 source=source_ref.commit_id, dest=target_ref.commit_id)
610 self._remote.invalidate_vcs_cache()
610 self._remote.invalidate_vcs_cache()
611 self._update(bookmark_name)
611 self._update(bookmark_name)
612 return self._identify(), True
612 return self._identify(), True
613 except RepositoryError:
613 except RepositoryError:
614 # The rebase-abort may raise another exception which 'hides'
614 # The rebase-abort may raise another exception which 'hides'
615 # the original one, therefore we log it here.
615 # the original one, therefore we log it here.
616 log.exception('Error while rebasing shadow repo during merge.')
616 log.exception('Error while rebasing shadow repo during merge.')
617
617
618 # Cleanup any rebase leftovers
618 # Cleanup any rebase leftovers
619 self._remote.invalidate_vcs_cache()
619 self._remote.invalidate_vcs_cache()
620 self._remote.rebase(abort=True)
620 self._remote.rebase(abort=True)
621 self._remote.invalidate_vcs_cache()
621 self._remote.invalidate_vcs_cache()
622 self._remote.update(clean=True)
622 self._remote.update(clean=True)
623 raise
623 raise
624 else:
624 else:
625 try:
625 try:
626 self._remote.merge(source_ref.commit_id)
626 self._remote.merge(source_ref.commit_id)
627 self._remote.invalidate_vcs_cache()
627 self._remote.invalidate_vcs_cache()
628 self._remote.commit(
628 self._remote.commit(
629 message=safe_str(merge_message),
629 message=safe_str(merge_message),
630 username=safe_str('%s <%s>' % (user_name, user_email)))
630 username=safe_str('%s <%s>' % (user_name, user_email)))
631 self._remote.invalidate_vcs_cache()
631 self._remote.invalidate_vcs_cache()
632 return self._identify(), True
632 return self._identify(), True
633 except RepositoryError:
633 except RepositoryError:
634 # Cleanup any merge leftovers
634 # Cleanup any merge leftovers
635 self._remote.update(clean=True)
635 self._remote.update(clean=True)
636 raise
636 raise
637
637
638 def _is_the_same_branch(self, target_ref, source_ref):
638 def _is_the_same_branch(self, target_ref, source_ref):
639 return (
639 return (
640 self._get_branch_name(target_ref) ==
640 self._get_branch_name(target_ref) ==
641 self._get_branch_name(source_ref))
641 self._get_branch_name(source_ref))
642
642
643 def _get_branch_name(self, ref):
643 def _get_branch_name(self, ref):
644 if ref.type == 'branch':
644 if ref.type == 'branch':
645 return ref.name
645 return ref.name
646 return self._remote.ctx_branch(ref.commit_id)
646 return self._remote.ctx_branch(ref.commit_id)
647
647
648 def _get_shadow_repository_path(self, workspace_id):
648 def _get_shadow_repository_path(self, workspace_id):
649 # The name of the shadow repository must start with '.', so it is
649 # The name of the shadow repository must start with '.', so it is
650 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
650 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
651 return os.path.join(
651 return os.path.join(
652 os.path.dirname(self.path),
652 os.path.dirname(self.path),
653 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
653 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
654
654
655 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref):
655 def _maybe_prepare_merge_workspace(self, workspace_id, unused_target_ref):
656 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
656 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
657 if not os.path.exists(shadow_repository_path):
657 if not os.path.exists(shadow_repository_path):
658 self._local_clone(shadow_repository_path)
658 self._local_clone(shadow_repository_path)
659 log.debug(
659 log.debug(
660 'Prepared shadow repository in %s', shadow_repository_path)
660 'Prepared shadow repository in %s', shadow_repository_path)
661
661
662 return shadow_repository_path
662 return shadow_repository_path
663
663
664 def cleanup_merge_workspace(self, workspace_id):
664 def cleanup_merge_workspace(self, workspace_id):
665 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
665 shadow_repository_path = self._get_shadow_repository_path(workspace_id)
666 shutil.rmtree(shadow_repository_path, ignore_errors=True)
666 shutil.rmtree(shadow_repository_path, ignore_errors=True)
667
667
668 def _merge_repo(self, shadow_repository_path, target_ref,
668 def _merge_repo(self, shadow_repository_path, target_ref,
669 source_repo, source_ref, merge_message,
669 source_repo, source_ref, merge_message,
670 merger_name, merger_email, dry_run=False,
670 merger_name, merger_email, dry_run=False,
671 use_rebase=False):
671 use_rebase=False):
672 if target_ref.commit_id not in self._heads():
672 if target_ref.commit_id not in self._heads():
673 return MergeResponse(
673 return MergeResponse(
674 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
674 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD)
675
675
676 try:
676 try:
677 if (target_ref.type == 'branch' and
677 if (target_ref.type == 'branch' and
678 len(self._heads(target_ref.name)) != 1):
678 len(self._heads(target_ref.name)) != 1):
679 return MergeResponse(
679 return MergeResponse(
680 False, False, None,
680 False, False, None,
681 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
681 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS)
682 except CommitDoesNotExistError as e:
682 except CommitDoesNotExistError as e:
683 log.exception('Failure when looking up branch heads on hg target')
683 log.exception('Failure when looking up branch heads on hg target')
684 return MergeResponse(
684 return MergeResponse(
685 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
685 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
686
686
687 shadow_repo = self._get_shadow_instance(shadow_repository_path)
687 shadow_repo = self._get_shadow_instance(shadow_repository_path)
688
688
689 log.debug('Pulling in target reference %s', target_ref)
689 log.debug('Pulling in target reference %s', target_ref)
690 self._validate_pull_reference(target_ref)
690 self._validate_pull_reference(target_ref)
691 shadow_repo._local_pull(self.path, target_ref)
691 shadow_repo._local_pull(self.path, target_ref)
692 try:
692 try:
693 log.debug('Pulling in source reference %s', source_ref)
693 log.debug('Pulling in source reference %s', source_ref)
694 source_repo._validate_pull_reference(source_ref)
694 source_repo._validate_pull_reference(source_ref)
695 shadow_repo._local_pull(source_repo.path, source_ref)
695 shadow_repo._local_pull(source_repo.path, source_ref)
696 except CommitDoesNotExistError:
696 except CommitDoesNotExistError:
697 log.exception('Failure when doing local pull on hg shadow repo')
697 log.exception('Failure when doing local pull on hg shadow repo')
698 return MergeResponse(
698 return MergeResponse(
699 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
699 False, False, None, MergeFailureReason.MISSING_SOURCE_REF)
700
700
701 merge_ref = None
701 merge_ref = None
702 merge_failure_reason = MergeFailureReason.NONE
702 merge_failure_reason = MergeFailureReason.NONE
703
703
704 try:
704 try:
705 merge_commit_id, needs_push = shadow_repo._local_merge(
705 merge_commit_id, needs_push = shadow_repo._local_merge(
706 target_ref, merge_message, merger_name, merger_email,
706 target_ref, merge_message, merger_name, merger_email,
707 source_ref, use_rebase=use_rebase)
707 source_ref, use_rebase=use_rebase)
708 merge_possible = True
708 merge_possible = True
709
709
710 # Set a bookmark pointing to the merge commit. This bookmark may be
710 # Set a bookmark pointing to the merge commit. This bookmark may be
711 # used to easily identify the last successful merge commit in the
711 # used to easily identify the last successful merge commit in the
712 # shadow repository.
712 # shadow repository.
713 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
713 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
714 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
714 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
715 except SubrepoMergeError:
716 log.exception(
717 'Subrepo merge error during local merge on hg shadow repo.')
718 merge_possible = False
719 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
720 except RepositoryError:
715 except RepositoryError:
721 log.exception('Failure when doing local merge on hg shadow repo')
716 log.exception('Failure when doing local merge on hg shadow repo')
722 merge_possible = False
717 merge_possible = False
723 merge_failure_reason = MergeFailureReason.MERGE_FAILED
718 merge_failure_reason = MergeFailureReason.MERGE_FAILED
724
719
725 if merge_possible and not dry_run:
720 if merge_possible and not dry_run:
726 if needs_push:
721 if needs_push:
727 # In case the target is a bookmark, update it, so after pushing
722 # In case the target is a bookmark, update it, so after pushing
728 # the bookmarks is also updated in the target.
723 # the bookmarks is also updated in the target.
729 if target_ref.type == 'book':
724 if target_ref.type == 'book':
730 shadow_repo.bookmark(
725 shadow_repo.bookmark(
731 target_ref.name, revision=merge_commit_id)
726 target_ref.name, revision=merge_commit_id)
732
727
733 try:
728 try:
734 shadow_repo_with_hooks = self._get_shadow_instance(
729 shadow_repo_with_hooks = self._get_shadow_instance(
735 shadow_repository_path,
730 shadow_repository_path,
736 enable_hooks=True)
731 enable_hooks=True)
737 # Note: the push_branches option will push any new branch
732 # Note: the push_branches option will push any new branch
738 # defined in the source repository to the target. This may
733 # defined in the source repository to the target. This may
739 # be dangerous as branches are permanent in Mercurial.
734 # be dangerous as branches are permanent in Mercurial.
740 # This feature was requested in issue #441.
735 # This feature was requested in issue #441.
741 shadow_repo_with_hooks._local_push(
736 shadow_repo_with_hooks._local_push(
742 merge_commit_id, self.path, push_branches=True,
737 merge_commit_id, self.path, push_branches=True,
743 enable_hooks=True)
738 enable_hooks=True)
744 merge_succeeded = True
739 merge_succeeded = True
745 except RepositoryError:
740 except RepositoryError:
746 log.exception(
741 log.exception(
747 'Failure when doing local push from the shadow '
742 'Failure when doing local push from the shadow '
748 'repository to the target repository.')
743 'repository to the target repository.')
749 merge_succeeded = False
744 merge_succeeded = False
750 merge_failure_reason = MergeFailureReason.PUSH_FAILED
745 merge_failure_reason = MergeFailureReason.PUSH_FAILED
751 else:
746 else:
752 merge_succeeded = True
747 merge_succeeded = True
753 else:
748 else:
754 merge_succeeded = False
749 merge_succeeded = False
755
750
756 return MergeResponse(
751 return MergeResponse(
757 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
752 merge_possible, merge_succeeded, merge_ref, merge_failure_reason)
758
753
759 def _get_shadow_instance(
754 def _get_shadow_instance(
760 self, shadow_repository_path, enable_hooks=False):
755 self, shadow_repository_path, enable_hooks=False):
761 config = self.config.copy()
756 config = self.config.copy()
762 if not enable_hooks:
757 if not enable_hooks:
763 config.clear_section('hooks')
758 config.clear_section('hooks')
764 return MercurialRepository(shadow_repository_path, config)
759 return MercurialRepository(shadow_repository_path, config)
765
760
766 def _validate_pull_reference(self, reference):
761 def _validate_pull_reference(self, reference):
767 if not (reference.name in self.bookmarks or
762 if not (reference.name in self.bookmarks or
768 reference.name in self.branches or
763 reference.name in self.branches or
769 self.get_commit(reference.commit_id)):
764 self.get_commit(reference.commit_id)):
770 raise CommitDoesNotExistError(
765 raise CommitDoesNotExistError(
771 'Unknown branch, bookmark or commit id')
766 'Unknown branch, bookmark or commit id')
772
767
773 def _local_pull(self, repository_path, reference):
768 def _local_pull(self, repository_path, reference):
774 """
769 """
775 Fetch a branch, bookmark or commit from a local repository.
770 Fetch a branch, bookmark or commit from a local repository.
776 """
771 """
777 repository_path = os.path.abspath(repository_path)
772 repository_path = os.path.abspath(repository_path)
778 if repository_path == self.path:
773 if repository_path == self.path:
779 raise ValueError('Cannot pull from the same repository')
774 raise ValueError('Cannot pull from the same repository')
780
775
781 reference_type_to_option_name = {
776 reference_type_to_option_name = {
782 'book': 'bookmark',
777 'book': 'bookmark',
783 'branch': 'branch',
778 'branch': 'branch',
784 }
779 }
785 option_name = reference_type_to_option_name.get(
780 option_name = reference_type_to_option_name.get(
786 reference.type, 'revision')
781 reference.type, 'revision')
787
782
788 if option_name == 'revision':
783 if option_name == 'revision':
789 ref = reference.commit_id
784 ref = reference.commit_id
790 else:
785 else:
791 ref = reference.name
786 ref = reference.name
792
787
793 options = {option_name: [ref]}
788 options = {option_name: [ref]}
794 self._remote.pull_cmd(repository_path, hooks=False, **options)
789 self._remote.pull_cmd(repository_path, hooks=False, **options)
795 self._remote.invalidate_vcs_cache()
790 self._remote.invalidate_vcs_cache()
796
791
797 def bookmark(self, bookmark, revision=None):
792 def bookmark(self, bookmark, revision=None):
798 if isinstance(bookmark, unicode):
793 if isinstance(bookmark, unicode):
799 bookmark = safe_str(bookmark)
794 bookmark = safe_str(bookmark)
800 self._remote.bookmark(bookmark, revision=revision)
795 self._remote.bookmark(bookmark, revision=revision)
801 self._remote.invalidate_vcs_cache()
796 self._remote.invalidate_vcs_cache()
802
797
803
798
804 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
799 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
805
800
806 def _commit_factory(self, commit_id):
801 def _commit_factory(self, commit_id):
807 return self.repo.get_commit(
802 return self.repo.get_commit(
808 commit_idx=commit_id, pre_load=self.pre_load)
803 commit_idx=commit_id, pre_load=self.pre_load)
@@ -1,205 +1,196 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2016 RhodeCode GmbH
3 # Copyright (C) 2014-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Custom vcs exceptions module.
22 Custom vcs exceptions module.
23 """
23 """
24
24
25 import functools
25 import functools
26 import urllib2
26 import urllib2
27
27
28
28
29 class VCSCommunicationError(Exception):
29 class VCSCommunicationError(Exception):
30 pass
30 pass
31
31
32
32
33 class PyroVCSCommunicationError(VCSCommunicationError):
33 class PyroVCSCommunicationError(VCSCommunicationError):
34 pass
34 pass
35
35
36
36
37 class HttpVCSCommunicationError(VCSCommunicationError):
37 class HttpVCSCommunicationError(VCSCommunicationError):
38 pass
38 pass
39
39
40
40
41 class VCSError(Exception):
41 class VCSError(Exception):
42 pass
42 pass
43
43
44
44
45 class RepositoryError(VCSError):
45 class RepositoryError(VCSError):
46 pass
46 pass
47
47
48
48
49 class RepositoryRequirementError(RepositoryError):
49 class RepositoryRequirementError(RepositoryError):
50 pass
50 pass
51
51
52
52
53 class VCSBackendNotSupportedError(VCSError):
53 class VCSBackendNotSupportedError(VCSError):
54 """
54 """
55 Exception raised when VCSServer does not support requested backend
55 Exception raised when VCSServer does not support requested backend
56 """
56 """
57
57
58
58
59 class EmptyRepositoryError(RepositoryError):
59 class EmptyRepositoryError(RepositoryError):
60 pass
60 pass
61
61
62
62
63 class TagAlreadyExistError(RepositoryError):
63 class TagAlreadyExistError(RepositoryError):
64 pass
64 pass
65
65
66
66
67 class TagDoesNotExistError(RepositoryError):
67 class TagDoesNotExistError(RepositoryError):
68 pass
68 pass
69
69
70
70
71 class BranchAlreadyExistError(RepositoryError):
71 class BranchAlreadyExistError(RepositoryError):
72 pass
72 pass
73
73
74
74
75 class BranchDoesNotExistError(RepositoryError):
75 class BranchDoesNotExistError(RepositoryError):
76 pass
76 pass
77
77
78
78
79 class CommitError(RepositoryError):
79 class CommitError(RepositoryError):
80 """
80 """
81 Exceptions related to an existing commit
81 Exceptions related to an existing commit
82 """
82 """
83
83
84
84
85 class CommitDoesNotExistError(CommitError):
85 class CommitDoesNotExistError(CommitError):
86 pass
86 pass
87
87
88
88
89 class CommittingError(RepositoryError):
89 class CommittingError(RepositoryError):
90 """
90 """
91 Exceptions happening while creating a new commit
91 Exceptions happening while creating a new commit
92 """
92 """
93
93
94
94
95 class NothingChangedError(CommittingError):
95 class NothingChangedError(CommittingError):
96 pass
96 pass
97
97
98
98
99 class NodeError(VCSError):
99 class NodeError(VCSError):
100 pass
100 pass
101
101
102
102
103 class RemovedFileNodeError(NodeError):
103 class RemovedFileNodeError(NodeError):
104 pass
104 pass
105
105
106
106
107 class NodeAlreadyExistsError(CommittingError):
107 class NodeAlreadyExistsError(CommittingError):
108 pass
108 pass
109
109
110
110
111 class NodeAlreadyChangedError(CommittingError):
111 class NodeAlreadyChangedError(CommittingError):
112 pass
112 pass
113
113
114
114
115 class NodeDoesNotExistError(CommittingError):
115 class NodeDoesNotExistError(CommittingError):
116 pass
116 pass
117
117
118
118
119 class NodeNotChangedError(CommittingError):
119 class NodeNotChangedError(CommittingError):
120 pass
120 pass
121
121
122
122
123 class NodeAlreadyAddedError(CommittingError):
123 class NodeAlreadyAddedError(CommittingError):
124 pass
124 pass
125
125
126
126
127 class NodeAlreadyRemovedError(CommittingError):
127 class NodeAlreadyRemovedError(CommittingError):
128 pass
128 pass
129
129
130
130
131 class SubrepoMergeError(RepositoryError):
132 """
133 This happens if we try to merge a repository which contains subrepos and
134 the subrepos cannot be merged. The subrepos are not merged itself but
135 their references in the root repo are merged.
136 """
137
138
139 class ImproperArchiveTypeError(VCSError):
131 class ImproperArchiveTypeError(VCSError):
140 pass
132 pass
141
133
142
134
143 class CommandError(VCSError):
135 class CommandError(VCSError):
144 pass
136 pass
145
137
146
138
147 class UnhandledException(VCSError):
139 class UnhandledException(VCSError):
148 """
140 """
149 Signals that something unexpected went wrong.
141 Signals that something unexpected went wrong.
150
142
151 This usually means we have a programming error on the side of the VCSServer
143 This usually means we have a programming error on the side of the VCSServer
152 and should inspect the logfile of the VCSServer to find more details.
144 and should inspect the logfile of the VCSServer to find more details.
153 """
145 """
154
146
155
147
156 _EXCEPTION_MAP = {
148 _EXCEPTION_MAP = {
157 'abort': RepositoryError,
149 'abort': RepositoryError,
158 'archive': ImproperArchiveTypeError,
150 'archive': ImproperArchiveTypeError,
159 'error': RepositoryError,
151 'error': RepositoryError,
160 'lookup': CommitDoesNotExistError,
152 'lookup': CommitDoesNotExistError,
161 'repo_locked': RepositoryError,
153 'repo_locked': RepositoryError,
162 'requirement': RepositoryRequirementError,
154 'requirement': RepositoryRequirementError,
163 'unhandled': UnhandledException,
155 'unhandled': UnhandledException,
164 # TODO: johbo: Define our own exception for this and stop abusing
156 # TODO: johbo: Define our own exception for this and stop abusing
165 # urllib's exception class.
157 # urllib's exception class.
166 'url_error': urllib2.URLError,
158 'url_error': urllib2.URLError,
167 'subrepo_merge_error': SubrepoMergeError,
168 }
159 }
169
160
170
161
171 def map_vcs_exceptions(func):
162 def map_vcs_exceptions(func):
172 """
163 """
173 Utility to decorate functions so that plain exceptions are translated.
164 Utility to decorate functions so that plain exceptions are translated.
174
165
175 The translation is based on `exc_map` which maps a `str` indicating
166 The translation is based on `exc_map` which maps a `str` indicating
176 the error type into an exception class representing this error inside
167 the error type into an exception class representing this error inside
177 of the vcs layer.
168 of the vcs layer.
178 """
169 """
179
170
180 @functools.wraps(func)
171 @functools.wraps(func)
181 def wrapper(*args, **kwargs):
172 def wrapper(*args, **kwargs):
182 try:
173 try:
183 return func(*args, **kwargs)
174 return func(*args, **kwargs)
184 except Exception as e:
175 except Exception as e:
185 # The error middleware adds information if it finds
176 # The error middleware adds information if it finds
186 # __traceback_info__ in a frame object. This way the remote
177 # __traceback_info__ in a frame object. This way the remote
187 # traceback information is made available in error reports.
178 # traceback information is made available in error reports.
188 remote_tb = getattr(e, '_pyroTraceback', None)
179 remote_tb = getattr(e, '_pyroTraceback', None)
189 if remote_tb:
180 if remote_tb:
190 __traceback_info__ = (
181 __traceback_info__ = (
191 'Found Pyro4 remote traceback information:\n\n' +
182 'Found Pyro4 remote traceback information:\n\n' +
192 '\n'.join(remote_tb))
183 '\n'.join(remote_tb))
193
184
194 # Avoid that remote_tb also appears in the frame
185 # Avoid that remote_tb also appears in the frame
195 del remote_tb
186 del remote_tb
196
187
197 # Special vcs errors had an attribute "_vcs_kind" which is used
188 # Special vcs errors had an attribute "_vcs_kind" which is used
198 # to translate them to the proper exception class in the vcs
189 # to translate them to the proper exception class in the vcs
199 # client layer.
190 # client layer.
200 kind = getattr(e, '_vcs_kind', None)
191 kind = getattr(e, '_vcs_kind', None)
201 if kind:
192 if kind:
202 raise _EXCEPTION_MAP[kind](*e.args)
193 raise _EXCEPTION_MAP[kind](*e.args)
203 else:
194 else:
204 raise
195 raise
205 return wrapper
196 return wrapper
@@ -1,1317 +1,1314 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2012-2016 RhodeCode GmbH
3 # Copyright (C) 2012-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26 from collections import namedtuple
26 from collections import namedtuple
27 import json
27 import json
28 import logging
28 import logging
29 import datetime
29 import datetime
30 import urllib
30 import urllib
31
31
32 from pylons.i18n.translation import _
32 from pylons.i18n.translation import _
33 from pylons.i18n.translation import lazy_ugettext
33 from pylons.i18n.translation import lazy_ugettext
34 from sqlalchemy import or_
34 from sqlalchemy import or_
35
35
36 from rhodecode.lib import helpers as h, hooks_utils, diffs
36 from rhodecode.lib import helpers as h, hooks_utils, diffs
37 from rhodecode.lib.compat import OrderedDict
37 from rhodecode.lib.compat import OrderedDict
38 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
38 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
39 from rhodecode.lib.markup_renderer import (
39 from rhodecode.lib.markup_renderer import (
40 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
40 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
41 from rhodecode.lib.utils import action_logger
41 from rhodecode.lib.utils import action_logger
42 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
42 from rhodecode.lib.utils2 import safe_unicode, safe_str, md5_safe
43 from rhodecode.lib.vcs.backends.base import (
43 from rhodecode.lib.vcs.backends.base import (
44 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
44 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason)
45 from rhodecode.lib.vcs.conf import settings as vcs_settings
45 from rhodecode.lib.vcs.conf import settings as vcs_settings
46 from rhodecode.lib.vcs.exceptions import (
46 from rhodecode.lib.vcs.exceptions import (
47 CommitDoesNotExistError, EmptyRepositoryError)
47 CommitDoesNotExistError, EmptyRepositoryError)
48 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
49 from rhodecode.model.changeset_status import ChangesetStatusModel
49 from rhodecode.model.changeset_status import ChangesetStatusModel
50 from rhodecode.model.comment import ChangesetCommentsModel
50 from rhodecode.model.comment import ChangesetCommentsModel
51 from rhodecode.model.db import (
51 from rhodecode.model.db import (
52 PullRequest, PullRequestReviewers, ChangesetStatus,
52 PullRequest, PullRequestReviewers, ChangesetStatus,
53 PullRequestVersion, ChangesetComment)
53 PullRequestVersion, ChangesetComment)
54 from rhodecode.model.meta import Session
54 from rhodecode.model.meta import Session
55 from rhodecode.model.notification import NotificationModel, \
55 from rhodecode.model.notification import NotificationModel, \
56 EmailNotificationModel
56 EmailNotificationModel
57 from rhodecode.model.scm import ScmModel
57 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.settings import VcsSettingsModel
58 from rhodecode.model.settings import VcsSettingsModel
59
59
60
60
61 log = logging.getLogger(__name__)
61 log = logging.getLogger(__name__)
62
62
63
63
64 # Data structure to hold the response data when updating commits during a pull
64 # Data structure to hold the response data when updating commits during a pull
65 # request update.
65 # request update.
66 UpdateResponse = namedtuple(
66 UpdateResponse = namedtuple(
67 'UpdateResponse', 'executed, reason, new, old, changes')
67 'UpdateResponse', 'executed, reason, new, old, changes')
68
68
69
69
70 class PullRequestModel(BaseModel):
70 class PullRequestModel(BaseModel):
71
71
72 cls = PullRequest
72 cls = PullRequest
73
73
74 DIFF_CONTEXT = 3
74 DIFF_CONTEXT = 3
75
75
76 MERGE_STATUS_MESSAGES = {
76 MERGE_STATUS_MESSAGES = {
77 MergeFailureReason.NONE: lazy_ugettext(
77 MergeFailureReason.NONE: lazy_ugettext(
78 'This pull request can be automatically merged.'),
78 'This pull request can be automatically merged.'),
79 MergeFailureReason.UNKNOWN: lazy_ugettext(
79 MergeFailureReason.UNKNOWN: lazy_ugettext(
80 'This pull request cannot be merged because of an unhandled'
80 'This pull request cannot be merged because of an unhandled'
81 ' exception.'),
81 ' exception.'),
82 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
82 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
83 'This pull request cannot be merged because of conflicts.'),
83 'This pull request cannot be merged because of conflicts.'),
84 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
84 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
85 'This pull request could not be merged because push to target'
85 'This pull request could not be merged because push to target'
86 ' failed.'),
86 ' failed.'),
87 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
87 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
88 'This pull request cannot be merged because the target is not a'
88 'This pull request cannot be merged because the target is not a'
89 ' head.'),
89 ' head.'),
90 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
90 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
91 'This pull request cannot be merged because the source contains'
91 'This pull request cannot be merged because the source contains'
92 ' more branches than the target.'),
92 ' more branches than the target.'),
93 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
93 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
94 'This pull request cannot be merged because the target has'
94 'This pull request cannot be merged because the target has'
95 ' multiple heads.'),
95 ' multiple heads.'),
96 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
96 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
97 'This pull request cannot be merged because the target repository'
97 'This pull request cannot be merged because the target repository'
98 ' is locked.'),
98 ' is locked.'),
99 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
99 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
100 'This pull request cannot be merged because the target or the '
100 'This pull request cannot be merged because the target or the '
101 'source reference is missing.'),
101 'source reference is missing.'),
102 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
102 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
103 'This pull request cannot be merged because the target '
103 'This pull request cannot be merged because the target '
104 'reference is missing.'),
104 'reference is missing.'),
105 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
105 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
106 'This pull request cannot be merged because the source '
106 'This pull request cannot be merged because the source '
107 'reference is missing.'),
107 'reference is missing.'),
108 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
109 'This pull request cannot be merged because of conflicts related '
110 'to sub repositories.'),
111 }
108 }
112
109
113 UPDATE_STATUS_MESSAGES = {
110 UPDATE_STATUS_MESSAGES = {
114 UpdateFailureReason.NONE: lazy_ugettext(
111 UpdateFailureReason.NONE: lazy_ugettext(
115 'Pull request update successful.'),
112 'Pull request update successful.'),
116 UpdateFailureReason.UNKNOWN: lazy_ugettext(
113 UpdateFailureReason.UNKNOWN: lazy_ugettext(
117 'Pull request update failed because of an unknown error.'),
114 'Pull request update failed because of an unknown error.'),
118 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
115 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
119 'No update needed because the source reference is already '
116 'No update needed because the source reference is already '
120 'up to date.'),
117 'up to date.'),
121 UpdateFailureReason.WRONG_REF_TPYE: lazy_ugettext(
118 UpdateFailureReason.WRONG_REF_TPYE: lazy_ugettext(
122 'Pull request cannot be updated because the reference type is '
119 'Pull request cannot be updated because the reference type is '
123 'not supported for an update.'),
120 'not supported for an update.'),
124 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
121 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
125 'This pull request cannot be updated because the target '
122 'This pull request cannot be updated because the target '
126 'reference is missing.'),
123 'reference is missing.'),
127 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
124 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
128 'This pull request cannot be updated because the source '
125 'This pull request cannot be updated because the source '
129 'reference is missing.'),
126 'reference is missing.'),
130 }
127 }
131
128
132 def __get_pull_request(self, pull_request):
129 def __get_pull_request(self, pull_request):
133 return self._get_instance(PullRequest, pull_request)
130 return self._get_instance(PullRequest, pull_request)
134
131
135 def _check_perms(self, perms, pull_request, user, api=False):
132 def _check_perms(self, perms, pull_request, user, api=False):
136 if not api:
133 if not api:
137 return h.HasRepoPermissionAny(*perms)(
134 return h.HasRepoPermissionAny(*perms)(
138 user=user, repo_name=pull_request.target_repo.repo_name)
135 user=user, repo_name=pull_request.target_repo.repo_name)
139 else:
136 else:
140 return h.HasRepoPermissionAnyApi(*perms)(
137 return h.HasRepoPermissionAnyApi(*perms)(
141 user=user, repo_name=pull_request.target_repo.repo_name)
138 user=user, repo_name=pull_request.target_repo.repo_name)
142
139
143 def check_user_read(self, pull_request, user, api=False):
140 def check_user_read(self, pull_request, user, api=False):
144 _perms = ('repository.admin', 'repository.write', 'repository.read',)
141 _perms = ('repository.admin', 'repository.write', 'repository.read',)
145 return self._check_perms(_perms, pull_request, user, api)
142 return self._check_perms(_perms, pull_request, user, api)
146
143
147 def check_user_merge(self, pull_request, user, api=False):
144 def check_user_merge(self, pull_request, user, api=False):
148 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
145 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
149 return self._check_perms(_perms, pull_request, user, api)
146 return self._check_perms(_perms, pull_request, user, api)
150
147
151 def check_user_update(self, pull_request, user, api=False):
148 def check_user_update(self, pull_request, user, api=False):
152 owner = user.user_id == pull_request.user_id
149 owner = user.user_id == pull_request.user_id
153 return self.check_user_merge(pull_request, user, api) or owner
150 return self.check_user_merge(pull_request, user, api) or owner
154
151
155 def check_user_delete(self, pull_request, user):
152 def check_user_delete(self, pull_request, user):
156 owner = user.user_id == pull_request.user_id
153 owner = user.user_id == pull_request.user_id
157 _perms = ('repository.admin')
154 _perms = ('repository.admin')
158 return self._check_perms(_perms, pull_request, user) or owner
155 return self._check_perms(_perms, pull_request, user) or owner
159
156
160 def check_user_change_status(self, pull_request, user, api=False):
157 def check_user_change_status(self, pull_request, user, api=False):
161 reviewer = user.user_id in [x.user_id for x in
158 reviewer = user.user_id in [x.user_id for x in
162 pull_request.reviewers]
159 pull_request.reviewers]
163 return self.check_user_update(pull_request, user, api) or reviewer
160 return self.check_user_update(pull_request, user, api) or reviewer
164
161
165 def get(self, pull_request):
162 def get(self, pull_request):
166 return self.__get_pull_request(pull_request)
163 return self.__get_pull_request(pull_request)
167
164
168 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
165 def _prepare_get_all_query(self, repo_name, source=False, statuses=None,
169 opened_by=None, order_by=None,
166 opened_by=None, order_by=None,
170 order_dir='desc'):
167 order_dir='desc'):
171 repo = None
168 repo = None
172 if repo_name:
169 if repo_name:
173 repo = self._get_repo(repo_name)
170 repo = self._get_repo(repo_name)
174
171
175 q = PullRequest.query()
172 q = PullRequest.query()
176
173
177 # source or target
174 # source or target
178 if repo and source:
175 if repo and source:
179 q = q.filter(PullRequest.source_repo == repo)
176 q = q.filter(PullRequest.source_repo == repo)
180 elif repo:
177 elif repo:
181 q = q.filter(PullRequest.target_repo == repo)
178 q = q.filter(PullRequest.target_repo == repo)
182
179
183 # closed,opened
180 # closed,opened
184 if statuses:
181 if statuses:
185 q = q.filter(PullRequest.status.in_(statuses))
182 q = q.filter(PullRequest.status.in_(statuses))
186
183
187 # opened by filter
184 # opened by filter
188 if opened_by:
185 if opened_by:
189 q = q.filter(PullRequest.user_id.in_(opened_by))
186 q = q.filter(PullRequest.user_id.in_(opened_by))
190
187
191 if order_by:
188 if order_by:
192 order_map = {
189 order_map = {
193 'name_raw': PullRequest.pull_request_id,
190 'name_raw': PullRequest.pull_request_id,
194 'title': PullRequest.title,
191 'title': PullRequest.title,
195 'updated_on_raw': PullRequest.updated_on,
192 'updated_on_raw': PullRequest.updated_on,
196 'target_repo': PullRequest.target_repo_id
193 'target_repo': PullRequest.target_repo_id
197 }
194 }
198 if order_dir == 'asc':
195 if order_dir == 'asc':
199 q = q.order_by(order_map[order_by].asc())
196 q = q.order_by(order_map[order_by].asc())
200 else:
197 else:
201 q = q.order_by(order_map[order_by].desc())
198 q = q.order_by(order_map[order_by].desc())
202
199
203 return q
200 return q
204
201
205 def count_all(self, repo_name, source=False, statuses=None,
202 def count_all(self, repo_name, source=False, statuses=None,
206 opened_by=None):
203 opened_by=None):
207 """
204 """
208 Count the number of pull requests for a specific repository.
205 Count the number of pull requests for a specific repository.
209
206
210 :param repo_name: target or source repo
207 :param repo_name: target or source repo
211 :param source: boolean flag to specify if repo_name refers to source
208 :param source: boolean flag to specify if repo_name refers to source
212 :param statuses: list of pull request statuses
209 :param statuses: list of pull request statuses
213 :param opened_by: author user of the pull request
210 :param opened_by: author user of the pull request
214 :returns: int number of pull requests
211 :returns: int number of pull requests
215 """
212 """
216 q = self._prepare_get_all_query(
213 q = self._prepare_get_all_query(
217 repo_name, source=source, statuses=statuses, opened_by=opened_by)
214 repo_name, source=source, statuses=statuses, opened_by=opened_by)
218
215
219 return q.count()
216 return q.count()
220
217
221 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
218 def get_all(self, repo_name, source=False, statuses=None, opened_by=None,
222 offset=0, length=None, order_by=None, order_dir='desc'):
219 offset=0, length=None, order_by=None, order_dir='desc'):
223 """
220 """
224 Get all pull requests for a specific repository.
221 Get all pull requests for a specific repository.
225
222
226 :param repo_name: target or source repo
223 :param repo_name: target or source repo
227 :param source: boolean flag to specify if repo_name refers to source
224 :param source: boolean flag to specify if repo_name refers to source
228 :param statuses: list of pull request statuses
225 :param statuses: list of pull request statuses
229 :param opened_by: author user of the pull request
226 :param opened_by: author user of the pull request
230 :param offset: pagination offset
227 :param offset: pagination offset
231 :param length: length of returned list
228 :param length: length of returned list
232 :param order_by: order of the returned list
229 :param order_by: order of the returned list
233 :param order_dir: 'asc' or 'desc' ordering direction
230 :param order_dir: 'asc' or 'desc' ordering direction
234 :returns: list of pull requests
231 :returns: list of pull requests
235 """
232 """
236 q = self._prepare_get_all_query(
233 q = self._prepare_get_all_query(
237 repo_name, source=source, statuses=statuses, opened_by=opened_by,
234 repo_name, source=source, statuses=statuses, opened_by=opened_by,
238 order_by=order_by, order_dir=order_dir)
235 order_by=order_by, order_dir=order_dir)
239
236
240 if length:
237 if length:
241 pull_requests = q.limit(length).offset(offset).all()
238 pull_requests = q.limit(length).offset(offset).all()
242 else:
239 else:
243 pull_requests = q.all()
240 pull_requests = q.all()
244
241
245 return pull_requests
242 return pull_requests
246
243
247 def count_awaiting_review(self, repo_name, source=False, statuses=None,
244 def count_awaiting_review(self, repo_name, source=False, statuses=None,
248 opened_by=None):
245 opened_by=None):
249 """
246 """
250 Count the number of pull requests for a specific repository that are
247 Count the number of pull requests for a specific repository that are
251 awaiting review.
248 awaiting review.
252
249
253 :param repo_name: target or source repo
250 :param repo_name: target or source repo
254 :param source: boolean flag to specify if repo_name refers to source
251 :param source: boolean flag to specify if repo_name refers to source
255 :param statuses: list of pull request statuses
252 :param statuses: list of pull request statuses
256 :param opened_by: author user of the pull request
253 :param opened_by: author user of the pull request
257 :returns: int number of pull requests
254 :returns: int number of pull requests
258 """
255 """
259 pull_requests = self.get_awaiting_review(
256 pull_requests = self.get_awaiting_review(
260 repo_name, source=source, statuses=statuses, opened_by=opened_by)
257 repo_name, source=source, statuses=statuses, opened_by=opened_by)
261
258
262 return len(pull_requests)
259 return len(pull_requests)
263
260
264 def get_awaiting_review(self, repo_name, source=False, statuses=None,
261 def get_awaiting_review(self, repo_name, source=False, statuses=None,
265 opened_by=None, offset=0, length=None,
262 opened_by=None, offset=0, length=None,
266 order_by=None, order_dir='desc'):
263 order_by=None, order_dir='desc'):
267 """
264 """
268 Get all pull requests for a specific repository that are awaiting
265 Get all pull requests for a specific repository that are awaiting
269 review.
266 review.
270
267
271 :param repo_name: target or source repo
268 :param repo_name: target or source repo
272 :param source: boolean flag to specify if repo_name refers to source
269 :param source: boolean flag to specify if repo_name refers to source
273 :param statuses: list of pull request statuses
270 :param statuses: list of pull request statuses
274 :param opened_by: author user of the pull request
271 :param opened_by: author user of the pull request
275 :param offset: pagination offset
272 :param offset: pagination offset
276 :param length: length of returned list
273 :param length: length of returned list
277 :param order_by: order of the returned list
274 :param order_by: order of the returned list
278 :param order_dir: 'asc' or 'desc' ordering direction
275 :param order_dir: 'asc' or 'desc' ordering direction
279 :returns: list of pull requests
276 :returns: list of pull requests
280 """
277 """
281 pull_requests = self.get_all(
278 pull_requests = self.get_all(
282 repo_name, source=source, statuses=statuses, opened_by=opened_by,
279 repo_name, source=source, statuses=statuses, opened_by=opened_by,
283 order_by=order_by, order_dir=order_dir)
280 order_by=order_by, order_dir=order_dir)
284
281
285 _filtered_pull_requests = []
282 _filtered_pull_requests = []
286 for pr in pull_requests:
283 for pr in pull_requests:
287 status = pr.calculated_review_status()
284 status = pr.calculated_review_status()
288 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
285 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
289 ChangesetStatus.STATUS_UNDER_REVIEW]:
286 ChangesetStatus.STATUS_UNDER_REVIEW]:
290 _filtered_pull_requests.append(pr)
287 _filtered_pull_requests.append(pr)
291 if length:
288 if length:
292 return _filtered_pull_requests[offset:offset+length]
289 return _filtered_pull_requests[offset:offset+length]
293 else:
290 else:
294 return _filtered_pull_requests
291 return _filtered_pull_requests
295
292
296 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
293 def count_awaiting_my_review(self, repo_name, source=False, statuses=None,
297 opened_by=None, user_id=None):
294 opened_by=None, user_id=None):
298 """
295 """
299 Count the number of pull requests for a specific repository that are
296 Count the number of pull requests for a specific repository that are
300 awaiting review from a specific user.
297 awaiting review from a specific user.
301
298
302 :param repo_name: target or source repo
299 :param repo_name: target or source repo
303 :param source: boolean flag to specify if repo_name refers to source
300 :param source: boolean flag to specify if repo_name refers to source
304 :param statuses: list of pull request statuses
301 :param statuses: list of pull request statuses
305 :param opened_by: author user of the pull request
302 :param opened_by: author user of the pull request
306 :param user_id: reviewer user of the pull request
303 :param user_id: reviewer user of the pull request
307 :returns: int number of pull requests
304 :returns: int number of pull requests
308 """
305 """
309 pull_requests = self.get_awaiting_my_review(
306 pull_requests = self.get_awaiting_my_review(
310 repo_name, source=source, statuses=statuses, opened_by=opened_by,
307 repo_name, source=source, statuses=statuses, opened_by=opened_by,
311 user_id=user_id)
308 user_id=user_id)
312
309
313 return len(pull_requests)
310 return len(pull_requests)
314
311
315 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
312 def get_awaiting_my_review(self, repo_name, source=False, statuses=None,
316 opened_by=None, user_id=None, offset=0,
313 opened_by=None, user_id=None, offset=0,
317 length=None, order_by=None, order_dir='desc'):
314 length=None, order_by=None, order_dir='desc'):
318 """
315 """
319 Get all pull requests for a specific repository that are awaiting
316 Get all pull requests for a specific repository that are awaiting
320 review from a specific user.
317 review from a specific user.
321
318
322 :param repo_name: target or source repo
319 :param repo_name: target or source repo
323 :param source: boolean flag to specify if repo_name refers to source
320 :param source: boolean flag to specify if repo_name refers to source
324 :param statuses: list of pull request statuses
321 :param statuses: list of pull request statuses
325 :param opened_by: author user of the pull request
322 :param opened_by: author user of the pull request
326 :param user_id: reviewer user of the pull request
323 :param user_id: reviewer user of the pull request
327 :param offset: pagination offset
324 :param offset: pagination offset
328 :param length: length of returned list
325 :param length: length of returned list
329 :param order_by: order of the returned list
326 :param order_by: order of the returned list
330 :param order_dir: 'asc' or 'desc' ordering direction
327 :param order_dir: 'asc' or 'desc' ordering direction
331 :returns: list of pull requests
328 :returns: list of pull requests
332 """
329 """
333 pull_requests = self.get_all(
330 pull_requests = self.get_all(
334 repo_name, source=source, statuses=statuses, opened_by=opened_by,
331 repo_name, source=source, statuses=statuses, opened_by=opened_by,
335 order_by=order_by, order_dir=order_dir)
332 order_by=order_by, order_dir=order_dir)
336
333
337 _my = PullRequestModel().get_not_reviewed(user_id)
334 _my = PullRequestModel().get_not_reviewed(user_id)
338 my_participation = []
335 my_participation = []
339 for pr in pull_requests:
336 for pr in pull_requests:
340 if pr in _my:
337 if pr in _my:
341 my_participation.append(pr)
338 my_participation.append(pr)
342 _filtered_pull_requests = my_participation
339 _filtered_pull_requests = my_participation
343 if length:
340 if length:
344 return _filtered_pull_requests[offset:offset+length]
341 return _filtered_pull_requests[offset:offset+length]
345 else:
342 else:
346 return _filtered_pull_requests
343 return _filtered_pull_requests
347
344
348 def get_not_reviewed(self, user_id):
345 def get_not_reviewed(self, user_id):
349 return [
346 return [
350 x.pull_request for x in PullRequestReviewers.query().filter(
347 x.pull_request for x in PullRequestReviewers.query().filter(
351 PullRequestReviewers.user_id == user_id).all()
348 PullRequestReviewers.user_id == user_id).all()
352 ]
349 ]
353
350
354 def _prepare_participating_query(self, user_id=None, statuses=None,
351 def _prepare_participating_query(self, user_id=None, statuses=None,
355 order_by=None, order_dir='desc'):
352 order_by=None, order_dir='desc'):
356 q = PullRequest.query()
353 q = PullRequest.query()
357 if user_id:
354 if user_id:
358 reviewers_subquery = Session().query(
355 reviewers_subquery = Session().query(
359 PullRequestReviewers.pull_request_id).filter(
356 PullRequestReviewers.pull_request_id).filter(
360 PullRequestReviewers.user_id == user_id).subquery()
357 PullRequestReviewers.user_id == user_id).subquery()
361 user_filter= or_(
358 user_filter= or_(
362 PullRequest.user_id == user_id,
359 PullRequest.user_id == user_id,
363 PullRequest.pull_request_id.in_(reviewers_subquery)
360 PullRequest.pull_request_id.in_(reviewers_subquery)
364 )
361 )
365 q = PullRequest.query().filter(user_filter)
362 q = PullRequest.query().filter(user_filter)
366
363
367 # closed,opened
364 # closed,opened
368 if statuses:
365 if statuses:
369 q = q.filter(PullRequest.status.in_(statuses))
366 q = q.filter(PullRequest.status.in_(statuses))
370
367
371 if order_by:
368 if order_by:
372 order_map = {
369 order_map = {
373 'name_raw': PullRequest.pull_request_id,
370 'name_raw': PullRequest.pull_request_id,
374 'title': PullRequest.title,
371 'title': PullRequest.title,
375 'updated_on_raw': PullRequest.updated_on,
372 'updated_on_raw': PullRequest.updated_on,
376 'target_repo': PullRequest.target_repo_id
373 'target_repo': PullRequest.target_repo_id
377 }
374 }
378 if order_dir == 'asc':
375 if order_dir == 'asc':
379 q = q.order_by(order_map[order_by].asc())
376 q = q.order_by(order_map[order_by].asc())
380 else:
377 else:
381 q = q.order_by(order_map[order_by].desc())
378 q = q.order_by(order_map[order_by].desc())
382
379
383 return q
380 return q
384
381
385 def count_im_participating_in(self, user_id=None, statuses=None):
382 def count_im_participating_in(self, user_id=None, statuses=None):
386 q = self._prepare_participating_query(user_id, statuses=statuses)
383 q = self._prepare_participating_query(user_id, statuses=statuses)
387 return q.count()
384 return q.count()
388
385
389 def get_im_participating_in(
386 def get_im_participating_in(
390 self, user_id=None, statuses=None, offset=0,
387 self, user_id=None, statuses=None, offset=0,
391 length=None, order_by=None, order_dir='desc'):
388 length=None, order_by=None, order_dir='desc'):
392 """
389 """
393 Get all Pull requests that i'm participating in, or i have opened
390 Get all Pull requests that i'm participating in, or i have opened
394 """
391 """
395
392
396 q = self._prepare_participating_query(
393 q = self._prepare_participating_query(
397 user_id, statuses=statuses, order_by=order_by,
394 user_id, statuses=statuses, order_by=order_by,
398 order_dir=order_dir)
395 order_dir=order_dir)
399
396
400 if length:
397 if length:
401 pull_requests = q.limit(length).offset(offset).all()
398 pull_requests = q.limit(length).offset(offset).all()
402 else:
399 else:
403 pull_requests = q.all()
400 pull_requests = q.all()
404
401
405 return pull_requests
402 return pull_requests
406
403
407 def get_versions(self, pull_request):
404 def get_versions(self, pull_request):
408 """
405 """
409 returns version of pull request sorted by ID descending
406 returns version of pull request sorted by ID descending
410 """
407 """
411 return PullRequestVersion.query()\
408 return PullRequestVersion.query()\
412 .filter(PullRequestVersion.pull_request == pull_request)\
409 .filter(PullRequestVersion.pull_request == pull_request)\
413 .order_by(PullRequestVersion.pull_request_version_id.asc())\
410 .order_by(PullRequestVersion.pull_request_version_id.asc())\
414 .all()
411 .all()
415
412
416 def create(self, created_by, source_repo, source_ref, target_repo,
413 def create(self, created_by, source_repo, source_ref, target_repo,
417 target_ref, revisions, reviewers, title, description=None):
414 target_ref, revisions, reviewers, title, description=None):
418 created_by_user = self._get_user(created_by)
415 created_by_user = self._get_user(created_by)
419 source_repo = self._get_repo(source_repo)
416 source_repo = self._get_repo(source_repo)
420 target_repo = self._get_repo(target_repo)
417 target_repo = self._get_repo(target_repo)
421
418
422 pull_request = PullRequest()
419 pull_request = PullRequest()
423 pull_request.source_repo = source_repo
420 pull_request.source_repo = source_repo
424 pull_request.source_ref = source_ref
421 pull_request.source_ref = source_ref
425 pull_request.target_repo = target_repo
422 pull_request.target_repo = target_repo
426 pull_request.target_ref = target_ref
423 pull_request.target_ref = target_ref
427 pull_request.revisions = revisions
424 pull_request.revisions = revisions
428 pull_request.title = title
425 pull_request.title = title
429 pull_request.description = description
426 pull_request.description = description
430 pull_request.author = created_by_user
427 pull_request.author = created_by_user
431
428
432 Session().add(pull_request)
429 Session().add(pull_request)
433 Session().flush()
430 Session().flush()
434
431
435 reviewer_ids = set()
432 reviewer_ids = set()
436 # members / reviewers
433 # members / reviewers
437 for reviewer_object in reviewers:
434 for reviewer_object in reviewers:
438 if isinstance(reviewer_object, tuple):
435 if isinstance(reviewer_object, tuple):
439 user_id, reasons = reviewer_object
436 user_id, reasons = reviewer_object
440 else:
437 else:
441 user_id, reasons = reviewer_object, []
438 user_id, reasons = reviewer_object, []
442
439
443 user = self._get_user(user_id)
440 user = self._get_user(user_id)
444 reviewer_ids.add(user.user_id)
441 reviewer_ids.add(user.user_id)
445
442
446 reviewer = PullRequestReviewers(user, pull_request, reasons)
443 reviewer = PullRequestReviewers(user, pull_request, reasons)
447 Session().add(reviewer)
444 Session().add(reviewer)
448
445
449 # Set approval status to "Under Review" for all commits which are
446 # Set approval status to "Under Review" for all commits which are
450 # part of this pull request.
447 # part of this pull request.
451 ChangesetStatusModel().set_status(
448 ChangesetStatusModel().set_status(
452 repo=target_repo,
449 repo=target_repo,
453 status=ChangesetStatus.STATUS_UNDER_REVIEW,
450 status=ChangesetStatus.STATUS_UNDER_REVIEW,
454 user=created_by_user,
451 user=created_by_user,
455 pull_request=pull_request
452 pull_request=pull_request
456 )
453 )
457
454
458 self.notify_reviewers(pull_request, reviewer_ids)
455 self.notify_reviewers(pull_request, reviewer_ids)
459 self._trigger_pull_request_hook(
456 self._trigger_pull_request_hook(
460 pull_request, created_by_user, 'create')
457 pull_request, created_by_user, 'create')
461
458
462 return pull_request
459 return pull_request
463
460
464 def _trigger_pull_request_hook(self, pull_request, user, action):
461 def _trigger_pull_request_hook(self, pull_request, user, action):
465 pull_request = self.__get_pull_request(pull_request)
462 pull_request = self.__get_pull_request(pull_request)
466 target_scm = pull_request.target_repo.scm_instance()
463 target_scm = pull_request.target_repo.scm_instance()
467 if action == 'create':
464 if action == 'create':
468 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
465 trigger_hook = hooks_utils.trigger_log_create_pull_request_hook
469 elif action == 'merge':
466 elif action == 'merge':
470 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
467 trigger_hook = hooks_utils.trigger_log_merge_pull_request_hook
471 elif action == 'close':
468 elif action == 'close':
472 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
469 trigger_hook = hooks_utils.trigger_log_close_pull_request_hook
473 elif action == 'review_status_change':
470 elif action == 'review_status_change':
474 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
471 trigger_hook = hooks_utils.trigger_log_review_pull_request_hook
475 elif action == 'update':
472 elif action == 'update':
476 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
473 trigger_hook = hooks_utils.trigger_log_update_pull_request_hook
477 else:
474 else:
478 return
475 return
479
476
480 trigger_hook(
477 trigger_hook(
481 username=user.username,
478 username=user.username,
482 repo_name=pull_request.target_repo.repo_name,
479 repo_name=pull_request.target_repo.repo_name,
483 repo_alias=target_scm.alias,
480 repo_alias=target_scm.alias,
484 pull_request=pull_request)
481 pull_request=pull_request)
485
482
486 def _get_commit_ids(self, pull_request):
483 def _get_commit_ids(self, pull_request):
487 """
484 """
488 Return the commit ids of the merged pull request.
485 Return the commit ids of the merged pull request.
489
486
490 This method is not dealing correctly yet with the lack of autoupdates
487 This method is not dealing correctly yet with the lack of autoupdates
491 nor with the implicit target updates.
488 nor with the implicit target updates.
492 For example: if a commit in the source repo is already in the target it
489 For example: if a commit in the source repo is already in the target it
493 will be reported anyways.
490 will be reported anyways.
494 """
491 """
495 merge_rev = pull_request.merge_rev
492 merge_rev = pull_request.merge_rev
496 if merge_rev is None:
493 if merge_rev is None:
497 raise ValueError('This pull request was not merged yet')
494 raise ValueError('This pull request was not merged yet')
498
495
499 commit_ids = list(pull_request.revisions)
496 commit_ids = list(pull_request.revisions)
500 if merge_rev not in commit_ids:
497 if merge_rev not in commit_ids:
501 commit_ids.append(merge_rev)
498 commit_ids.append(merge_rev)
502
499
503 return commit_ids
500 return commit_ids
504
501
505 def merge(self, pull_request, user, extras):
502 def merge(self, pull_request, user, extras):
506 log.debug("Merging pull request %s", pull_request.pull_request_id)
503 log.debug("Merging pull request %s", pull_request.pull_request_id)
507 merge_state = self._merge_pull_request(pull_request, user, extras)
504 merge_state = self._merge_pull_request(pull_request, user, extras)
508 if merge_state.executed:
505 if merge_state.executed:
509 log.debug(
506 log.debug(
510 "Merge was successful, updating the pull request comments.")
507 "Merge was successful, updating the pull request comments.")
511 self._comment_and_close_pr(pull_request, user, merge_state)
508 self._comment_and_close_pr(pull_request, user, merge_state)
512 self._log_action('user_merged_pull_request', user, pull_request)
509 self._log_action('user_merged_pull_request', user, pull_request)
513 else:
510 else:
514 log.warn("Merge failed, not updating the pull request.")
511 log.warn("Merge failed, not updating the pull request.")
515 return merge_state
512 return merge_state
516
513
517 def _merge_pull_request(self, pull_request, user, extras):
514 def _merge_pull_request(self, pull_request, user, extras):
518 target_vcs = pull_request.target_repo.scm_instance()
515 target_vcs = pull_request.target_repo.scm_instance()
519 source_vcs = pull_request.source_repo.scm_instance()
516 source_vcs = pull_request.source_repo.scm_instance()
520 target_ref = self._refresh_reference(
517 target_ref = self._refresh_reference(
521 pull_request.target_ref_parts, target_vcs)
518 pull_request.target_ref_parts, target_vcs)
522
519
523 message = _(
520 message = _(
524 'Merge pull request #%(pr_id)s from '
521 'Merge pull request #%(pr_id)s from '
525 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
522 '%(source_repo)s %(source_ref_name)s\n\n %(pr_title)s') % {
526 'pr_id': pull_request.pull_request_id,
523 'pr_id': pull_request.pull_request_id,
527 'source_repo': source_vcs.name,
524 'source_repo': source_vcs.name,
528 'source_ref_name': pull_request.source_ref_parts.name,
525 'source_ref_name': pull_request.source_ref_parts.name,
529 'pr_title': pull_request.title
526 'pr_title': pull_request.title
530 }
527 }
531
528
532 workspace_id = self._workspace_id(pull_request)
529 workspace_id = self._workspace_id(pull_request)
533 use_rebase = self._use_rebase_for_merging(pull_request)
530 use_rebase = self._use_rebase_for_merging(pull_request)
534
531
535 callback_daemon, extras = prepare_callback_daemon(
532 callback_daemon, extras = prepare_callback_daemon(
536 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
533 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
537 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
534 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
538
535
539 with callback_daemon:
536 with callback_daemon:
540 # TODO: johbo: Implement a clean way to run a config_override
537 # TODO: johbo: Implement a clean way to run a config_override
541 # for a single call.
538 # for a single call.
542 target_vcs.config.set(
539 target_vcs.config.set(
543 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
540 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
544 merge_state = target_vcs.merge(
541 merge_state = target_vcs.merge(
545 target_ref, source_vcs, pull_request.source_ref_parts,
542 target_ref, source_vcs, pull_request.source_ref_parts,
546 workspace_id, user_name=user.username,
543 workspace_id, user_name=user.username,
547 user_email=user.email, message=message, use_rebase=use_rebase)
544 user_email=user.email, message=message, use_rebase=use_rebase)
548 return merge_state
545 return merge_state
549
546
550 def _comment_and_close_pr(self, pull_request, user, merge_state):
547 def _comment_and_close_pr(self, pull_request, user, merge_state):
551 pull_request.merge_rev = merge_state.merge_ref.commit_id
548 pull_request.merge_rev = merge_state.merge_ref.commit_id
552 pull_request.updated_on = datetime.datetime.now()
549 pull_request.updated_on = datetime.datetime.now()
553
550
554 ChangesetCommentsModel().create(
551 ChangesetCommentsModel().create(
555 text=unicode(_('Pull request merged and closed')),
552 text=unicode(_('Pull request merged and closed')),
556 repo=pull_request.target_repo.repo_id,
553 repo=pull_request.target_repo.repo_id,
557 user=user.user_id,
554 user=user.user_id,
558 pull_request=pull_request.pull_request_id,
555 pull_request=pull_request.pull_request_id,
559 f_path=None,
556 f_path=None,
560 line_no=None,
557 line_no=None,
561 closing_pr=True
558 closing_pr=True
562 )
559 )
563
560
564 Session().add(pull_request)
561 Session().add(pull_request)
565 Session().flush()
562 Session().flush()
566 # TODO: paris: replace invalidation with less radical solution
563 # TODO: paris: replace invalidation with less radical solution
567 ScmModel().mark_for_invalidation(
564 ScmModel().mark_for_invalidation(
568 pull_request.target_repo.repo_name)
565 pull_request.target_repo.repo_name)
569 self._trigger_pull_request_hook(pull_request, user, 'merge')
566 self._trigger_pull_request_hook(pull_request, user, 'merge')
570
567
571 def has_valid_update_type(self, pull_request):
568 def has_valid_update_type(self, pull_request):
572 source_ref_type = pull_request.source_ref_parts.type
569 source_ref_type = pull_request.source_ref_parts.type
573 return source_ref_type in ['book', 'branch', 'tag']
570 return source_ref_type in ['book', 'branch', 'tag']
574
571
575 def update_commits(self, pull_request):
572 def update_commits(self, pull_request):
576 """
573 """
577 Get the updated list of commits for the pull request
574 Get the updated list of commits for the pull request
578 and return the new pull request version and the list
575 and return the new pull request version and the list
579 of commits processed by this update action
576 of commits processed by this update action
580 """
577 """
581 pull_request = self.__get_pull_request(pull_request)
578 pull_request = self.__get_pull_request(pull_request)
582 source_ref_type = pull_request.source_ref_parts.type
579 source_ref_type = pull_request.source_ref_parts.type
583 source_ref_name = pull_request.source_ref_parts.name
580 source_ref_name = pull_request.source_ref_parts.name
584 source_ref_id = pull_request.source_ref_parts.commit_id
581 source_ref_id = pull_request.source_ref_parts.commit_id
585
582
586 if not self.has_valid_update_type(pull_request):
583 if not self.has_valid_update_type(pull_request):
587 log.debug(
584 log.debug(
588 "Skipping update of pull request %s due to ref type: %s",
585 "Skipping update of pull request %s due to ref type: %s",
589 pull_request, source_ref_type)
586 pull_request, source_ref_type)
590 return UpdateResponse(
587 return UpdateResponse(
591 executed=False,
588 executed=False,
592 reason=UpdateFailureReason.WRONG_REF_TPYE,
589 reason=UpdateFailureReason.WRONG_REF_TPYE,
593 old=pull_request, new=None, changes=None)
590 old=pull_request, new=None, changes=None)
594
591
595 source_repo = pull_request.source_repo.scm_instance()
592 source_repo = pull_request.source_repo.scm_instance()
596 try:
593 try:
597 source_commit = source_repo.get_commit(commit_id=source_ref_name)
594 source_commit = source_repo.get_commit(commit_id=source_ref_name)
598 except CommitDoesNotExistError:
595 except CommitDoesNotExistError:
599 return UpdateResponse(
596 return UpdateResponse(
600 executed=False,
597 executed=False,
601 reason=UpdateFailureReason.MISSING_SOURCE_REF,
598 reason=UpdateFailureReason.MISSING_SOURCE_REF,
602 old=pull_request, new=None, changes=None)
599 old=pull_request, new=None, changes=None)
603
600
604 if source_ref_id == source_commit.raw_id:
601 if source_ref_id == source_commit.raw_id:
605 log.debug("Nothing changed in pull request %s", pull_request)
602 log.debug("Nothing changed in pull request %s", pull_request)
606 return UpdateResponse(
603 return UpdateResponse(
607 executed=False,
604 executed=False,
608 reason=UpdateFailureReason.NO_CHANGE,
605 reason=UpdateFailureReason.NO_CHANGE,
609 old=pull_request, new=None, changes=None)
606 old=pull_request, new=None, changes=None)
610
607
611 # Finally there is a need for an update
608 # Finally there is a need for an update
612 pull_request_version = self._create_version_from_snapshot(pull_request)
609 pull_request_version = self._create_version_from_snapshot(pull_request)
613 self._link_comments_to_version(pull_request_version)
610 self._link_comments_to_version(pull_request_version)
614
611
615 target_ref_type = pull_request.target_ref_parts.type
612 target_ref_type = pull_request.target_ref_parts.type
616 target_ref_name = pull_request.target_ref_parts.name
613 target_ref_name = pull_request.target_ref_parts.name
617 target_ref_id = pull_request.target_ref_parts.commit_id
614 target_ref_id = pull_request.target_ref_parts.commit_id
618 target_repo = pull_request.target_repo.scm_instance()
615 target_repo = pull_request.target_repo.scm_instance()
619
616
620 try:
617 try:
621 if target_ref_type in ('tag', 'branch', 'book'):
618 if target_ref_type in ('tag', 'branch', 'book'):
622 target_commit = target_repo.get_commit(target_ref_name)
619 target_commit = target_repo.get_commit(target_ref_name)
623 else:
620 else:
624 target_commit = target_repo.get_commit(target_ref_id)
621 target_commit = target_repo.get_commit(target_ref_id)
625 except CommitDoesNotExistError:
622 except CommitDoesNotExistError:
626 return UpdateResponse(
623 return UpdateResponse(
627 executed=False,
624 executed=False,
628 reason=UpdateFailureReason.MISSING_TARGET_REF,
625 reason=UpdateFailureReason.MISSING_TARGET_REF,
629 old=pull_request, new=None, changes=None)
626 old=pull_request, new=None, changes=None)
630
627
631 # re-compute commit ids
628 # re-compute commit ids
632 old_commit_ids = set(pull_request.revisions)
629 old_commit_ids = set(pull_request.revisions)
633 pre_load = ["author", "branch", "date", "message"]
630 pre_load = ["author", "branch", "date", "message"]
634 commit_ranges = target_repo.compare(
631 commit_ranges = target_repo.compare(
635 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
632 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
636 pre_load=pre_load)
633 pre_load=pre_load)
637
634
638 ancestor = target_repo.get_common_ancestor(
635 ancestor = target_repo.get_common_ancestor(
639 target_commit.raw_id, source_commit.raw_id, source_repo)
636 target_commit.raw_id, source_commit.raw_id, source_repo)
640
637
641 pull_request.source_ref = '%s:%s:%s' % (
638 pull_request.source_ref = '%s:%s:%s' % (
642 source_ref_type, source_ref_name, source_commit.raw_id)
639 source_ref_type, source_ref_name, source_commit.raw_id)
643 pull_request.target_ref = '%s:%s:%s' % (
640 pull_request.target_ref = '%s:%s:%s' % (
644 target_ref_type, target_ref_name, ancestor)
641 target_ref_type, target_ref_name, ancestor)
645 pull_request.revisions = [
642 pull_request.revisions = [
646 commit.raw_id for commit in reversed(commit_ranges)]
643 commit.raw_id for commit in reversed(commit_ranges)]
647 pull_request.updated_on = datetime.datetime.now()
644 pull_request.updated_on = datetime.datetime.now()
648 Session().add(pull_request)
645 Session().add(pull_request)
649 new_commit_ids = set(pull_request.revisions)
646 new_commit_ids = set(pull_request.revisions)
650
647
651 changes = self._calculate_commit_id_changes(
648 changes = self._calculate_commit_id_changes(
652 old_commit_ids, new_commit_ids)
649 old_commit_ids, new_commit_ids)
653
650
654 old_diff_data, new_diff_data = self._generate_update_diffs(
651 old_diff_data, new_diff_data = self._generate_update_diffs(
655 pull_request, pull_request_version)
652 pull_request, pull_request_version)
656
653
657 ChangesetCommentsModel().outdate_comments(
654 ChangesetCommentsModel().outdate_comments(
658 pull_request, old_diff_data=old_diff_data,
655 pull_request, old_diff_data=old_diff_data,
659 new_diff_data=new_diff_data)
656 new_diff_data=new_diff_data)
660
657
661 file_changes = self._calculate_file_changes(
658 file_changes = self._calculate_file_changes(
662 old_diff_data, new_diff_data)
659 old_diff_data, new_diff_data)
663
660
664 # Add an automatic comment to the pull request
661 # Add an automatic comment to the pull request
665 update_comment = ChangesetCommentsModel().create(
662 update_comment = ChangesetCommentsModel().create(
666 text=self._render_update_message(changes, file_changes),
663 text=self._render_update_message(changes, file_changes),
667 repo=pull_request.target_repo,
664 repo=pull_request.target_repo,
668 user=pull_request.author,
665 user=pull_request.author,
669 pull_request=pull_request,
666 pull_request=pull_request,
670 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
667 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
671
668
672 # Update status to "Under Review" for added commits
669 # Update status to "Under Review" for added commits
673 for commit_id in changes.added:
670 for commit_id in changes.added:
674 ChangesetStatusModel().set_status(
671 ChangesetStatusModel().set_status(
675 repo=pull_request.source_repo,
672 repo=pull_request.source_repo,
676 status=ChangesetStatus.STATUS_UNDER_REVIEW,
673 status=ChangesetStatus.STATUS_UNDER_REVIEW,
677 comment=update_comment,
674 comment=update_comment,
678 user=pull_request.author,
675 user=pull_request.author,
679 pull_request=pull_request,
676 pull_request=pull_request,
680 revision=commit_id)
677 revision=commit_id)
681
678
682 log.debug(
679 log.debug(
683 'Updated pull request %s, added_ids: %s, common_ids: %s, '
680 'Updated pull request %s, added_ids: %s, common_ids: %s, '
684 'removed_ids: %s', pull_request.pull_request_id,
681 'removed_ids: %s', pull_request.pull_request_id,
685 changes.added, changes.common, changes.removed)
682 changes.added, changes.common, changes.removed)
686 log.debug('Updated pull request with the following file changes: %s',
683 log.debug('Updated pull request with the following file changes: %s',
687 file_changes)
684 file_changes)
688
685
689 log.info(
686 log.info(
690 "Updated pull request %s from commit %s to commit %s, "
687 "Updated pull request %s from commit %s to commit %s, "
691 "stored new version %s of this pull request.",
688 "stored new version %s of this pull request.",
692 pull_request.pull_request_id, source_ref_id,
689 pull_request.pull_request_id, source_ref_id,
693 pull_request.source_ref_parts.commit_id,
690 pull_request.source_ref_parts.commit_id,
694 pull_request_version.pull_request_version_id)
691 pull_request_version.pull_request_version_id)
695 Session().commit()
692 Session().commit()
696 self._trigger_pull_request_hook(pull_request, pull_request.author,
693 self._trigger_pull_request_hook(pull_request, pull_request.author,
697 'update')
694 'update')
698
695
699 return UpdateResponse(
696 return UpdateResponse(
700 executed=True, reason=UpdateFailureReason.NONE,
697 executed=True, reason=UpdateFailureReason.NONE,
701 old=pull_request, new=pull_request_version, changes=changes)
698 old=pull_request, new=pull_request_version, changes=changes)
702
699
703 def _create_version_from_snapshot(self, pull_request):
700 def _create_version_from_snapshot(self, pull_request):
704 version = PullRequestVersion()
701 version = PullRequestVersion()
705 version.title = pull_request.title
702 version.title = pull_request.title
706 version.description = pull_request.description
703 version.description = pull_request.description
707 version.status = pull_request.status
704 version.status = pull_request.status
708 version.created_on = pull_request.created_on
705 version.created_on = pull_request.created_on
709 version.updated_on = pull_request.updated_on
706 version.updated_on = pull_request.updated_on
710 version.user_id = pull_request.user_id
707 version.user_id = pull_request.user_id
711 version.source_repo = pull_request.source_repo
708 version.source_repo = pull_request.source_repo
712 version.source_ref = pull_request.source_ref
709 version.source_ref = pull_request.source_ref
713 version.target_repo = pull_request.target_repo
710 version.target_repo = pull_request.target_repo
714 version.target_ref = pull_request.target_ref
711 version.target_ref = pull_request.target_ref
715
712
716 version._last_merge_source_rev = pull_request._last_merge_source_rev
713 version._last_merge_source_rev = pull_request._last_merge_source_rev
717 version._last_merge_target_rev = pull_request._last_merge_target_rev
714 version._last_merge_target_rev = pull_request._last_merge_target_rev
718 version._last_merge_status = pull_request._last_merge_status
715 version._last_merge_status = pull_request._last_merge_status
719 version.shadow_merge_ref = pull_request.shadow_merge_ref
716 version.shadow_merge_ref = pull_request.shadow_merge_ref
720 version.merge_rev = pull_request.merge_rev
717 version.merge_rev = pull_request.merge_rev
721
718
722 version.revisions = pull_request.revisions
719 version.revisions = pull_request.revisions
723 version.pull_request = pull_request
720 version.pull_request = pull_request
724 Session().add(version)
721 Session().add(version)
725 Session().flush()
722 Session().flush()
726
723
727 return version
724 return version
728
725
729 def _generate_update_diffs(self, pull_request, pull_request_version):
726 def _generate_update_diffs(self, pull_request, pull_request_version):
730 diff_context = (
727 diff_context = (
731 self.DIFF_CONTEXT +
728 self.DIFF_CONTEXT +
732 ChangesetCommentsModel.needed_extra_diff_context())
729 ChangesetCommentsModel.needed_extra_diff_context())
733 old_diff = self._get_diff_from_pr_or_version(
730 old_diff = self._get_diff_from_pr_or_version(
734 pull_request_version, context=diff_context)
731 pull_request_version, context=diff_context)
735 new_diff = self._get_diff_from_pr_or_version(
732 new_diff = self._get_diff_from_pr_or_version(
736 pull_request, context=diff_context)
733 pull_request, context=diff_context)
737
734
738 old_diff_data = diffs.DiffProcessor(old_diff)
735 old_diff_data = diffs.DiffProcessor(old_diff)
739 old_diff_data.prepare()
736 old_diff_data.prepare()
740 new_diff_data = diffs.DiffProcessor(new_diff)
737 new_diff_data = diffs.DiffProcessor(new_diff)
741 new_diff_data.prepare()
738 new_diff_data.prepare()
742
739
743 return old_diff_data, new_diff_data
740 return old_diff_data, new_diff_data
744
741
745 def _link_comments_to_version(self, pull_request_version):
742 def _link_comments_to_version(self, pull_request_version):
746 """
743 """
747 Link all unlinked comments of this pull request to the given version.
744 Link all unlinked comments of this pull request to the given version.
748
745
749 :param pull_request_version: The `PullRequestVersion` to which
746 :param pull_request_version: The `PullRequestVersion` to which
750 the comments shall be linked.
747 the comments shall be linked.
751
748
752 """
749 """
753 pull_request = pull_request_version.pull_request
750 pull_request = pull_request_version.pull_request
754 comments = ChangesetComment.query().filter(
751 comments = ChangesetComment.query().filter(
755 # TODO: johbo: Should we query for the repo at all here?
752 # TODO: johbo: Should we query for the repo at all here?
756 # Pending decision on how comments of PRs are to be related
753 # Pending decision on how comments of PRs are to be related
757 # to either the source repo, the target repo or no repo at all.
754 # to either the source repo, the target repo or no repo at all.
758 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
755 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
759 ChangesetComment.pull_request == pull_request,
756 ChangesetComment.pull_request == pull_request,
760 ChangesetComment.pull_request_version == None)
757 ChangesetComment.pull_request_version == None)
761
758
762 # TODO: johbo: Find out why this breaks if it is done in a bulk
759 # TODO: johbo: Find out why this breaks if it is done in a bulk
763 # operation.
760 # operation.
764 for comment in comments:
761 for comment in comments:
765 comment.pull_request_version_id = (
762 comment.pull_request_version_id = (
766 pull_request_version.pull_request_version_id)
763 pull_request_version.pull_request_version_id)
767 Session().add(comment)
764 Session().add(comment)
768
765
769 def _calculate_commit_id_changes(self, old_ids, new_ids):
766 def _calculate_commit_id_changes(self, old_ids, new_ids):
770 added = new_ids.difference(old_ids)
767 added = new_ids.difference(old_ids)
771 common = old_ids.intersection(new_ids)
768 common = old_ids.intersection(new_ids)
772 removed = old_ids.difference(new_ids)
769 removed = old_ids.difference(new_ids)
773 return ChangeTuple(added, common, removed)
770 return ChangeTuple(added, common, removed)
774
771
775 def _calculate_file_changes(self, old_diff_data, new_diff_data):
772 def _calculate_file_changes(self, old_diff_data, new_diff_data):
776
773
777 old_files = OrderedDict()
774 old_files = OrderedDict()
778 for diff_data in old_diff_data.parsed_diff:
775 for diff_data in old_diff_data.parsed_diff:
779 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
776 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
780
777
781 added_files = []
778 added_files = []
782 modified_files = []
779 modified_files = []
783 removed_files = []
780 removed_files = []
784 for diff_data in new_diff_data.parsed_diff:
781 for diff_data in new_diff_data.parsed_diff:
785 new_filename = diff_data['filename']
782 new_filename = diff_data['filename']
786 new_hash = md5_safe(diff_data['raw_diff'])
783 new_hash = md5_safe(diff_data['raw_diff'])
787
784
788 old_hash = old_files.get(new_filename)
785 old_hash = old_files.get(new_filename)
789 if not old_hash:
786 if not old_hash:
790 # file is not present in old diff, means it's added
787 # file is not present in old diff, means it's added
791 added_files.append(new_filename)
788 added_files.append(new_filename)
792 else:
789 else:
793 if new_hash != old_hash:
790 if new_hash != old_hash:
794 modified_files.append(new_filename)
791 modified_files.append(new_filename)
795 # now remove a file from old, since we have seen it already
792 # now remove a file from old, since we have seen it already
796 del old_files[new_filename]
793 del old_files[new_filename]
797
794
798 # removed files is when there are present in old, but not in NEW,
795 # removed files is when there are present in old, but not in NEW,
799 # since we remove old files that are present in new diff, left-overs
796 # since we remove old files that are present in new diff, left-overs
800 # if any should be the removed files
797 # if any should be the removed files
801 removed_files.extend(old_files.keys())
798 removed_files.extend(old_files.keys())
802
799
803 return FileChangeTuple(added_files, modified_files, removed_files)
800 return FileChangeTuple(added_files, modified_files, removed_files)
804
801
805 def _render_update_message(self, changes, file_changes):
802 def _render_update_message(self, changes, file_changes):
806 """
803 """
807 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
804 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
808 so it's always looking the same disregarding on which default
805 so it's always looking the same disregarding on which default
809 renderer system is using.
806 renderer system is using.
810
807
811 :param changes: changes named tuple
808 :param changes: changes named tuple
812 :param file_changes: file changes named tuple
809 :param file_changes: file changes named tuple
813
810
814 """
811 """
815 new_status = ChangesetStatus.get_status_lbl(
812 new_status = ChangesetStatus.get_status_lbl(
816 ChangesetStatus.STATUS_UNDER_REVIEW)
813 ChangesetStatus.STATUS_UNDER_REVIEW)
817
814
818 changed_files = (
815 changed_files = (
819 file_changes.added + file_changes.modified + file_changes.removed)
816 file_changes.added + file_changes.modified + file_changes.removed)
820
817
821 params = {
818 params = {
822 'under_review_label': new_status,
819 'under_review_label': new_status,
823 'added_commits': changes.added,
820 'added_commits': changes.added,
824 'removed_commits': changes.removed,
821 'removed_commits': changes.removed,
825 'changed_files': changed_files,
822 'changed_files': changed_files,
826 'added_files': file_changes.added,
823 'added_files': file_changes.added,
827 'modified_files': file_changes.modified,
824 'modified_files': file_changes.modified,
828 'removed_files': file_changes.removed,
825 'removed_files': file_changes.removed,
829 }
826 }
830 renderer = RstTemplateRenderer()
827 renderer = RstTemplateRenderer()
831 return renderer.render('pull_request_update.mako', **params)
828 return renderer.render('pull_request_update.mako', **params)
832
829
833 def edit(self, pull_request, title, description):
830 def edit(self, pull_request, title, description):
834 pull_request = self.__get_pull_request(pull_request)
831 pull_request = self.__get_pull_request(pull_request)
835 if pull_request.is_closed():
832 if pull_request.is_closed():
836 raise ValueError('This pull request is closed')
833 raise ValueError('This pull request is closed')
837 if title:
834 if title:
838 pull_request.title = title
835 pull_request.title = title
839 pull_request.description = description
836 pull_request.description = description
840 pull_request.updated_on = datetime.datetime.now()
837 pull_request.updated_on = datetime.datetime.now()
841 Session().add(pull_request)
838 Session().add(pull_request)
842
839
843 def update_reviewers(self, pull_request, reviewer_data):
840 def update_reviewers(self, pull_request, reviewer_data):
844 """
841 """
845 Update the reviewers in the pull request
842 Update the reviewers in the pull request
846
843
847 :param pull_request: the pr to update
844 :param pull_request: the pr to update
848 :param reviewer_data: list of tuples [(user, ['reason1', 'reason2'])]
845 :param reviewer_data: list of tuples [(user, ['reason1', 'reason2'])]
849 """
846 """
850
847
851 reviewers_reasons = {}
848 reviewers_reasons = {}
852 for user_id, reasons in reviewer_data:
849 for user_id, reasons in reviewer_data:
853 if isinstance(user_id, (int, basestring)):
850 if isinstance(user_id, (int, basestring)):
854 user_id = self._get_user(user_id).user_id
851 user_id = self._get_user(user_id).user_id
855 reviewers_reasons[user_id] = reasons
852 reviewers_reasons[user_id] = reasons
856
853
857 reviewers_ids = set(reviewers_reasons.keys())
854 reviewers_ids = set(reviewers_reasons.keys())
858 pull_request = self.__get_pull_request(pull_request)
855 pull_request = self.__get_pull_request(pull_request)
859 current_reviewers = PullRequestReviewers.query()\
856 current_reviewers = PullRequestReviewers.query()\
860 .filter(PullRequestReviewers.pull_request ==
857 .filter(PullRequestReviewers.pull_request ==
861 pull_request).all()
858 pull_request).all()
862 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
859 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
863
860
864 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
861 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
865 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
862 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
866
863
867 log.debug("Adding %s reviewers", ids_to_add)
864 log.debug("Adding %s reviewers", ids_to_add)
868 log.debug("Removing %s reviewers", ids_to_remove)
865 log.debug("Removing %s reviewers", ids_to_remove)
869 changed = False
866 changed = False
870 for uid in ids_to_add:
867 for uid in ids_to_add:
871 changed = True
868 changed = True
872 _usr = self._get_user(uid)
869 _usr = self._get_user(uid)
873 reasons = reviewers_reasons[uid]
870 reasons = reviewers_reasons[uid]
874 reviewer = PullRequestReviewers(_usr, pull_request, reasons)
871 reviewer = PullRequestReviewers(_usr, pull_request, reasons)
875 Session().add(reviewer)
872 Session().add(reviewer)
876
873
877 self.notify_reviewers(pull_request, ids_to_add)
874 self.notify_reviewers(pull_request, ids_to_add)
878
875
879 for uid in ids_to_remove:
876 for uid in ids_to_remove:
880 changed = True
877 changed = True
881 reviewer = PullRequestReviewers.query()\
878 reviewer = PullRequestReviewers.query()\
882 .filter(PullRequestReviewers.user_id == uid,
879 .filter(PullRequestReviewers.user_id == uid,
883 PullRequestReviewers.pull_request == pull_request)\
880 PullRequestReviewers.pull_request == pull_request)\
884 .scalar()
881 .scalar()
885 if reviewer:
882 if reviewer:
886 Session().delete(reviewer)
883 Session().delete(reviewer)
887 if changed:
884 if changed:
888 pull_request.updated_on = datetime.datetime.now()
885 pull_request.updated_on = datetime.datetime.now()
889 Session().add(pull_request)
886 Session().add(pull_request)
890
887
891 return ids_to_add, ids_to_remove
888 return ids_to_add, ids_to_remove
892
889
893 def get_url(self, pull_request):
890 def get_url(self, pull_request):
894 return h.url('pullrequest_show',
891 return h.url('pullrequest_show',
895 repo_name=safe_str(pull_request.target_repo.repo_name),
892 repo_name=safe_str(pull_request.target_repo.repo_name),
896 pull_request_id=pull_request.pull_request_id,
893 pull_request_id=pull_request.pull_request_id,
897 qualified=True)
894 qualified=True)
898
895
899 def get_shadow_clone_url(self, pull_request):
896 def get_shadow_clone_url(self, pull_request):
900 """
897 """
901 Returns qualified url pointing to the shadow repository. If this pull
898 Returns qualified url pointing to the shadow repository. If this pull
902 request is closed there is no shadow repository and ``None`` will be
899 request is closed there is no shadow repository and ``None`` will be
903 returned.
900 returned.
904 """
901 """
905 if pull_request.is_closed():
902 if pull_request.is_closed():
906 return None
903 return None
907 else:
904 else:
908 pr_url = urllib.unquote(self.get_url(pull_request))
905 pr_url = urllib.unquote(self.get_url(pull_request))
909 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
906 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
910
907
911 def notify_reviewers(self, pull_request, reviewers_ids):
908 def notify_reviewers(self, pull_request, reviewers_ids):
912 # notification to reviewers
909 # notification to reviewers
913 if not reviewers_ids:
910 if not reviewers_ids:
914 return
911 return
915
912
916 pull_request_obj = pull_request
913 pull_request_obj = pull_request
917 # get the current participants of this pull request
914 # get the current participants of this pull request
918 recipients = reviewers_ids
915 recipients = reviewers_ids
919 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
916 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
920
917
921 pr_source_repo = pull_request_obj.source_repo
918 pr_source_repo = pull_request_obj.source_repo
922 pr_target_repo = pull_request_obj.target_repo
919 pr_target_repo = pull_request_obj.target_repo
923
920
924 pr_url = h.url(
921 pr_url = h.url(
925 'pullrequest_show',
922 'pullrequest_show',
926 repo_name=pr_target_repo.repo_name,
923 repo_name=pr_target_repo.repo_name,
927 pull_request_id=pull_request_obj.pull_request_id,
924 pull_request_id=pull_request_obj.pull_request_id,
928 qualified=True,)
925 qualified=True,)
929
926
930 # set some variables for email notification
927 # set some variables for email notification
931 pr_target_repo_url = h.url(
928 pr_target_repo_url = h.url(
932 'summary_home',
929 'summary_home',
933 repo_name=pr_target_repo.repo_name,
930 repo_name=pr_target_repo.repo_name,
934 qualified=True)
931 qualified=True)
935
932
936 pr_source_repo_url = h.url(
933 pr_source_repo_url = h.url(
937 'summary_home',
934 'summary_home',
938 repo_name=pr_source_repo.repo_name,
935 repo_name=pr_source_repo.repo_name,
939 qualified=True)
936 qualified=True)
940
937
941 # pull request specifics
938 # pull request specifics
942 pull_request_commits = [
939 pull_request_commits = [
943 (x.raw_id, x.message)
940 (x.raw_id, x.message)
944 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
941 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
945
942
946 kwargs = {
943 kwargs = {
947 'user': pull_request.author,
944 'user': pull_request.author,
948 'pull_request': pull_request_obj,
945 'pull_request': pull_request_obj,
949 'pull_request_commits': pull_request_commits,
946 'pull_request_commits': pull_request_commits,
950
947
951 'pull_request_target_repo': pr_target_repo,
948 'pull_request_target_repo': pr_target_repo,
952 'pull_request_target_repo_url': pr_target_repo_url,
949 'pull_request_target_repo_url': pr_target_repo_url,
953
950
954 'pull_request_source_repo': pr_source_repo,
951 'pull_request_source_repo': pr_source_repo,
955 'pull_request_source_repo_url': pr_source_repo_url,
952 'pull_request_source_repo_url': pr_source_repo_url,
956
953
957 'pull_request_url': pr_url,
954 'pull_request_url': pr_url,
958 }
955 }
959
956
960 # pre-generate the subject for notification itself
957 # pre-generate the subject for notification itself
961 (subject,
958 (subject,
962 _h, _e, # we don't care about those
959 _h, _e, # we don't care about those
963 body_plaintext) = EmailNotificationModel().render_email(
960 body_plaintext) = EmailNotificationModel().render_email(
964 notification_type, **kwargs)
961 notification_type, **kwargs)
965
962
966 # create notification objects, and emails
963 # create notification objects, and emails
967 NotificationModel().create(
964 NotificationModel().create(
968 created_by=pull_request.author,
965 created_by=pull_request.author,
969 notification_subject=subject,
966 notification_subject=subject,
970 notification_body=body_plaintext,
967 notification_body=body_plaintext,
971 notification_type=notification_type,
968 notification_type=notification_type,
972 recipients=recipients,
969 recipients=recipients,
973 email_kwargs=kwargs,
970 email_kwargs=kwargs,
974 )
971 )
975
972
976 def delete(self, pull_request):
973 def delete(self, pull_request):
977 pull_request = self.__get_pull_request(pull_request)
974 pull_request = self.__get_pull_request(pull_request)
978 self._cleanup_merge_workspace(pull_request)
975 self._cleanup_merge_workspace(pull_request)
979 Session().delete(pull_request)
976 Session().delete(pull_request)
980
977
981 def close_pull_request(self, pull_request, user):
978 def close_pull_request(self, pull_request, user):
982 pull_request = self.__get_pull_request(pull_request)
979 pull_request = self.__get_pull_request(pull_request)
983 self._cleanup_merge_workspace(pull_request)
980 self._cleanup_merge_workspace(pull_request)
984 pull_request.status = PullRequest.STATUS_CLOSED
981 pull_request.status = PullRequest.STATUS_CLOSED
985 pull_request.updated_on = datetime.datetime.now()
982 pull_request.updated_on = datetime.datetime.now()
986 Session().add(pull_request)
983 Session().add(pull_request)
987 self._trigger_pull_request_hook(
984 self._trigger_pull_request_hook(
988 pull_request, pull_request.author, 'close')
985 pull_request, pull_request.author, 'close')
989 self._log_action('user_closed_pull_request', user, pull_request)
986 self._log_action('user_closed_pull_request', user, pull_request)
990
987
991 def close_pull_request_with_comment(self, pull_request, user, repo,
988 def close_pull_request_with_comment(self, pull_request, user, repo,
992 message=None):
989 message=None):
993 status = ChangesetStatus.STATUS_REJECTED
990 status = ChangesetStatus.STATUS_REJECTED
994
991
995 if not message:
992 if not message:
996 message = (
993 message = (
997 _('Status change %(transition_icon)s %(status)s') % {
994 _('Status change %(transition_icon)s %(status)s') % {
998 'transition_icon': '>',
995 'transition_icon': '>',
999 'status': ChangesetStatus.get_status_lbl(status)})
996 'status': ChangesetStatus.get_status_lbl(status)})
1000
997
1001 internal_message = _('Closing with') + ' ' + message
998 internal_message = _('Closing with') + ' ' + message
1002
999
1003 comm = ChangesetCommentsModel().create(
1000 comm = ChangesetCommentsModel().create(
1004 text=internal_message,
1001 text=internal_message,
1005 repo=repo.repo_id,
1002 repo=repo.repo_id,
1006 user=user.user_id,
1003 user=user.user_id,
1007 pull_request=pull_request.pull_request_id,
1004 pull_request=pull_request.pull_request_id,
1008 f_path=None,
1005 f_path=None,
1009 line_no=None,
1006 line_no=None,
1010 status_change=ChangesetStatus.get_status_lbl(status),
1007 status_change=ChangesetStatus.get_status_lbl(status),
1011 status_change_type=status,
1008 status_change_type=status,
1012 closing_pr=True
1009 closing_pr=True
1013 )
1010 )
1014
1011
1015 ChangesetStatusModel().set_status(
1012 ChangesetStatusModel().set_status(
1016 repo.repo_id,
1013 repo.repo_id,
1017 status,
1014 status,
1018 user.user_id,
1015 user.user_id,
1019 comm,
1016 comm,
1020 pull_request=pull_request.pull_request_id
1017 pull_request=pull_request.pull_request_id
1021 )
1018 )
1022 Session().flush()
1019 Session().flush()
1023
1020
1024 PullRequestModel().close_pull_request(
1021 PullRequestModel().close_pull_request(
1025 pull_request.pull_request_id, user)
1022 pull_request.pull_request_id, user)
1026
1023
1027 def merge_status(self, pull_request):
1024 def merge_status(self, pull_request):
1028 if not self._is_merge_enabled(pull_request):
1025 if not self._is_merge_enabled(pull_request):
1029 return False, _('Server-side pull request merging is disabled.')
1026 return False, _('Server-side pull request merging is disabled.')
1030 if pull_request.is_closed():
1027 if pull_request.is_closed():
1031 return False, _('This pull request is closed.')
1028 return False, _('This pull request is closed.')
1032 merge_possible, msg = self._check_repo_requirements(
1029 merge_possible, msg = self._check_repo_requirements(
1033 target=pull_request.target_repo, source=pull_request.source_repo)
1030 target=pull_request.target_repo, source=pull_request.source_repo)
1034 if not merge_possible:
1031 if not merge_possible:
1035 return merge_possible, msg
1032 return merge_possible, msg
1036
1033
1037 try:
1034 try:
1038 resp = self._try_merge(pull_request)
1035 resp = self._try_merge(pull_request)
1039 log.debug("Merge response: %s", resp)
1036 log.debug("Merge response: %s", resp)
1040 status = resp.possible, self.merge_status_message(
1037 status = resp.possible, self.merge_status_message(
1041 resp.failure_reason)
1038 resp.failure_reason)
1042 except NotImplementedError:
1039 except NotImplementedError:
1043 status = False, _('Pull request merging is not supported.')
1040 status = False, _('Pull request merging is not supported.')
1044
1041
1045 return status
1042 return status
1046
1043
1047 def _check_repo_requirements(self, target, source):
1044 def _check_repo_requirements(self, target, source):
1048 """
1045 """
1049 Check if `target` and `source` have compatible requirements.
1046 Check if `target` and `source` have compatible requirements.
1050
1047
1051 Currently this is just checking for largefiles.
1048 Currently this is just checking for largefiles.
1052 """
1049 """
1053 target_has_largefiles = self._has_largefiles(target)
1050 target_has_largefiles = self._has_largefiles(target)
1054 source_has_largefiles = self._has_largefiles(source)
1051 source_has_largefiles = self._has_largefiles(source)
1055 merge_possible = True
1052 merge_possible = True
1056 message = u''
1053 message = u''
1057
1054
1058 if target_has_largefiles != source_has_largefiles:
1055 if target_has_largefiles != source_has_largefiles:
1059 merge_possible = False
1056 merge_possible = False
1060 if source_has_largefiles:
1057 if source_has_largefiles:
1061 message = _(
1058 message = _(
1062 'Target repository large files support is disabled.')
1059 'Target repository large files support is disabled.')
1063 else:
1060 else:
1064 message = _(
1061 message = _(
1065 'Source repository large files support is disabled.')
1062 'Source repository large files support is disabled.')
1066
1063
1067 return merge_possible, message
1064 return merge_possible, message
1068
1065
1069 def _has_largefiles(self, repo):
1066 def _has_largefiles(self, repo):
1070 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1067 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1071 'extensions', 'largefiles')
1068 'extensions', 'largefiles')
1072 return largefiles_ui and largefiles_ui[0].active
1069 return largefiles_ui and largefiles_ui[0].active
1073
1070
1074 def _try_merge(self, pull_request):
1071 def _try_merge(self, pull_request):
1075 """
1072 """
1076 Try to merge the pull request and return the merge status.
1073 Try to merge the pull request and return the merge status.
1077 """
1074 """
1078 log.debug(
1075 log.debug(
1079 "Trying out if the pull request %s can be merged.",
1076 "Trying out if the pull request %s can be merged.",
1080 pull_request.pull_request_id)
1077 pull_request.pull_request_id)
1081 target_vcs = pull_request.target_repo.scm_instance()
1078 target_vcs = pull_request.target_repo.scm_instance()
1082
1079
1083 # Refresh the target reference.
1080 # Refresh the target reference.
1084 try:
1081 try:
1085 target_ref = self._refresh_reference(
1082 target_ref = self._refresh_reference(
1086 pull_request.target_ref_parts, target_vcs)
1083 pull_request.target_ref_parts, target_vcs)
1087 except CommitDoesNotExistError:
1084 except CommitDoesNotExistError:
1088 merge_state = MergeResponse(
1085 merge_state = MergeResponse(
1089 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1086 False, False, None, MergeFailureReason.MISSING_TARGET_REF)
1090 return merge_state
1087 return merge_state
1091
1088
1092 target_locked = pull_request.target_repo.locked
1089 target_locked = pull_request.target_repo.locked
1093 if target_locked and target_locked[0]:
1090 if target_locked and target_locked[0]:
1094 log.debug("The target repository is locked.")
1091 log.debug("The target repository is locked.")
1095 merge_state = MergeResponse(
1092 merge_state = MergeResponse(
1096 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1093 False, False, None, MergeFailureReason.TARGET_IS_LOCKED)
1097 elif self._needs_merge_state_refresh(pull_request, target_ref):
1094 elif self._needs_merge_state_refresh(pull_request, target_ref):
1098 log.debug("Refreshing the merge status of the repository.")
1095 log.debug("Refreshing the merge status of the repository.")
1099 merge_state = self._refresh_merge_state(
1096 merge_state = self._refresh_merge_state(
1100 pull_request, target_vcs, target_ref)
1097 pull_request, target_vcs, target_ref)
1101 else:
1098 else:
1102 possible = pull_request.\
1099 possible = pull_request.\
1103 _last_merge_status == MergeFailureReason.NONE
1100 _last_merge_status == MergeFailureReason.NONE
1104 merge_state = MergeResponse(
1101 merge_state = MergeResponse(
1105 possible, False, None, pull_request._last_merge_status)
1102 possible, False, None, pull_request._last_merge_status)
1106
1103
1107 return merge_state
1104 return merge_state
1108
1105
1109 def _refresh_reference(self, reference, vcs_repository):
1106 def _refresh_reference(self, reference, vcs_repository):
1110 if reference.type in ('branch', 'book'):
1107 if reference.type in ('branch', 'book'):
1111 name_or_id = reference.name
1108 name_or_id = reference.name
1112 else:
1109 else:
1113 name_or_id = reference.commit_id
1110 name_or_id = reference.commit_id
1114 refreshed_commit = vcs_repository.get_commit(name_or_id)
1111 refreshed_commit = vcs_repository.get_commit(name_or_id)
1115 refreshed_reference = Reference(
1112 refreshed_reference = Reference(
1116 reference.type, reference.name, refreshed_commit.raw_id)
1113 reference.type, reference.name, refreshed_commit.raw_id)
1117 return refreshed_reference
1114 return refreshed_reference
1118
1115
1119 def _needs_merge_state_refresh(self, pull_request, target_reference):
1116 def _needs_merge_state_refresh(self, pull_request, target_reference):
1120 return not(
1117 return not(
1121 pull_request.revisions and
1118 pull_request.revisions and
1122 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1119 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1123 target_reference.commit_id == pull_request._last_merge_target_rev)
1120 target_reference.commit_id == pull_request._last_merge_target_rev)
1124
1121
1125 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1122 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1126 workspace_id = self._workspace_id(pull_request)
1123 workspace_id = self._workspace_id(pull_request)
1127 source_vcs = pull_request.source_repo.scm_instance()
1124 source_vcs = pull_request.source_repo.scm_instance()
1128 use_rebase = self._use_rebase_for_merging(pull_request)
1125 use_rebase = self._use_rebase_for_merging(pull_request)
1129 merge_state = target_vcs.merge(
1126 merge_state = target_vcs.merge(
1130 target_reference, source_vcs, pull_request.source_ref_parts,
1127 target_reference, source_vcs, pull_request.source_ref_parts,
1131 workspace_id, dry_run=True, use_rebase=use_rebase)
1128 workspace_id, dry_run=True, use_rebase=use_rebase)
1132
1129
1133 # Do not store the response if there was an unknown error.
1130 # Do not store the response if there was an unknown error.
1134 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1131 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1135 pull_request._last_merge_source_rev = \
1132 pull_request._last_merge_source_rev = \
1136 pull_request.source_ref_parts.commit_id
1133 pull_request.source_ref_parts.commit_id
1137 pull_request._last_merge_target_rev = target_reference.commit_id
1134 pull_request._last_merge_target_rev = target_reference.commit_id
1138 pull_request._last_merge_status = merge_state.failure_reason
1135 pull_request._last_merge_status = merge_state.failure_reason
1139 pull_request.shadow_merge_ref = merge_state.merge_ref
1136 pull_request.shadow_merge_ref = merge_state.merge_ref
1140 Session().add(pull_request)
1137 Session().add(pull_request)
1141 Session().commit()
1138 Session().commit()
1142
1139
1143 return merge_state
1140 return merge_state
1144
1141
1145 def _workspace_id(self, pull_request):
1142 def _workspace_id(self, pull_request):
1146 workspace_id = 'pr-%s' % pull_request.pull_request_id
1143 workspace_id = 'pr-%s' % pull_request.pull_request_id
1147 return workspace_id
1144 return workspace_id
1148
1145
1149 def merge_status_message(self, status_code):
1146 def merge_status_message(self, status_code):
1150 """
1147 """
1151 Return a human friendly error message for the given merge status code.
1148 Return a human friendly error message for the given merge status code.
1152 """
1149 """
1153 return self.MERGE_STATUS_MESSAGES[status_code]
1150 return self.MERGE_STATUS_MESSAGES[status_code]
1154
1151
1155 def generate_repo_data(self, repo, commit_id=None, branch=None,
1152 def generate_repo_data(self, repo, commit_id=None, branch=None,
1156 bookmark=None):
1153 bookmark=None):
1157 all_refs, selected_ref = \
1154 all_refs, selected_ref = \
1158 self._get_repo_pullrequest_sources(
1155 self._get_repo_pullrequest_sources(
1159 repo.scm_instance(), commit_id=commit_id,
1156 repo.scm_instance(), commit_id=commit_id,
1160 branch=branch, bookmark=bookmark)
1157 branch=branch, bookmark=bookmark)
1161
1158
1162 refs_select2 = []
1159 refs_select2 = []
1163 for element in all_refs:
1160 for element in all_refs:
1164 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1161 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1165 refs_select2.append({'text': element[1], 'children': children})
1162 refs_select2.append({'text': element[1], 'children': children})
1166
1163
1167 return {
1164 return {
1168 'user': {
1165 'user': {
1169 'user_id': repo.user.user_id,
1166 'user_id': repo.user.user_id,
1170 'username': repo.user.username,
1167 'username': repo.user.username,
1171 'firstname': repo.user.firstname,
1168 'firstname': repo.user.firstname,
1172 'lastname': repo.user.lastname,
1169 'lastname': repo.user.lastname,
1173 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1170 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1174 },
1171 },
1175 'description': h.chop_at_smart(repo.description, '\n'),
1172 'description': h.chop_at_smart(repo.description, '\n'),
1176 'refs': {
1173 'refs': {
1177 'all_refs': all_refs,
1174 'all_refs': all_refs,
1178 'selected_ref': selected_ref,
1175 'selected_ref': selected_ref,
1179 'select2_refs': refs_select2
1176 'select2_refs': refs_select2
1180 }
1177 }
1181 }
1178 }
1182
1179
1183 def generate_pullrequest_title(self, source, source_ref, target):
1180 def generate_pullrequest_title(self, source, source_ref, target):
1184 return u'{source}#{at_ref} to {target}'.format(
1181 return u'{source}#{at_ref} to {target}'.format(
1185 source=source,
1182 source=source,
1186 at_ref=source_ref,
1183 at_ref=source_ref,
1187 target=target,
1184 target=target,
1188 )
1185 )
1189
1186
1190 def _cleanup_merge_workspace(self, pull_request):
1187 def _cleanup_merge_workspace(self, pull_request):
1191 # Merging related cleanup
1188 # Merging related cleanup
1192 target_scm = pull_request.target_repo.scm_instance()
1189 target_scm = pull_request.target_repo.scm_instance()
1193 workspace_id = 'pr-%s' % pull_request.pull_request_id
1190 workspace_id = 'pr-%s' % pull_request.pull_request_id
1194
1191
1195 try:
1192 try:
1196 target_scm.cleanup_merge_workspace(workspace_id)
1193 target_scm.cleanup_merge_workspace(workspace_id)
1197 except NotImplementedError:
1194 except NotImplementedError:
1198 pass
1195 pass
1199
1196
1200 def _get_repo_pullrequest_sources(
1197 def _get_repo_pullrequest_sources(
1201 self, repo, commit_id=None, branch=None, bookmark=None):
1198 self, repo, commit_id=None, branch=None, bookmark=None):
1202 """
1199 """
1203 Return a structure with repo's interesting commits, suitable for
1200 Return a structure with repo's interesting commits, suitable for
1204 the selectors in pullrequest controller
1201 the selectors in pullrequest controller
1205
1202
1206 :param commit_id: a commit that must be in the list somehow
1203 :param commit_id: a commit that must be in the list somehow
1207 and selected by default
1204 and selected by default
1208 :param branch: a branch that must be in the list and selected
1205 :param branch: a branch that must be in the list and selected
1209 by default - even if closed
1206 by default - even if closed
1210 :param bookmark: a bookmark that must be in the list and selected
1207 :param bookmark: a bookmark that must be in the list and selected
1211 """
1208 """
1212
1209
1213 commit_id = safe_str(commit_id) if commit_id else None
1210 commit_id = safe_str(commit_id) if commit_id else None
1214 branch = safe_str(branch) if branch else None
1211 branch = safe_str(branch) if branch else None
1215 bookmark = safe_str(bookmark) if bookmark else None
1212 bookmark = safe_str(bookmark) if bookmark else None
1216
1213
1217 selected = None
1214 selected = None
1218
1215
1219 # order matters: first source that has commit_id in it will be selected
1216 # order matters: first source that has commit_id in it will be selected
1220 sources = []
1217 sources = []
1221 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1218 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
1222 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1219 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
1223
1220
1224 if commit_id:
1221 if commit_id:
1225 ref_commit = (h.short_id(commit_id), commit_id)
1222 ref_commit = (h.short_id(commit_id), commit_id)
1226 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1223 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
1227
1224
1228 sources.append(
1225 sources.append(
1229 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1226 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
1230 )
1227 )
1231
1228
1232 groups = []
1229 groups = []
1233 for group_key, ref_list, group_name, match in sources:
1230 for group_key, ref_list, group_name, match in sources:
1234 group_refs = []
1231 group_refs = []
1235 for ref_name, ref_id in ref_list:
1232 for ref_name, ref_id in ref_list:
1236 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1233 ref_key = '%s:%s:%s' % (group_key, ref_name, ref_id)
1237 group_refs.append((ref_key, ref_name))
1234 group_refs.append((ref_key, ref_name))
1238
1235
1239 if not selected:
1236 if not selected:
1240 if set([commit_id, match]) & set([ref_id, ref_name]):
1237 if set([commit_id, match]) & set([ref_id, ref_name]):
1241 selected = ref_key
1238 selected = ref_key
1242
1239
1243 if group_refs:
1240 if group_refs:
1244 groups.append((group_refs, group_name))
1241 groups.append((group_refs, group_name))
1245
1242
1246 if not selected:
1243 if not selected:
1247 ref = commit_id or branch or bookmark
1244 ref = commit_id or branch or bookmark
1248 if ref:
1245 if ref:
1249 raise CommitDoesNotExistError(
1246 raise CommitDoesNotExistError(
1250 'No commit refs could be found matching: %s' % ref)
1247 'No commit refs could be found matching: %s' % ref)
1251 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1248 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
1252 selected = 'branch:%s:%s' % (
1249 selected = 'branch:%s:%s' % (
1253 repo.DEFAULT_BRANCH_NAME,
1250 repo.DEFAULT_BRANCH_NAME,
1254 repo.branches[repo.DEFAULT_BRANCH_NAME]
1251 repo.branches[repo.DEFAULT_BRANCH_NAME]
1255 )
1252 )
1256 elif repo.commit_ids:
1253 elif repo.commit_ids:
1257 rev = repo.commit_ids[0]
1254 rev = repo.commit_ids[0]
1258 selected = 'rev:%s:%s' % (rev, rev)
1255 selected = 'rev:%s:%s' % (rev, rev)
1259 else:
1256 else:
1260 raise EmptyRepositoryError()
1257 raise EmptyRepositoryError()
1261 return groups, selected
1258 return groups, selected
1262
1259
1263 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1260 def get_diff(self, pull_request, context=DIFF_CONTEXT):
1264 pull_request = self.__get_pull_request(pull_request)
1261 pull_request = self.__get_pull_request(pull_request)
1265 return self._get_diff_from_pr_or_version(pull_request, context=context)
1262 return self._get_diff_from_pr_or_version(pull_request, context=context)
1266
1263
1267 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1264 def _get_diff_from_pr_or_version(self, pr_or_version, context):
1268 source_repo = pr_or_version.source_repo
1265 source_repo = pr_or_version.source_repo
1269
1266
1270 # we swap org/other ref since we run a simple diff on one repo
1267 # we swap org/other ref since we run a simple diff on one repo
1271 target_ref_id = pr_or_version.target_ref_parts.commit_id
1268 target_ref_id = pr_or_version.target_ref_parts.commit_id
1272 source_ref_id = pr_or_version.source_ref_parts.commit_id
1269 source_ref_id = pr_or_version.source_ref_parts.commit_id
1273 target_commit = source_repo.get_commit(
1270 target_commit = source_repo.get_commit(
1274 commit_id=safe_str(target_ref_id))
1271 commit_id=safe_str(target_ref_id))
1275 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1272 source_commit = source_repo.get_commit(commit_id=safe_str(source_ref_id))
1276 vcs_repo = source_repo.scm_instance()
1273 vcs_repo = source_repo.scm_instance()
1277
1274
1278 # TODO: johbo: In the context of an update, we cannot reach
1275 # TODO: johbo: In the context of an update, we cannot reach
1279 # the old commit anymore with our normal mechanisms. It needs
1276 # the old commit anymore with our normal mechanisms. It needs
1280 # some sort of special support in the vcs layer to avoid this
1277 # some sort of special support in the vcs layer to avoid this
1281 # workaround.
1278 # workaround.
1282 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1279 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
1283 vcs_repo.alias == 'git'):
1280 vcs_repo.alias == 'git'):
1284 source_commit.raw_id = safe_str(source_ref_id)
1281 source_commit.raw_id = safe_str(source_ref_id)
1285
1282
1286 log.debug('calculating diff between '
1283 log.debug('calculating diff between '
1287 'source_ref:%s and target_ref:%s for repo `%s`',
1284 'source_ref:%s and target_ref:%s for repo `%s`',
1288 target_ref_id, source_ref_id,
1285 target_ref_id, source_ref_id,
1289 safe_unicode(vcs_repo.path))
1286 safe_unicode(vcs_repo.path))
1290
1287
1291 vcs_diff = vcs_repo.get_diff(
1288 vcs_diff = vcs_repo.get_diff(
1292 commit1=target_commit, commit2=source_commit, context=context)
1289 commit1=target_commit, commit2=source_commit, context=context)
1293 return vcs_diff
1290 return vcs_diff
1294
1291
1295 def _is_merge_enabled(self, pull_request):
1292 def _is_merge_enabled(self, pull_request):
1296 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1293 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1297 settings = settings_model.get_general_settings()
1294 settings = settings_model.get_general_settings()
1298 return settings.get('rhodecode_pr_merge_enabled', False)
1295 return settings.get('rhodecode_pr_merge_enabled', False)
1299
1296
1300 def _use_rebase_for_merging(self, pull_request):
1297 def _use_rebase_for_merging(self, pull_request):
1301 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1298 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
1302 settings = settings_model.get_general_settings()
1299 settings = settings_model.get_general_settings()
1303 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1300 return settings.get('rhodecode_hg_use_rebase_for_merging', False)
1304
1301
1305 def _log_action(self, action, user, pull_request):
1302 def _log_action(self, action, user, pull_request):
1306 action_logger(
1303 action_logger(
1307 user,
1304 user,
1308 '{action}:{pr_id}'.format(
1305 '{action}:{pr_id}'.format(
1309 action=action, pr_id=pull_request.pull_request_id),
1306 action=action, pr_id=pull_request.pull_request_id),
1310 pull_request.target_repo)
1307 pull_request.target_repo)
1311
1308
1312
1309
1313 ChangeTuple = namedtuple('ChangeTuple',
1310 ChangeTuple = namedtuple('ChangeTuple',
1314 ['added', 'common', 'removed'])
1311 ['added', 'common', 'removed'])
1315
1312
1316 FileChangeTuple = namedtuple('FileChangeTuple',
1313 FileChangeTuple = namedtuple('FileChangeTuple',
1317 ['added', 'modified', 'removed'])
1314 ['added', 'modified', 'removed'])