##// END OF EJS Templates
merges: fixed excessive data saved in merge metadata that could not fit inside the DB table....
milka -
r4628:a7083868 stable
parent child Browse files
Show More
@@ -1,996 +1,1012 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.utils2 import CachedProperty
35 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs.backends.base import (
37 from rhodecode.lib.vcs.backends.base import (
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 MergeFailureReason, Reference, BasePathPermissionChecker)
39 MergeFailureReason, Reference, BasePathPermissionChecker)
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.exceptions import (
43 from rhodecode.lib.vcs.exceptions import (
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
45 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 from rhodecode.lib.vcs.compat import configparser
46 from rhodecode.lib.vcs.compat import configparser
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = dict((commit_id, index)
105 self._commit_ids = dict((commit_id, index)
106 for index, commit_id in enumerate(commit_ids))
106 for index, commit_id in enumerate(commit_ids))
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
138 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
157 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
181 message = "Added tag %s for commit %s" % (name, commit.short_id)
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (safe_unicode(n), hexlify(h)) for n, h in
233 (safe_unicode(n), hexlify(h)) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup="none")
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
288 update_cache = self._remote.hg_update_cache()
289
289
290 self._remote.invalidate_vcs_cache()
290 self._remote.invalidate_vcs_cache()
291 return update_cache
291 return update_cache
292
292
293 def hg_rebuild_fn_cache(self):
293 def hg_rebuild_fn_cache(self):
294 update_cache = self._remote.hg_rebuild_fn_cache()
294 update_cache = self._remote.hg_rebuild_fn_cache()
295
295
296 self._remote.invalidate_vcs_cache()
296 self._remote.invalidate_vcs_cache()
297 return update_cache
297 return update_cache
298
298
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 self, commit_id1, repo2, commit_id2)
301 self, commit_id1, repo2, commit_id2)
302
302
303 if commit_id1 == commit_id2:
303 if commit_id1 == commit_id2:
304 return commit_id1
304 return commit_id1
305
305
306 ancestors = self._remote.revs_from_revspec(
306 ancestors = self._remote.revs_from_revspec(
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 other_path=repo2.path)
308 other_path=repo2.path)
309
309
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311
311
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 return ancestor_id
313 return ancestor_id
314
314
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 if commit_id1 == commit_id2:
316 if commit_id1 == commit_id2:
317 commits = []
317 commits = []
318 else:
318 else:
319 if merge:
319 if merge:
320 indexes = self._remote.revs_from_revspec(
320 indexes = self._remote.revs_from_revspec(
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 else:
323 else:
324 indexes = self._remote.revs_from_revspec(
324 indexes = self._remote.revs_from_revspec(
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 commit_id1, other_path=repo2.path)
326 commit_id1, other_path=repo2.path)
327
327
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 for idx in indexes]
329 for idx in indexes]
330
330
331 return commits
331 return commits
332
332
333 @staticmethod
333 @staticmethod
334 def check_url(url, config):
334 def check_url(url, config):
335 """
335 """
336 Function will check given url and try to verify if it's a valid
336 Function will check given url and try to verify if it's a valid
337 link. Sometimes it may happened that mercurial will issue basic
337 link. Sometimes it may happened that mercurial will issue basic
338 auth request that can cause whole API to hang when used from python
338 auth request that can cause whole API to hang when used from python
339 or other external calls.
339 or other external calls.
340
340
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 when the return code is non 200
342 when the return code is non 200
343 """
343 """
344 # check first if it's not an local url
344 # check first if it's not an local url
345 if os.path.isdir(url) or url.startswith('file:'):
345 if os.path.isdir(url) or url.startswith('file:'):
346 return True
346 return True
347
347
348 # Request the _remote to verify the url
348 # Request the _remote to verify the url
349 return connection.Hg.check_url(url, config.serialize())
349 return connection.Hg.check_url(url, config.serialize())
350
350
351 @staticmethod
351 @staticmethod
352 def is_valid_repository(path):
352 def is_valid_repository(path):
353 return os.path.isdir(os.path.join(path, '.hg'))
353 return os.path.isdir(os.path.join(path, '.hg'))
354
354
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 """
356 """
357 Function will check for mercurial repository in given path. If there
357 Function will check for mercurial repository in given path. If there
358 is no repository in that path it will raise an exception unless
358 is no repository in that path it will raise an exception unless
359 `create` parameter is set to True - in that case repository would
359 `create` parameter is set to True - in that case repository would
360 be created.
360 be created.
361
361
362 If `src_url` is given, would try to clone repository from the
362 If `src_url` is given, would try to clone repository from the
363 location at given clone_point. Additionally it'll make update to
363 location at given clone_point. Additionally it'll make update to
364 working copy accordingly to `do_workspace_checkout` flag.
364 working copy accordingly to `do_workspace_checkout` flag.
365 """
365 """
366 if create and os.path.exists(self.path):
366 if create and os.path.exists(self.path):
367 raise RepositoryError(
367 raise RepositoryError(
368 "Cannot create repository at %s, location already exist"
368 "Cannot create repository at %s, location already exist"
369 % self.path)
369 % self.path)
370
370
371 if src_url:
371 if src_url:
372 url = str(self._get_url(src_url))
372 url = str(self._get_url(src_url))
373 MercurialRepository.check_url(url, self.config)
373 MercurialRepository.check_url(url, self.config)
374
374
375 self._remote.clone(url, self.path, do_workspace_checkout)
375 self._remote.clone(url, self.path, do_workspace_checkout)
376
376
377 # Don't try to create if we've already cloned repo
377 # Don't try to create if we've already cloned repo
378 create = False
378 create = False
379
379
380 if create:
380 if create:
381 os.makedirs(self.path, mode=0o755)
381 os.makedirs(self.path, mode=0o755)
382 self._remote.localrepository(create)
382 self._remote.localrepository(create)
383
383
384 @LazyProperty
384 @LazyProperty
385 def in_memory_commit(self):
385 def in_memory_commit(self):
386 return MercurialInMemoryCommit(self)
386 return MercurialInMemoryCommit(self)
387
387
388 @LazyProperty
388 @LazyProperty
389 def description(self):
389 def description(self):
390 description = self._remote.get_config_value(
390 description = self._remote.get_config_value(
391 'web', 'description', untrusted=True)
391 'web', 'description', untrusted=True)
392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
392 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
393
393
394 @LazyProperty
394 @LazyProperty
395 def contact(self):
395 def contact(self):
396 contact = (
396 contact = (
397 self._remote.get_config_value("web", "contact") or
397 self._remote.get_config_value("web", "contact") or
398 self._remote.get_config_value("ui", "username"))
398 self._remote.get_config_value("ui", "username"))
399 return safe_unicode(contact or self.DEFAULT_CONTACT)
399 return safe_unicode(contact or self.DEFAULT_CONTACT)
400
400
401 @LazyProperty
401 @LazyProperty
402 def last_change(self):
402 def last_change(self):
403 """
403 """
404 Returns last change made on this repository as
404 Returns last change made on this repository as
405 `datetime.datetime` object.
405 `datetime.datetime` object.
406 """
406 """
407 try:
407 try:
408 return self.get_commit().date
408 return self.get_commit().date
409 except RepositoryError:
409 except RepositoryError:
410 tzoffset = makedate()[1]
410 tzoffset = makedate()[1]
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412
412
413 def _get_fs_mtime(self):
413 def _get_fs_mtime(self):
414 # fallback to filesystem
414 # fallback to filesystem
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 st_path = os.path.join(self.path, '.hg', "store")
416 st_path = os.path.join(self.path, '.hg', "store")
417 if os.path.exists(cl_path):
417 if os.path.exists(cl_path):
418 return os.stat(cl_path).st_mtime
418 return os.stat(cl_path).st_mtime
419 else:
419 else:
420 return os.stat(st_path).st_mtime
420 return os.stat(st_path).st_mtime
421
421
422 def _get_url(self, url):
422 def _get_url(self, url):
423 """
423 """
424 Returns normalized url. If schema is not given, would fall
424 Returns normalized url. If schema is not given, would fall
425 to filesystem
425 to filesystem
426 (``file:///``) schema.
426 (``file:///``) schema.
427 """
427 """
428 url = url.encode('utf8')
428 url = url.encode('utf8')
429 if url != 'default' and '://' not in url:
429 if url != 'default' and '://' not in url:
430 url = "file:" + urllib.pathname2url(url)
430 url = "file:" + urllib.pathname2url(url)
431 return url
431 return url
432
432
433 def get_hook_location(self):
433 def get_hook_location(self):
434 """
434 """
435 returns absolute path to location where hooks are stored
435 returns absolute path to location where hooks are stored
436 """
436 """
437 return os.path.join(self.path, '.hg', '.hgrc')
437 return os.path.join(self.path, '.hg', '.hgrc')
438
438
439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
439 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
440 translate_tag=None, maybe_unreachable=False):
440 translate_tag=None, maybe_unreachable=False):
441 """
441 """
442 Returns ``MercurialCommit`` object representing repository's
442 Returns ``MercurialCommit`` object representing repository's
443 commit at the given `commit_id` or `commit_idx`.
443 commit at the given `commit_id` or `commit_idx`.
444 """
444 """
445 if self.is_empty():
445 if self.is_empty():
446 raise EmptyRepositoryError("There are no commits yet")
446 raise EmptyRepositoryError("There are no commits yet")
447
447
448 if commit_id is not None:
448 if commit_id is not None:
449 self._validate_commit_id(commit_id)
449 self._validate_commit_id(commit_id)
450 try:
450 try:
451 # we have cached idx, use it without contacting the remote
451 # we have cached idx, use it without contacting the remote
452 idx = self._commit_ids[commit_id]
452 idx = self._commit_ids[commit_id]
453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
453 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
454 except KeyError:
454 except KeyError:
455 pass
455 pass
456
456
457 elif commit_idx is not None:
457 elif commit_idx is not None:
458 self._validate_commit_idx(commit_idx)
458 self._validate_commit_idx(commit_idx)
459 try:
459 try:
460 _commit_id = self.commit_ids[commit_idx]
460 _commit_id = self.commit_ids[commit_idx]
461 if commit_idx < 0:
461 if commit_idx < 0:
462 commit_idx = self.commit_ids.index(_commit_id)
462 commit_idx = self.commit_ids.index(_commit_id)
463
463
464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
464 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
465 except IndexError:
465 except IndexError:
466 commit_id = commit_idx
466 commit_id = commit_idx
467 else:
467 else:
468 commit_id = "tip"
468 commit_id = "tip"
469
469
470 if isinstance(commit_id, unicode):
470 if isinstance(commit_id, unicode):
471 commit_id = safe_str(commit_id)
471 commit_id = safe_str(commit_id)
472
472
473 try:
473 try:
474 raw_id, idx = self._remote.lookup(commit_id, both=True)
474 raw_id, idx = self._remote.lookup(commit_id, both=True)
475 except CommitDoesNotExistError:
475 except CommitDoesNotExistError:
476 msg = "Commit {} does not exist for `{}`".format(
476 msg = "Commit {} does not exist for `{}`".format(
477 *map(safe_str, [commit_id, self.name]))
477 *map(safe_str, [commit_id, self.name]))
478 raise CommitDoesNotExistError(msg)
478 raise CommitDoesNotExistError(msg)
479
479
480 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
480 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
481
481
482 def get_commits(
482 def get_commits(
483 self, start_id=None, end_id=None, start_date=None, end_date=None,
483 self, start_id=None, end_id=None, start_date=None, end_date=None,
484 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
484 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
485 """
485 """
486 Returns generator of ``MercurialCommit`` objects from start to end
486 Returns generator of ``MercurialCommit`` objects from start to end
487 (both are inclusive)
487 (both are inclusive)
488
488
489 :param start_id: None, str(commit_id)
489 :param start_id: None, str(commit_id)
490 :param end_id: None, str(commit_id)
490 :param end_id: None, str(commit_id)
491 :param start_date: if specified, commits with commit date less than
491 :param start_date: if specified, commits with commit date less than
492 ``start_date`` would be filtered out from returned set
492 ``start_date`` would be filtered out from returned set
493 :param end_date: if specified, commits with commit date greater than
493 :param end_date: if specified, commits with commit date greater than
494 ``end_date`` would be filtered out from returned set
494 ``end_date`` would be filtered out from returned set
495 :param branch_name: if specified, commits not reachable from given
495 :param branch_name: if specified, commits not reachable from given
496 branch would be filtered out from returned set
496 branch would be filtered out from returned set
497 :param show_hidden: Show hidden commits such as obsolete or hidden from
497 :param show_hidden: Show hidden commits such as obsolete or hidden from
498 Mercurial evolve
498 Mercurial evolve
499 :raise BranchDoesNotExistError: If given ``branch_name`` does not
499 :raise BranchDoesNotExistError: If given ``branch_name`` does not
500 exist.
500 exist.
501 :raise CommitDoesNotExistError: If commit for given ``start`` or
501 :raise CommitDoesNotExistError: If commit for given ``start`` or
502 ``end`` could not be found.
502 ``end`` could not be found.
503 """
503 """
504 # actually we should check now if it's not an empty repo
504 # actually we should check now if it's not an empty repo
505 if self.is_empty():
505 if self.is_empty():
506 raise EmptyRepositoryError("There are no commits yet")
506 raise EmptyRepositoryError("There are no commits yet")
507 self._validate_branch_name(branch_name)
507 self._validate_branch_name(branch_name)
508
508
509 branch_ancestors = False
509 branch_ancestors = False
510 if start_id is not None:
510 if start_id is not None:
511 self._validate_commit_id(start_id)
511 self._validate_commit_id(start_id)
512 c_start = self.get_commit(commit_id=start_id)
512 c_start = self.get_commit(commit_id=start_id)
513 start_pos = self._commit_ids[c_start.raw_id]
513 start_pos = self._commit_ids[c_start.raw_id]
514 else:
514 else:
515 start_pos = None
515 start_pos = None
516
516
517 if end_id is not None:
517 if end_id is not None:
518 self._validate_commit_id(end_id)
518 self._validate_commit_id(end_id)
519 c_end = self.get_commit(commit_id=end_id)
519 c_end = self.get_commit(commit_id=end_id)
520 end_pos = max(0, self._commit_ids[c_end.raw_id])
520 end_pos = max(0, self._commit_ids[c_end.raw_id])
521 else:
521 else:
522 end_pos = None
522 end_pos = None
523
523
524 if None not in [start_id, end_id] and start_pos > end_pos:
524 if None not in [start_id, end_id] and start_pos > end_pos:
525 raise RepositoryError(
525 raise RepositoryError(
526 "Start commit '%s' cannot be after end commit '%s'" %
526 "Start commit '%s' cannot be after end commit '%s'" %
527 (start_id, end_id))
527 (start_id, end_id))
528
528
529 if end_pos is not None:
529 if end_pos is not None:
530 end_pos += 1
530 end_pos += 1
531
531
532 commit_filter = []
532 commit_filter = []
533
533
534 if branch_name and not branch_ancestors:
534 if branch_name and not branch_ancestors:
535 commit_filter.append('branch("%s")' % (branch_name,))
535 commit_filter.append('branch("%s")' % (branch_name,))
536 elif branch_name and branch_ancestors:
536 elif branch_name and branch_ancestors:
537 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
537 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
538
538
539 if start_date and not end_date:
539 if start_date and not end_date:
540 commit_filter.append('date(">%s")' % (start_date,))
540 commit_filter.append('date(">%s")' % (start_date,))
541 if end_date and not start_date:
541 if end_date and not start_date:
542 commit_filter.append('date("<%s")' % (end_date,))
542 commit_filter.append('date("<%s")' % (end_date,))
543 if start_date and end_date:
543 if start_date and end_date:
544 commit_filter.append(
544 commit_filter.append(
545 'date(">%s") and date("<%s")' % (start_date, end_date))
545 'date(">%s") and date("<%s")' % (start_date, end_date))
546
546
547 if not show_hidden:
547 if not show_hidden:
548 commit_filter.append('not obsolete()')
548 commit_filter.append('not obsolete()')
549 commit_filter.append('not hidden()')
549 commit_filter.append('not hidden()')
550
550
551 # TODO: johbo: Figure out a simpler way for this solution
551 # TODO: johbo: Figure out a simpler way for this solution
552 collection_generator = CollectionGenerator
552 collection_generator = CollectionGenerator
553 if commit_filter:
553 if commit_filter:
554 commit_filter = ' and '.join(map(safe_str, commit_filter))
554 commit_filter = ' and '.join(map(safe_str, commit_filter))
555 revisions = self._remote.rev_range([commit_filter])
555 revisions = self._remote.rev_range([commit_filter])
556 collection_generator = MercurialIndexBasedCollectionGenerator
556 collection_generator = MercurialIndexBasedCollectionGenerator
557 else:
557 else:
558 revisions = self.commit_ids
558 revisions = self.commit_ids
559
559
560 if start_pos or end_pos:
560 if start_pos or end_pos:
561 revisions = revisions[start_pos:end_pos]
561 revisions = revisions[start_pos:end_pos]
562
562
563 return collection_generator(self, revisions, pre_load=pre_load)
563 return collection_generator(self, revisions, pre_load=pre_load)
564
564
565 def pull(self, url, commit_ids=None):
565 def pull(self, url, commit_ids=None):
566 """
566 """
567 Pull changes from external location.
567 Pull changes from external location.
568
568
569 :param commit_ids: Optional. Can be set to a list of commit ids
569 :param commit_ids: Optional. Can be set to a list of commit ids
570 which shall be pulled from the other repository.
570 which shall be pulled from the other repository.
571 """
571 """
572 url = self._get_url(url)
572 url = self._get_url(url)
573 self._remote.pull(url, commit_ids=commit_ids)
573 self._remote.pull(url, commit_ids=commit_ids)
574 self._remote.invalidate_vcs_cache()
574 self._remote.invalidate_vcs_cache()
575
575
576 def fetch(self, url, commit_ids=None):
576 def fetch(self, url, commit_ids=None):
577 """
577 """
578 Backward compatibility with GIT fetch==pull
578 Backward compatibility with GIT fetch==pull
579 """
579 """
580 return self.pull(url, commit_ids=commit_ids)
580 return self.pull(url, commit_ids=commit_ids)
581
581
582 def push(self, url):
582 def push(self, url):
583 url = self._get_url(url)
583 url = self._get_url(url)
584 self._remote.sync_push(url)
584 self._remote.sync_push(url)
585
585
586 def _local_clone(self, clone_path):
586 def _local_clone(self, clone_path):
587 """
587 """
588 Create a local clone of the current repo.
588 Create a local clone of the current repo.
589 """
589 """
590 self._remote.clone(self.path, clone_path, update_after_clone=True,
590 self._remote.clone(self.path, clone_path, update_after_clone=True,
591 hooks=False)
591 hooks=False)
592
592
593 def _update(self, revision, clean=False):
593 def _update(self, revision, clean=False):
594 """
594 """
595 Update the working copy to the specified revision.
595 Update the working copy to the specified revision.
596 """
596 """
597 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
597 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
598 self._remote.update(revision, clean=clean)
598 self._remote.update(revision, clean=clean)
599
599
600 def _identify(self):
600 def _identify(self):
601 """
601 """
602 Return the current state of the working directory.
602 Return the current state of the working directory.
603 """
603 """
604 return self._remote.identify().strip().rstrip('+')
604 return self._remote.identify().strip().rstrip('+')
605
605
606 def _heads(self, branch=None):
606 def _heads(self, branch=None):
607 """
607 """
608 Return the commit ids of the repository heads.
608 Return the commit ids of the repository heads.
609 """
609 """
610 return self._remote.heads(branch=branch).strip().split(' ')
610 return self._remote.heads(branch=branch).strip().split(' ')
611
611
612 def _ancestor(self, revision1, revision2):
612 def _ancestor(self, revision1, revision2):
613 """
613 """
614 Return the common ancestor of the two revisions.
614 Return the common ancestor of the two revisions.
615 """
615 """
616 return self._remote.ancestor(revision1, revision2)
616 return self._remote.ancestor(revision1, revision2)
617
617
618 def _local_push(
618 def _local_push(
619 self, revision, repository_path, push_branches=False,
619 self, revision, repository_path, push_branches=False,
620 enable_hooks=False):
620 enable_hooks=False):
621 """
621 """
622 Push the given revision to the specified repository.
622 Push the given revision to the specified repository.
623
623
624 :param push_branches: allow to create branches in the target repo.
624 :param push_branches: allow to create branches in the target repo.
625 """
625 """
626 self._remote.push(
626 self._remote.push(
627 [revision], repository_path, hooks=enable_hooks,
627 [revision], repository_path, hooks=enable_hooks,
628 push_branches=push_branches)
628 push_branches=push_branches)
629
629
630 def _local_merge(self, target_ref, merge_message, user_name, user_email,
630 def _local_merge(self, target_ref, merge_message, user_name, user_email,
631 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
631 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
632 """
632 """
633 Merge the given source_revision into the checked out revision.
633 Merge the given source_revision into the checked out revision.
634
634
635 Returns the commit id of the merge and a boolean indicating if the
635 Returns the commit id of the merge and a boolean indicating if the
636 commit needs to be pushed.
636 commit needs to be pushed.
637 """
637 """
638 source_ref_commit_id = source_ref.commit_id
638 source_ref_commit_id = source_ref.commit_id
639 target_ref_commit_id = target_ref.commit_id
639 target_ref_commit_id = target_ref.commit_id
640
640
641 # update our workdir to target ref, for proper merge
641 # update our workdir to target ref, for proper merge
642 self._update(target_ref_commit_id, clean=True)
642 self._update(target_ref_commit_id, clean=True)
643
643
644 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
644 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
645 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
645 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
646
646
647 if close_commit_id:
647 if close_commit_id:
648 # NOTE(marcink): if we get the close commit, this is our new source
648 # NOTE(marcink): if we get the close commit, this is our new source
649 # which will include the close commit itself.
649 # which will include the close commit itself.
650 source_ref_commit_id = close_commit_id
650 source_ref_commit_id = close_commit_id
651
651
652 if ancestor == source_ref_commit_id:
652 if ancestor == source_ref_commit_id:
653 # Nothing to do, the changes were already integrated
653 # Nothing to do, the changes were already integrated
654 return target_ref_commit_id, False
654 return target_ref_commit_id, False
655
655
656 elif ancestor == target_ref_commit_id and is_the_same_branch:
656 elif ancestor == target_ref_commit_id and is_the_same_branch:
657 # In this case we should force a commit message
657 # In this case we should force a commit message
658 return source_ref_commit_id, True
658 return source_ref_commit_id, True
659
659
660 unresolved = None
660 unresolved = None
661 if use_rebase:
661 if use_rebase:
662 try:
662 try:
663 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
663 bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id)
664 self.bookmark(bookmark_name, revision=source_ref.commit_id)
664 self.bookmark(bookmark_name, revision=source_ref.commit_id)
665 self._remote.rebase(
665 self._remote.rebase(
666 source=source_ref_commit_id, dest=target_ref_commit_id)
666 source=source_ref_commit_id, dest=target_ref_commit_id)
667 self._remote.invalidate_vcs_cache()
667 self._remote.invalidate_vcs_cache()
668 self._update(bookmark_name, clean=True)
668 self._update(bookmark_name, clean=True)
669 return self._identify(), True
669 return self._identify(), True
670 except RepositoryError as e:
670 except RepositoryError as e:
671 # The rebase-abort may raise another exception which 'hides'
671 # The rebase-abort may raise another exception which 'hides'
672 # the original one, therefore we log it here.
672 # the original one, therefore we log it here.
673 log.exception('Error while rebasing shadow repo during merge.')
673 log.exception('Error while rebasing shadow repo during merge.')
674 if 'unresolved conflicts' in safe_str(e):
674 if 'unresolved conflicts' in safe_str(e):
675 unresolved = self._remote.get_unresolved_files()
675 unresolved = self._remote.get_unresolved_files()
676 log.debug('unresolved files: %s', unresolved)
676 log.debug('unresolved files: %s', unresolved)
677
677
678 # Cleanup any rebase leftovers
678 # Cleanup any rebase leftovers
679 self._remote.invalidate_vcs_cache()
679 self._remote.invalidate_vcs_cache()
680 self._remote.rebase(abort=True)
680 self._remote.rebase(abort=True)
681 self._remote.invalidate_vcs_cache()
681 self._remote.invalidate_vcs_cache()
682 self._remote.update(clean=True)
682 self._remote.update(clean=True)
683 if unresolved:
683 if unresolved:
684 raise UnresolvedFilesInRepo(unresolved)
684 raise UnresolvedFilesInRepo(unresolved)
685 else:
685 else:
686 raise
686 raise
687 else:
687 else:
688 try:
688 try:
689 self._remote.merge(source_ref_commit_id)
689 self._remote.merge(source_ref_commit_id)
690 self._remote.invalidate_vcs_cache()
690 self._remote.invalidate_vcs_cache()
691 self._remote.commit(
691 self._remote.commit(
692 message=safe_str(merge_message),
692 message=safe_str(merge_message),
693 username=safe_str('%s <%s>' % (user_name, user_email)))
693 username=safe_str('%s <%s>' % (user_name, user_email)))
694 self._remote.invalidate_vcs_cache()
694 self._remote.invalidate_vcs_cache()
695 return self._identify(), True
695 return self._identify(), True
696 except RepositoryError as e:
696 except RepositoryError as e:
697 # The merge-abort may raise another exception which 'hides'
697 # The merge-abort may raise another exception which 'hides'
698 # the original one, therefore we log it here.
698 # the original one, therefore we log it here.
699 log.exception('Error while merging shadow repo during merge.')
699 log.exception('Error while merging shadow repo during merge.')
700 if 'unresolved merge conflicts' in safe_str(e):
700 if 'unresolved merge conflicts' in safe_str(e):
701 unresolved = self._remote.get_unresolved_files()
701 unresolved = self._remote.get_unresolved_files()
702 log.debug('unresolved files: %s', unresolved)
702 log.debug('unresolved files: %s', unresolved)
703
703
704 # Cleanup any merge leftovers
704 # Cleanup any merge leftovers
705 self._remote.update(clean=True)
705 self._remote.update(clean=True)
706 if unresolved:
706 if unresolved:
707 raise UnresolvedFilesInRepo(unresolved)
707 raise UnresolvedFilesInRepo(unresolved)
708 else:
708 else:
709 raise
709 raise
710
710
711 def _local_close(self, target_ref, user_name, user_email,
711 def _local_close(self, target_ref, user_name, user_email,
712 source_ref, close_message=''):
712 source_ref, close_message=''):
713 """
713 """
714 Close the branch of the given source_revision
714 Close the branch of the given source_revision
715
715
716 Returns the commit id of the close and a boolean indicating if the
716 Returns the commit id of the close and a boolean indicating if the
717 commit needs to be pushed.
717 commit needs to be pushed.
718 """
718 """
719 self._update(source_ref.commit_id)
719 self._update(source_ref.commit_id)
720 message = close_message or "Closing branch: `{}`".format(source_ref.name)
720 message = close_message or "Closing branch: `{}`".format(source_ref.name)
721 try:
721 try:
722 self._remote.commit(
722 self._remote.commit(
723 message=safe_str(message),
723 message=safe_str(message),
724 username=safe_str('%s <%s>' % (user_name, user_email)),
724 username=safe_str('%s <%s>' % (user_name, user_email)),
725 close_branch=True)
725 close_branch=True)
726 self._remote.invalidate_vcs_cache()
726 self._remote.invalidate_vcs_cache()
727 return self._identify(), True
727 return self._identify(), True
728 except RepositoryError:
728 except RepositoryError:
729 # Cleanup any commit leftovers
729 # Cleanup any commit leftovers
730 self._remote.update(clean=True)
730 self._remote.update(clean=True)
731 raise
731 raise
732
732
733 def _is_the_same_branch(self, target_ref, source_ref):
733 def _is_the_same_branch(self, target_ref, source_ref):
734 return (
734 return (
735 self._get_branch_name(target_ref) ==
735 self._get_branch_name(target_ref) ==
736 self._get_branch_name(source_ref))
736 self._get_branch_name(source_ref))
737
737
738 def _get_branch_name(self, ref):
738 def _get_branch_name(self, ref):
739 if ref.type == 'branch':
739 if ref.type == 'branch':
740 return ref.name
740 return ref.name
741 return self._remote.ctx_branch(ref.commit_id)
741 return self._remote.ctx_branch(ref.commit_id)
742
742
743 def _maybe_prepare_merge_workspace(
743 def _maybe_prepare_merge_workspace(
744 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
744 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
745 shadow_repository_path = self._get_shadow_repository_path(
745 shadow_repository_path = self._get_shadow_repository_path(
746 self.path, repo_id, workspace_id)
746 self.path, repo_id, workspace_id)
747 if not os.path.exists(shadow_repository_path):
747 if not os.path.exists(shadow_repository_path):
748 self._local_clone(shadow_repository_path)
748 self._local_clone(shadow_repository_path)
749 log.debug(
749 log.debug(
750 'Prepared shadow repository in %s', shadow_repository_path)
750 'Prepared shadow repository in %s', shadow_repository_path)
751
751
752 return shadow_repository_path
752 return shadow_repository_path
753
753
754 def _merge_repo(self, repo_id, workspace_id, target_ref,
754 def _merge_repo(self, repo_id, workspace_id, target_ref,
755 source_repo, source_ref, merge_message,
755 source_repo, source_ref, merge_message,
756 merger_name, merger_email, dry_run=False,
756 merger_name, merger_email, dry_run=False,
757 use_rebase=False, close_branch=False):
757 use_rebase=False, close_branch=False):
758
758
759 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
759 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
760 'rebase' if use_rebase else 'merge', dry_run)
760 'rebase' if use_rebase else 'merge', dry_run)
761 if target_ref.commit_id not in self._heads():
761 if target_ref.commit_id not in self._heads():
762 return MergeResponse(
762 return MergeResponse(
763 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
763 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
764 metadata={'target_ref': target_ref})
764 metadata={'target_ref': target_ref})
765
765
766 try:
766 try:
767 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
767 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
768 heads = '\n,'.join(self._heads(target_ref.name))
768 heads_all = self._heads(target_ref.name)
769 max_heads = 10
770 if len(heads_all) > max_heads:
771 heads = '\n,'.join(
772 heads_all[:max_heads] +
773 ['and {} more.'.format(len(heads_all)-max_heads)])
774 else:
775 heads = '\n,'.join(heads_all)
769 metadata = {
776 metadata = {
770 'target_ref': target_ref,
777 'target_ref': target_ref,
771 'source_ref': source_ref,
778 'source_ref': source_ref,
772 'heads': heads
779 'heads': heads
773 }
780 }
774 return MergeResponse(
781 return MergeResponse(
775 False, False, None,
782 False, False, None,
776 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
783 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
777 metadata=metadata)
784 metadata=metadata)
778 except CommitDoesNotExistError:
785 except CommitDoesNotExistError:
779 log.exception('Failure when looking up branch heads on hg target')
786 log.exception('Failure when looking up branch heads on hg target')
780 return MergeResponse(
787 return MergeResponse(
781 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
788 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
782 metadata={'target_ref': target_ref})
789 metadata={'target_ref': target_ref})
783
790
784 shadow_repository_path = self._maybe_prepare_merge_workspace(
791 shadow_repository_path = self._maybe_prepare_merge_workspace(
785 repo_id, workspace_id, target_ref, source_ref)
792 repo_id, workspace_id, target_ref, source_ref)
786 shadow_repo = self.get_shadow_instance(shadow_repository_path)
793 shadow_repo = self.get_shadow_instance(shadow_repository_path)
787
794
788 log.debug('Pulling in target reference %s', target_ref)
795 log.debug('Pulling in target reference %s', target_ref)
789 self._validate_pull_reference(target_ref)
796 self._validate_pull_reference(target_ref)
790 shadow_repo._local_pull(self.path, target_ref)
797 shadow_repo._local_pull(self.path, target_ref)
791
798
792 try:
799 try:
793 log.debug('Pulling in source reference %s', source_ref)
800 log.debug('Pulling in source reference %s', source_ref)
794 source_repo._validate_pull_reference(source_ref)
801 source_repo._validate_pull_reference(source_ref)
795 shadow_repo._local_pull(source_repo.path, source_ref)
802 shadow_repo._local_pull(source_repo.path, source_ref)
796 except CommitDoesNotExistError:
803 except CommitDoesNotExistError:
797 log.exception('Failure when doing local pull on hg shadow repo')
804 log.exception('Failure when doing local pull on hg shadow repo')
798 return MergeResponse(
805 return MergeResponse(
799 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
806 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
800 metadata={'source_ref': source_ref})
807 metadata={'source_ref': source_ref})
801
808
802 merge_ref = None
809 merge_ref = None
803 merge_commit_id = None
810 merge_commit_id = None
804 close_commit_id = None
811 close_commit_id = None
805 merge_failure_reason = MergeFailureReason.NONE
812 merge_failure_reason = MergeFailureReason.NONE
806 metadata = {}
813 metadata = {}
807
814
808 # enforce that close branch should be used only in case we source from
815 # enforce that close branch should be used only in case we source from
809 # an actual Branch
816 # an actual Branch
810 close_branch = close_branch and source_ref.type == 'branch'
817 close_branch = close_branch and source_ref.type == 'branch'
811
818
812 # don't allow to close branch if source and target are the same
819 # don't allow to close branch if source and target are the same
813 close_branch = close_branch and source_ref.name != target_ref.name
820 close_branch = close_branch and source_ref.name != target_ref.name
814
821
815 needs_push_on_close = False
822 needs_push_on_close = False
816 if close_branch and not use_rebase and not dry_run:
823 if close_branch and not use_rebase and not dry_run:
817 try:
824 try:
818 close_commit_id, needs_push_on_close = shadow_repo._local_close(
825 close_commit_id, needs_push_on_close = shadow_repo._local_close(
819 target_ref, merger_name, merger_email, source_ref)
826 target_ref, merger_name, merger_email, source_ref)
820 merge_possible = True
827 merge_possible = True
821 except RepositoryError:
828 except RepositoryError:
822 log.exception('Failure when doing close branch on '
829 log.exception('Failure when doing close branch on '
823 'shadow repo: %s', shadow_repo)
830 'shadow repo: %s', shadow_repo)
824 merge_possible = False
831 merge_possible = False
825 merge_failure_reason = MergeFailureReason.MERGE_FAILED
832 merge_failure_reason = MergeFailureReason.MERGE_FAILED
826 else:
833 else:
827 merge_possible = True
834 merge_possible = True
828
835
829 needs_push = False
836 needs_push = False
830 if merge_possible:
837 if merge_possible:
831
838
832 try:
839 try:
833 merge_commit_id, needs_push = shadow_repo._local_merge(
840 merge_commit_id, needs_push = shadow_repo._local_merge(
834 target_ref, merge_message, merger_name, merger_email,
841 target_ref, merge_message, merger_name, merger_email,
835 source_ref, use_rebase=use_rebase,
842 source_ref, use_rebase=use_rebase,
836 close_commit_id=close_commit_id, dry_run=dry_run)
843 close_commit_id=close_commit_id, dry_run=dry_run)
837 merge_possible = True
844 merge_possible = True
838
845
839 # read the state of the close action, if it
846 # read the state of the close action, if it
840 # maybe required a push
847 # maybe required a push
841 needs_push = needs_push or needs_push_on_close
848 needs_push = needs_push or needs_push_on_close
842
849
843 # Set a bookmark pointing to the merge commit. This bookmark
850 # Set a bookmark pointing to the merge commit. This bookmark
844 # may be used to easily identify the last successful merge
851 # may be used to easily identify the last successful merge
845 # commit in the shadow repository.
852 # commit in the shadow repository.
846 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
853 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
847 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
854 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
848 except SubrepoMergeError:
855 except SubrepoMergeError:
849 log.exception(
856 log.exception(
850 'Subrepo merge error during local merge on hg shadow repo.')
857 'Subrepo merge error during local merge on hg shadow repo.')
851 merge_possible = False
858 merge_possible = False
852 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
859 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
853 needs_push = False
860 needs_push = False
854 except RepositoryError as e:
861 except RepositoryError as e:
855 log.exception('Failure when doing local merge on hg shadow repo')
862 log.exception('Failure when doing local merge on hg shadow repo')
856 if isinstance(e, UnresolvedFilesInRepo):
863 if isinstance(e, UnresolvedFilesInRepo):
857 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
864 all_conflicts = list(e.args[0])
865 max_conflicts = 20
866 if len(all_conflicts) > max_conflicts:
867 conflicts = all_conflicts[:max_conflicts] \
868 + ['and {} more.'.format(len(all_conflicts)-max_conflicts)]
869 else:
870 conflicts = all_conflicts
871 metadata['unresolved_files'] = \
872 '\n* conflict: ' + \
873 ('\n * conflict: '.join(conflicts))
858
874
859 merge_possible = False
875 merge_possible = False
860 merge_failure_reason = MergeFailureReason.MERGE_FAILED
876 merge_failure_reason = MergeFailureReason.MERGE_FAILED
861 needs_push = False
877 needs_push = False
862
878
863 if merge_possible and not dry_run:
879 if merge_possible and not dry_run:
864 if needs_push:
880 if needs_push:
865 # In case the target is a bookmark, update it, so after pushing
881 # In case the target is a bookmark, update it, so after pushing
866 # the bookmarks is also updated in the target.
882 # the bookmarks is also updated in the target.
867 if target_ref.type == 'book':
883 if target_ref.type == 'book':
868 shadow_repo.bookmark(
884 shadow_repo.bookmark(
869 target_ref.name, revision=merge_commit_id)
885 target_ref.name, revision=merge_commit_id)
870 try:
886 try:
871 shadow_repo_with_hooks = self.get_shadow_instance(
887 shadow_repo_with_hooks = self.get_shadow_instance(
872 shadow_repository_path,
888 shadow_repository_path,
873 enable_hooks=True)
889 enable_hooks=True)
874 # This is the actual merge action, we push from shadow
890 # This is the actual merge action, we push from shadow
875 # into origin.
891 # into origin.
876 # Note: the push_branches option will push any new branch
892 # Note: the push_branches option will push any new branch
877 # defined in the source repository to the target. This may
893 # defined in the source repository to the target. This may
878 # be dangerous as branches are permanent in Mercurial.
894 # be dangerous as branches are permanent in Mercurial.
879 # This feature was requested in issue #441.
895 # This feature was requested in issue #441.
880 shadow_repo_with_hooks._local_push(
896 shadow_repo_with_hooks._local_push(
881 merge_commit_id, self.path, push_branches=True,
897 merge_commit_id, self.path, push_branches=True,
882 enable_hooks=True)
898 enable_hooks=True)
883
899
884 # maybe we also need to push the close_commit_id
900 # maybe we also need to push the close_commit_id
885 if close_commit_id:
901 if close_commit_id:
886 shadow_repo_with_hooks._local_push(
902 shadow_repo_with_hooks._local_push(
887 close_commit_id, self.path, push_branches=True,
903 close_commit_id, self.path, push_branches=True,
888 enable_hooks=True)
904 enable_hooks=True)
889 merge_succeeded = True
905 merge_succeeded = True
890 except RepositoryError:
906 except RepositoryError:
891 log.exception(
907 log.exception(
892 'Failure when doing local push from the shadow '
908 'Failure when doing local push from the shadow '
893 'repository to the target repository at %s.', self.path)
909 'repository to the target repository at %s.', self.path)
894 merge_succeeded = False
910 merge_succeeded = False
895 merge_failure_reason = MergeFailureReason.PUSH_FAILED
911 merge_failure_reason = MergeFailureReason.PUSH_FAILED
896 metadata['target'] = 'hg shadow repo'
912 metadata['target'] = 'hg shadow repo'
897 metadata['merge_commit'] = merge_commit_id
913 metadata['merge_commit'] = merge_commit_id
898 else:
914 else:
899 merge_succeeded = True
915 merge_succeeded = True
900 else:
916 else:
901 merge_succeeded = False
917 merge_succeeded = False
902
918
903 return MergeResponse(
919 return MergeResponse(
904 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
920 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
905 metadata=metadata)
921 metadata=metadata)
906
922
907 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
923 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
908 config = self.config.copy()
924 config = self.config.copy()
909 if not enable_hooks:
925 if not enable_hooks:
910 config.clear_section('hooks')
926 config.clear_section('hooks')
911 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
927 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
912
928
913 def _validate_pull_reference(self, reference):
929 def _validate_pull_reference(self, reference):
914 if not (reference.name in self.bookmarks or
930 if not (reference.name in self.bookmarks or
915 reference.name in self.branches or
931 reference.name in self.branches or
916 self.get_commit(reference.commit_id)):
932 self.get_commit(reference.commit_id)):
917 raise CommitDoesNotExistError(
933 raise CommitDoesNotExistError(
918 'Unknown branch, bookmark or commit id')
934 'Unknown branch, bookmark or commit id')
919
935
920 def _local_pull(self, repository_path, reference):
936 def _local_pull(self, repository_path, reference):
921 """
937 """
922 Fetch a branch, bookmark or commit from a local repository.
938 Fetch a branch, bookmark or commit from a local repository.
923 """
939 """
924 repository_path = os.path.abspath(repository_path)
940 repository_path = os.path.abspath(repository_path)
925 if repository_path == self.path:
941 if repository_path == self.path:
926 raise ValueError('Cannot pull from the same repository')
942 raise ValueError('Cannot pull from the same repository')
927
943
928 reference_type_to_option_name = {
944 reference_type_to_option_name = {
929 'book': 'bookmark',
945 'book': 'bookmark',
930 'branch': 'branch',
946 'branch': 'branch',
931 }
947 }
932 option_name = reference_type_to_option_name.get(
948 option_name = reference_type_to_option_name.get(
933 reference.type, 'revision')
949 reference.type, 'revision')
934
950
935 if option_name == 'revision':
951 if option_name == 'revision':
936 ref = reference.commit_id
952 ref = reference.commit_id
937 else:
953 else:
938 ref = reference.name
954 ref = reference.name
939
955
940 options = {option_name: [ref]}
956 options = {option_name: [ref]}
941 self._remote.pull_cmd(repository_path, hooks=False, **options)
957 self._remote.pull_cmd(repository_path, hooks=False, **options)
942 self._remote.invalidate_vcs_cache()
958 self._remote.invalidate_vcs_cache()
943
959
944 def bookmark(self, bookmark, revision=None):
960 def bookmark(self, bookmark, revision=None):
945 if isinstance(bookmark, unicode):
961 if isinstance(bookmark, unicode):
946 bookmark = safe_str(bookmark)
962 bookmark = safe_str(bookmark)
947 self._remote.bookmark(bookmark, revision=revision)
963 self._remote.bookmark(bookmark, revision=revision)
948 self._remote.invalidate_vcs_cache()
964 self._remote.invalidate_vcs_cache()
949
965
950 def get_path_permissions(self, username):
966 def get_path_permissions(self, username):
951 hgacl_file = os.path.join(self.path, '.hg/hgacl')
967 hgacl_file = os.path.join(self.path, '.hg/hgacl')
952
968
953 def read_patterns(suffix):
969 def read_patterns(suffix):
954 svalue = None
970 svalue = None
955 for section, option in [
971 for section, option in [
956 ('narrowacl', username + suffix),
972 ('narrowacl', username + suffix),
957 ('narrowacl', 'default' + suffix),
973 ('narrowacl', 'default' + suffix),
958 ('narrowhgacl', username + suffix),
974 ('narrowhgacl', username + suffix),
959 ('narrowhgacl', 'default' + suffix)
975 ('narrowhgacl', 'default' + suffix)
960 ]:
976 ]:
961 try:
977 try:
962 svalue = hgacl.get(section, option)
978 svalue = hgacl.get(section, option)
963 break # stop at the first value we find
979 break # stop at the first value we find
964 except configparser.NoOptionError:
980 except configparser.NoOptionError:
965 pass
981 pass
966 if not svalue:
982 if not svalue:
967 return None
983 return None
968 result = ['/']
984 result = ['/']
969 for pattern in svalue.split():
985 for pattern in svalue.split():
970 result.append(pattern)
986 result.append(pattern)
971 if '*' not in pattern and '?' not in pattern:
987 if '*' not in pattern and '?' not in pattern:
972 result.append(pattern + '/*')
988 result.append(pattern + '/*')
973 return result
989 return result
974
990
975 if os.path.exists(hgacl_file):
991 if os.path.exists(hgacl_file):
976 try:
992 try:
977 hgacl = configparser.RawConfigParser()
993 hgacl = configparser.RawConfigParser()
978 hgacl.read(hgacl_file)
994 hgacl.read(hgacl_file)
979
995
980 includes = read_patterns('.includes')
996 includes = read_patterns('.includes')
981 excludes = read_patterns('.excludes')
997 excludes = read_patterns('.excludes')
982 return BasePathPermissionChecker.create_from_patterns(
998 return BasePathPermissionChecker.create_from_patterns(
983 includes, excludes)
999 includes, excludes)
984 except BaseException as e:
1000 except BaseException as e:
985 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1001 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
986 hgacl_file, self.name, e)
1002 hgacl_file, self.name, e)
987 raise exceptions.RepositoryRequirementError(msg)
1003 raise exceptions.RepositoryRequirementError(msg)
988 else:
1004 else:
989 return None
1005 return None
990
1006
991
1007
992 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1008 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
993
1009
994 def _commit_factory(self, commit_id):
1010 def _commit_factory(self, commit_id):
995 return self.repo.get_commit(
1011 return self.repo.get_commit(
996 commit_idx=commit_id, pre_load=self.pre_load)
1012 commit_idx=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now