##// END OF EJS Templates
fix(mercurial): actually use assert_path instead of always initializing repo object....
super-admin -
r5581:68a5b57b default
parent child Browse files
Show More
@@ -1,1030 +1,1036 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 HG repository module
20 HG repository module
21 """
21 """
22 import os
22 import os
23 import logging
23 import logging
24 import binascii
24 import binascii
25 import configparser
25 import configparser
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from collections import OrderedDict
32 from collections import OrderedDict
33 from rhodecode.lib.datelib import (
33 from rhodecode.lib.datelib import (
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.utils2 import CachedProperty
37 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs import connection, exceptions
38 from rhodecode.lib.vcs.conf import settings as vcs_settings
38 from rhodecode.lib.vcs.conf import settings as vcs_settings
39 from rhodecode.lib.vcs.backends.base import (
39 from rhodecode.lib.vcs.backends.base import (
40 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 BaseRepository, CollectionGenerator, Config, MergeResponse,
41 MergeFailureReason, Reference, BasePathPermissionChecker)
41 MergeFailureReason, Reference, BasePathPermissionChecker)
42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
47 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
48
48
49 hexlify = binascii.hexlify
49 hexlify = binascii.hexlify
50 nullid = "\0" * 20
50 nullid = "\0" * 20
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54
54
55 class MercurialRepository(BaseRepository):
55 class MercurialRepository(BaseRepository):
56 """
56 """
57 Mercurial repository backend
57 Mercurial repository backend
58 """
58 """
59 DEFAULT_BRANCH_NAME = 'default'
59 DEFAULT_BRANCH_NAME = 'default'
60
60
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 do_workspace_checkout=False, with_wire=None, bare=False):
62 do_workspace_checkout=False, with_wire=None, bare=False):
63 """
63 """
64 Raises RepositoryError if repository could not be find at the given
64 Raises RepositoryError if repository could not be find at the given
65 ``repo_path``.
65 ``repo_path``.
66
66
67 :param repo_path: local path of the repository
67 :param repo_path: local path of the repository
68 :param config: config object containing the repo configuration
68 :param config: config object containing the repo configuration
69 :param create=False: if set to True, would try to create repository if
69 :param create=False: if set to True, would try to create repository if
70 it does not exist rather than raising exception
70 it does not exist rather than raising exception
71 :param src_url=None: would try to clone repository from given location
71 :param src_url=None: would try to clone repository from given location
72 :param do_workspace_checkout=False: sets update of working copy after
72 :param do_workspace_checkout=False: sets update of working copy after
73 making a clone
73 making a clone
74 :param bare: not used, compatible with other VCS
74 :param bare: not used, compatible with other VCS
75 """
75 """
76
76
77 self.path = safe_str(os.path.abspath(repo_path))
77 self.path = safe_str(os.path.abspath(repo_path))
78 # mercurial since 4.4.X requires certain configuration to be present
78 # mercurial since 4.4.X requires certain configuration to be present
79 # because sometimes we init the repos with config we need to meet
79 # because sometimes we init the repos with config we need to meet
80 # special requirements
80 # special requirements
81 self.config = config if config else self.get_default_config(
81 self.config = config if config else self.get_default_config(
82 default=[('extensions', 'largefiles', '')])
82 default=[('extensions', 'largefiles', '')])
83
83
84 # NOTE(marcink): since python3 hgsubversion is deprecated.
84 # NOTE(marcink): since python3 hgsubversion is deprecated.
85 # From old installations we might still have this set enabled
85 # From old installations we might still have this set enabled
86 # we explicitly remove this now here to make sure it wont propagate further
86 # we explicitly remove this now here to make sure it wont propagate further
87 if config and config.get('extensions', 'hgsubversion') is not None:
87 if config and config.get('extensions', 'hgsubversion') is not None:
88 config.drop_option('extensions', 'hgsubversion')
88 config.drop_option('extensions', 'hgsubversion')
89
89
90 self.with_wire = with_wire or {"cache": False} # default should not use cache
90 self.with_wire = with_wire or {"cache": False} # default should not use cache
91
91
92 self._init_repo(create, src_url, do_workspace_checkout)
92 self._init_repo(create, src_url, do_workspace_checkout)
93
93
94 # caches
94 # caches
95 self._commit_ids = {}
95 self._commit_ids = {}
96
96
97 @LazyProperty
97 @LazyProperty
98 def _remote(self):
98 def _remote(self):
99 repo_id = self.path
99 repo_id = self.path
100 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
100 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
101
101
102 @CachedProperty
102 @CachedProperty
103 def commit_ids(self):
103 def commit_ids(self):
104 """
104 """
105 Returns list of commit ids, in ascending order. Being lazy
105 Returns list of commit ids, in ascending order. Being lazy
106 attribute allows external tools to inject shas from cache.
106 attribute allows external tools to inject shas from cache.
107 """
107 """
108 commit_ids = self._get_all_commit_ids()
108 commit_ids = self._get_all_commit_ids()
109 self._rebuild_cache(commit_ids)
109 self._rebuild_cache(commit_ids)
110 return commit_ids
110 return commit_ids
111
111
112 def _rebuild_cache(self, commit_ids):
112 def _rebuild_cache(self, commit_ids):
113 self._commit_ids = {commit_id: index
113 self._commit_ids = {commit_id: index
114 for index, commit_id in enumerate(commit_ids)}
114 for index, commit_id in enumerate(commit_ids)}
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches(self):
117 def branches(self):
118 return self._get_branches()
118 return self._get_branches()
119
119
120 @CachedProperty
120 @CachedProperty
121 def branches_closed(self):
121 def branches_closed(self):
122 return self._get_branches(active=False, closed=True)
122 return self._get_branches(active=False, closed=True)
123
123
124 @CachedProperty
124 @CachedProperty
125 def branches_all(self):
125 def branches_all(self):
126 all_branches = {}
126 all_branches = {}
127 all_branches.update(self.branches)
127 all_branches.update(self.branches)
128 all_branches.update(self.branches_closed)
128 all_branches.update(self.branches_closed)
129 return all_branches
129 return all_branches
130
130
131 def _get_branches(self, active=True, closed=False):
131 def _get_branches(self, active=True, closed=False):
132 """
132 """
133 Gets branches for this repository
133 Gets branches for this repository
134 Returns only not closed active branches by default
134 Returns only not closed active branches by default
135
135
136 :param active: return also active branches
136 :param active: return also active branches
137 :param closed: return also closed branches
137 :param closed: return also closed branches
138
138
139 """
139 """
140 if self.is_empty():
140 if self.is_empty():
141 return {}
141 return {}
142
142
143 def get_name(ctx):
143 def get_name(ctx):
144 return ctx[0]
144 return ctx[0]
145
145
146 _branches = [(n, h,) for n, h in
146 _branches = [(n, h,) for n, h in
147 self._remote.branches(active, closed).items()]
147 self._remote.branches(active, closed).items()]
148
148
149 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
149 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
150
150
151 @CachedProperty
151 @CachedProperty
152 def tags(self):
152 def tags(self):
153 """
153 """
154 Gets tags for this repository
154 Gets tags for this repository
155 """
155 """
156 return self._get_tags()
156 return self._get_tags()
157
157
158 def _get_tags(self):
158 def _get_tags(self):
159 if self.is_empty():
159 if self.is_empty():
160 return {}
160 return {}
161
161
162 def get_name(ctx):
162 def get_name(ctx):
163 return ctx[0]
163 return ctx[0]
164
164
165 _tags = [(n, h,) for n, h in
165 _tags = [(n, h,) for n, h in
166 self._remote.tags().items()]
166 self._remote.tags().items()]
167
167
168 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
168 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
169
169
170 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
170 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
171 """
171 """
172 Creates and returns a tag for the given ``commit_id``.
172 Creates and returns a tag for the given ``commit_id``.
173
173
174 :param name: name for new tag
174 :param name: name for new tag
175 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
175 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
176 :param commit_id: commit id for which new tag would be created
176 :param commit_id: commit id for which new tag would be created
177 :param message: message of the tag's commit
177 :param message: message of the tag's commit
178 :param date: date of tag's commit
178 :param date: date of tag's commit
179
179
180 :raises TagAlreadyExistError: if tag with same name already exists
180 :raises TagAlreadyExistError: if tag with same name already exists
181 """
181 """
182 if name in self.tags:
182 if name in self.tags:
183 raise TagAlreadyExistError("Tag %s already exists" % name)
183 raise TagAlreadyExistError("Tag %s already exists" % name)
184
184
185 commit = self.get_commit(commit_id=commit_id)
185 commit = self.get_commit(commit_id=commit_id)
186 local = kwargs.setdefault('local', False)
186 local = kwargs.setdefault('local', False)
187
187
188 if message is None:
188 if message is None:
189 message = f"Added tag {name} for commit {commit.short_id}"
189 message = f"Added tag {name} for commit {commit.short_id}"
190
190
191 date, tz = date_to_timestamp_plus_offset(date)
191 date, tz = date_to_timestamp_plus_offset(date)
192
192
193 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
193 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
194 self._remote.invalidate_vcs_cache()
194 self._remote.invalidate_vcs_cache()
195
195
196 # Reinitialize tags
196 # Reinitialize tags
197 self._invalidate_prop_cache('tags')
197 self._invalidate_prop_cache('tags')
198 tag_id = self.tags[name]
198 tag_id = self.tags[name]
199
199
200 return self.get_commit(commit_id=tag_id)
200 return self.get_commit(commit_id=tag_id)
201
201
202 def remove_tag(self, name, user, message=None, date=None):
202 def remove_tag(self, name, user, message=None, date=None):
203 """
203 """
204 Removes tag with the given `name`.
204 Removes tag with the given `name`.
205
205
206 :param name: name of the tag to be removed
206 :param name: name of the tag to be removed
207 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
207 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
208 :param message: message of the tag's removal commit
208 :param message: message of the tag's removal commit
209 :param date: date of tag's removal commit
209 :param date: date of tag's removal commit
210
210
211 :raises TagDoesNotExistError: if tag with given name does not exists
211 :raises TagDoesNotExistError: if tag with given name does not exists
212 """
212 """
213 if name not in self.tags:
213 if name not in self.tags:
214 raise TagDoesNotExistError("Tag %s does not exist" % name)
214 raise TagDoesNotExistError("Tag %s does not exist" % name)
215
215
216 if message is None:
216 if message is None:
217 message = "Removed tag %s" % name
217 message = "Removed tag %s" % name
218 local = False
218 local = False
219
219
220 date, tz = date_to_timestamp_plus_offset(date)
220 date, tz = date_to_timestamp_plus_offset(date)
221
221
222 self._remote.tag(name, nullid, message, local, user, date, tz)
222 self._remote.tag(name, nullid, message, local, user, date, tz)
223 self._remote.invalidate_vcs_cache()
223 self._remote.invalidate_vcs_cache()
224 self._invalidate_prop_cache('tags')
224 self._invalidate_prop_cache('tags')
225
225
226 @LazyProperty
226 @LazyProperty
227 def bookmarks(self):
227 def bookmarks(self):
228 """
228 """
229 Gets bookmarks for this repository
229 Gets bookmarks for this repository
230 """
230 """
231 return self._get_bookmarks()
231 return self._get_bookmarks()
232
232
233 def _get_bookmarks(self):
233 def _get_bookmarks(self):
234 if self.is_empty():
234 if self.is_empty():
235 return {}
235 return {}
236
236
237 def get_name(ctx):
237 def get_name(ctx):
238 return ctx[0]
238 return ctx[0]
239
239
240 _bookmarks = [
240 _bookmarks = [
241 (n, h) for n, h in
241 (n, h) for n, h in
242 self._remote.bookmarks().items()]
242 self._remote.bookmarks().items()]
243
243
244 return OrderedDict(sorted(_bookmarks, key=get_name))
244 return OrderedDict(sorted(_bookmarks, key=get_name))
245
245
246 def _get_all_commit_ids(self):
246 def _get_all_commit_ids(self):
247 return self._remote.get_all_commit_ids('visible')
247 return self._remote.get_all_commit_ids('visible')
248
248
249 def get_diff(
249 def get_diff(
250 self, commit1, commit2, path='', ignore_whitespace=False,
250 self, commit1, commit2, path='', ignore_whitespace=False,
251 context=3, path1=None):
251 context=3, path1=None):
252 """
252 """
253 Returns (git like) *diff*, as plain text. Shows changes introduced by
253 Returns (git like) *diff*, as plain text. Shows changes introduced by
254 `commit2` since `commit1`.
254 `commit2` since `commit1`.
255
255
256 :param commit1: Entry point from which diff is shown. Can be
256 :param commit1: Entry point from which diff is shown. Can be
257 ``self.EMPTY_COMMIT`` - in this case, patch showing all
257 ``self.EMPTY_COMMIT`` - in this case, patch showing all
258 the changes since empty state of the repository until `commit2`
258 the changes since empty state of the repository until `commit2`
259 :param commit2: Until which commit changes should be shown.
259 :param commit2: Until which commit changes should be shown.
260 :param ignore_whitespace: If set to ``True``, would not show whitespace
260 :param ignore_whitespace: If set to ``True``, would not show whitespace
261 changes. Defaults to ``False``.
261 changes. Defaults to ``False``.
262 :param context: How many lines before/after changed lines should be
262 :param context: How many lines before/after changed lines should be
263 shown. Defaults to ``3``.
263 shown. Defaults to ``3``.
264 """
264 """
265 self._validate_diff_commits(commit1, commit2)
265 self._validate_diff_commits(commit1, commit2)
266 if path1 is not None and path1 != path:
266 if path1 is not None and path1 != path:
267 raise ValueError("Diff of two different paths not supported.")
267 raise ValueError("Diff of two different paths not supported.")
268
268
269 if path:
269 if path:
270 file_filter = [self.path, path]
270 file_filter = [self.path, path]
271 else:
271 else:
272 file_filter = None
272 file_filter = None
273
273
274 diff = self._remote.diff(
274 diff = self._remote.diff(
275 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
275 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
276 opt_git=True, opt_ignorews=ignore_whitespace,
276 opt_git=True, opt_ignorews=ignore_whitespace,
277 context=context)
277 context=context)
278 return MercurialDiff(diff)
278 return MercurialDiff(diff)
279
279
280 def strip(self, commit_id, branch=None):
280 def strip(self, commit_id, branch=None):
281 self._remote.strip(commit_id, update=False, backup=False)
281 self._remote.strip(commit_id, update=False, backup=False)
282
282
283 self._remote.invalidate_vcs_cache()
283 self._remote.invalidate_vcs_cache()
284 # clear cache
284 # clear cache
285 self._invalidate_prop_cache('commit_ids')
285 self._invalidate_prop_cache('commit_ids')
286
286
287 return len(self.commit_ids)
287 return len(self.commit_ids)
288
288
289 def verify(self):
289 def verify(self):
290 verify = self._remote.verify()
290 verify = self._remote.verify()
291
291
292 self._remote.invalidate_vcs_cache()
292 self._remote.invalidate_vcs_cache()
293 return verify
293 return verify
294
294
295 def hg_update_cache(self):
295 def hg_update_cache(self):
296 update_cache = self._remote.hg_update_cache()
296 update_cache = self._remote.hg_update_cache()
297
297
298 self._remote.invalidate_vcs_cache()
298 self._remote.invalidate_vcs_cache()
299 return update_cache
299 return update_cache
300
300
301 def hg_rebuild_fn_cache(self):
301 def hg_rebuild_fn_cache(self):
302 update_cache = self._remote.hg_rebuild_fn_cache()
302 update_cache = self._remote.hg_rebuild_fn_cache()
303
303
304 self._remote.invalidate_vcs_cache()
304 self._remote.invalidate_vcs_cache()
305 return update_cache
305 return update_cache
306
306
307 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
307 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
308 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
308 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
309 self, commit_id1, repo2, commit_id2)
309 self, commit_id1, repo2, commit_id2)
310
310
311 if commit_id1 == commit_id2:
311 if commit_id1 == commit_id2:
312 return commit_id1
312 return commit_id1
313
313
314 ancestors = self._remote.revs_from_revspec(
314 ancestors = self._remote.revs_from_revspec(
315 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
315 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
316 other_path=repo2.path)
316 other_path=repo2.path)
317
317
318 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
318 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
319
319
320 log.debug('Found common ancestor with sha: %s', ancestor_id)
320 log.debug('Found common ancestor with sha: %s', ancestor_id)
321 return ancestor_id
321 return ancestor_id
322
322
323 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
323 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
324 if commit_id1 == commit_id2:
324 if commit_id1 == commit_id2:
325 commits = []
325 commits = []
326 else:
326 else:
327 if merge:
327 if merge:
328 indexes = self._remote.revs_from_revspec(
328 indexes = self._remote.revs_from_revspec(
329 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
329 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
330 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
330 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
331 else:
331 else:
332 indexes = self._remote.revs_from_revspec(
332 indexes = self._remote.revs_from_revspec(
333 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
333 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
334 commit_id1, other_path=repo2.path)
334 commit_id1, other_path=repo2.path)
335
335
336 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
336 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
337 for idx in indexes]
337 for idx in indexes]
338
338
339 return commits
339 return commits
340
340
341 @staticmethod
341 @staticmethod
342 def check_url(url, config):
342 def check_url(url, config):
343 """
343 """
344 Function will check given url and try to verify if it's a valid
344 Function will check given url and try to verify if it's a valid
345 link. Sometimes it may happened that mercurial will issue basic
345 link. Sometimes it may happened that mercurial will issue basic
346 auth request that can cause whole API to hang when used from python
346 auth request that can cause whole API to hang when used from python
347 or other external calls.
347 or other external calls.
348
348
349 On failures it'll raise urllib2.HTTPError, exception is also thrown
349 On failures it'll raise urllib2.HTTPError, exception is also thrown
350 when the return code is non 200
350 when the return code is non 200
351 """
351 """
352 # check first if it's not an local url
352 # check first if it's not an local url
353 if os.path.isdir(url) or url.startswith('file:'):
353 if os.path.isdir(url) or url.startswith('file:'):
354 return True
354 return True
355
355
356 # Request the _remote to verify the url
356 # Request the _remote to verify the url
357 return connection.Hg.check_url(url, config.serialize())
357 return connection.Hg.check_url(url, config.serialize())
358
358
359 @staticmethod
359 @staticmethod
360 def is_valid_repository(path):
360 def is_valid_repository(path):
361 return os.path.isdir(os.path.join(path, '.hg'))
361 return os.path.isdir(os.path.join(path, '.hg'))
362
362
363 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
363 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
364 """
364 """
365 Function will check for mercurial repository in given path. If there
365 Function will check for mercurial repository in given path. If there
366 is no repository in that path it will raise an exception unless
366 is no repository in that path it will raise an exception unless
367 `create` parameter is set to True - in that case repository would
367 `create` parameter is set to True - in that case repository would
368 be created.
368 be created.
369
369
370 If `src_url` is given, would try to clone repository from the
370 If `src_url` is given, would try to clone repository from the
371 location at given clone_point. Additionally it'll make update to
371 location at given clone_point. Additionally, it'll make update to
372 working copy accordingly to `do_workspace_checkout` flag.
372 working copy accordingly to `do_workspace_checkout` flag.
373 """
373 """
374 if create and os.path.exists(self.path):
374 if create and os.path.exists(self.path):
375 raise RepositoryError(
375 raise RepositoryError(
376 f"Cannot create repository at {self.path}, location already exist")
376 f"Cannot create repository at {self.path}, location already exist")
377
377
378 if create:
378 if src_url:
379 if src_url:
379 url = str(self._get_url(src_url))
380 url = str(self._get_url(src_url))
380 MercurialRepository.check_url(url, self.config)
381 MercurialRepository.check_url(url, self.config)
381
382
382 self._remote.clone(url, self.path, do_workspace_checkout)
383 self._remote.clone(url, self.path, do_workspace_checkout)
383
384
384 # Don't try to create if we've already cloned repo
385 # Don't try to create if we've already cloned repo
385 create = False
386 create = False
386
387 self._remote.localrepository(create)
387 if create:
388 else:
388 os.makedirs(self.path, mode=0o755)
389 os.makedirs(self.path, mode=0o755)
390 create = True
391 self._remote.localrepository(create)
389
392
390 self._remote.localrepository(create)
393 else:
394 if not self._remote.assert_correct_path():
395 raise RepositoryError(
396 f'Path "{self.path}" does not contain a Mercurial repository')
391
397
392 @LazyProperty
398 @LazyProperty
393 def in_memory_commit(self):
399 def in_memory_commit(self):
394 return MercurialInMemoryCommit(self)
400 return MercurialInMemoryCommit(self)
395
401
396 @LazyProperty
402 @LazyProperty
397 def description(self):
403 def description(self):
398 description = self._remote.get_config_value(
404 description = self._remote.get_config_value(
399 'web', 'description', untrusted=True)
405 'web', 'description', untrusted=True)
400 return safe_str(description or self.DEFAULT_DESCRIPTION)
406 return safe_str(description or self.DEFAULT_DESCRIPTION)
401
407
402 @LazyProperty
408 @LazyProperty
403 def contact(self):
409 def contact(self):
404 contact = (
410 contact = (
405 self._remote.get_config_value("web", "contact") or
411 self._remote.get_config_value("web", "contact") or
406 self._remote.get_config_value("ui", "username"))
412 self._remote.get_config_value("ui", "username"))
407 return safe_str(contact or self.DEFAULT_CONTACT)
413 return safe_str(contact or self.DEFAULT_CONTACT)
408
414
409 @LazyProperty
415 @LazyProperty
410 def last_change(self):
416 def last_change(self):
411 """
417 """
412 Returns last change made on this repository as
418 Returns last change made on this repository as
413 `datetime.datetime` object.
419 `datetime.datetime` object.
414 """
420 """
415 try:
421 try:
416 return self.get_commit().date
422 return self.get_commit().date
417 except RepositoryError:
423 except RepositoryError:
418 tzoffset = makedate()[1]
424 tzoffset = makedate()[1]
419 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
425 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
420
426
421 def _get_fs_mtime(self):
427 def _get_fs_mtime(self):
422 # fallback to filesystem
428 # fallback to filesystem
423 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
429 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
424 st_path = os.path.join(self.path, '.hg', "store")
430 st_path = os.path.join(self.path, '.hg', "store")
425 if os.path.exists(cl_path):
431 if os.path.exists(cl_path):
426 return os.stat(cl_path).st_mtime
432 return os.stat(cl_path).st_mtime
427 else:
433 else:
428 return os.stat(st_path).st_mtime
434 return os.stat(st_path).st_mtime
429
435
430 def _get_url(self, url):
436 def _get_url(self, url):
431 """
437 """
432 Returns normalized url. If schema is not given, would fall
438 Returns normalized url. If schema is not given, would fall
433 to filesystem
439 to filesystem
434 (``file:///``) schema.
440 (``file:///``) schema.
435 """
441 """
436 if url != 'default' and '://' not in url:
442 if url != 'default' and '://' not in url:
437 url = "file:" + urllib.request.pathname2url(url)
443 url = "file:" + urllib.request.pathname2url(url)
438 return url
444 return url
439
445
440 def get_hook_location(self):
446 def get_hook_location(self):
441 """
447 """
442 returns absolute path to location where hooks are stored
448 returns absolute path to location where hooks are stored
443 """
449 """
444 return os.path.join(self.path, '.hg', '.hgrc')
450 return os.path.join(self.path, '.hg', '.hgrc')
445
451
446 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
452 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
447 translate_tag=None, maybe_unreachable=False, reference_obj=None):
453 translate_tag=None, maybe_unreachable=False, reference_obj=None):
448 """
454 """
449 Returns ``MercurialCommit`` object representing repository's
455 Returns ``MercurialCommit`` object representing repository's
450 commit at the given `commit_id` or `commit_idx`.
456 commit at the given `commit_id` or `commit_idx`.
451 """
457 """
452 if self.is_empty():
458 if self.is_empty():
453 raise EmptyRepositoryError("There are no commits yet")
459 raise EmptyRepositoryError("There are no commits yet")
454
460
455 if commit_id is not None:
461 if commit_id is not None:
456 self._validate_commit_id(commit_id)
462 self._validate_commit_id(commit_id)
457 try:
463 try:
458 # we have cached idx, use it without contacting the remote
464 # we have cached idx, use it without contacting the remote
459 idx = self._commit_ids[commit_id]
465 idx = self._commit_ids[commit_id]
460 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
466 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
461 except KeyError:
467 except KeyError:
462 pass
468 pass
463
469
464 elif commit_idx is not None:
470 elif commit_idx is not None:
465 self._validate_commit_idx(commit_idx)
471 self._validate_commit_idx(commit_idx)
466 try:
472 try:
467 _commit_id = self.commit_ids[commit_idx]
473 _commit_id = self.commit_ids[commit_idx]
468 if commit_idx < 0:
474 if commit_idx < 0:
469 commit_idx = self.commit_ids.index(_commit_id)
475 commit_idx = self.commit_ids.index(_commit_id)
470
476
471 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
477 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
472 except IndexError:
478 except IndexError:
473 commit_id = commit_idx
479 commit_id = commit_idx
474 else:
480 else:
475 commit_id = "tip"
481 commit_id = "tip"
476
482
477 # case here is no cached version, do an actual lookup instead
483 # case here is no cached version, do an actual lookup instead
478 try:
484 try:
479 raw_id, idx = self._remote.lookup(commit_id, both=True)
485 raw_id, idx = self._remote.lookup(commit_id, both=True)
480 except CommitDoesNotExistError:
486 except CommitDoesNotExistError:
481 msg = "Commit {} does not exist for `{}`".format(
487 msg = "Commit {} does not exist for `{}`".format(
482 *map(safe_str, [commit_id, self.name]))
488 *map(safe_str, [commit_id, self.name]))
483 raise CommitDoesNotExistError(msg)
489 raise CommitDoesNotExistError(msg)
484
490
485 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
491 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
486
492
487 def get_commits(
493 def get_commits(
488 self, start_id=None, end_id=None, start_date=None, end_date=None,
494 self, start_id=None, end_id=None, start_date=None, end_date=None,
489 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
495 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
490 """
496 """
491 Returns generator of ``MercurialCommit`` objects from start to end
497 Returns generator of ``MercurialCommit`` objects from start to end
492 (both are inclusive)
498 (both are inclusive)
493
499
494 :param start_id: None, str(commit_id)
500 :param start_id: None, str(commit_id)
495 :param end_id: None, str(commit_id)
501 :param end_id: None, str(commit_id)
496 :param start_date: if specified, commits with commit date less than
502 :param start_date: if specified, commits with commit date less than
497 ``start_date`` would be filtered out from returned set
503 ``start_date`` would be filtered out from returned set
498 :param end_date: if specified, commits with commit date greater than
504 :param end_date: if specified, commits with commit date greater than
499 ``end_date`` would be filtered out from returned set
505 ``end_date`` would be filtered out from returned set
500 :param branch_name: if specified, commits not reachable from given
506 :param branch_name: if specified, commits not reachable from given
501 branch would be filtered out from returned set
507 branch would be filtered out from returned set
502 :param show_hidden: Show hidden commits such as obsolete or hidden from
508 :param show_hidden: Show hidden commits such as obsolete or hidden from
503 Mercurial evolve
509 Mercurial evolve
504 :raise BranchDoesNotExistError: If given ``branch_name`` does not
510 :raise BranchDoesNotExistError: If given ``branch_name`` does not
505 exist.
511 exist.
506 :raise CommitDoesNotExistError: If commit for given ``start`` or
512 :raise CommitDoesNotExistError: If commit for given ``start`` or
507 ``end`` could not be found.
513 ``end`` could not be found.
508 """
514 """
509 # actually we should check now if it's not an empty repo
515 # actually we should check now if it's not an empty repo
510 if self.is_empty():
516 if self.is_empty():
511 raise EmptyRepositoryError("There are no commits yet")
517 raise EmptyRepositoryError("There are no commits yet")
512 self._validate_branch_name(branch_name)
518 self._validate_branch_name(branch_name)
513
519
514 branch_ancestors = False
520 branch_ancestors = False
515 if start_id is not None:
521 if start_id is not None:
516 self._validate_commit_id(start_id)
522 self._validate_commit_id(start_id)
517 c_start = self.get_commit(commit_id=start_id)
523 c_start = self.get_commit(commit_id=start_id)
518 start_pos = self._commit_ids[c_start.raw_id]
524 start_pos = self._commit_ids[c_start.raw_id]
519 else:
525 else:
520 start_pos = None
526 start_pos = None
521
527
522 if end_id is not None:
528 if end_id is not None:
523 self._validate_commit_id(end_id)
529 self._validate_commit_id(end_id)
524 c_end = self.get_commit(commit_id=end_id)
530 c_end = self.get_commit(commit_id=end_id)
525 end_pos = max(0, self._commit_ids[c_end.raw_id])
531 end_pos = max(0, self._commit_ids[c_end.raw_id])
526 else:
532 else:
527 end_pos = None
533 end_pos = None
528
534
529 if None not in [start_id, end_id] and start_pos > end_pos:
535 if None not in [start_id, end_id] and start_pos > end_pos:
530 raise RepositoryError(
536 raise RepositoryError(
531 "Start commit '%s' cannot be after end commit '%s'" %
537 "Start commit '%s' cannot be after end commit '%s'" %
532 (start_id, end_id))
538 (start_id, end_id))
533
539
534 if end_pos is not None:
540 if end_pos is not None:
535 end_pos += 1
541 end_pos += 1
536
542
537 commit_filter = []
543 commit_filter = []
538
544
539 if branch_name and not branch_ancestors:
545 if branch_name and not branch_ancestors:
540 commit_filter.append(f'branch("{branch_name}")')
546 commit_filter.append(f'branch("{branch_name}")')
541 elif branch_name and branch_ancestors:
547 elif branch_name and branch_ancestors:
542 commit_filter.append(f'ancestors(branch("{branch_name}"))')
548 commit_filter.append(f'ancestors(branch("{branch_name}"))')
543
549
544 if start_date and not end_date:
550 if start_date and not end_date:
545 commit_filter.append(f'date(">{start_date}")')
551 commit_filter.append(f'date(">{start_date}")')
546 if end_date and not start_date:
552 if end_date and not start_date:
547 commit_filter.append(f'date("<{end_date}")')
553 commit_filter.append(f'date("<{end_date}")')
548 if start_date and end_date:
554 if start_date and end_date:
549 commit_filter.append(
555 commit_filter.append(
550 f'date(">{start_date}") and date("<{end_date}")')
556 f'date(">{start_date}") and date("<{end_date}")')
551
557
552 if not show_hidden:
558 if not show_hidden:
553 commit_filter.append('not obsolete()')
559 commit_filter.append('not obsolete()')
554 commit_filter.append('not hidden()')
560 commit_filter.append('not hidden()')
555
561
556 # TODO: johbo: Figure out a simpler way for this solution
562 # TODO: johbo: Figure out a simpler way for this solution
557 collection_generator = CollectionGenerator
563 collection_generator = CollectionGenerator
558 if commit_filter:
564 if commit_filter:
559 commit_filter = ' and '.join(map(safe_str, commit_filter))
565 commit_filter = ' and '.join(map(safe_str, commit_filter))
560 revisions = self._remote.rev_range([commit_filter])
566 revisions = self._remote.rev_range([commit_filter])
561 collection_generator = MercurialIndexBasedCollectionGenerator
567 collection_generator = MercurialIndexBasedCollectionGenerator
562 else:
568 else:
563 revisions = self.commit_ids
569 revisions = self.commit_ids
564
570
565 if start_pos or end_pos:
571 if start_pos or end_pos:
566 revisions = revisions[start_pos:end_pos]
572 revisions = revisions[start_pos:end_pos]
567
573
568 return collection_generator(self, revisions, pre_load=pre_load)
574 return collection_generator(self, revisions, pre_load=pre_load)
569
575
570 def pull(self, url, commit_ids=None):
576 def pull(self, url, commit_ids=None):
571 """
577 """
572 Pull changes from external location.
578 Pull changes from external location.
573
579
574 :param commit_ids: Optional. Can be set to a list of commit ids
580 :param commit_ids: Optional. Can be set to a list of commit ids
575 which shall be pulled from the other repository.
581 which shall be pulled from the other repository.
576 """
582 """
577 url = self._get_url(url)
583 url = self._get_url(url)
578 self._remote.pull(url, commit_ids=commit_ids)
584 self._remote.pull(url, commit_ids=commit_ids)
579 self._remote.invalidate_vcs_cache()
585 self._remote.invalidate_vcs_cache()
580
586
581 def fetch(self, url, commit_ids=None, **kwargs):
587 def fetch(self, url, commit_ids=None, **kwargs):
582 """
588 """
583 Backward compatibility with GIT fetch==pull
589 Backward compatibility with GIT fetch==pull
584 """
590 """
585 return self.pull(url, commit_ids=commit_ids)
591 return self.pull(url, commit_ids=commit_ids)
586
592
587 def push(self, url, **kwargs):
593 def push(self, url, **kwargs):
588 url = self._get_url(url)
594 url = self._get_url(url)
589 self._remote.sync_push(url)
595 self._remote.sync_push(url)
590
596
591 def _local_clone(self, clone_path):
597 def _local_clone(self, clone_path):
592 """
598 """
593 Create a local clone of the current repo.
599 Create a local clone of the current repo.
594 """
600 """
595 self._remote.clone(self.path, clone_path, update_after_clone=True,
601 self._remote.clone(self.path, clone_path, update_after_clone=True,
596 hooks=False)
602 hooks=False)
597
603
598 def _update(self, revision, clean=False):
604 def _update(self, revision, clean=False):
599 """
605 """
600 Update the working copy to the specified revision.
606 Update the working copy to the specified revision.
601 """
607 """
602 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
608 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
603 self._remote.update(revision, clean=clean)
609 self._remote.update(revision, clean=clean)
604
610
605 def _identify(self):
611 def _identify(self):
606 """
612 """
607 Return the current state of the working directory.
613 Return the current state of the working directory.
608 """
614 """
609 return self._remote.identify().strip().rstrip('+')
615 return self._remote.identify().strip().rstrip('+')
610
616
611 def _heads(self, branch=None):
617 def _heads(self, branch=None):
612 """
618 """
613 Return the commit ids of the repository heads.
619 Return the commit ids of the repository heads.
614 """
620 """
615 return self._remote.heads(branch=branch).strip().split(' ')
621 return self._remote.heads(branch=branch).strip().split(' ')
616
622
617 def _ancestor(self, revision1, revision2):
623 def _ancestor(self, revision1, revision2):
618 """
624 """
619 Return the common ancestor of the two revisions.
625 Return the common ancestor of the two revisions.
620 """
626 """
621 return self._remote.ancestor(revision1, revision2)
627 return self._remote.ancestor(revision1, revision2)
622
628
623 def _local_push(
629 def _local_push(
624 self, revision, repository_path, push_branches=False,
630 self, revision, repository_path, push_branches=False,
625 enable_hooks=False):
631 enable_hooks=False):
626 """
632 """
627 Push the given revision to the specified repository.
633 Push the given revision to the specified repository.
628
634
629 :param push_branches: allow to create branches in the target repo.
635 :param push_branches: allow to create branches in the target repo.
630 """
636 """
631 self._remote.push(
637 self._remote.push(
632 [revision], repository_path, hooks=enable_hooks,
638 [revision], repository_path, hooks=enable_hooks,
633 push_branches=push_branches)
639 push_branches=push_branches)
634
640
635 def _local_merge(self, target_ref, merge_message, user_name, user_email,
641 def _local_merge(self, target_ref, merge_message, user_name, user_email,
636 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
642 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
637 """
643 """
638 Merge the given source_revision into the checked out revision.
644 Merge the given source_revision into the checked out revision.
639
645
640 Returns the commit id of the merge and a boolean indicating if the
646 Returns the commit id of the merge and a boolean indicating if the
641 commit needs to be pushed.
647 commit needs to be pushed.
642 """
648 """
643
649
644 source_ref_commit_id = source_ref.commit_id
650 source_ref_commit_id = source_ref.commit_id
645 target_ref_commit_id = target_ref.commit_id
651 target_ref_commit_id = target_ref.commit_id
646
652
647 # update our workdir to target ref, for proper merge
653 # update our workdir to target ref, for proper merge
648 self._update(target_ref_commit_id, clean=True)
654 self._update(target_ref_commit_id, clean=True)
649
655
650 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
656 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
651 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
657 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
652
658
653 if close_commit_id:
659 if close_commit_id:
654 # NOTE(marcink): if we get the close commit, this is our new source
660 # NOTE(marcink): if we get the close commit, this is our new source
655 # which will include the close commit itself.
661 # which will include the close commit itself.
656 source_ref_commit_id = close_commit_id
662 source_ref_commit_id = close_commit_id
657
663
658 if ancestor == source_ref_commit_id:
664 if ancestor == source_ref_commit_id:
659 # Nothing to do, the changes were already integrated
665 # Nothing to do, the changes were already integrated
660 return target_ref_commit_id, False
666 return target_ref_commit_id, False
661
667
662 elif ancestor == target_ref_commit_id and is_the_same_branch:
668 elif ancestor == target_ref_commit_id and is_the_same_branch:
663 # In this case we should force a commit message
669 # In this case we should force a commit message
664 return source_ref_commit_id, True
670 return source_ref_commit_id, True
665
671
666 unresolved = None
672 unresolved = None
667 if use_rebase:
673 if use_rebase:
668 try:
674 try:
669 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
675 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
670 self.bookmark(bookmark_name, revision=source_ref.commit_id)
676 self.bookmark(bookmark_name, revision=source_ref.commit_id)
671 self._remote.rebase(
677 self._remote.rebase(
672 source=source_ref_commit_id, dest=target_ref_commit_id)
678 source=source_ref_commit_id, dest=target_ref_commit_id)
673 self._remote.invalidate_vcs_cache()
679 self._remote.invalidate_vcs_cache()
674 self._update(bookmark_name, clean=True)
680 self._update(bookmark_name, clean=True)
675 return self._identify(), True
681 return self._identify(), True
676 except RepositoryError as e:
682 except RepositoryError as e:
677 # The rebase-abort may raise another exception which 'hides'
683 # The rebase-abort may raise another exception which 'hides'
678 # the original one, therefore we log it here.
684 # the original one, therefore we log it here.
679 log.exception('Error while rebasing shadow repo during merge.')
685 log.exception('Error while rebasing shadow repo during merge.')
680 if 'unresolved conflicts' in safe_str(e):
686 if 'unresolved conflicts' in safe_str(e):
681 unresolved = self._remote.get_unresolved_files()
687 unresolved = self._remote.get_unresolved_files()
682 log.debug('unresolved files: %s', unresolved)
688 log.debug('unresolved files: %s', unresolved)
683
689
684 # Cleanup any rebase leftovers
690 # Cleanup any rebase leftovers
685 self._remote.invalidate_vcs_cache()
691 self._remote.invalidate_vcs_cache()
686 self._remote.rebase(abort=True)
692 self._remote.rebase(abort=True)
687 self._remote.invalidate_vcs_cache()
693 self._remote.invalidate_vcs_cache()
688 self._remote.update(clean=True)
694 self._remote.update(clean=True)
689 if unresolved:
695 if unresolved:
690 raise UnresolvedFilesInRepo(unresolved)
696 raise UnresolvedFilesInRepo(unresolved)
691 else:
697 else:
692 raise
698 raise
693 else:
699 else:
694 try:
700 try:
695 self._remote.merge(source_ref_commit_id)
701 self._remote.merge(source_ref_commit_id)
696 self._remote.invalidate_vcs_cache()
702 self._remote.invalidate_vcs_cache()
697 self._remote.commit(
703 self._remote.commit(
698 message=safe_str(merge_message),
704 message=safe_str(merge_message),
699 username=safe_str(f'{user_name} <{user_email}>'))
705 username=safe_str(f'{user_name} <{user_email}>'))
700 self._remote.invalidate_vcs_cache()
706 self._remote.invalidate_vcs_cache()
701 return self._identify(), True
707 return self._identify(), True
702 except RepositoryError as e:
708 except RepositoryError as e:
703 # The merge-abort may raise another exception which 'hides'
709 # The merge-abort may raise another exception which 'hides'
704 # the original one, therefore we log it here.
710 # the original one, therefore we log it here.
705 log.exception('Error while merging shadow repo during merge.')
711 log.exception('Error while merging shadow repo during merge.')
706 if 'unresolved merge conflicts' in safe_str(e):
712 if 'unresolved merge conflicts' in safe_str(e):
707 unresolved = self._remote.get_unresolved_files()
713 unresolved = self._remote.get_unresolved_files()
708 log.debug('unresolved files: %s', unresolved)
714 log.debug('unresolved files: %s', unresolved)
709
715
710 # Cleanup any merge leftovers
716 # Cleanup any merge leftovers
711 self._remote.update(clean=True)
717 self._remote.update(clean=True)
712 if unresolved:
718 if unresolved:
713 raise UnresolvedFilesInRepo(unresolved)
719 raise UnresolvedFilesInRepo(unresolved)
714 else:
720 else:
715 raise
721 raise
716
722
717 def _local_close(self, target_ref, user_name, user_email,
723 def _local_close(self, target_ref, user_name, user_email,
718 source_ref, close_message=''):
724 source_ref, close_message=''):
719 """
725 """
720 Close the branch of the given source_revision
726 Close the branch of the given source_revision
721
727
722 Returns the commit id of the close and a boolean indicating if the
728 Returns the commit id of the close and a boolean indicating if the
723 commit needs to be pushed.
729 commit needs to be pushed.
724 """
730 """
725 self._update(source_ref.commit_id)
731 self._update(source_ref.commit_id)
726 message = (close_message or vcs_settings.HG_CLOSE_BRANCH_MESSAGE_TMPL).format(
732 message = (close_message or vcs_settings.HG_CLOSE_BRANCH_MESSAGE_TMPL).format(
727 user_name=user_name,
733 user_name=user_name,
728 user_email=user_email,
734 user_email=user_email,
729 target_ref_name=target_ref.name,
735 target_ref_name=target_ref.name,
730 source_ref_name=source_ref.name
736 source_ref_name=source_ref.name
731 )
737 )
732 try:
738 try:
733 self._remote.commit(
739 self._remote.commit(
734 message=safe_str(message),
740 message=safe_str(message),
735 username=safe_str(f'{user_name} <{user_email}>'),
741 username=safe_str(f'{user_name} <{user_email}>'),
736 close_branch=True)
742 close_branch=True)
737 self._remote.invalidate_vcs_cache()
743 self._remote.invalidate_vcs_cache()
738 return self._identify(), True
744 return self._identify(), True
739 except RepositoryError:
745 except RepositoryError:
740 # Cleanup any commit leftovers
746 # Cleanup any commit leftovers
741 self._remote.update(clean=True)
747 self._remote.update(clean=True)
742 raise
748 raise
743
749
744 def _is_the_same_branch(self, target_ref, source_ref):
750 def _is_the_same_branch(self, target_ref, source_ref):
745 return (
751 return (
746 self._get_branch_name(target_ref) ==
752 self._get_branch_name(target_ref) ==
747 self._get_branch_name(source_ref))
753 self._get_branch_name(source_ref))
748
754
749 def _get_branch_name(self, ref):
755 def _get_branch_name(self, ref):
750 if ref.type == 'branch':
756 if ref.type == 'branch':
751 return ref.name
757 return ref.name
752 return self._remote.ctx_branch(ref.commit_id)
758 return self._remote.ctx_branch(ref.commit_id)
753
759
754 def _maybe_prepare_merge_workspace(
760 def _maybe_prepare_merge_workspace(
755 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
761 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
756 shadow_repository_path = self._get_shadow_repository_path(
762 shadow_repository_path = self._get_shadow_repository_path(
757 self.path, repo_id, workspace_id)
763 self.path, repo_id, workspace_id)
758 if not os.path.exists(shadow_repository_path):
764 if not os.path.exists(shadow_repository_path):
759 self._local_clone(shadow_repository_path)
765 self._local_clone(shadow_repository_path)
760 log.debug(
766 log.debug(
761 'Prepared shadow repository in %s', shadow_repository_path)
767 'Prepared shadow repository in %s', shadow_repository_path)
762
768
763 return shadow_repository_path
769 return shadow_repository_path
764
770
765 def _merge_repo(self, repo_id, workspace_id, target_ref,
771 def _merge_repo(self, repo_id, workspace_id, target_ref,
766 source_repo, source_ref, merge_message,
772 source_repo, source_ref, merge_message,
767 merger_name, merger_email, dry_run=False,
773 merger_name, merger_email, dry_run=False,
768 use_rebase=False, close_branch=False):
774 use_rebase=False, close_branch=False):
769
775
770 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
776 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
771 'rebase' if use_rebase else 'merge', dry_run)
777 'rebase' if use_rebase else 'merge', dry_run)
772
778
773 if target_ref.commit_id not in self._heads():
779 if target_ref.commit_id not in self._heads():
774 return MergeResponse(
780 return MergeResponse(
775 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
781 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
776 metadata={'target_ref': target_ref})
782 metadata={'target_ref': target_ref})
777
783
778 try:
784 try:
779 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
785 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
780 heads_all = self._heads(target_ref.name)
786 heads_all = self._heads(target_ref.name)
781 max_heads = 10
787 max_heads = 10
782 if len(heads_all) > max_heads:
788 if len(heads_all) > max_heads:
783 heads = '\n,'.join(
789 heads = '\n,'.join(
784 heads_all[:max_heads] +
790 heads_all[:max_heads] +
785 [f'and {len(heads_all)-max_heads} more.'])
791 [f'and {len(heads_all)-max_heads} more.'])
786 else:
792 else:
787 heads = '\n,'.join(heads_all)
793 heads = '\n,'.join(heads_all)
788 metadata = {
794 metadata = {
789 'target_ref': target_ref,
795 'target_ref': target_ref,
790 'source_ref': source_ref,
796 'source_ref': source_ref,
791 'heads': heads
797 'heads': heads
792 }
798 }
793 return MergeResponse(
799 return MergeResponse(
794 False, False, None,
800 False, False, None,
795 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
801 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
796 metadata=metadata)
802 metadata=metadata)
797 except CommitDoesNotExistError:
803 except CommitDoesNotExistError:
798 log.exception('Failure when looking up branch heads on hg target')
804 log.exception('Failure when looking up branch heads on hg target')
799 return MergeResponse(
805 return MergeResponse(
800 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
806 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
801 metadata={'target_ref': target_ref})
807 metadata={'target_ref': target_ref})
802
808
803 shadow_repository_path = self._maybe_prepare_merge_workspace(
809 shadow_repository_path = self._maybe_prepare_merge_workspace(
804 repo_id, workspace_id, target_ref, source_ref)
810 repo_id, workspace_id, target_ref, source_ref)
805 shadow_repo = self.get_shadow_instance(shadow_repository_path)
811 shadow_repo = self.get_shadow_instance(shadow_repository_path)
806
812
807 log.debug('Pulling in target reference %s', target_ref)
813 log.debug('Pulling in target reference %s', target_ref)
808 self._validate_pull_reference(target_ref)
814 self._validate_pull_reference(target_ref)
809 shadow_repo._local_pull(self.path, target_ref)
815 shadow_repo._local_pull(self.path, target_ref)
810
816
811 try:
817 try:
812 log.debug('Pulling in source reference %s', source_ref)
818 log.debug('Pulling in source reference %s', source_ref)
813 source_repo._validate_pull_reference(source_ref)
819 source_repo._validate_pull_reference(source_ref)
814 shadow_repo._local_pull(source_repo.path, source_ref)
820 shadow_repo._local_pull(source_repo.path, source_ref)
815 except CommitDoesNotExistError:
821 except CommitDoesNotExistError:
816 log.exception('Failure when doing local pull on hg shadow repo')
822 log.exception('Failure when doing local pull on hg shadow repo')
817 return MergeResponse(
823 return MergeResponse(
818 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
824 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
819 metadata={'source_ref': source_ref})
825 metadata={'source_ref': source_ref})
820
826
821 merge_ref = None
827 merge_ref = None
822 merge_commit_id = None
828 merge_commit_id = None
823 close_commit_id = None
829 close_commit_id = None
824 merge_failure_reason = MergeFailureReason.NONE
830 merge_failure_reason = MergeFailureReason.NONE
825 metadata = {}
831 metadata = {}
826
832
827 # enforce that close branch should be used only in case we source from
833 # enforce that close branch should be used only in case we source from
828 # an actual Branch
834 # an actual Branch
829 close_branch = close_branch and source_ref.type == 'branch'
835 close_branch = close_branch and source_ref.type == 'branch'
830
836
831 # don't allow to close branch if source and target are the same
837 # don't allow to close branch if source and target are the same
832 close_branch = close_branch and source_ref.name != target_ref.name
838 close_branch = close_branch and source_ref.name != target_ref.name
833
839
834 needs_push_on_close = False
840 needs_push_on_close = False
835 if close_branch and not use_rebase and not dry_run:
841 if close_branch and not use_rebase and not dry_run:
836 try:
842 try:
837 close_commit_id, needs_push_on_close = shadow_repo._local_close(
843 close_commit_id, needs_push_on_close = shadow_repo._local_close(
838 target_ref, merger_name, merger_email, source_ref)
844 target_ref, merger_name, merger_email, source_ref)
839 merge_possible = True
845 merge_possible = True
840 except RepositoryError:
846 except RepositoryError:
841 log.exception('Failure when doing close branch on '
847 log.exception('Failure when doing close branch on '
842 'shadow repo: %s', shadow_repo)
848 'shadow repo: %s', shadow_repo)
843 merge_possible = False
849 merge_possible = False
844 merge_failure_reason = MergeFailureReason.MERGE_FAILED
850 merge_failure_reason = MergeFailureReason.MERGE_FAILED
845 else:
851 else:
846 merge_possible = True
852 merge_possible = True
847
853
848 needs_push = False
854 needs_push = False
849 if merge_possible:
855 if merge_possible:
850
856
851 try:
857 try:
852 merge_commit_id, needs_push = shadow_repo._local_merge(
858 merge_commit_id, needs_push = shadow_repo._local_merge(
853 target_ref, merge_message, merger_name, merger_email,
859 target_ref, merge_message, merger_name, merger_email,
854 source_ref, use_rebase=use_rebase,
860 source_ref, use_rebase=use_rebase,
855 close_commit_id=close_commit_id, dry_run=dry_run)
861 close_commit_id=close_commit_id, dry_run=dry_run)
856 merge_possible = True
862 merge_possible = True
857
863
858 # read the state of the close action, if it
864 # read the state of the close action, if it
859 # maybe required a push
865 # maybe required a push
860 needs_push = needs_push or needs_push_on_close
866 needs_push = needs_push or needs_push_on_close
861
867
862 # Set a bookmark pointing to the merge commit. This bookmark
868 # Set a bookmark pointing to the merge commit. This bookmark
863 # may be used to easily identify the last successful merge
869 # may be used to easily identify the last successful merge
864 # commit in the shadow repository.
870 # commit in the shadow repository.
865 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
871 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
866 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
872 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
867 except SubrepoMergeError:
873 except SubrepoMergeError:
868 log.exception(
874 log.exception(
869 'Subrepo merge error during local merge on hg shadow repo.')
875 'Subrepo merge error during local merge on hg shadow repo.')
870 merge_possible = False
876 merge_possible = False
871 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
877 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
872 needs_push = False
878 needs_push = False
873 except RepositoryError as e:
879 except RepositoryError as e:
874 log.exception('Failure when doing local merge on hg shadow repo')
880 log.exception('Failure when doing local merge on hg shadow repo')
875 metadata['unresolved_files'] = 'no unresolved files found'
881 metadata['unresolved_files'] = 'no unresolved files found'
876
882
877 if isinstance(e, UnresolvedFilesInRepo):
883 if isinstance(e, UnresolvedFilesInRepo):
878 all_conflicts = list(e.args[0])
884 all_conflicts = list(e.args[0])
879 max_conflicts = 20
885 max_conflicts = 20
880 if len(all_conflicts) > max_conflicts:
886 if len(all_conflicts) > max_conflicts:
881 conflicts = all_conflicts[:max_conflicts] \
887 conflicts = all_conflicts[:max_conflicts] \
882 + [f'and {len(all_conflicts)-max_conflicts} more.']
888 + [f'and {len(all_conflicts)-max_conflicts} more.']
883 else:
889 else:
884 conflicts = all_conflicts
890 conflicts = all_conflicts
885 metadata['unresolved_files'] = \
891 metadata['unresolved_files'] = \
886 '\n* conflict: ' + \
892 '\n* conflict: ' + \
887 ('\n * conflict: '.join(conflicts))
893 ('\n * conflict: '.join(conflicts))
888
894
889 merge_possible = False
895 merge_possible = False
890 merge_failure_reason = MergeFailureReason.MERGE_FAILED
896 merge_failure_reason = MergeFailureReason.MERGE_FAILED
891 needs_push = False
897 needs_push = False
892
898
893 if merge_possible and not dry_run:
899 if merge_possible and not dry_run:
894 if needs_push:
900 if needs_push:
895 # In case the target is a bookmark, update it, so after pushing
901 # In case the target is a bookmark, update it, so after pushing
896 # the bookmarks is also updated in the target.
902 # the bookmarks is also updated in the target.
897 if target_ref.type == 'book':
903 if target_ref.type == 'book':
898 shadow_repo.bookmark(
904 shadow_repo.bookmark(
899 target_ref.name, revision=merge_commit_id)
905 target_ref.name, revision=merge_commit_id)
900 try:
906 try:
901 shadow_repo_with_hooks = self.get_shadow_instance(
907 shadow_repo_with_hooks = self.get_shadow_instance(
902 shadow_repository_path,
908 shadow_repository_path,
903 enable_hooks=True)
909 enable_hooks=True)
904 # This is the actual merge action, we push from shadow
910 # This is the actual merge action, we push from shadow
905 # into origin.
911 # into origin.
906 # Note: the push_branches option will push any new branch
912 # Note: the push_branches option will push any new branch
907 # defined in the source repository to the target. This may
913 # defined in the source repository to the target. This may
908 # be dangerous as branches are permanent in Mercurial.
914 # be dangerous as branches are permanent in Mercurial.
909 # This feature was requested in issue #441.
915 # This feature was requested in issue #441.
910 shadow_repo_with_hooks._local_push(
916 shadow_repo_with_hooks._local_push(
911 merge_commit_id, self.path, push_branches=True,
917 merge_commit_id, self.path, push_branches=True,
912 enable_hooks=True)
918 enable_hooks=True)
913
919
914 # maybe we also need to push the close_commit_id
920 # maybe we also need to push the close_commit_id
915 if close_commit_id:
921 if close_commit_id:
916 shadow_repo_with_hooks._local_push(
922 shadow_repo_with_hooks._local_push(
917 close_commit_id, self.path, push_branches=True,
923 close_commit_id, self.path, push_branches=True,
918 enable_hooks=True)
924 enable_hooks=True)
919 merge_succeeded = True
925 merge_succeeded = True
920 except RepositoryError:
926 except RepositoryError:
921 log.exception(
927 log.exception(
922 'Failure when doing local push from the shadow '
928 'Failure when doing local push from the shadow '
923 'repository to the target repository at %s.', self.path)
929 'repository to the target repository at %s.', self.path)
924 merge_succeeded = False
930 merge_succeeded = False
925 merge_failure_reason = MergeFailureReason.PUSH_FAILED
931 merge_failure_reason = MergeFailureReason.PUSH_FAILED
926 metadata['target'] = 'hg shadow repo'
932 metadata['target'] = 'hg shadow repo'
927 metadata['merge_commit'] = merge_commit_id
933 metadata['merge_commit'] = merge_commit_id
928 else:
934 else:
929 merge_succeeded = True
935 merge_succeeded = True
930 else:
936 else:
931 merge_succeeded = False
937 merge_succeeded = False
932
938
933 return MergeResponse(
939 return MergeResponse(
934 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
940 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
935 metadata=metadata)
941 metadata=metadata)
936
942
937 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
943 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
938 config = self.config.copy()
944 config = self.config.copy()
939 if not enable_hooks:
945 if not enable_hooks:
940 config.clear_section('hooks')
946 config.clear_section('hooks')
941 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
947 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
942
948
943 def _validate_pull_reference(self, reference):
949 def _validate_pull_reference(self, reference):
944 if not (reference.name in self.bookmarks or
950 if not (reference.name in self.bookmarks or
945 reference.name in self.branches or
951 reference.name in self.branches or
946 self.get_commit(reference.commit_id)):
952 self.get_commit(reference.commit_id)):
947 raise CommitDoesNotExistError(
953 raise CommitDoesNotExistError(
948 'Unknown branch, bookmark or commit id')
954 'Unknown branch, bookmark or commit id')
949
955
950 def _local_pull(self, repository_path, reference):
956 def _local_pull(self, repository_path, reference):
951 """
957 """
952 Fetch a branch, bookmark or commit from a local repository.
958 Fetch a branch, bookmark or commit from a local repository.
953 """
959 """
954 repository_path = os.path.abspath(repository_path)
960 repository_path = os.path.abspath(repository_path)
955 if repository_path == self.path:
961 if repository_path == self.path:
956 raise ValueError('Cannot pull from the same repository')
962 raise ValueError('Cannot pull from the same repository')
957
963
958 reference_type_to_option_name = {
964 reference_type_to_option_name = {
959 'book': 'bookmark',
965 'book': 'bookmark',
960 'branch': 'branch',
966 'branch': 'branch',
961 }
967 }
962 option_name = reference_type_to_option_name.get(
968 option_name = reference_type_to_option_name.get(
963 reference.type, 'revision')
969 reference.type, 'revision')
964
970
965 if option_name == 'revision':
971 if option_name == 'revision':
966 ref = reference.commit_id
972 ref = reference.commit_id
967 else:
973 else:
968 ref = reference.name
974 ref = reference.name
969
975
970 options = {option_name: [ref]}
976 options = {option_name: [ref]}
971 self._remote.pull_cmd(repository_path, hooks=False, **options)
977 self._remote.pull_cmd(repository_path, hooks=False, **options)
972 self._remote.invalidate_vcs_cache()
978 self._remote.invalidate_vcs_cache()
973
979
974 def bookmark(self, bookmark, revision=None):
980 def bookmark(self, bookmark, revision=None):
975 if isinstance(bookmark, str):
981 if isinstance(bookmark, str):
976 bookmark = safe_str(bookmark)
982 bookmark = safe_str(bookmark)
977 self._remote.bookmark(bookmark, revision=revision)
983 self._remote.bookmark(bookmark, revision=revision)
978 self._remote.invalidate_vcs_cache()
984 self._remote.invalidate_vcs_cache()
979
985
980 def get_path_permissions(self, username):
986 def get_path_permissions(self, username):
981 hgacl_file = os.path.join(self.path, '.hg/hgacl')
987 hgacl_file = os.path.join(self.path, '.hg/hgacl')
982
988
983 def read_patterns(suffix):
989 def read_patterns(suffix):
984 svalue = None
990 svalue = None
985 for section, option in [
991 for section, option in [
986 ('narrowacl', username + suffix),
992 ('narrowacl', username + suffix),
987 ('narrowacl', 'default' + suffix),
993 ('narrowacl', 'default' + suffix),
988 ('narrowhgacl', username + suffix),
994 ('narrowhgacl', username + suffix),
989 ('narrowhgacl', 'default' + suffix)
995 ('narrowhgacl', 'default' + suffix)
990 ]:
996 ]:
991 try:
997 try:
992 svalue = hgacl.get(section, option)
998 svalue = hgacl.get(section, option)
993 break # stop at the first value we find
999 break # stop at the first value we find
994 except configparser.NoOptionError:
1000 except configparser.NoOptionError:
995 pass
1001 pass
996 if not svalue:
1002 if not svalue:
997 return None
1003 return None
998 result = ['/']
1004 result = ['/']
999 for pattern in svalue.split():
1005 for pattern in svalue.split():
1000 result.append(pattern)
1006 result.append(pattern)
1001 if '*' not in pattern and '?' not in pattern:
1007 if '*' not in pattern and '?' not in pattern:
1002 result.append(pattern + '/*')
1008 result.append(pattern + '/*')
1003 return result
1009 return result
1004
1010
1005 if os.path.exists(hgacl_file):
1011 if os.path.exists(hgacl_file):
1006 try:
1012 try:
1007 hgacl = configparser.RawConfigParser()
1013 hgacl = configparser.RawConfigParser()
1008 hgacl.read(hgacl_file)
1014 hgacl.read(hgacl_file)
1009
1015
1010 includes = read_patterns('.includes')
1016 includes = read_patterns('.includes')
1011 excludes = read_patterns('.excludes')
1017 excludes = read_patterns('.excludes')
1012 return BasePathPermissionChecker.create_from_patterns(
1018 return BasePathPermissionChecker.create_from_patterns(
1013 includes, excludes)
1019 includes, excludes)
1014 except BaseException as e:
1020 except BaseException as e:
1015 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1021 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1016 hgacl_file, self.name, e)
1022 hgacl_file, self.name, e)
1017 raise exceptions.RepositoryRequirementError(msg)
1023 raise exceptions.RepositoryRequirementError(msg)
1018 else:
1024 else:
1019 return None
1025 return None
1020
1026
1021
1027
1022 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1028 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1023
1029
1024 def _commit_factory(self, commit_id):
1030 def _commit_factory(self, commit_id):
1025 if isinstance(commit_id, int):
1031 if isinstance(commit_id, int):
1026 return self.repo.get_commit(
1032 return self.repo.get_commit(
1027 commit_idx=commit_id, pre_load=self.pre_load)
1033 commit_idx=commit_id, pre_load=self.pre_load)
1028 else:
1034 else:
1029 return self.repo.get_commit(
1035 return self.repo.get_commit(
1030 commit_id=commit_id, pre_load=self.pre_load)
1036 commit_id=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now