##// END OF EJS Templates
strip: fix backup command args
super-admin -
r5158:79ec5964 default
parent child Browse files
Show More
@@ -1,1013 +1,1013 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 HG repository module
20 HG repository module
21 """
21 """
22 import os
22 import os
23 import logging
23 import logging
24 import binascii
24 import binascii
25 import configparser
25 import configparser
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from collections import OrderedDict
32 from collections import OrderedDict
33 from rhodecode.lib.datelib import (
33 from rhodecode.lib.datelib import (
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.utils2 import CachedProperty
37 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs import connection, exceptions
38 from rhodecode.lib.vcs.backends.base import (
38 from rhodecode.lib.vcs.backends.base import (
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 MergeFailureReason, Reference, BasePathPermissionChecker)
40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '1')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = {commit_id: index
105 self._commit_ids = {commit_id: index
106 for index, commit_id in enumerate(commit_ids)}
106 for index, commit_id in enumerate(commit_ids)}
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(n, h,) for n, h in
138 _branches = [(n, h,) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(n, h,) for n, h in
157 _tags = [(n, h,) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = f"Added tag {name} for commit {commit.short_id}"
181 message = f"Added tag {name} for commit {commit.short_id}"
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (n, h) for n, h in
233 (n, h) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup=False)
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
288 update_cache = self._remote.hg_update_cache()
289
289
290 self._remote.invalidate_vcs_cache()
290 self._remote.invalidate_vcs_cache()
291 return update_cache
291 return update_cache
292
292
293 def hg_rebuild_fn_cache(self):
293 def hg_rebuild_fn_cache(self):
294 update_cache = self._remote.hg_rebuild_fn_cache()
294 update_cache = self._remote.hg_rebuild_fn_cache()
295
295
296 self._remote.invalidate_vcs_cache()
296 self._remote.invalidate_vcs_cache()
297 return update_cache
297 return update_cache
298
298
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 self, commit_id1, repo2, commit_id2)
301 self, commit_id1, repo2, commit_id2)
302
302
303 if commit_id1 == commit_id2:
303 if commit_id1 == commit_id2:
304 return commit_id1
304 return commit_id1
305
305
306 ancestors = self._remote.revs_from_revspec(
306 ancestors = self._remote.revs_from_revspec(
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 other_path=repo2.path)
308 other_path=repo2.path)
309
309
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311
311
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 return ancestor_id
313 return ancestor_id
314
314
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 if commit_id1 == commit_id2:
316 if commit_id1 == commit_id2:
317 commits = []
317 commits = []
318 else:
318 else:
319 if merge:
319 if merge:
320 indexes = self._remote.revs_from_revspec(
320 indexes = self._remote.revs_from_revspec(
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 else:
323 else:
324 indexes = self._remote.revs_from_revspec(
324 indexes = self._remote.revs_from_revspec(
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 commit_id1, other_path=repo2.path)
326 commit_id1, other_path=repo2.path)
327
327
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 for idx in indexes]
329 for idx in indexes]
330
330
331 return commits
331 return commits
332
332
333 @staticmethod
333 @staticmethod
334 def check_url(url, config):
334 def check_url(url, config):
335 """
335 """
336 Function will check given url and try to verify if it's a valid
336 Function will check given url and try to verify if it's a valid
337 link. Sometimes it may happened that mercurial will issue basic
337 link. Sometimes it may happened that mercurial will issue basic
338 auth request that can cause whole API to hang when used from python
338 auth request that can cause whole API to hang when used from python
339 or other external calls.
339 or other external calls.
340
340
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 when the return code is non 200
342 when the return code is non 200
343 """
343 """
344 # check first if it's not an local url
344 # check first if it's not an local url
345 if os.path.isdir(url) or url.startswith('file:'):
345 if os.path.isdir(url) or url.startswith('file:'):
346 return True
346 return True
347
347
348 # Request the _remote to verify the url
348 # Request the _remote to verify the url
349 return connection.Hg.check_url(url, config.serialize())
349 return connection.Hg.check_url(url, config.serialize())
350
350
351 @staticmethod
351 @staticmethod
352 def is_valid_repository(path):
352 def is_valid_repository(path):
353 return os.path.isdir(os.path.join(path, '.hg'))
353 return os.path.isdir(os.path.join(path, '.hg'))
354
354
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 """
356 """
357 Function will check for mercurial repository in given path. If there
357 Function will check for mercurial repository in given path. If there
358 is no repository in that path it will raise an exception unless
358 is no repository in that path it will raise an exception unless
359 `create` parameter is set to True - in that case repository would
359 `create` parameter is set to True - in that case repository would
360 be created.
360 be created.
361
361
362 If `src_url` is given, would try to clone repository from the
362 If `src_url` is given, would try to clone repository from the
363 location at given clone_point. Additionally it'll make update to
363 location at given clone_point. Additionally it'll make update to
364 working copy accordingly to `do_workspace_checkout` flag.
364 working copy accordingly to `do_workspace_checkout` flag.
365 """
365 """
366 if create and os.path.exists(self.path):
366 if create and os.path.exists(self.path):
367 raise RepositoryError(
367 raise RepositoryError(
368 f"Cannot create repository at {self.path}, location already exist")
368 f"Cannot create repository at {self.path}, location already exist")
369
369
370 if src_url:
370 if src_url:
371 url = str(self._get_url(src_url))
371 url = str(self._get_url(src_url))
372 MercurialRepository.check_url(url, self.config)
372 MercurialRepository.check_url(url, self.config)
373
373
374 self._remote.clone(url, self.path, do_workspace_checkout)
374 self._remote.clone(url, self.path, do_workspace_checkout)
375
375
376 # Don't try to create if we've already cloned repo
376 # Don't try to create if we've already cloned repo
377 create = False
377 create = False
378
378
379 if create:
379 if create:
380 os.makedirs(self.path, mode=0o755)
380 os.makedirs(self.path, mode=0o755)
381
381
382 self._remote.localrepository(create)
382 self._remote.localrepository(create)
383
383
384 @LazyProperty
384 @LazyProperty
385 def in_memory_commit(self):
385 def in_memory_commit(self):
386 return MercurialInMemoryCommit(self)
386 return MercurialInMemoryCommit(self)
387
387
388 @LazyProperty
388 @LazyProperty
389 def description(self):
389 def description(self):
390 description = self._remote.get_config_value(
390 description = self._remote.get_config_value(
391 'web', 'description', untrusted=True)
391 'web', 'description', untrusted=True)
392 return safe_str(description or self.DEFAULT_DESCRIPTION)
392 return safe_str(description or self.DEFAULT_DESCRIPTION)
393
393
394 @LazyProperty
394 @LazyProperty
395 def contact(self):
395 def contact(self):
396 contact = (
396 contact = (
397 self._remote.get_config_value("web", "contact") or
397 self._remote.get_config_value("web", "contact") or
398 self._remote.get_config_value("ui", "username"))
398 self._remote.get_config_value("ui", "username"))
399 return safe_str(contact or self.DEFAULT_CONTACT)
399 return safe_str(contact or self.DEFAULT_CONTACT)
400
400
401 @LazyProperty
401 @LazyProperty
402 def last_change(self):
402 def last_change(self):
403 """
403 """
404 Returns last change made on this repository as
404 Returns last change made on this repository as
405 `datetime.datetime` object.
405 `datetime.datetime` object.
406 """
406 """
407 try:
407 try:
408 return self.get_commit().date
408 return self.get_commit().date
409 except RepositoryError:
409 except RepositoryError:
410 tzoffset = makedate()[1]
410 tzoffset = makedate()[1]
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412
412
413 def _get_fs_mtime(self):
413 def _get_fs_mtime(self):
414 # fallback to filesystem
414 # fallback to filesystem
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 st_path = os.path.join(self.path, '.hg', "store")
416 st_path = os.path.join(self.path, '.hg', "store")
417 if os.path.exists(cl_path):
417 if os.path.exists(cl_path):
418 return os.stat(cl_path).st_mtime
418 return os.stat(cl_path).st_mtime
419 else:
419 else:
420 return os.stat(st_path).st_mtime
420 return os.stat(st_path).st_mtime
421
421
422 def _get_url(self, url):
422 def _get_url(self, url):
423 """
423 """
424 Returns normalized url. If schema is not given, would fall
424 Returns normalized url. If schema is not given, would fall
425 to filesystem
425 to filesystem
426 (``file:///``) schema.
426 (``file:///``) schema.
427 """
427 """
428 if url != 'default' and '://' not in url:
428 if url != 'default' and '://' not in url:
429 url = "file:" + urllib.request.pathname2url(url)
429 url = "file:" + urllib.request.pathname2url(url)
430 return url
430 return url
431
431
432 def get_hook_location(self):
432 def get_hook_location(self):
433 """
433 """
434 returns absolute path to location where hooks are stored
434 returns absolute path to location where hooks are stored
435 """
435 """
436 return os.path.join(self.path, '.hg', '.hgrc')
436 return os.path.join(self.path, '.hg', '.hgrc')
437
437
438 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
438 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
439 translate_tag=None, maybe_unreachable=False, reference_obj=None):
439 translate_tag=None, maybe_unreachable=False, reference_obj=None):
440 """
440 """
441 Returns ``MercurialCommit`` object representing repository's
441 Returns ``MercurialCommit`` object representing repository's
442 commit at the given `commit_id` or `commit_idx`.
442 commit at the given `commit_id` or `commit_idx`.
443 """
443 """
444 if self.is_empty():
444 if self.is_empty():
445 raise EmptyRepositoryError("There are no commits yet")
445 raise EmptyRepositoryError("There are no commits yet")
446
446
447 if commit_id is not None:
447 if commit_id is not None:
448 self._validate_commit_id(commit_id)
448 self._validate_commit_id(commit_id)
449 try:
449 try:
450 # we have cached idx, use it without contacting the remote
450 # we have cached idx, use it without contacting the remote
451 idx = self._commit_ids[commit_id]
451 idx = self._commit_ids[commit_id]
452 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
452 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
453 except KeyError:
453 except KeyError:
454 pass
454 pass
455
455
456 elif commit_idx is not None:
456 elif commit_idx is not None:
457 self._validate_commit_idx(commit_idx)
457 self._validate_commit_idx(commit_idx)
458 try:
458 try:
459 _commit_id = self.commit_ids[commit_idx]
459 _commit_id = self.commit_ids[commit_idx]
460 if commit_idx < 0:
460 if commit_idx < 0:
461 commit_idx = self.commit_ids.index(_commit_id)
461 commit_idx = self.commit_ids.index(_commit_id)
462
462
463 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
463 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
464 except IndexError:
464 except IndexError:
465 commit_id = commit_idx
465 commit_id = commit_idx
466 else:
466 else:
467 commit_id = "tip"
467 commit_id = "tip"
468
468
469 # case here is no cached version, do an actual lookup instead
469 # case here is no cached version, do an actual lookup instead
470 try:
470 try:
471 raw_id, idx = self._remote.lookup(commit_id, both=True)
471 raw_id, idx = self._remote.lookup(commit_id, both=True)
472 except CommitDoesNotExistError:
472 except CommitDoesNotExistError:
473 msg = "Commit {} does not exist for `{}`".format(
473 msg = "Commit {} does not exist for `{}`".format(
474 *map(safe_str, [commit_id, self.name]))
474 *map(safe_str, [commit_id, self.name]))
475 raise CommitDoesNotExistError(msg)
475 raise CommitDoesNotExistError(msg)
476
476
477 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
477 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
478
478
479 def get_commits(
479 def get_commits(
480 self, start_id=None, end_id=None, start_date=None, end_date=None,
480 self, start_id=None, end_id=None, start_date=None, end_date=None,
481 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
481 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
482 """
482 """
483 Returns generator of ``MercurialCommit`` objects from start to end
483 Returns generator of ``MercurialCommit`` objects from start to end
484 (both are inclusive)
484 (both are inclusive)
485
485
486 :param start_id: None, str(commit_id)
486 :param start_id: None, str(commit_id)
487 :param end_id: None, str(commit_id)
487 :param end_id: None, str(commit_id)
488 :param start_date: if specified, commits with commit date less than
488 :param start_date: if specified, commits with commit date less than
489 ``start_date`` would be filtered out from returned set
489 ``start_date`` would be filtered out from returned set
490 :param end_date: if specified, commits with commit date greater than
490 :param end_date: if specified, commits with commit date greater than
491 ``end_date`` would be filtered out from returned set
491 ``end_date`` would be filtered out from returned set
492 :param branch_name: if specified, commits not reachable from given
492 :param branch_name: if specified, commits not reachable from given
493 branch would be filtered out from returned set
493 branch would be filtered out from returned set
494 :param show_hidden: Show hidden commits such as obsolete or hidden from
494 :param show_hidden: Show hidden commits such as obsolete or hidden from
495 Mercurial evolve
495 Mercurial evolve
496 :raise BranchDoesNotExistError: If given ``branch_name`` does not
496 :raise BranchDoesNotExistError: If given ``branch_name`` does not
497 exist.
497 exist.
498 :raise CommitDoesNotExistError: If commit for given ``start`` or
498 :raise CommitDoesNotExistError: If commit for given ``start`` or
499 ``end`` could not be found.
499 ``end`` could not be found.
500 """
500 """
501 # actually we should check now if it's not an empty repo
501 # actually we should check now if it's not an empty repo
502 if self.is_empty():
502 if self.is_empty():
503 raise EmptyRepositoryError("There are no commits yet")
503 raise EmptyRepositoryError("There are no commits yet")
504 self._validate_branch_name(branch_name)
504 self._validate_branch_name(branch_name)
505
505
506 branch_ancestors = False
506 branch_ancestors = False
507 if start_id is not None:
507 if start_id is not None:
508 self._validate_commit_id(start_id)
508 self._validate_commit_id(start_id)
509 c_start = self.get_commit(commit_id=start_id)
509 c_start = self.get_commit(commit_id=start_id)
510 start_pos = self._commit_ids[c_start.raw_id]
510 start_pos = self._commit_ids[c_start.raw_id]
511 else:
511 else:
512 start_pos = None
512 start_pos = None
513
513
514 if end_id is not None:
514 if end_id is not None:
515 self._validate_commit_id(end_id)
515 self._validate_commit_id(end_id)
516 c_end = self.get_commit(commit_id=end_id)
516 c_end = self.get_commit(commit_id=end_id)
517 end_pos = max(0, self._commit_ids[c_end.raw_id])
517 end_pos = max(0, self._commit_ids[c_end.raw_id])
518 else:
518 else:
519 end_pos = None
519 end_pos = None
520
520
521 if None not in [start_id, end_id] and start_pos > end_pos:
521 if None not in [start_id, end_id] and start_pos > end_pos:
522 raise RepositoryError(
522 raise RepositoryError(
523 "Start commit '%s' cannot be after end commit '%s'" %
523 "Start commit '%s' cannot be after end commit '%s'" %
524 (start_id, end_id))
524 (start_id, end_id))
525
525
526 if end_pos is not None:
526 if end_pos is not None:
527 end_pos += 1
527 end_pos += 1
528
528
529 commit_filter = []
529 commit_filter = []
530
530
531 if branch_name and not branch_ancestors:
531 if branch_name and not branch_ancestors:
532 commit_filter.append(f'branch("{branch_name}")')
532 commit_filter.append(f'branch("{branch_name}")')
533 elif branch_name and branch_ancestors:
533 elif branch_name and branch_ancestors:
534 commit_filter.append(f'ancestors(branch("{branch_name}"))')
534 commit_filter.append(f'ancestors(branch("{branch_name}"))')
535
535
536 if start_date and not end_date:
536 if start_date and not end_date:
537 commit_filter.append(f'date(">{start_date}")')
537 commit_filter.append(f'date(">{start_date}")')
538 if end_date and not start_date:
538 if end_date and not start_date:
539 commit_filter.append(f'date("<{end_date}")')
539 commit_filter.append(f'date("<{end_date}")')
540 if start_date and end_date:
540 if start_date and end_date:
541 commit_filter.append(
541 commit_filter.append(
542 f'date(">{start_date}") and date("<{end_date}")')
542 f'date(">{start_date}") and date("<{end_date}")')
543
543
544 if not show_hidden:
544 if not show_hidden:
545 commit_filter.append('not obsolete()')
545 commit_filter.append('not obsolete()')
546 commit_filter.append('not hidden()')
546 commit_filter.append('not hidden()')
547
547
548 # TODO: johbo: Figure out a simpler way for this solution
548 # TODO: johbo: Figure out a simpler way for this solution
549 collection_generator = CollectionGenerator
549 collection_generator = CollectionGenerator
550 if commit_filter:
550 if commit_filter:
551 commit_filter = ' and '.join(map(safe_str, commit_filter))
551 commit_filter = ' and '.join(map(safe_str, commit_filter))
552 revisions = self._remote.rev_range([commit_filter])
552 revisions = self._remote.rev_range([commit_filter])
553 collection_generator = MercurialIndexBasedCollectionGenerator
553 collection_generator = MercurialIndexBasedCollectionGenerator
554 else:
554 else:
555 revisions = self.commit_ids
555 revisions = self.commit_ids
556
556
557 if start_pos or end_pos:
557 if start_pos or end_pos:
558 revisions = revisions[start_pos:end_pos]
558 revisions = revisions[start_pos:end_pos]
559
559
560 return collection_generator(self, revisions, pre_load=pre_load)
560 return collection_generator(self, revisions, pre_load=pre_load)
561
561
562 def pull(self, url, commit_ids=None):
562 def pull(self, url, commit_ids=None):
563 """
563 """
564 Pull changes from external location.
564 Pull changes from external location.
565
565
566 :param commit_ids: Optional. Can be set to a list of commit ids
566 :param commit_ids: Optional. Can be set to a list of commit ids
567 which shall be pulled from the other repository.
567 which shall be pulled from the other repository.
568 """
568 """
569 url = self._get_url(url)
569 url = self._get_url(url)
570 self._remote.pull(url, commit_ids=commit_ids)
570 self._remote.pull(url, commit_ids=commit_ids)
571 self._remote.invalidate_vcs_cache()
571 self._remote.invalidate_vcs_cache()
572
572
573 def fetch(self, url, commit_ids=None):
573 def fetch(self, url, commit_ids=None):
574 """
574 """
575 Backward compatibility with GIT fetch==pull
575 Backward compatibility with GIT fetch==pull
576 """
576 """
577 return self.pull(url, commit_ids=commit_ids)
577 return self.pull(url, commit_ids=commit_ids)
578
578
579 def push(self, url):
579 def push(self, url):
580 url = self._get_url(url)
580 url = self._get_url(url)
581 self._remote.sync_push(url)
581 self._remote.sync_push(url)
582
582
583 def _local_clone(self, clone_path):
583 def _local_clone(self, clone_path):
584 """
584 """
585 Create a local clone of the current repo.
585 Create a local clone of the current repo.
586 """
586 """
587 self._remote.clone(self.path, clone_path, update_after_clone=True,
587 self._remote.clone(self.path, clone_path, update_after_clone=True,
588 hooks=False)
588 hooks=False)
589
589
590 def _update(self, revision, clean=False):
590 def _update(self, revision, clean=False):
591 """
591 """
592 Update the working copy to the specified revision.
592 Update the working copy to the specified revision.
593 """
593 """
594 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
594 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
595 self._remote.update(revision, clean=clean)
595 self._remote.update(revision, clean=clean)
596
596
597 def _identify(self):
597 def _identify(self):
598 """
598 """
599 Return the current state of the working directory.
599 Return the current state of the working directory.
600 """
600 """
601 return self._remote.identify().strip().rstrip('+')
601 return self._remote.identify().strip().rstrip('+')
602
602
603 def _heads(self, branch=None):
603 def _heads(self, branch=None):
604 """
604 """
605 Return the commit ids of the repository heads.
605 Return the commit ids of the repository heads.
606 """
606 """
607 return self._remote.heads(branch=branch).strip().split(' ')
607 return self._remote.heads(branch=branch).strip().split(' ')
608
608
609 def _ancestor(self, revision1, revision2):
609 def _ancestor(self, revision1, revision2):
610 """
610 """
611 Return the common ancestor of the two revisions.
611 Return the common ancestor of the two revisions.
612 """
612 """
613 return self._remote.ancestor(revision1, revision2)
613 return self._remote.ancestor(revision1, revision2)
614
614
615 def _local_push(
615 def _local_push(
616 self, revision, repository_path, push_branches=False,
616 self, revision, repository_path, push_branches=False,
617 enable_hooks=False):
617 enable_hooks=False):
618 """
618 """
619 Push the given revision to the specified repository.
619 Push the given revision to the specified repository.
620
620
621 :param push_branches: allow to create branches in the target repo.
621 :param push_branches: allow to create branches in the target repo.
622 """
622 """
623 self._remote.push(
623 self._remote.push(
624 [revision], repository_path, hooks=enable_hooks,
624 [revision], repository_path, hooks=enable_hooks,
625 push_branches=push_branches)
625 push_branches=push_branches)
626
626
627 def _local_merge(self, target_ref, merge_message, user_name, user_email,
627 def _local_merge(self, target_ref, merge_message, user_name, user_email,
628 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
628 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
629 """
629 """
630 Merge the given source_revision into the checked out revision.
630 Merge the given source_revision into the checked out revision.
631
631
632 Returns the commit id of the merge and a boolean indicating if the
632 Returns the commit id of the merge and a boolean indicating if the
633 commit needs to be pushed.
633 commit needs to be pushed.
634 """
634 """
635 source_ref_commit_id = source_ref.commit_id
635 source_ref_commit_id = source_ref.commit_id
636 target_ref_commit_id = target_ref.commit_id
636 target_ref_commit_id = target_ref.commit_id
637
637
638 # update our workdir to target ref, for proper merge
638 # update our workdir to target ref, for proper merge
639 self._update(target_ref_commit_id, clean=True)
639 self._update(target_ref_commit_id, clean=True)
640
640
641 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
641 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
642 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
642 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
643
643
644 if close_commit_id:
644 if close_commit_id:
645 # NOTE(marcink): if we get the close commit, this is our new source
645 # NOTE(marcink): if we get the close commit, this is our new source
646 # which will include the close commit itself.
646 # which will include the close commit itself.
647 source_ref_commit_id = close_commit_id
647 source_ref_commit_id = close_commit_id
648
648
649 if ancestor == source_ref_commit_id:
649 if ancestor == source_ref_commit_id:
650 # Nothing to do, the changes were already integrated
650 # Nothing to do, the changes were already integrated
651 return target_ref_commit_id, False
651 return target_ref_commit_id, False
652
652
653 elif ancestor == target_ref_commit_id and is_the_same_branch:
653 elif ancestor == target_ref_commit_id and is_the_same_branch:
654 # In this case we should force a commit message
654 # In this case we should force a commit message
655 return source_ref_commit_id, True
655 return source_ref_commit_id, True
656
656
657 unresolved = None
657 unresolved = None
658 if use_rebase:
658 if use_rebase:
659 try:
659 try:
660 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
660 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
661 self.bookmark(bookmark_name, revision=source_ref.commit_id)
661 self.bookmark(bookmark_name, revision=source_ref.commit_id)
662 self._remote.rebase(
662 self._remote.rebase(
663 source=source_ref_commit_id, dest=target_ref_commit_id)
663 source=source_ref_commit_id, dest=target_ref_commit_id)
664 self._remote.invalidate_vcs_cache()
664 self._remote.invalidate_vcs_cache()
665 self._update(bookmark_name, clean=True)
665 self._update(bookmark_name, clean=True)
666 return self._identify(), True
666 return self._identify(), True
667 except RepositoryError as e:
667 except RepositoryError as e:
668 # The rebase-abort may raise another exception which 'hides'
668 # The rebase-abort may raise another exception which 'hides'
669 # the original one, therefore we log it here.
669 # the original one, therefore we log it here.
670 log.exception('Error while rebasing shadow repo during merge.')
670 log.exception('Error while rebasing shadow repo during merge.')
671 if 'unresolved conflicts' in safe_str(e):
671 if 'unresolved conflicts' in safe_str(e):
672 unresolved = self._remote.get_unresolved_files()
672 unresolved = self._remote.get_unresolved_files()
673 log.debug('unresolved files: %s', unresolved)
673 log.debug('unresolved files: %s', unresolved)
674
674
675 # Cleanup any rebase leftovers
675 # Cleanup any rebase leftovers
676 self._remote.invalidate_vcs_cache()
676 self._remote.invalidate_vcs_cache()
677 self._remote.rebase(abort=True)
677 self._remote.rebase(abort=True)
678 self._remote.invalidate_vcs_cache()
678 self._remote.invalidate_vcs_cache()
679 self._remote.update(clean=True)
679 self._remote.update(clean=True)
680 if unresolved:
680 if unresolved:
681 raise UnresolvedFilesInRepo(unresolved)
681 raise UnresolvedFilesInRepo(unresolved)
682 else:
682 else:
683 raise
683 raise
684 else:
684 else:
685 try:
685 try:
686 self._remote.merge(source_ref_commit_id)
686 self._remote.merge(source_ref_commit_id)
687 self._remote.invalidate_vcs_cache()
687 self._remote.invalidate_vcs_cache()
688 self._remote.commit(
688 self._remote.commit(
689 message=safe_str(merge_message),
689 message=safe_str(merge_message),
690 username=safe_str(f'{user_name} <{user_email}>'))
690 username=safe_str(f'{user_name} <{user_email}>'))
691 self._remote.invalidate_vcs_cache()
691 self._remote.invalidate_vcs_cache()
692 return self._identify(), True
692 return self._identify(), True
693 except RepositoryError as e:
693 except RepositoryError as e:
694 # The merge-abort may raise another exception which 'hides'
694 # The merge-abort may raise another exception which 'hides'
695 # the original one, therefore we log it here.
695 # the original one, therefore we log it here.
696 log.exception('Error while merging shadow repo during merge.')
696 log.exception('Error while merging shadow repo during merge.')
697 if 'unresolved merge conflicts' in safe_str(e):
697 if 'unresolved merge conflicts' in safe_str(e):
698 unresolved = self._remote.get_unresolved_files()
698 unresolved = self._remote.get_unresolved_files()
699 log.debug('unresolved files: %s', unresolved)
699 log.debug('unresolved files: %s', unresolved)
700
700
701 # Cleanup any merge leftovers
701 # Cleanup any merge leftovers
702 self._remote.update(clean=True)
702 self._remote.update(clean=True)
703 if unresolved:
703 if unresolved:
704 raise UnresolvedFilesInRepo(unresolved)
704 raise UnresolvedFilesInRepo(unresolved)
705 else:
705 else:
706 raise
706 raise
707
707
708 def _local_close(self, target_ref, user_name, user_email,
708 def _local_close(self, target_ref, user_name, user_email,
709 source_ref, close_message=''):
709 source_ref, close_message=''):
710 """
710 """
711 Close the branch of the given source_revision
711 Close the branch of the given source_revision
712
712
713 Returns the commit id of the close and a boolean indicating if the
713 Returns the commit id of the close and a boolean indicating if the
714 commit needs to be pushed.
714 commit needs to be pushed.
715 """
715 """
716 self._update(source_ref.commit_id)
716 self._update(source_ref.commit_id)
717 message = close_message or f"Closing branch: `{source_ref.name}`"
717 message = close_message or f"Closing branch: `{source_ref.name}`"
718 try:
718 try:
719 self._remote.commit(
719 self._remote.commit(
720 message=safe_str(message),
720 message=safe_str(message),
721 username=safe_str(f'{user_name} <{user_email}>'),
721 username=safe_str(f'{user_name} <{user_email}>'),
722 close_branch=True)
722 close_branch=True)
723 self._remote.invalidate_vcs_cache()
723 self._remote.invalidate_vcs_cache()
724 return self._identify(), True
724 return self._identify(), True
725 except RepositoryError:
725 except RepositoryError:
726 # Cleanup any commit leftovers
726 # Cleanup any commit leftovers
727 self._remote.update(clean=True)
727 self._remote.update(clean=True)
728 raise
728 raise
729
729
730 def _is_the_same_branch(self, target_ref, source_ref):
730 def _is_the_same_branch(self, target_ref, source_ref):
731 return (
731 return (
732 self._get_branch_name(target_ref) ==
732 self._get_branch_name(target_ref) ==
733 self._get_branch_name(source_ref))
733 self._get_branch_name(source_ref))
734
734
735 def _get_branch_name(self, ref):
735 def _get_branch_name(self, ref):
736 if ref.type == 'branch':
736 if ref.type == 'branch':
737 return ref.name
737 return ref.name
738 return self._remote.ctx_branch(ref.commit_id)
738 return self._remote.ctx_branch(ref.commit_id)
739
739
740 def _maybe_prepare_merge_workspace(
740 def _maybe_prepare_merge_workspace(
741 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
741 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
742 shadow_repository_path = self._get_shadow_repository_path(
742 shadow_repository_path = self._get_shadow_repository_path(
743 self.path, repo_id, workspace_id)
743 self.path, repo_id, workspace_id)
744 if not os.path.exists(shadow_repository_path):
744 if not os.path.exists(shadow_repository_path):
745 self._local_clone(shadow_repository_path)
745 self._local_clone(shadow_repository_path)
746 log.debug(
746 log.debug(
747 'Prepared shadow repository in %s', shadow_repository_path)
747 'Prepared shadow repository in %s', shadow_repository_path)
748
748
749 return shadow_repository_path
749 return shadow_repository_path
750
750
751 def _merge_repo(self, repo_id, workspace_id, target_ref,
751 def _merge_repo(self, repo_id, workspace_id, target_ref,
752 source_repo, source_ref, merge_message,
752 source_repo, source_ref, merge_message,
753 merger_name, merger_email, dry_run=False,
753 merger_name, merger_email, dry_run=False,
754 use_rebase=False, close_branch=False):
754 use_rebase=False, close_branch=False):
755
755
756 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
756 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
757 'rebase' if use_rebase else 'merge', dry_run)
757 'rebase' if use_rebase else 'merge', dry_run)
758 if target_ref.commit_id not in self._heads():
758 if target_ref.commit_id not in self._heads():
759 return MergeResponse(
759 return MergeResponse(
760 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
760 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
761 metadata={'target_ref': target_ref})
761 metadata={'target_ref': target_ref})
762
762
763 try:
763 try:
764 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
764 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
765 heads_all = self._heads(target_ref.name)
765 heads_all = self._heads(target_ref.name)
766 max_heads = 10
766 max_heads = 10
767 if len(heads_all) > max_heads:
767 if len(heads_all) > max_heads:
768 heads = '\n,'.join(
768 heads = '\n,'.join(
769 heads_all[:max_heads] +
769 heads_all[:max_heads] +
770 [f'and {len(heads_all)-max_heads} more.'])
770 [f'and {len(heads_all)-max_heads} more.'])
771 else:
771 else:
772 heads = '\n,'.join(heads_all)
772 heads = '\n,'.join(heads_all)
773 metadata = {
773 metadata = {
774 'target_ref': target_ref,
774 'target_ref': target_ref,
775 'source_ref': source_ref,
775 'source_ref': source_ref,
776 'heads': heads
776 'heads': heads
777 }
777 }
778 return MergeResponse(
778 return MergeResponse(
779 False, False, None,
779 False, False, None,
780 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
780 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
781 metadata=metadata)
781 metadata=metadata)
782 except CommitDoesNotExistError:
782 except CommitDoesNotExistError:
783 log.exception('Failure when looking up branch heads on hg target')
783 log.exception('Failure when looking up branch heads on hg target')
784 return MergeResponse(
784 return MergeResponse(
785 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
785 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
786 metadata={'target_ref': target_ref})
786 metadata={'target_ref': target_ref})
787
787
788 shadow_repository_path = self._maybe_prepare_merge_workspace(
788 shadow_repository_path = self._maybe_prepare_merge_workspace(
789 repo_id, workspace_id, target_ref, source_ref)
789 repo_id, workspace_id, target_ref, source_ref)
790 shadow_repo = self.get_shadow_instance(shadow_repository_path)
790 shadow_repo = self.get_shadow_instance(shadow_repository_path)
791
791
792 log.debug('Pulling in target reference %s', target_ref)
792 log.debug('Pulling in target reference %s', target_ref)
793 self._validate_pull_reference(target_ref)
793 self._validate_pull_reference(target_ref)
794 shadow_repo._local_pull(self.path, target_ref)
794 shadow_repo._local_pull(self.path, target_ref)
795
795
796 try:
796 try:
797 log.debug('Pulling in source reference %s', source_ref)
797 log.debug('Pulling in source reference %s', source_ref)
798 source_repo._validate_pull_reference(source_ref)
798 source_repo._validate_pull_reference(source_ref)
799 shadow_repo._local_pull(source_repo.path, source_ref)
799 shadow_repo._local_pull(source_repo.path, source_ref)
800 except CommitDoesNotExistError:
800 except CommitDoesNotExistError:
801 log.exception('Failure when doing local pull on hg shadow repo')
801 log.exception('Failure when doing local pull on hg shadow repo')
802 return MergeResponse(
802 return MergeResponse(
803 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
803 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
804 metadata={'source_ref': source_ref})
804 metadata={'source_ref': source_ref})
805
805
806 merge_ref = None
806 merge_ref = None
807 merge_commit_id = None
807 merge_commit_id = None
808 close_commit_id = None
808 close_commit_id = None
809 merge_failure_reason = MergeFailureReason.NONE
809 merge_failure_reason = MergeFailureReason.NONE
810 metadata = {}
810 metadata = {}
811
811
812 # enforce that close branch should be used only in case we source from
812 # enforce that close branch should be used only in case we source from
813 # an actual Branch
813 # an actual Branch
814 close_branch = close_branch and source_ref.type == 'branch'
814 close_branch = close_branch and source_ref.type == 'branch'
815
815
816 # don't allow to close branch if source and target are the same
816 # don't allow to close branch if source and target are the same
817 close_branch = close_branch and source_ref.name != target_ref.name
817 close_branch = close_branch and source_ref.name != target_ref.name
818
818
819 needs_push_on_close = False
819 needs_push_on_close = False
820 if close_branch and not use_rebase and not dry_run:
820 if close_branch and not use_rebase and not dry_run:
821 try:
821 try:
822 close_commit_id, needs_push_on_close = shadow_repo._local_close(
822 close_commit_id, needs_push_on_close = shadow_repo._local_close(
823 target_ref, merger_name, merger_email, source_ref)
823 target_ref, merger_name, merger_email, source_ref)
824 merge_possible = True
824 merge_possible = True
825 except RepositoryError:
825 except RepositoryError:
826 log.exception('Failure when doing close branch on '
826 log.exception('Failure when doing close branch on '
827 'shadow repo: %s', shadow_repo)
827 'shadow repo: %s', shadow_repo)
828 merge_possible = False
828 merge_possible = False
829 merge_failure_reason = MergeFailureReason.MERGE_FAILED
829 merge_failure_reason = MergeFailureReason.MERGE_FAILED
830 else:
830 else:
831 merge_possible = True
831 merge_possible = True
832
832
833 needs_push = False
833 needs_push = False
834 if merge_possible:
834 if merge_possible:
835
835
836 try:
836 try:
837 merge_commit_id, needs_push = shadow_repo._local_merge(
837 merge_commit_id, needs_push = shadow_repo._local_merge(
838 target_ref, merge_message, merger_name, merger_email,
838 target_ref, merge_message, merger_name, merger_email,
839 source_ref, use_rebase=use_rebase,
839 source_ref, use_rebase=use_rebase,
840 close_commit_id=close_commit_id, dry_run=dry_run)
840 close_commit_id=close_commit_id, dry_run=dry_run)
841 merge_possible = True
841 merge_possible = True
842
842
843 # read the state of the close action, if it
843 # read the state of the close action, if it
844 # maybe required a push
844 # maybe required a push
845 needs_push = needs_push or needs_push_on_close
845 needs_push = needs_push or needs_push_on_close
846
846
847 # Set a bookmark pointing to the merge commit. This bookmark
847 # Set a bookmark pointing to the merge commit. This bookmark
848 # may be used to easily identify the last successful merge
848 # may be used to easily identify the last successful merge
849 # commit in the shadow repository.
849 # commit in the shadow repository.
850 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
850 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
851 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
851 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
852 except SubrepoMergeError:
852 except SubrepoMergeError:
853 log.exception(
853 log.exception(
854 'Subrepo merge error during local merge on hg shadow repo.')
854 'Subrepo merge error during local merge on hg shadow repo.')
855 merge_possible = False
855 merge_possible = False
856 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
856 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
857 needs_push = False
857 needs_push = False
858 except RepositoryError as e:
858 except RepositoryError as e:
859 log.exception('Failure when doing local merge on hg shadow repo')
859 log.exception('Failure when doing local merge on hg shadow repo')
860 if isinstance(e, UnresolvedFilesInRepo):
860 if isinstance(e, UnresolvedFilesInRepo):
861 all_conflicts = list(e.args[0])
861 all_conflicts = list(e.args[0])
862 max_conflicts = 20
862 max_conflicts = 20
863 if len(all_conflicts) > max_conflicts:
863 if len(all_conflicts) > max_conflicts:
864 conflicts = all_conflicts[:max_conflicts] \
864 conflicts = all_conflicts[:max_conflicts] \
865 + [f'and {len(all_conflicts)-max_conflicts} more.']
865 + [f'and {len(all_conflicts)-max_conflicts} more.']
866 else:
866 else:
867 conflicts = all_conflicts
867 conflicts = all_conflicts
868 metadata['unresolved_files'] = \
868 metadata['unresolved_files'] = \
869 '\n* conflict: ' + \
869 '\n* conflict: ' + \
870 ('\n * conflict: '.join(conflicts))
870 ('\n * conflict: '.join(conflicts))
871
871
872 merge_possible = False
872 merge_possible = False
873 merge_failure_reason = MergeFailureReason.MERGE_FAILED
873 merge_failure_reason = MergeFailureReason.MERGE_FAILED
874 needs_push = False
874 needs_push = False
875
875
876 if merge_possible and not dry_run:
876 if merge_possible and not dry_run:
877 if needs_push:
877 if needs_push:
878 # In case the target is a bookmark, update it, so after pushing
878 # In case the target is a bookmark, update it, so after pushing
879 # the bookmarks is also updated in the target.
879 # the bookmarks is also updated in the target.
880 if target_ref.type == 'book':
880 if target_ref.type == 'book':
881 shadow_repo.bookmark(
881 shadow_repo.bookmark(
882 target_ref.name, revision=merge_commit_id)
882 target_ref.name, revision=merge_commit_id)
883 try:
883 try:
884 shadow_repo_with_hooks = self.get_shadow_instance(
884 shadow_repo_with_hooks = self.get_shadow_instance(
885 shadow_repository_path,
885 shadow_repository_path,
886 enable_hooks=True)
886 enable_hooks=True)
887 # This is the actual merge action, we push from shadow
887 # This is the actual merge action, we push from shadow
888 # into origin.
888 # into origin.
889 # Note: the push_branches option will push any new branch
889 # Note: the push_branches option will push any new branch
890 # defined in the source repository to the target. This may
890 # defined in the source repository to the target. This may
891 # be dangerous as branches are permanent in Mercurial.
891 # be dangerous as branches are permanent in Mercurial.
892 # This feature was requested in issue #441.
892 # This feature was requested in issue #441.
893 shadow_repo_with_hooks._local_push(
893 shadow_repo_with_hooks._local_push(
894 merge_commit_id, self.path, push_branches=True,
894 merge_commit_id, self.path, push_branches=True,
895 enable_hooks=True)
895 enable_hooks=True)
896
896
897 # maybe we also need to push the close_commit_id
897 # maybe we also need to push the close_commit_id
898 if close_commit_id:
898 if close_commit_id:
899 shadow_repo_with_hooks._local_push(
899 shadow_repo_with_hooks._local_push(
900 close_commit_id, self.path, push_branches=True,
900 close_commit_id, self.path, push_branches=True,
901 enable_hooks=True)
901 enable_hooks=True)
902 merge_succeeded = True
902 merge_succeeded = True
903 except RepositoryError:
903 except RepositoryError:
904 log.exception(
904 log.exception(
905 'Failure when doing local push from the shadow '
905 'Failure when doing local push from the shadow '
906 'repository to the target repository at %s.', self.path)
906 'repository to the target repository at %s.', self.path)
907 merge_succeeded = False
907 merge_succeeded = False
908 merge_failure_reason = MergeFailureReason.PUSH_FAILED
908 merge_failure_reason = MergeFailureReason.PUSH_FAILED
909 metadata['target'] = 'hg shadow repo'
909 metadata['target'] = 'hg shadow repo'
910 metadata['merge_commit'] = merge_commit_id
910 metadata['merge_commit'] = merge_commit_id
911 else:
911 else:
912 merge_succeeded = True
912 merge_succeeded = True
913 else:
913 else:
914 merge_succeeded = False
914 merge_succeeded = False
915
915
916 return MergeResponse(
916 return MergeResponse(
917 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
917 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
918 metadata=metadata)
918 metadata=metadata)
919
919
920 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
920 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
921 config = self.config.copy()
921 config = self.config.copy()
922 if not enable_hooks:
922 if not enable_hooks:
923 config.clear_section('hooks')
923 config.clear_section('hooks')
924 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
924 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
925
925
926 def _validate_pull_reference(self, reference):
926 def _validate_pull_reference(self, reference):
927 if not (reference.name in self.bookmarks or
927 if not (reference.name in self.bookmarks or
928 reference.name in self.branches or
928 reference.name in self.branches or
929 self.get_commit(reference.commit_id)):
929 self.get_commit(reference.commit_id)):
930 raise CommitDoesNotExistError(
930 raise CommitDoesNotExistError(
931 'Unknown branch, bookmark or commit id')
931 'Unknown branch, bookmark or commit id')
932
932
933 def _local_pull(self, repository_path, reference):
933 def _local_pull(self, repository_path, reference):
934 """
934 """
935 Fetch a branch, bookmark or commit from a local repository.
935 Fetch a branch, bookmark or commit from a local repository.
936 """
936 """
937 repository_path = os.path.abspath(repository_path)
937 repository_path = os.path.abspath(repository_path)
938 if repository_path == self.path:
938 if repository_path == self.path:
939 raise ValueError('Cannot pull from the same repository')
939 raise ValueError('Cannot pull from the same repository')
940
940
941 reference_type_to_option_name = {
941 reference_type_to_option_name = {
942 'book': 'bookmark',
942 'book': 'bookmark',
943 'branch': 'branch',
943 'branch': 'branch',
944 }
944 }
945 option_name = reference_type_to_option_name.get(
945 option_name = reference_type_to_option_name.get(
946 reference.type, 'revision')
946 reference.type, 'revision')
947
947
948 if option_name == 'revision':
948 if option_name == 'revision':
949 ref = reference.commit_id
949 ref = reference.commit_id
950 else:
950 else:
951 ref = reference.name
951 ref = reference.name
952
952
953 options = {option_name: [ref]}
953 options = {option_name: [ref]}
954 self._remote.pull_cmd(repository_path, hooks=False, **options)
954 self._remote.pull_cmd(repository_path, hooks=False, **options)
955 self._remote.invalidate_vcs_cache()
955 self._remote.invalidate_vcs_cache()
956
956
957 def bookmark(self, bookmark, revision=None):
957 def bookmark(self, bookmark, revision=None):
958 if isinstance(bookmark, str):
958 if isinstance(bookmark, str):
959 bookmark = safe_str(bookmark)
959 bookmark = safe_str(bookmark)
960 self._remote.bookmark(bookmark, revision=revision)
960 self._remote.bookmark(bookmark, revision=revision)
961 self._remote.invalidate_vcs_cache()
961 self._remote.invalidate_vcs_cache()
962
962
963 def get_path_permissions(self, username):
963 def get_path_permissions(self, username):
964 hgacl_file = os.path.join(self.path, '.hg/hgacl')
964 hgacl_file = os.path.join(self.path, '.hg/hgacl')
965
965
966 def read_patterns(suffix):
966 def read_patterns(suffix):
967 svalue = None
967 svalue = None
968 for section, option in [
968 for section, option in [
969 ('narrowacl', username + suffix),
969 ('narrowacl', username + suffix),
970 ('narrowacl', 'default' + suffix),
970 ('narrowacl', 'default' + suffix),
971 ('narrowhgacl', username + suffix),
971 ('narrowhgacl', username + suffix),
972 ('narrowhgacl', 'default' + suffix)
972 ('narrowhgacl', 'default' + suffix)
973 ]:
973 ]:
974 try:
974 try:
975 svalue = hgacl.get(section, option)
975 svalue = hgacl.get(section, option)
976 break # stop at the first value we find
976 break # stop at the first value we find
977 except configparser.NoOptionError:
977 except configparser.NoOptionError:
978 pass
978 pass
979 if not svalue:
979 if not svalue:
980 return None
980 return None
981 result = ['/']
981 result = ['/']
982 for pattern in svalue.split():
982 for pattern in svalue.split():
983 result.append(pattern)
983 result.append(pattern)
984 if '*' not in pattern and '?' not in pattern:
984 if '*' not in pattern and '?' not in pattern:
985 result.append(pattern + '/*')
985 result.append(pattern + '/*')
986 return result
986 return result
987
987
988 if os.path.exists(hgacl_file):
988 if os.path.exists(hgacl_file):
989 try:
989 try:
990 hgacl = configparser.RawConfigParser()
990 hgacl = configparser.RawConfigParser()
991 hgacl.read(hgacl_file)
991 hgacl.read(hgacl_file)
992
992
993 includes = read_patterns('.includes')
993 includes = read_patterns('.includes')
994 excludes = read_patterns('.excludes')
994 excludes = read_patterns('.excludes')
995 return BasePathPermissionChecker.create_from_patterns(
995 return BasePathPermissionChecker.create_from_patterns(
996 includes, excludes)
996 includes, excludes)
997 except BaseException as e:
997 except BaseException as e:
998 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
998 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
999 hgacl_file, self.name, e)
999 hgacl_file, self.name, e)
1000 raise exceptions.RepositoryRequirementError(msg)
1000 raise exceptions.RepositoryRequirementError(msg)
1001 else:
1001 else:
1002 return None
1002 return None
1003
1003
1004
1004
1005 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1005 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1006
1006
1007 def _commit_factory(self, commit_id):
1007 def _commit_factory(self, commit_id):
1008 if isinstance(commit_id, int):
1008 if isinstance(commit_id, int):
1009 return self.repo.get_commit(
1009 return self.repo.get_commit(
1010 commit_idx=commit_id, pre_load=self.pre_load)
1010 commit_idx=commit_id, pre_load=self.pre_load)
1011 else:
1011 else:
1012 return self.repo.get_commit(
1012 return self.repo.get_commit(
1013 commit_id=commit_id, pre_load=self.pre_load)
1013 commit_id=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now