##// END OF EJS Templates
feat(mercurial): added close branch commit message template
super-admin -
r5334:c713b60d default
parent child Browse files
Show More
@@ -1,1024 +1,1030 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 HG repository module
20 HG repository module
21 """
21 """
22 import os
22 import os
23 import logging
23 import logging
24 import binascii
24 import binascii
25 import configparser
25 import configparser
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from collections import OrderedDict
32 from collections import OrderedDict
33 from rhodecode.lib.datelib import (
33 from rhodecode.lib.datelib import (
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.utils2 import CachedProperty
37 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs import connection, exceptions
38 from rhodecode.lib.vcs.conf import settings as vcs_settings
38 from rhodecode.lib.vcs.backends.base import (
39 from rhodecode.lib.vcs.backends.base import (
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 MergeFailureReason, Reference, BasePathPermissionChecker)
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47
48
48 hexlify = binascii.hexlify
49 hexlify = binascii.hexlify
49 nullid = "\0" * 20
50 nullid = "\0" * 20
50
51
51 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
52
53
53
54
54 class MercurialRepository(BaseRepository):
55 class MercurialRepository(BaseRepository):
55 """
56 """
56 Mercurial repository backend
57 Mercurial repository backend
57 """
58 """
58 DEFAULT_BRANCH_NAME = 'default'
59 DEFAULT_BRANCH_NAME = 'default'
59
60
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
63 """
63 Raises RepositoryError if repository could not be find at the given
64 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
65 ``repo_path``.
65
66
66 :param repo_path: local path of the repository
67 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
68 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
69 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
70 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
71 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
72 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
73 making a clone
73 :param bare: not used, compatible with other VCS
74 :param bare: not used, compatible with other VCS
74 """
75 """
75
76
76 self.path = safe_str(os.path.abspath(repo_path))
77 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
78 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
79 # because sometimes we init the repos with config we need to meet
79 # special requirements
80 # special requirements
80 self.config = config if config else self.get_default_config(
81 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '')])
82 default=[('extensions', 'largefiles', '')])
82
83
83 # NOTE(marcink): since python3 hgsubversion is deprecated.
84 # NOTE(marcink): since python3 hgsubversion is deprecated.
84 # From old installations we might still have this set enabled
85 # From old installations we might still have this set enabled
85 # we explicitly remove this now here to make sure it wont propagate further
86 # we explicitly remove this now here to make sure it wont propagate further
86 if config and config.get('extensions', 'hgsubversion') is not None:
87 if config and config.get('extensions', 'hgsubversion') is not None:
87 config.drop_option('extensions', 'hgsubversion')
88 config.drop_option('extensions', 'hgsubversion')
88
89
89 self.with_wire = with_wire or {"cache": False} # default should not use cache
90 self.with_wire = with_wire or {"cache": False} # default should not use cache
90
91
91 self._init_repo(create, src_url, do_workspace_checkout)
92 self._init_repo(create, src_url, do_workspace_checkout)
92
93
93 # caches
94 # caches
94 self._commit_ids = {}
95 self._commit_ids = {}
95
96
96 @LazyProperty
97 @LazyProperty
97 def _remote(self):
98 def _remote(self):
98 repo_id = self.path
99 repo_id = self.path
99 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
100 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
100
101
101 @CachedProperty
102 @CachedProperty
102 def commit_ids(self):
103 def commit_ids(self):
103 """
104 """
104 Returns list of commit ids, in ascending order. Being lazy
105 Returns list of commit ids, in ascending order. Being lazy
105 attribute allows external tools to inject shas from cache.
106 attribute allows external tools to inject shas from cache.
106 """
107 """
107 commit_ids = self._get_all_commit_ids()
108 commit_ids = self._get_all_commit_ids()
108 self._rebuild_cache(commit_ids)
109 self._rebuild_cache(commit_ids)
109 return commit_ids
110 return commit_ids
110
111
111 def _rebuild_cache(self, commit_ids):
112 def _rebuild_cache(self, commit_ids):
112 self._commit_ids = {commit_id: index
113 self._commit_ids = {commit_id: index
113 for index, commit_id in enumerate(commit_ids)}
114 for index, commit_id in enumerate(commit_ids)}
114
115
115 @CachedProperty
116 @CachedProperty
116 def branches(self):
117 def branches(self):
117 return self._get_branches()
118 return self._get_branches()
118
119
119 @CachedProperty
120 @CachedProperty
120 def branches_closed(self):
121 def branches_closed(self):
121 return self._get_branches(active=False, closed=True)
122 return self._get_branches(active=False, closed=True)
122
123
123 @CachedProperty
124 @CachedProperty
124 def branches_all(self):
125 def branches_all(self):
125 all_branches = {}
126 all_branches = {}
126 all_branches.update(self.branches)
127 all_branches.update(self.branches)
127 all_branches.update(self.branches_closed)
128 all_branches.update(self.branches_closed)
128 return all_branches
129 return all_branches
129
130
130 def _get_branches(self, active=True, closed=False):
131 def _get_branches(self, active=True, closed=False):
131 """
132 """
132 Gets branches for this repository
133 Gets branches for this repository
133 Returns only not closed active branches by default
134 Returns only not closed active branches by default
134
135
135 :param active: return also active branches
136 :param active: return also active branches
136 :param closed: return also closed branches
137 :param closed: return also closed branches
137
138
138 """
139 """
139 if self.is_empty():
140 if self.is_empty():
140 return {}
141 return {}
141
142
142 def get_name(ctx):
143 def get_name(ctx):
143 return ctx[0]
144 return ctx[0]
144
145
145 _branches = [(n, h,) for n, h in
146 _branches = [(n, h,) for n, h in
146 self._remote.branches(active, closed).items()]
147 self._remote.branches(active, closed).items()]
147
148
148 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
149 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
149
150
150 @CachedProperty
151 @CachedProperty
151 def tags(self):
152 def tags(self):
152 """
153 """
153 Gets tags for this repository
154 Gets tags for this repository
154 """
155 """
155 return self._get_tags()
156 return self._get_tags()
156
157
157 def _get_tags(self):
158 def _get_tags(self):
158 if self.is_empty():
159 if self.is_empty():
159 return {}
160 return {}
160
161
161 def get_name(ctx):
162 def get_name(ctx):
162 return ctx[0]
163 return ctx[0]
163
164
164 _tags = [(n, h,) for n, h in
165 _tags = [(n, h,) for n, h in
165 self._remote.tags().items()]
166 self._remote.tags().items()]
166
167
167 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
168 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
168
169
169 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
170 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
170 """
171 """
171 Creates and returns a tag for the given ``commit_id``.
172 Creates and returns a tag for the given ``commit_id``.
172
173
173 :param name: name for new tag
174 :param name: name for new tag
174 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
175 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
175 :param commit_id: commit id for which new tag would be created
176 :param commit_id: commit id for which new tag would be created
176 :param message: message of the tag's commit
177 :param message: message of the tag's commit
177 :param date: date of tag's commit
178 :param date: date of tag's commit
178
179
179 :raises TagAlreadyExistError: if tag with same name already exists
180 :raises TagAlreadyExistError: if tag with same name already exists
180 """
181 """
181 if name in self.tags:
182 if name in self.tags:
182 raise TagAlreadyExistError("Tag %s already exists" % name)
183 raise TagAlreadyExistError("Tag %s already exists" % name)
183
184
184 commit = self.get_commit(commit_id=commit_id)
185 commit = self.get_commit(commit_id=commit_id)
185 local = kwargs.setdefault('local', False)
186 local = kwargs.setdefault('local', False)
186
187
187 if message is None:
188 if message is None:
188 message = f"Added tag {name} for commit {commit.short_id}"
189 message = f"Added tag {name} for commit {commit.short_id}"
189
190
190 date, tz = date_to_timestamp_plus_offset(date)
191 date, tz = date_to_timestamp_plus_offset(date)
191
192
192 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
193 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
193 self._remote.invalidate_vcs_cache()
194 self._remote.invalidate_vcs_cache()
194
195
195 # Reinitialize tags
196 # Reinitialize tags
196 self._invalidate_prop_cache('tags')
197 self._invalidate_prop_cache('tags')
197 tag_id = self.tags[name]
198 tag_id = self.tags[name]
198
199
199 return self.get_commit(commit_id=tag_id)
200 return self.get_commit(commit_id=tag_id)
200
201
201 def remove_tag(self, name, user, message=None, date=None):
202 def remove_tag(self, name, user, message=None, date=None):
202 """
203 """
203 Removes tag with the given `name`.
204 Removes tag with the given `name`.
204
205
205 :param name: name of the tag to be removed
206 :param name: name of the tag to be removed
206 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
207 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
207 :param message: message of the tag's removal commit
208 :param message: message of the tag's removal commit
208 :param date: date of tag's removal commit
209 :param date: date of tag's removal commit
209
210
210 :raises TagDoesNotExistError: if tag with given name does not exists
211 :raises TagDoesNotExistError: if tag with given name does not exists
211 """
212 """
212 if name not in self.tags:
213 if name not in self.tags:
213 raise TagDoesNotExistError("Tag %s does not exist" % name)
214 raise TagDoesNotExistError("Tag %s does not exist" % name)
214
215
215 if message is None:
216 if message is None:
216 message = "Removed tag %s" % name
217 message = "Removed tag %s" % name
217 local = False
218 local = False
218
219
219 date, tz = date_to_timestamp_plus_offset(date)
220 date, tz = date_to_timestamp_plus_offset(date)
220
221
221 self._remote.tag(name, nullid, message, local, user, date, tz)
222 self._remote.tag(name, nullid, message, local, user, date, tz)
222 self._remote.invalidate_vcs_cache()
223 self._remote.invalidate_vcs_cache()
223 self._invalidate_prop_cache('tags')
224 self._invalidate_prop_cache('tags')
224
225
225 @LazyProperty
226 @LazyProperty
226 def bookmarks(self):
227 def bookmarks(self):
227 """
228 """
228 Gets bookmarks for this repository
229 Gets bookmarks for this repository
229 """
230 """
230 return self._get_bookmarks()
231 return self._get_bookmarks()
231
232
232 def _get_bookmarks(self):
233 def _get_bookmarks(self):
233 if self.is_empty():
234 if self.is_empty():
234 return {}
235 return {}
235
236
236 def get_name(ctx):
237 def get_name(ctx):
237 return ctx[0]
238 return ctx[0]
238
239
239 _bookmarks = [
240 _bookmarks = [
240 (n, h) for n, h in
241 (n, h) for n, h in
241 self._remote.bookmarks().items()]
242 self._remote.bookmarks().items()]
242
243
243 return OrderedDict(sorted(_bookmarks, key=get_name))
244 return OrderedDict(sorted(_bookmarks, key=get_name))
244
245
245 def _get_all_commit_ids(self):
246 def _get_all_commit_ids(self):
246 return self._remote.get_all_commit_ids('visible')
247 return self._remote.get_all_commit_ids('visible')
247
248
248 def get_diff(
249 def get_diff(
249 self, commit1, commit2, path='', ignore_whitespace=False,
250 self, commit1, commit2, path='', ignore_whitespace=False,
250 context=3, path1=None):
251 context=3, path1=None):
251 """
252 """
252 Returns (git like) *diff*, as plain text. Shows changes introduced by
253 Returns (git like) *diff*, as plain text. Shows changes introduced by
253 `commit2` since `commit1`.
254 `commit2` since `commit1`.
254
255
255 :param commit1: Entry point from which diff is shown. Can be
256 :param commit1: Entry point from which diff is shown. Can be
256 ``self.EMPTY_COMMIT`` - in this case, patch showing all
257 ``self.EMPTY_COMMIT`` - in this case, patch showing all
257 the changes since empty state of the repository until `commit2`
258 the changes since empty state of the repository until `commit2`
258 :param commit2: Until which commit changes should be shown.
259 :param commit2: Until which commit changes should be shown.
259 :param ignore_whitespace: If set to ``True``, would not show whitespace
260 :param ignore_whitespace: If set to ``True``, would not show whitespace
260 changes. Defaults to ``False``.
261 changes. Defaults to ``False``.
261 :param context: How many lines before/after changed lines should be
262 :param context: How many lines before/after changed lines should be
262 shown. Defaults to ``3``.
263 shown. Defaults to ``3``.
263 """
264 """
264 self._validate_diff_commits(commit1, commit2)
265 self._validate_diff_commits(commit1, commit2)
265 if path1 is not None and path1 != path:
266 if path1 is not None and path1 != path:
266 raise ValueError("Diff of two different paths not supported.")
267 raise ValueError("Diff of two different paths not supported.")
267
268
268 if path:
269 if path:
269 file_filter = [self.path, path]
270 file_filter = [self.path, path]
270 else:
271 else:
271 file_filter = None
272 file_filter = None
272
273
273 diff = self._remote.diff(
274 diff = self._remote.diff(
274 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
275 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
275 opt_git=True, opt_ignorews=ignore_whitespace,
276 opt_git=True, opt_ignorews=ignore_whitespace,
276 context=context)
277 context=context)
277 return MercurialDiff(diff)
278 return MercurialDiff(diff)
278
279
279 def strip(self, commit_id, branch=None):
280 def strip(self, commit_id, branch=None):
280 self._remote.strip(commit_id, update=False, backup=False)
281 self._remote.strip(commit_id, update=False, backup=False)
281
282
282 self._remote.invalidate_vcs_cache()
283 self._remote.invalidate_vcs_cache()
283 # clear cache
284 # clear cache
284 self._invalidate_prop_cache('commit_ids')
285 self._invalidate_prop_cache('commit_ids')
285
286
286 return len(self.commit_ids)
287 return len(self.commit_ids)
287
288
288 def verify(self):
289 def verify(self):
289 verify = self._remote.verify()
290 verify = self._remote.verify()
290
291
291 self._remote.invalidate_vcs_cache()
292 self._remote.invalidate_vcs_cache()
292 return verify
293 return verify
293
294
294 def hg_update_cache(self):
295 def hg_update_cache(self):
295 update_cache = self._remote.hg_update_cache()
296 update_cache = self._remote.hg_update_cache()
296
297
297 self._remote.invalidate_vcs_cache()
298 self._remote.invalidate_vcs_cache()
298 return update_cache
299 return update_cache
299
300
300 def hg_rebuild_fn_cache(self):
301 def hg_rebuild_fn_cache(self):
301 update_cache = self._remote.hg_rebuild_fn_cache()
302 update_cache = self._remote.hg_rebuild_fn_cache()
302
303
303 self._remote.invalidate_vcs_cache()
304 self._remote.invalidate_vcs_cache()
304 return update_cache
305 return update_cache
305
306
306 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
307 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
307 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
308 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
308 self, commit_id1, repo2, commit_id2)
309 self, commit_id1, repo2, commit_id2)
309
310
310 if commit_id1 == commit_id2:
311 if commit_id1 == commit_id2:
311 return commit_id1
312 return commit_id1
312
313
313 ancestors = self._remote.revs_from_revspec(
314 ancestors = self._remote.revs_from_revspec(
314 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
315 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
315 other_path=repo2.path)
316 other_path=repo2.path)
316
317
317 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
318 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
318
319
319 log.debug('Found common ancestor with sha: %s', ancestor_id)
320 log.debug('Found common ancestor with sha: %s', ancestor_id)
320 return ancestor_id
321 return ancestor_id
321
322
322 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
323 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
323 if commit_id1 == commit_id2:
324 if commit_id1 == commit_id2:
324 commits = []
325 commits = []
325 else:
326 else:
326 if merge:
327 if merge:
327 indexes = self._remote.revs_from_revspec(
328 indexes = self._remote.revs_from_revspec(
328 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
329 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
329 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
330 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
330 else:
331 else:
331 indexes = self._remote.revs_from_revspec(
332 indexes = self._remote.revs_from_revspec(
332 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
333 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
333 commit_id1, other_path=repo2.path)
334 commit_id1, other_path=repo2.path)
334
335
335 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
336 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
336 for idx in indexes]
337 for idx in indexes]
337
338
338 return commits
339 return commits
339
340
340 @staticmethod
341 @staticmethod
341 def check_url(url, config):
342 def check_url(url, config):
342 """
343 """
343 Function will check given url and try to verify if it's a valid
344 Function will check given url and try to verify if it's a valid
344 link. Sometimes it may happened that mercurial will issue basic
345 link. Sometimes it may happened that mercurial will issue basic
345 auth request that can cause whole API to hang when used from python
346 auth request that can cause whole API to hang when used from python
346 or other external calls.
347 or other external calls.
347
348
348 On failures it'll raise urllib2.HTTPError, exception is also thrown
349 On failures it'll raise urllib2.HTTPError, exception is also thrown
349 when the return code is non 200
350 when the return code is non 200
350 """
351 """
351 # check first if it's not an local url
352 # check first if it's not an local url
352 if os.path.isdir(url) or url.startswith('file:'):
353 if os.path.isdir(url) or url.startswith('file:'):
353 return True
354 return True
354
355
355 # Request the _remote to verify the url
356 # Request the _remote to verify the url
356 return connection.Hg.check_url(url, config.serialize())
357 return connection.Hg.check_url(url, config.serialize())
357
358
358 @staticmethod
359 @staticmethod
359 def is_valid_repository(path):
360 def is_valid_repository(path):
360 return os.path.isdir(os.path.join(path, '.hg'))
361 return os.path.isdir(os.path.join(path, '.hg'))
361
362
362 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
363 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
363 """
364 """
364 Function will check for mercurial repository in given path. If there
365 Function will check for mercurial repository in given path. If there
365 is no repository in that path it will raise an exception unless
366 is no repository in that path it will raise an exception unless
366 `create` parameter is set to True - in that case repository would
367 `create` parameter is set to True - in that case repository would
367 be created.
368 be created.
368
369
369 If `src_url` is given, would try to clone repository from the
370 If `src_url` is given, would try to clone repository from the
370 location at given clone_point. Additionally it'll make update to
371 location at given clone_point. Additionally it'll make update to
371 working copy accordingly to `do_workspace_checkout` flag.
372 working copy accordingly to `do_workspace_checkout` flag.
372 """
373 """
373 if create and os.path.exists(self.path):
374 if create and os.path.exists(self.path):
374 raise RepositoryError(
375 raise RepositoryError(
375 f"Cannot create repository at {self.path}, location already exist")
376 f"Cannot create repository at {self.path}, location already exist")
376
377
377 if src_url:
378 if src_url:
378 url = str(self._get_url(src_url))
379 url = str(self._get_url(src_url))
379 MercurialRepository.check_url(url, self.config)
380 MercurialRepository.check_url(url, self.config)
380
381
381 self._remote.clone(url, self.path, do_workspace_checkout)
382 self._remote.clone(url, self.path, do_workspace_checkout)
382
383
383 # Don't try to create if we've already cloned repo
384 # Don't try to create if we've already cloned repo
384 create = False
385 create = False
385
386
386 if create:
387 if create:
387 os.makedirs(self.path, mode=0o755)
388 os.makedirs(self.path, mode=0o755)
388
389
389 self._remote.localrepository(create)
390 self._remote.localrepository(create)
390
391
391 @LazyProperty
392 @LazyProperty
392 def in_memory_commit(self):
393 def in_memory_commit(self):
393 return MercurialInMemoryCommit(self)
394 return MercurialInMemoryCommit(self)
394
395
395 @LazyProperty
396 @LazyProperty
396 def description(self):
397 def description(self):
397 description = self._remote.get_config_value(
398 description = self._remote.get_config_value(
398 'web', 'description', untrusted=True)
399 'web', 'description', untrusted=True)
399 return safe_str(description or self.DEFAULT_DESCRIPTION)
400 return safe_str(description or self.DEFAULT_DESCRIPTION)
400
401
401 @LazyProperty
402 @LazyProperty
402 def contact(self):
403 def contact(self):
403 contact = (
404 contact = (
404 self._remote.get_config_value("web", "contact") or
405 self._remote.get_config_value("web", "contact") or
405 self._remote.get_config_value("ui", "username"))
406 self._remote.get_config_value("ui", "username"))
406 return safe_str(contact or self.DEFAULT_CONTACT)
407 return safe_str(contact or self.DEFAULT_CONTACT)
407
408
408 @LazyProperty
409 @LazyProperty
409 def last_change(self):
410 def last_change(self):
410 """
411 """
411 Returns last change made on this repository as
412 Returns last change made on this repository as
412 `datetime.datetime` object.
413 `datetime.datetime` object.
413 """
414 """
414 try:
415 try:
415 return self.get_commit().date
416 return self.get_commit().date
416 except RepositoryError:
417 except RepositoryError:
417 tzoffset = makedate()[1]
418 tzoffset = makedate()[1]
418 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
419 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
419
420
420 def _get_fs_mtime(self):
421 def _get_fs_mtime(self):
421 # fallback to filesystem
422 # fallback to filesystem
422 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
423 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
423 st_path = os.path.join(self.path, '.hg', "store")
424 st_path = os.path.join(self.path, '.hg', "store")
424 if os.path.exists(cl_path):
425 if os.path.exists(cl_path):
425 return os.stat(cl_path).st_mtime
426 return os.stat(cl_path).st_mtime
426 else:
427 else:
427 return os.stat(st_path).st_mtime
428 return os.stat(st_path).st_mtime
428
429
429 def _get_url(self, url):
430 def _get_url(self, url):
430 """
431 """
431 Returns normalized url. If schema is not given, would fall
432 Returns normalized url. If schema is not given, would fall
432 to filesystem
433 to filesystem
433 (``file:///``) schema.
434 (``file:///``) schema.
434 """
435 """
435 if url != 'default' and '://' not in url:
436 if url != 'default' and '://' not in url:
436 url = "file:" + urllib.request.pathname2url(url)
437 url = "file:" + urllib.request.pathname2url(url)
437 return url
438 return url
438
439
439 def get_hook_location(self):
440 def get_hook_location(self):
440 """
441 """
441 returns absolute path to location where hooks are stored
442 returns absolute path to location where hooks are stored
442 """
443 """
443 return os.path.join(self.path, '.hg', '.hgrc')
444 return os.path.join(self.path, '.hg', '.hgrc')
444
445
445 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
446 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
446 translate_tag=None, maybe_unreachable=False, reference_obj=None):
447 translate_tag=None, maybe_unreachable=False, reference_obj=None):
447 """
448 """
448 Returns ``MercurialCommit`` object representing repository's
449 Returns ``MercurialCommit`` object representing repository's
449 commit at the given `commit_id` or `commit_idx`.
450 commit at the given `commit_id` or `commit_idx`.
450 """
451 """
451 if self.is_empty():
452 if self.is_empty():
452 raise EmptyRepositoryError("There are no commits yet")
453 raise EmptyRepositoryError("There are no commits yet")
453
454
454 if commit_id is not None:
455 if commit_id is not None:
455 self._validate_commit_id(commit_id)
456 self._validate_commit_id(commit_id)
456 try:
457 try:
457 # we have cached idx, use it without contacting the remote
458 # we have cached idx, use it without contacting the remote
458 idx = self._commit_ids[commit_id]
459 idx = self._commit_ids[commit_id]
459 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
460 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
460 except KeyError:
461 except KeyError:
461 pass
462 pass
462
463
463 elif commit_idx is not None:
464 elif commit_idx is not None:
464 self._validate_commit_idx(commit_idx)
465 self._validate_commit_idx(commit_idx)
465 try:
466 try:
466 _commit_id = self.commit_ids[commit_idx]
467 _commit_id = self.commit_ids[commit_idx]
467 if commit_idx < 0:
468 if commit_idx < 0:
468 commit_idx = self.commit_ids.index(_commit_id)
469 commit_idx = self.commit_ids.index(_commit_id)
469
470
470 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
471 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
471 except IndexError:
472 except IndexError:
472 commit_id = commit_idx
473 commit_id = commit_idx
473 else:
474 else:
474 commit_id = "tip"
475 commit_id = "tip"
475
476
476 # case here is no cached version, do an actual lookup instead
477 # case here is no cached version, do an actual lookup instead
477 try:
478 try:
478 raw_id, idx = self._remote.lookup(commit_id, both=True)
479 raw_id, idx = self._remote.lookup(commit_id, both=True)
479 except CommitDoesNotExistError:
480 except CommitDoesNotExistError:
480 msg = "Commit {} does not exist for `{}`".format(
481 msg = "Commit {} does not exist for `{}`".format(
481 *map(safe_str, [commit_id, self.name]))
482 *map(safe_str, [commit_id, self.name]))
482 raise CommitDoesNotExistError(msg)
483 raise CommitDoesNotExistError(msg)
483
484
484 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
485 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
485
486
486 def get_commits(
487 def get_commits(
487 self, start_id=None, end_id=None, start_date=None, end_date=None,
488 self, start_id=None, end_id=None, start_date=None, end_date=None,
488 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
489 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
489 """
490 """
490 Returns generator of ``MercurialCommit`` objects from start to end
491 Returns generator of ``MercurialCommit`` objects from start to end
491 (both are inclusive)
492 (both are inclusive)
492
493
493 :param start_id: None, str(commit_id)
494 :param start_id: None, str(commit_id)
494 :param end_id: None, str(commit_id)
495 :param end_id: None, str(commit_id)
495 :param start_date: if specified, commits with commit date less than
496 :param start_date: if specified, commits with commit date less than
496 ``start_date`` would be filtered out from returned set
497 ``start_date`` would be filtered out from returned set
497 :param end_date: if specified, commits with commit date greater than
498 :param end_date: if specified, commits with commit date greater than
498 ``end_date`` would be filtered out from returned set
499 ``end_date`` would be filtered out from returned set
499 :param branch_name: if specified, commits not reachable from given
500 :param branch_name: if specified, commits not reachable from given
500 branch would be filtered out from returned set
501 branch would be filtered out from returned set
501 :param show_hidden: Show hidden commits such as obsolete or hidden from
502 :param show_hidden: Show hidden commits such as obsolete or hidden from
502 Mercurial evolve
503 Mercurial evolve
503 :raise BranchDoesNotExistError: If given ``branch_name`` does not
504 :raise BranchDoesNotExistError: If given ``branch_name`` does not
504 exist.
505 exist.
505 :raise CommitDoesNotExistError: If commit for given ``start`` or
506 :raise CommitDoesNotExistError: If commit for given ``start`` or
506 ``end`` could not be found.
507 ``end`` could not be found.
507 """
508 """
508 # actually we should check now if it's not an empty repo
509 # actually we should check now if it's not an empty repo
509 if self.is_empty():
510 if self.is_empty():
510 raise EmptyRepositoryError("There are no commits yet")
511 raise EmptyRepositoryError("There are no commits yet")
511 self._validate_branch_name(branch_name)
512 self._validate_branch_name(branch_name)
512
513
513 branch_ancestors = False
514 branch_ancestors = False
514 if start_id is not None:
515 if start_id is not None:
515 self._validate_commit_id(start_id)
516 self._validate_commit_id(start_id)
516 c_start = self.get_commit(commit_id=start_id)
517 c_start = self.get_commit(commit_id=start_id)
517 start_pos = self._commit_ids[c_start.raw_id]
518 start_pos = self._commit_ids[c_start.raw_id]
518 else:
519 else:
519 start_pos = None
520 start_pos = None
520
521
521 if end_id is not None:
522 if end_id is not None:
522 self._validate_commit_id(end_id)
523 self._validate_commit_id(end_id)
523 c_end = self.get_commit(commit_id=end_id)
524 c_end = self.get_commit(commit_id=end_id)
524 end_pos = max(0, self._commit_ids[c_end.raw_id])
525 end_pos = max(0, self._commit_ids[c_end.raw_id])
525 else:
526 else:
526 end_pos = None
527 end_pos = None
527
528
528 if None not in [start_id, end_id] and start_pos > end_pos:
529 if None not in [start_id, end_id] and start_pos > end_pos:
529 raise RepositoryError(
530 raise RepositoryError(
530 "Start commit '%s' cannot be after end commit '%s'" %
531 "Start commit '%s' cannot be after end commit '%s'" %
531 (start_id, end_id))
532 (start_id, end_id))
532
533
533 if end_pos is not None:
534 if end_pos is not None:
534 end_pos += 1
535 end_pos += 1
535
536
536 commit_filter = []
537 commit_filter = []
537
538
538 if branch_name and not branch_ancestors:
539 if branch_name and not branch_ancestors:
539 commit_filter.append(f'branch("{branch_name}")')
540 commit_filter.append(f'branch("{branch_name}")')
540 elif branch_name and branch_ancestors:
541 elif branch_name and branch_ancestors:
541 commit_filter.append(f'ancestors(branch("{branch_name}"))')
542 commit_filter.append(f'ancestors(branch("{branch_name}"))')
542
543
543 if start_date and not end_date:
544 if start_date and not end_date:
544 commit_filter.append(f'date(">{start_date}")')
545 commit_filter.append(f'date(">{start_date}")')
545 if end_date and not start_date:
546 if end_date and not start_date:
546 commit_filter.append(f'date("<{end_date}")')
547 commit_filter.append(f'date("<{end_date}")')
547 if start_date and end_date:
548 if start_date and end_date:
548 commit_filter.append(
549 commit_filter.append(
549 f'date(">{start_date}") and date("<{end_date}")')
550 f'date(">{start_date}") and date("<{end_date}")')
550
551
551 if not show_hidden:
552 if not show_hidden:
552 commit_filter.append('not obsolete()')
553 commit_filter.append('not obsolete()')
553 commit_filter.append('not hidden()')
554 commit_filter.append('not hidden()')
554
555
555 # TODO: johbo: Figure out a simpler way for this solution
556 # TODO: johbo: Figure out a simpler way for this solution
556 collection_generator = CollectionGenerator
557 collection_generator = CollectionGenerator
557 if commit_filter:
558 if commit_filter:
558 commit_filter = ' and '.join(map(safe_str, commit_filter))
559 commit_filter = ' and '.join(map(safe_str, commit_filter))
559 revisions = self._remote.rev_range([commit_filter])
560 revisions = self._remote.rev_range([commit_filter])
560 collection_generator = MercurialIndexBasedCollectionGenerator
561 collection_generator = MercurialIndexBasedCollectionGenerator
561 else:
562 else:
562 revisions = self.commit_ids
563 revisions = self.commit_ids
563
564
564 if start_pos or end_pos:
565 if start_pos or end_pos:
565 revisions = revisions[start_pos:end_pos]
566 revisions = revisions[start_pos:end_pos]
566
567
567 return collection_generator(self, revisions, pre_load=pre_load)
568 return collection_generator(self, revisions, pre_load=pre_load)
568
569
569 def pull(self, url, commit_ids=None):
570 def pull(self, url, commit_ids=None):
570 """
571 """
571 Pull changes from external location.
572 Pull changes from external location.
572
573
573 :param commit_ids: Optional. Can be set to a list of commit ids
574 :param commit_ids: Optional. Can be set to a list of commit ids
574 which shall be pulled from the other repository.
575 which shall be pulled from the other repository.
575 """
576 """
576 url = self._get_url(url)
577 url = self._get_url(url)
577 self._remote.pull(url, commit_ids=commit_ids)
578 self._remote.pull(url, commit_ids=commit_ids)
578 self._remote.invalidate_vcs_cache()
579 self._remote.invalidate_vcs_cache()
579
580
580 def fetch(self, url, commit_ids=None, **kwargs):
581 def fetch(self, url, commit_ids=None, **kwargs):
581 """
582 """
582 Backward compatibility with GIT fetch==pull
583 Backward compatibility with GIT fetch==pull
583 """
584 """
584 return self.pull(url, commit_ids=commit_ids)
585 return self.pull(url, commit_ids=commit_ids)
585
586
586 def push(self, url, **kwargs):
587 def push(self, url, **kwargs):
587 url = self._get_url(url)
588 url = self._get_url(url)
588 self._remote.sync_push(url)
589 self._remote.sync_push(url)
589
590
590 def _local_clone(self, clone_path):
591 def _local_clone(self, clone_path):
591 """
592 """
592 Create a local clone of the current repo.
593 Create a local clone of the current repo.
593 """
594 """
594 self._remote.clone(self.path, clone_path, update_after_clone=True,
595 self._remote.clone(self.path, clone_path, update_after_clone=True,
595 hooks=False)
596 hooks=False)
596
597
597 def _update(self, revision, clean=False):
598 def _update(self, revision, clean=False):
598 """
599 """
599 Update the working copy to the specified revision.
600 Update the working copy to the specified revision.
600 """
601 """
601 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
602 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
602 self._remote.update(revision, clean=clean)
603 self._remote.update(revision, clean=clean)
603
604
604 def _identify(self):
605 def _identify(self):
605 """
606 """
606 Return the current state of the working directory.
607 Return the current state of the working directory.
607 """
608 """
608 return self._remote.identify().strip().rstrip('+')
609 return self._remote.identify().strip().rstrip('+')
609
610
610 def _heads(self, branch=None):
611 def _heads(self, branch=None):
611 """
612 """
612 Return the commit ids of the repository heads.
613 Return the commit ids of the repository heads.
613 """
614 """
614 return self._remote.heads(branch=branch).strip().split(' ')
615 return self._remote.heads(branch=branch).strip().split(' ')
615
616
616 def _ancestor(self, revision1, revision2):
617 def _ancestor(self, revision1, revision2):
617 """
618 """
618 Return the common ancestor of the two revisions.
619 Return the common ancestor of the two revisions.
619 """
620 """
620 return self._remote.ancestor(revision1, revision2)
621 return self._remote.ancestor(revision1, revision2)
621
622
622 def _local_push(
623 def _local_push(
623 self, revision, repository_path, push_branches=False,
624 self, revision, repository_path, push_branches=False,
624 enable_hooks=False):
625 enable_hooks=False):
625 """
626 """
626 Push the given revision to the specified repository.
627 Push the given revision to the specified repository.
627
628
628 :param push_branches: allow to create branches in the target repo.
629 :param push_branches: allow to create branches in the target repo.
629 """
630 """
630 self._remote.push(
631 self._remote.push(
631 [revision], repository_path, hooks=enable_hooks,
632 [revision], repository_path, hooks=enable_hooks,
632 push_branches=push_branches)
633 push_branches=push_branches)
633
634
634 def _local_merge(self, target_ref, merge_message, user_name, user_email,
635 def _local_merge(self, target_ref, merge_message, user_name, user_email,
635 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
636 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
636 """
637 """
637 Merge the given source_revision into the checked out revision.
638 Merge the given source_revision into the checked out revision.
638
639
639 Returns the commit id of the merge and a boolean indicating if the
640 Returns the commit id of the merge and a boolean indicating if the
640 commit needs to be pushed.
641 commit needs to be pushed.
641 """
642 """
642
643
643 source_ref_commit_id = source_ref.commit_id
644 source_ref_commit_id = source_ref.commit_id
644 target_ref_commit_id = target_ref.commit_id
645 target_ref_commit_id = target_ref.commit_id
645
646
646 # update our workdir to target ref, for proper merge
647 # update our workdir to target ref, for proper merge
647 self._update(target_ref_commit_id, clean=True)
648 self._update(target_ref_commit_id, clean=True)
648
649
649 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
650 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
650 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
651 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
651
652
652 if close_commit_id:
653 if close_commit_id:
653 # NOTE(marcink): if we get the close commit, this is our new source
654 # NOTE(marcink): if we get the close commit, this is our new source
654 # which will include the close commit itself.
655 # which will include the close commit itself.
655 source_ref_commit_id = close_commit_id
656 source_ref_commit_id = close_commit_id
656
657
657 if ancestor == source_ref_commit_id:
658 if ancestor == source_ref_commit_id:
658 # Nothing to do, the changes were already integrated
659 # Nothing to do, the changes were already integrated
659 return target_ref_commit_id, False
660 return target_ref_commit_id, False
660
661
661 elif ancestor == target_ref_commit_id and is_the_same_branch:
662 elif ancestor == target_ref_commit_id and is_the_same_branch:
662 # In this case we should force a commit message
663 # In this case we should force a commit message
663 return source_ref_commit_id, True
664 return source_ref_commit_id, True
664
665
665 unresolved = None
666 unresolved = None
666 if use_rebase:
667 if use_rebase:
667 try:
668 try:
668 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
669 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
669 self.bookmark(bookmark_name, revision=source_ref.commit_id)
670 self.bookmark(bookmark_name, revision=source_ref.commit_id)
670 self._remote.rebase(
671 self._remote.rebase(
671 source=source_ref_commit_id, dest=target_ref_commit_id)
672 source=source_ref_commit_id, dest=target_ref_commit_id)
672 self._remote.invalidate_vcs_cache()
673 self._remote.invalidate_vcs_cache()
673 self._update(bookmark_name, clean=True)
674 self._update(bookmark_name, clean=True)
674 return self._identify(), True
675 return self._identify(), True
675 except RepositoryError as e:
676 except RepositoryError as e:
676 # The rebase-abort may raise another exception which 'hides'
677 # The rebase-abort may raise another exception which 'hides'
677 # the original one, therefore we log it here.
678 # the original one, therefore we log it here.
678 log.exception('Error while rebasing shadow repo during merge.')
679 log.exception('Error while rebasing shadow repo during merge.')
679 if 'unresolved conflicts' in safe_str(e):
680 if 'unresolved conflicts' in safe_str(e):
680 unresolved = self._remote.get_unresolved_files()
681 unresolved = self._remote.get_unresolved_files()
681 log.debug('unresolved files: %s', unresolved)
682 log.debug('unresolved files: %s', unresolved)
682
683
683 # Cleanup any rebase leftovers
684 # Cleanup any rebase leftovers
684 self._remote.invalidate_vcs_cache()
685 self._remote.invalidate_vcs_cache()
685 self._remote.rebase(abort=True)
686 self._remote.rebase(abort=True)
686 self._remote.invalidate_vcs_cache()
687 self._remote.invalidate_vcs_cache()
687 self._remote.update(clean=True)
688 self._remote.update(clean=True)
688 if unresolved:
689 if unresolved:
689 raise UnresolvedFilesInRepo(unresolved)
690 raise UnresolvedFilesInRepo(unresolved)
690 else:
691 else:
691 raise
692 raise
692 else:
693 else:
693 try:
694 try:
694 self._remote.merge(source_ref_commit_id)
695 self._remote.merge(source_ref_commit_id)
695 self._remote.invalidate_vcs_cache()
696 self._remote.invalidate_vcs_cache()
696 self._remote.commit(
697 self._remote.commit(
697 message=safe_str(merge_message),
698 message=safe_str(merge_message),
698 username=safe_str(f'{user_name} <{user_email}>'))
699 username=safe_str(f'{user_name} <{user_email}>'))
699 self._remote.invalidate_vcs_cache()
700 self._remote.invalidate_vcs_cache()
700 return self._identify(), True
701 return self._identify(), True
701 except RepositoryError as e:
702 except RepositoryError as e:
702 # The merge-abort may raise another exception which 'hides'
703 # The merge-abort may raise another exception which 'hides'
703 # the original one, therefore we log it here.
704 # the original one, therefore we log it here.
704 log.exception('Error while merging shadow repo during merge.')
705 log.exception('Error while merging shadow repo during merge.')
705 if 'unresolved merge conflicts' in safe_str(e):
706 if 'unresolved merge conflicts' in safe_str(e):
706 unresolved = self._remote.get_unresolved_files()
707 unresolved = self._remote.get_unresolved_files()
707 log.debug('unresolved files: %s', unresolved)
708 log.debug('unresolved files: %s', unresolved)
708
709
709 # Cleanup any merge leftovers
710 # Cleanup any merge leftovers
710 self._remote.update(clean=True)
711 self._remote.update(clean=True)
711 if unresolved:
712 if unresolved:
712 raise UnresolvedFilesInRepo(unresolved)
713 raise UnresolvedFilesInRepo(unresolved)
713 else:
714 else:
714 raise
715 raise
715
716
716 def _local_close(self, target_ref, user_name, user_email,
717 def _local_close(self, target_ref, user_name, user_email,
717 source_ref, close_message=''):
718 source_ref, close_message=''):
718 """
719 """
719 Close the branch of the given source_revision
720 Close the branch of the given source_revision
720
721
721 Returns the commit id of the close and a boolean indicating if the
722 Returns the commit id of the close and a boolean indicating if the
722 commit needs to be pushed.
723 commit needs to be pushed.
723 """
724 """
724 self._update(source_ref.commit_id)
725 self._update(source_ref.commit_id)
725 message = close_message or f"Closing branch: `{source_ref.name}`"
726 message = (close_message or vcs_settings.HG_CLOSE_BRANCH_MESSAGE_TMPL).format(
727 user_name=user_name,
728 user_email=user_email,
729 target_ref_name=target_ref.name,
730 source_ref_name=source_ref.name
731 )
726 try:
732 try:
727 self._remote.commit(
733 self._remote.commit(
728 message=safe_str(message),
734 message=safe_str(message),
729 username=safe_str(f'{user_name} <{user_email}>'),
735 username=safe_str(f'{user_name} <{user_email}>'),
730 close_branch=True)
736 close_branch=True)
731 self._remote.invalidate_vcs_cache()
737 self._remote.invalidate_vcs_cache()
732 return self._identify(), True
738 return self._identify(), True
733 except RepositoryError:
739 except RepositoryError:
734 # Cleanup any commit leftovers
740 # Cleanup any commit leftovers
735 self._remote.update(clean=True)
741 self._remote.update(clean=True)
736 raise
742 raise
737
743
738 def _is_the_same_branch(self, target_ref, source_ref):
744 def _is_the_same_branch(self, target_ref, source_ref):
739 return (
745 return (
740 self._get_branch_name(target_ref) ==
746 self._get_branch_name(target_ref) ==
741 self._get_branch_name(source_ref))
747 self._get_branch_name(source_ref))
742
748
743 def _get_branch_name(self, ref):
749 def _get_branch_name(self, ref):
744 if ref.type == 'branch':
750 if ref.type == 'branch':
745 return ref.name
751 return ref.name
746 return self._remote.ctx_branch(ref.commit_id)
752 return self._remote.ctx_branch(ref.commit_id)
747
753
748 def _maybe_prepare_merge_workspace(
754 def _maybe_prepare_merge_workspace(
749 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
755 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
750 shadow_repository_path = self._get_shadow_repository_path(
756 shadow_repository_path = self._get_shadow_repository_path(
751 self.path, repo_id, workspace_id)
757 self.path, repo_id, workspace_id)
752 if not os.path.exists(shadow_repository_path):
758 if not os.path.exists(shadow_repository_path):
753 self._local_clone(shadow_repository_path)
759 self._local_clone(shadow_repository_path)
754 log.debug(
760 log.debug(
755 'Prepared shadow repository in %s', shadow_repository_path)
761 'Prepared shadow repository in %s', shadow_repository_path)
756
762
757 return shadow_repository_path
763 return shadow_repository_path
758
764
759 def _merge_repo(self, repo_id, workspace_id, target_ref,
765 def _merge_repo(self, repo_id, workspace_id, target_ref,
760 source_repo, source_ref, merge_message,
766 source_repo, source_ref, merge_message,
761 merger_name, merger_email, dry_run=False,
767 merger_name, merger_email, dry_run=False,
762 use_rebase=False, close_branch=False):
768 use_rebase=False, close_branch=False):
763
769
764 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
770 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
765 'rebase' if use_rebase else 'merge', dry_run)
771 'rebase' if use_rebase else 'merge', dry_run)
766
772
767 if target_ref.commit_id not in self._heads():
773 if target_ref.commit_id not in self._heads():
768 return MergeResponse(
774 return MergeResponse(
769 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
775 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
770 metadata={'target_ref': target_ref})
776 metadata={'target_ref': target_ref})
771
777
772 try:
778 try:
773 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
779 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
774 heads_all = self._heads(target_ref.name)
780 heads_all = self._heads(target_ref.name)
775 max_heads = 10
781 max_heads = 10
776 if len(heads_all) > max_heads:
782 if len(heads_all) > max_heads:
777 heads = '\n,'.join(
783 heads = '\n,'.join(
778 heads_all[:max_heads] +
784 heads_all[:max_heads] +
779 [f'and {len(heads_all)-max_heads} more.'])
785 [f'and {len(heads_all)-max_heads} more.'])
780 else:
786 else:
781 heads = '\n,'.join(heads_all)
787 heads = '\n,'.join(heads_all)
782 metadata = {
788 metadata = {
783 'target_ref': target_ref,
789 'target_ref': target_ref,
784 'source_ref': source_ref,
790 'source_ref': source_ref,
785 'heads': heads
791 'heads': heads
786 }
792 }
787 return MergeResponse(
793 return MergeResponse(
788 False, False, None,
794 False, False, None,
789 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
795 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
790 metadata=metadata)
796 metadata=metadata)
791 except CommitDoesNotExistError:
797 except CommitDoesNotExistError:
792 log.exception('Failure when looking up branch heads on hg target')
798 log.exception('Failure when looking up branch heads on hg target')
793 return MergeResponse(
799 return MergeResponse(
794 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
800 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
795 metadata={'target_ref': target_ref})
801 metadata={'target_ref': target_ref})
796
802
797 shadow_repository_path = self._maybe_prepare_merge_workspace(
803 shadow_repository_path = self._maybe_prepare_merge_workspace(
798 repo_id, workspace_id, target_ref, source_ref)
804 repo_id, workspace_id, target_ref, source_ref)
799 shadow_repo = self.get_shadow_instance(shadow_repository_path)
805 shadow_repo = self.get_shadow_instance(shadow_repository_path)
800
806
801 log.debug('Pulling in target reference %s', target_ref)
807 log.debug('Pulling in target reference %s', target_ref)
802 self._validate_pull_reference(target_ref)
808 self._validate_pull_reference(target_ref)
803 shadow_repo._local_pull(self.path, target_ref)
809 shadow_repo._local_pull(self.path, target_ref)
804
810
805 try:
811 try:
806 log.debug('Pulling in source reference %s', source_ref)
812 log.debug('Pulling in source reference %s', source_ref)
807 source_repo._validate_pull_reference(source_ref)
813 source_repo._validate_pull_reference(source_ref)
808 shadow_repo._local_pull(source_repo.path, source_ref)
814 shadow_repo._local_pull(source_repo.path, source_ref)
809 except CommitDoesNotExistError:
815 except CommitDoesNotExistError:
810 log.exception('Failure when doing local pull on hg shadow repo')
816 log.exception('Failure when doing local pull on hg shadow repo')
811 return MergeResponse(
817 return MergeResponse(
812 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
818 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
813 metadata={'source_ref': source_ref})
819 metadata={'source_ref': source_ref})
814
820
815 merge_ref = None
821 merge_ref = None
816 merge_commit_id = None
822 merge_commit_id = None
817 close_commit_id = None
823 close_commit_id = None
818 merge_failure_reason = MergeFailureReason.NONE
824 merge_failure_reason = MergeFailureReason.NONE
819 metadata = {}
825 metadata = {}
820
826
821 # enforce that close branch should be used only in case we source from
827 # enforce that close branch should be used only in case we source from
822 # an actual Branch
828 # an actual Branch
823 close_branch = close_branch and source_ref.type == 'branch'
829 close_branch = close_branch and source_ref.type == 'branch'
824
830
825 # don't allow to close branch if source and target are the same
831 # don't allow to close branch if source and target are the same
826 close_branch = close_branch and source_ref.name != target_ref.name
832 close_branch = close_branch and source_ref.name != target_ref.name
827
833
828 needs_push_on_close = False
834 needs_push_on_close = False
829 if close_branch and not use_rebase and not dry_run:
835 if close_branch and not use_rebase and not dry_run:
830 try:
836 try:
831 close_commit_id, needs_push_on_close = shadow_repo._local_close(
837 close_commit_id, needs_push_on_close = shadow_repo._local_close(
832 target_ref, merger_name, merger_email, source_ref)
838 target_ref, merger_name, merger_email, source_ref)
833 merge_possible = True
839 merge_possible = True
834 except RepositoryError:
840 except RepositoryError:
835 log.exception('Failure when doing close branch on '
841 log.exception('Failure when doing close branch on '
836 'shadow repo: %s', shadow_repo)
842 'shadow repo: %s', shadow_repo)
837 merge_possible = False
843 merge_possible = False
838 merge_failure_reason = MergeFailureReason.MERGE_FAILED
844 merge_failure_reason = MergeFailureReason.MERGE_FAILED
839 else:
845 else:
840 merge_possible = True
846 merge_possible = True
841
847
842 needs_push = False
848 needs_push = False
843 if merge_possible:
849 if merge_possible:
844
850
845 try:
851 try:
846 merge_commit_id, needs_push = shadow_repo._local_merge(
852 merge_commit_id, needs_push = shadow_repo._local_merge(
847 target_ref, merge_message, merger_name, merger_email,
853 target_ref, merge_message, merger_name, merger_email,
848 source_ref, use_rebase=use_rebase,
854 source_ref, use_rebase=use_rebase,
849 close_commit_id=close_commit_id, dry_run=dry_run)
855 close_commit_id=close_commit_id, dry_run=dry_run)
850 merge_possible = True
856 merge_possible = True
851
857
852 # read the state of the close action, if it
858 # read the state of the close action, if it
853 # maybe required a push
859 # maybe required a push
854 needs_push = needs_push or needs_push_on_close
860 needs_push = needs_push or needs_push_on_close
855
861
856 # Set a bookmark pointing to the merge commit. This bookmark
862 # Set a bookmark pointing to the merge commit. This bookmark
857 # may be used to easily identify the last successful merge
863 # may be used to easily identify the last successful merge
858 # commit in the shadow repository.
864 # commit in the shadow repository.
859 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
865 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
860 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
866 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
861 except SubrepoMergeError:
867 except SubrepoMergeError:
862 log.exception(
868 log.exception(
863 'Subrepo merge error during local merge on hg shadow repo.')
869 'Subrepo merge error during local merge on hg shadow repo.')
864 merge_possible = False
870 merge_possible = False
865 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
871 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
866 needs_push = False
872 needs_push = False
867 except RepositoryError as e:
873 except RepositoryError as e:
868 log.exception('Failure when doing local merge on hg shadow repo')
874 log.exception('Failure when doing local merge on hg shadow repo')
869 metadata['unresolved_files'] = 'no unresolved files found'
875 metadata['unresolved_files'] = 'no unresolved files found'
870
876
871 if isinstance(e, UnresolvedFilesInRepo):
877 if isinstance(e, UnresolvedFilesInRepo):
872 all_conflicts = list(e.args[0])
878 all_conflicts = list(e.args[0])
873 max_conflicts = 20
879 max_conflicts = 20
874 if len(all_conflicts) > max_conflicts:
880 if len(all_conflicts) > max_conflicts:
875 conflicts = all_conflicts[:max_conflicts] \
881 conflicts = all_conflicts[:max_conflicts] \
876 + [f'and {len(all_conflicts)-max_conflicts} more.']
882 + [f'and {len(all_conflicts)-max_conflicts} more.']
877 else:
883 else:
878 conflicts = all_conflicts
884 conflicts = all_conflicts
879 metadata['unresolved_files'] = \
885 metadata['unresolved_files'] = \
880 '\n* conflict: ' + \
886 '\n* conflict: ' + \
881 ('\n * conflict: '.join(conflicts))
887 ('\n * conflict: '.join(conflicts))
882
888
883 merge_possible = False
889 merge_possible = False
884 merge_failure_reason = MergeFailureReason.MERGE_FAILED
890 merge_failure_reason = MergeFailureReason.MERGE_FAILED
885 needs_push = False
891 needs_push = False
886
892
887 if merge_possible and not dry_run:
893 if merge_possible and not dry_run:
888 if needs_push:
894 if needs_push:
889 # In case the target is a bookmark, update it, so after pushing
895 # In case the target is a bookmark, update it, so after pushing
890 # the bookmarks is also updated in the target.
896 # the bookmarks is also updated in the target.
891 if target_ref.type == 'book':
897 if target_ref.type == 'book':
892 shadow_repo.bookmark(
898 shadow_repo.bookmark(
893 target_ref.name, revision=merge_commit_id)
899 target_ref.name, revision=merge_commit_id)
894 try:
900 try:
895 shadow_repo_with_hooks = self.get_shadow_instance(
901 shadow_repo_with_hooks = self.get_shadow_instance(
896 shadow_repository_path,
902 shadow_repository_path,
897 enable_hooks=True)
903 enable_hooks=True)
898 # This is the actual merge action, we push from shadow
904 # This is the actual merge action, we push from shadow
899 # into origin.
905 # into origin.
900 # Note: the push_branches option will push any new branch
906 # Note: the push_branches option will push any new branch
901 # defined in the source repository to the target. This may
907 # defined in the source repository to the target. This may
902 # be dangerous as branches are permanent in Mercurial.
908 # be dangerous as branches are permanent in Mercurial.
903 # This feature was requested in issue #441.
909 # This feature was requested in issue #441.
904 shadow_repo_with_hooks._local_push(
910 shadow_repo_with_hooks._local_push(
905 merge_commit_id, self.path, push_branches=True,
911 merge_commit_id, self.path, push_branches=True,
906 enable_hooks=True)
912 enable_hooks=True)
907
913
908 # maybe we also need to push the close_commit_id
914 # maybe we also need to push the close_commit_id
909 if close_commit_id:
915 if close_commit_id:
910 shadow_repo_with_hooks._local_push(
916 shadow_repo_with_hooks._local_push(
911 close_commit_id, self.path, push_branches=True,
917 close_commit_id, self.path, push_branches=True,
912 enable_hooks=True)
918 enable_hooks=True)
913 merge_succeeded = True
919 merge_succeeded = True
914 except RepositoryError:
920 except RepositoryError:
915 log.exception(
921 log.exception(
916 'Failure when doing local push from the shadow '
922 'Failure when doing local push from the shadow '
917 'repository to the target repository at %s.', self.path)
923 'repository to the target repository at %s.', self.path)
918 merge_succeeded = False
924 merge_succeeded = False
919 merge_failure_reason = MergeFailureReason.PUSH_FAILED
925 merge_failure_reason = MergeFailureReason.PUSH_FAILED
920 metadata['target'] = 'hg shadow repo'
926 metadata['target'] = 'hg shadow repo'
921 metadata['merge_commit'] = merge_commit_id
927 metadata['merge_commit'] = merge_commit_id
922 else:
928 else:
923 merge_succeeded = True
929 merge_succeeded = True
924 else:
930 else:
925 merge_succeeded = False
931 merge_succeeded = False
926
932
927 return MergeResponse(
933 return MergeResponse(
928 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
934 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
929 metadata=metadata)
935 metadata=metadata)
930
936
931 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
937 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
932 config = self.config.copy()
938 config = self.config.copy()
933 if not enable_hooks:
939 if not enable_hooks:
934 config.clear_section('hooks')
940 config.clear_section('hooks')
935 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
941 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
936
942
937 def _validate_pull_reference(self, reference):
943 def _validate_pull_reference(self, reference):
938 if not (reference.name in self.bookmarks or
944 if not (reference.name in self.bookmarks or
939 reference.name in self.branches or
945 reference.name in self.branches or
940 self.get_commit(reference.commit_id)):
946 self.get_commit(reference.commit_id)):
941 raise CommitDoesNotExistError(
947 raise CommitDoesNotExistError(
942 'Unknown branch, bookmark or commit id')
948 'Unknown branch, bookmark or commit id')
943
949
944 def _local_pull(self, repository_path, reference):
950 def _local_pull(self, repository_path, reference):
945 """
951 """
946 Fetch a branch, bookmark or commit from a local repository.
952 Fetch a branch, bookmark or commit from a local repository.
947 """
953 """
948 repository_path = os.path.abspath(repository_path)
954 repository_path = os.path.abspath(repository_path)
949 if repository_path == self.path:
955 if repository_path == self.path:
950 raise ValueError('Cannot pull from the same repository')
956 raise ValueError('Cannot pull from the same repository')
951
957
952 reference_type_to_option_name = {
958 reference_type_to_option_name = {
953 'book': 'bookmark',
959 'book': 'bookmark',
954 'branch': 'branch',
960 'branch': 'branch',
955 }
961 }
956 option_name = reference_type_to_option_name.get(
962 option_name = reference_type_to_option_name.get(
957 reference.type, 'revision')
963 reference.type, 'revision')
958
964
959 if option_name == 'revision':
965 if option_name == 'revision':
960 ref = reference.commit_id
966 ref = reference.commit_id
961 else:
967 else:
962 ref = reference.name
968 ref = reference.name
963
969
964 options = {option_name: [ref]}
970 options = {option_name: [ref]}
965 self._remote.pull_cmd(repository_path, hooks=False, **options)
971 self._remote.pull_cmd(repository_path, hooks=False, **options)
966 self._remote.invalidate_vcs_cache()
972 self._remote.invalidate_vcs_cache()
967
973
968 def bookmark(self, bookmark, revision=None):
974 def bookmark(self, bookmark, revision=None):
969 if isinstance(bookmark, str):
975 if isinstance(bookmark, str):
970 bookmark = safe_str(bookmark)
976 bookmark = safe_str(bookmark)
971 self._remote.bookmark(bookmark, revision=revision)
977 self._remote.bookmark(bookmark, revision=revision)
972 self._remote.invalidate_vcs_cache()
978 self._remote.invalidate_vcs_cache()
973
979
974 def get_path_permissions(self, username):
980 def get_path_permissions(self, username):
975 hgacl_file = os.path.join(self.path, '.hg/hgacl')
981 hgacl_file = os.path.join(self.path, '.hg/hgacl')
976
982
977 def read_patterns(suffix):
983 def read_patterns(suffix):
978 svalue = None
984 svalue = None
979 for section, option in [
985 for section, option in [
980 ('narrowacl', username + suffix),
986 ('narrowacl', username + suffix),
981 ('narrowacl', 'default' + suffix),
987 ('narrowacl', 'default' + suffix),
982 ('narrowhgacl', username + suffix),
988 ('narrowhgacl', username + suffix),
983 ('narrowhgacl', 'default' + suffix)
989 ('narrowhgacl', 'default' + suffix)
984 ]:
990 ]:
985 try:
991 try:
986 svalue = hgacl.get(section, option)
992 svalue = hgacl.get(section, option)
987 break # stop at the first value we find
993 break # stop at the first value we find
988 except configparser.NoOptionError:
994 except configparser.NoOptionError:
989 pass
995 pass
990 if not svalue:
996 if not svalue:
991 return None
997 return None
992 result = ['/']
998 result = ['/']
993 for pattern in svalue.split():
999 for pattern in svalue.split():
994 result.append(pattern)
1000 result.append(pattern)
995 if '*' not in pattern and '?' not in pattern:
1001 if '*' not in pattern and '?' not in pattern:
996 result.append(pattern + '/*')
1002 result.append(pattern + '/*')
997 return result
1003 return result
998
1004
999 if os.path.exists(hgacl_file):
1005 if os.path.exists(hgacl_file):
1000 try:
1006 try:
1001 hgacl = configparser.RawConfigParser()
1007 hgacl = configparser.RawConfigParser()
1002 hgacl.read(hgacl_file)
1008 hgacl.read(hgacl_file)
1003
1009
1004 includes = read_patterns('.includes')
1010 includes = read_patterns('.includes')
1005 excludes = read_patterns('.excludes')
1011 excludes = read_patterns('.excludes')
1006 return BasePathPermissionChecker.create_from_patterns(
1012 return BasePathPermissionChecker.create_from_patterns(
1007 includes, excludes)
1013 includes, excludes)
1008 except BaseException as e:
1014 except BaseException as e:
1009 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1015 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1010 hgacl_file, self.name, e)
1016 hgacl_file, self.name, e)
1011 raise exceptions.RepositoryRequirementError(msg)
1017 raise exceptions.RepositoryRequirementError(msg)
1012 else:
1018 else:
1013 return None
1019 return None
1014
1020
1015
1021
1016 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1022 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1017
1023
1018 def _commit_factory(self, commit_id):
1024 def _commit_factory(self, commit_id):
1019 if isinstance(commit_id, int):
1025 if isinstance(commit_id, int):
1020 return self.repo.get_commit(
1026 return self.repo.get_commit(
1021 commit_idx=commit_id, pre_load=self.pre_load)
1027 commit_idx=commit_id, pre_load=self.pre_load)
1022 else:
1028 else:
1023 return self.repo.get_commit(
1029 return self.repo.get_commit(
1024 commit_id=commit_id, pre_load=self.pre_load)
1030 commit_id=commit_id, pre_load=self.pre_load)
@@ -1,73 +1,76 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Internal settings for vcs-lib
20 Internal settings for vcs-lib
21 """
21 """
22
22
23 # list of default encoding used in safe_str methods
23 # list of default encoding used in safe_str methods
24 DEFAULT_ENCODINGS = ['utf8']
24 DEFAULT_ENCODINGS = ['utf8']
25
25
26
26
27 # Compatibility version when creating SVN repositories. None means newest.
27 # Compatibility version when creating SVN repositories. None means newest.
28 # Other available options are: pre-1.4-compatible, pre-1.5-compatible,
28 # Other available options are: pre-1.4-compatible, pre-1.5-compatible,
29 # pre-1.6-compatible, pre-1.8-compatible
29 # pre-1.6-compatible, pre-1.8-compatible
30 SVN_COMPATIBLE_VERSION = None
30 SVN_COMPATIBLE_VERSION = None
31
31
32 ALIASES = ['hg', 'git', 'svn']
32 ALIASES = ['hg', 'git', 'svn']
33
33
34 BACKENDS = {
34 BACKENDS = {
35 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
35 'hg': 'rhodecode.lib.vcs.backends.hg.MercurialRepository',
36 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
36 'git': 'rhodecode.lib.vcs.backends.git.GitRepository',
37 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
37 'svn': 'rhodecode.lib.vcs.backends.svn.SubversionRepository',
38 }
38 }
39
39
40
40
41 ARCHIVE_SPECS = [
41 ARCHIVE_SPECS = [
42 ('tbz2', 'application/x-bzip2', '.tbz2'),
42 ('tbz2', 'application/x-bzip2', '.tbz2'),
43 ('tbz2', 'application/x-bzip2', '.tar.bz2'),
43 ('tbz2', 'application/x-bzip2', '.tar.bz2'),
44
44
45 ('tgz', 'application/x-gzip', '.tgz'),
45 ('tgz', 'application/x-gzip', '.tgz'),
46 ('tgz', 'application/x-gzip', '.tar.gz'),
46 ('tgz', 'application/x-gzip', '.tar.gz'),
47
47
48 ('zip', 'application/zip', '.zip'),
48 ('zip', 'application/zip', '.zip'),
49 ]
49 ]
50
50
51 HOOKS_PROTOCOL = None
51 HOOKS_PROTOCOL = None
52 HOOKS_HOST = '127.0.0.1'
52 HOOKS_HOST = '127.0.0.1'
53
53
54
54
55 MERGE_MESSAGE_TMPL = (
55 MERGE_MESSAGE_TMPL = (
56 'Merge pull request !{pr_id} from {source_repo} {source_ref_name}\n\n '
56 'Merge pull request !{pr_id} from {source_repo} {source_ref_name}\n\n '
57 '{pr_title}')
57 '{pr_title}')
58 MERGE_DRY_RUN_MESSAGE = 'dry_run_merge_message_from_rhodecode'
58 MERGE_DRY_RUN_MESSAGE = 'dry_run_merge_message_from_rhodecode'
59 MERGE_DRY_RUN_USER = 'Dry-Run User'
59 MERGE_DRY_RUN_USER = 'Dry-Run User'
60 MERGE_DRY_RUN_EMAIL = 'dry-run-merge@rhodecode.com'
60 MERGE_DRY_RUN_EMAIL = 'dry-run-merge@rhodecode.com'
61 HG_CLOSE_BRANCH_MESSAGE_TMPL = (
62 'Closing branch: `{source_ref_name}`'
63 )
61
64
62
65
63 def available_aliases():
66 def available_aliases():
64 """
67 """
65 Mercurial is required for the system to work, so in case vcs.backends does
68 Mercurial is required for the system to work, so in case vcs.backends does
66 not include it, we make sure it will be available internally
69 not include it, we make sure it will be available internally
67 TODO: anderson: refactor vcs.backends so it won't be necessary, VCS server
70 TODO: anderson: refactor vcs.backends so it won't be necessary, VCS server
68 should be responsible to dictate available backends.
71 should be responsible to dictate available backends.
69 """
72 """
70 aliases = ALIASES[:]
73 aliases = ALIASES[:]
71 if 'hg' not in aliases:
74 if 'hg' not in aliases:
72 aliases += ['hg']
75 aliases += ['hg']
73 return aliases
76 return aliases
General Comments 0
You need to be logged in to leave comments. Login now