##// END OF EJS Templates
Future proof path permissions for Mercurial 5.0
idlsoft -
r3646:01e4dac7 default
parent child Browse files
Show More
@@ -1,937 +1,941 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG repository module
22 HG repository module
23 """
23 """
24 import os
24 import os
25 import logging
25 import logging
26 import binascii
26 import binascii
27 import urllib
27 import urllib
28
28
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 from rhodecode.lib.compat import OrderedDict
31 from rhodecode.lib.compat import OrderedDict
32 from rhodecode.lib.datelib import (
32 from rhodecode.lib.datelib import (
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
33 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils import safe_unicode, safe_str
35 from rhodecode.lib.vcs import connection, exceptions
35 from rhodecode.lib.vcs import connection, exceptions
36 from rhodecode.lib.vcs.backends.base import (
36 from rhodecode.lib.vcs.backends.base import (
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 BaseRepository, CollectionGenerator, Config, MergeResponse,
38 MergeFailureReason, Reference, BasePathPermissionChecker)
38 MergeFailureReason, Reference, BasePathPermissionChecker)
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
39 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
40 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
41 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
42 from rhodecode.lib.vcs.exceptions import (
42 from rhodecode.lib.vcs.exceptions import (
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
43 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
44 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError)
45 from rhodecode.lib.vcs.compat import configparser
45 from rhodecode.lib.vcs.compat import configparser
46
46
47 hexlify = binascii.hexlify
47 hexlify = binascii.hexlify
48 nullid = "\0" * 20
48 nullid = "\0" * 20
49
49
50 log = logging.getLogger(__name__)
50 log = logging.getLogger(__name__)
51
51
52
52
53 class MercurialRepository(BaseRepository):
53 class MercurialRepository(BaseRepository):
54 """
54 """
55 Mercurial repository backend
55 Mercurial repository backend
56 """
56 """
57 DEFAULT_BRANCH_NAME = 'default'
57 DEFAULT_BRANCH_NAME = 'default'
58
58
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
59 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 do_workspace_checkout=False, with_wire=None, bare=False):
60 do_workspace_checkout=False, with_wire=None, bare=False):
61 """
61 """
62 Raises RepositoryError if repository could not be find at the given
62 Raises RepositoryError if repository could not be find at the given
63 ``repo_path``.
63 ``repo_path``.
64
64
65 :param repo_path: local path of the repository
65 :param repo_path: local path of the repository
66 :param config: config object containing the repo configuration
66 :param config: config object containing the repo configuration
67 :param create=False: if set to True, would try to create repository if
67 :param create=False: if set to True, would try to create repository if
68 it does not exist rather than raising exception
68 it does not exist rather than raising exception
69 :param src_url=None: would try to clone repository from given location
69 :param src_url=None: would try to clone repository from given location
70 :param do_workspace_checkout=False: sets update of working copy after
70 :param do_workspace_checkout=False: sets update of working copy after
71 making a clone
71 making a clone
72 :param bare: not used, compatible with other VCS
72 :param bare: not used, compatible with other VCS
73 """
73 """
74
74
75 self.path = safe_str(os.path.abspath(repo_path))
75 self.path = safe_str(os.path.abspath(repo_path))
76 # mercurial since 4.4.X requires certain configuration to be present
76 # mercurial since 4.4.X requires certain configuration to be present
77 # because sometimes we init the repos with config we need to meet
77 # because sometimes we init the repos with config we need to meet
78 # special requirements
78 # special requirements
79 self.config = config if config else self.get_default_config(
79 self.config = config if config else self.get_default_config(
80 default=[('extensions', 'largefiles', '1')])
80 default=[('extensions', 'largefiles', '1')])
81 self.with_wire = with_wire
81 self.with_wire = with_wire
82
82
83 self._init_repo(create, src_url, do_workspace_checkout)
83 self._init_repo(create, src_url, do_workspace_checkout)
84
84
85 # caches
85 # caches
86 self._commit_ids = {}
86 self._commit_ids = {}
87
87
88 @LazyProperty
88 @LazyProperty
89 def _remote(self):
89 def _remote(self):
90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
90 return connection.Hg(self.path, self.config, with_wire=self.with_wire)
91
91
92 @LazyProperty
92 @LazyProperty
93 def commit_ids(self):
93 def commit_ids(self):
94 """
94 """
95 Returns list of commit ids, in ascending order. Being lazy
95 Returns list of commit ids, in ascending order. Being lazy
96 attribute allows external tools to inject shas from cache.
96 attribute allows external tools to inject shas from cache.
97 """
97 """
98 commit_ids = self._get_all_commit_ids()
98 commit_ids = self._get_all_commit_ids()
99 self._rebuild_cache(commit_ids)
99 self._rebuild_cache(commit_ids)
100 return commit_ids
100 return commit_ids
101
101
102 def _rebuild_cache(self, commit_ids):
102 def _rebuild_cache(self, commit_ids):
103 self._commit_ids = dict((commit_id, index)
103 self._commit_ids = dict((commit_id, index)
104 for index, commit_id in enumerate(commit_ids))
104 for index, commit_id in enumerate(commit_ids))
105
105
106 @LazyProperty
106 @LazyProperty
107 def branches(self):
107 def branches(self):
108 return self._get_branches()
108 return self._get_branches()
109
109
110 @LazyProperty
110 @LazyProperty
111 def branches_closed(self):
111 def branches_closed(self):
112 return self._get_branches(active=False, closed=True)
112 return self._get_branches(active=False, closed=True)
113
113
114 @LazyProperty
114 @LazyProperty
115 def branches_all(self):
115 def branches_all(self):
116 all_branches = {}
116 all_branches = {}
117 all_branches.update(self.branches)
117 all_branches.update(self.branches)
118 all_branches.update(self.branches_closed)
118 all_branches.update(self.branches_closed)
119 return all_branches
119 return all_branches
120
120
121 def _get_branches(self, active=True, closed=False):
121 def _get_branches(self, active=True, closed=False):
122 """
122 """
123 Gets branches for this repository
123 Gets branches for this repository
124 Returns only not closed active branches by default
124 Returns only not closed active branches by default
125
125
126 :param active: return also active branches
126 :param active: return also active branches
127 :param closed: return also closed branches
127 :param closed: return also closed branches
128
128
129 """
129 """
130 if self.is_empty():
130 if self.is_empty():
131 return {}
131 return {}
132
132
133 def get_name(ctx):
133 def get_name(ctx):
134 return ctx[0]
134 return ctx[0]
135
135
136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
136 _branches = [(safe_unicode(n), hexlify(h),) for n, h in
137 self._remote.branches(active, closed).items()]
137 self._remote.branches(active, closed).items()]
138
138
139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
139 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
140
140
141 @LazyProperty
141 @LazyProperty
142 def tags(self):
142 def tags(self):
143 """
143 """
144 Gets tags for this repository
144 Gets tags for this repository
145 """
145 """
146 return self._get_tags()
146 return self._get_tags()
147
147
148 def _get_tags(self):
148 def _get_tags(self):
149 if self.is_empty():
149 if self.is_empty():
150 return {}
150 return {}
151
151
152 def get_name(ctx):
152 def get_name(ctx):
153 return ctx[0]
153 return ctx[0]
154
154
155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
155 _tags = [(safe_unicode(n), hexlify(h),) for n, h in
156 self._remote.tags().items()]
156 self._remote.tags().items()]
157
157
158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
158 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
159
159
160 def tag(self, name, user, commit_id=None, message=None, date=None,
160 def tag(self, name, user, commit_id=None, message=None, date=None,
161 **kwargs):
161 **kwargs):
162 """
162 """
163 Creates and returns a tag for the given ``commit_id``.
163 Creates and returns a tag for the given ``commit_id``.
164
164
165 :param name: name for new tag
165 :param name: name for new tag
166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
166 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param commit_id: commit id for which new tag would be created
167 :param commit_id: commit id for which new tag would be created
168 :param message: message of the tag's commit
168 :param message: message of the tag's commit
169 :param date: date of tag's commit
169 :param date: date of tag's commit
170
170
171 :raises TagAlreadyExistError: if tag with same name already exists
171 :raises TagAlreadyExistError: if tag with same name already exists
172 """
172 """
173 if name in self.tags:
173 if name in self.tags:
174 raise TagAlreadyExistError("Tag %s already exists" % name)
174 raise TagAlreadyExistError("Tag %s already exists" % name)
175 commit = self.get_commit(commit_id=commit_id)
175 commit = self.get_commit(commit_id=commit_id)
176 local = kwargs.setdefault('local', False)
176 local = kwargs.setdefault('local', False)
177
177
178 if message is None:
178 if message is None:
179 message = "Added tag %s for commit %s" % (name, commit.short_id)
179 message = "Added tag %s for commit %s" % (name, commit.short_id)
180
180
181 date, tz = date_to_timestamp_plus_offset(date)
181 date, tz = date_to_timestamp_plus_offset(date)
182
182
183 self._remote.tag(
183 self._remote.tag(
184 name, commit.raw_id, message, local, user, date, tz)
184 name, commit.raw_id, message, local, user, date, tz)
185 self._remote.invalidate_vcs_cache()
185 self._remote.invalidate_vcs_cache()
186
186
187 # Reinitialize tags
187 # Reinitialize tags
188 self.tags = self._get_tags()
188 self.tags = self._get_tags()
189 tag_id = self.tags[name]
189 tag_id = self.tags[name]
190
190
191 return self.get_commit(commit_id=tag_id)
191 return self.get_commit(commit_id=tag_id)
192
192
193 def remove_tag(self, name, user, message=None, date=None):
193 def remove_tag(self, name, user, message=None, date=None):
194 """
194 """
195 Removes tag with the given `name`.
195 Removes tag with the given `name`.
196
196
197 :param name: name of the tag to be removed
197 :param name: name of the tag to be removed
198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
198 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param message: message of the tag's removal commit
199 :param message: message of the tag's removal commit
200 :param date: date of tag's removal commit
200 :param date: date of tag's removal commit
201
201
202 :raises TagDoesNotExistError: if tag with given name does not exists
202 :raises TagDoesNotExistError: if tag with given name does not exists
203 """
203 """
204 if name not in self.tags:
204 if name not in self.tags:
205 raise TagDoesNotExistError("Tag %s does not exist" % name)
205 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 if message is None:
206 if message is None:
207 message = "Removed tag %s" % name
207 message = "Removed tag %s" % name
208 local = False
208 local = False
209
209
210 date, tz = date_to_timestamp_plus_offset(date)
210 date, tz = date_to_timestamp_plus_offset(date)
211
211
212 self._remote.tag(name, nullid, message, local, user, date, tz)
212 self._remote.tag(name, nullid, message, local, user, date, tz)
213 self._remote.invalidate_vcs_cache()
213 self._remote.invalidate_vcs_cache()
214 self.tags = self._get_tags()
214 self.tags = self._get_tags()
215
215
216 @LazyProperty
216 @LazyProperty
217 def bookmarks(self):
217 def bookmarks(self):
218 """
218 """
219 Gets bookmarks for this repository
219 Gets bookmarks for this repository
220 """
220 """
221 return self._get_bookmarks()
221 return self._get_bookmarks()
222
222
223 def _get_bookmarks(self):
223 def _get_bookmarks(self):
224 if self.is_empty():
224 if self.is_empty():
225 return {}
225 return {}
226
226
227 def get_name(ctx):
227 def get_name(ctx):
228 return ctx[0]
228 return ctx[0]
229
229
230 _bookmarks = [
230 _bookmarks = [
231 (safe_unicode(n), hexlify(h)) for n, h in
231 (safe_unicode(n), hexlify(h)) for n, h in
232 self._remote.bookmarks().items()]
232 self._remote.bookmarks().items()]
233
233
234 return OrderedDict(sorted(_bookmarks, key=get_name))
234 return OrderedDict(sorted(_bookmarks, key=get_name))
235
235
236 def _get_all_commit_ids(self):
236 def _get_all_commit_ids(self):
237 return self._remote.get_all_commit_ids('visible')
237 return self._remote.get_all_commit_ids('visible')
238
238
239 def get_diff(
239 def get_diff(
240 self, commit1, commit2, path='', ignore_whitespace=False,
240 self, commit1, commit2, path='', ignore_whitespace=False,
241 context=3, path1=None):
241 context=3, path1=None):
242 """
242 """
243 Returns (git like) *diff*, as plain text. Shows changes introduced by
243 Returns (git like) *diff*, as plain text. Shows changes introduced by
244 `commit2` since `commit1`.
244 `commit2` since `commit1`.
245
245
246 :param commit1: Entry point from which diff is shown. Can be
246 :param commit1: Entry point from which diff is shown. Can be
247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
247 ``self.EMPTY_COMMIT`` - in this case, patch showing all
248 the changes since empty state of the repository until `commit2`
248 the changes since empty state of the repository until `commit2`
249 :param commit2: Until which commit changes should be shown.
249 :param commit2: Until which commit changes should be shown.
250 :param ignore_whitespace: If set to ``True``, would not show whitespace
250 :param ignore_whitespace: If set to ``True``, would not show whitespace
251 changes. Defaults to ``False``.
251 changes. Defaults to ``False``.
252 :param context: How many lines before/after changed lines should be
252 :param context: How many lines before/after changed lines should be
253 shown. Defaults to ``3``.
253 shown. Defaults to ``3``.
254 """
254 """
255 self._validate_diff_commits(commit1, commit2)
255 self._validate_diff_commits(commit1, commit2)
256 if path1 is not None and path1 != path:
256 if path1 is not None and path1 != path:
257 raise ValueError("Diff of two different paths not supported.")
257 raise ValueError("Diff of two different paths not supported.")
258
258
259 if path:
259 if path:
260 file_filter = [self.path, path]
260 file_filter = [self.path, path]
261 else:
261 else:
262 file_filter = None
262 file_filter = None
263
263
264 diff = self._remote.diff(
264 diff = self._remote.diff(
265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
265 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
266 opt_git=True, opt_ignorews=ignore_whitespace,
266 opt_git=True, opt_ignorews=ignore_whitespace,
267 context=context)
267 context=context)
268 return MercurialDiff(diff)
268 return MercurialDiff(diff)
269
269
270 def strip(self, commit_id, branch=None):
270 def strip(self, commit_id, branch=None):
271 self._remote.strip(commit_id, update=False, backup="none")
271 self._remote.strip(commit_id, update=False, backup="none")
272
272
273 self._remote.invalidate_vcs_cache()
273 self._remote.invalidate_vcs_cache()
274 self.commit_ids = self._get_all_commit_ids()
274 self.commit_ids = self._get_all_commit_ids()
275 self._rebuild_cache(self.commit_ids)
275 self._rebuild_cache(self.commit_ids)
276
276
277 def verify(self):
277 def verify(self):
278 verify = self._remote.verify()
278 verify = self._remote.verify()
279
279
280 self._remote.invalidate_vcs_cache()
280 self._remote.invalidate_vcs_cache()
281 return verify
281 return verify
282
282
283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
283 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
284 if commit_id1 == commit_id2:
284 if commit_id1 == commit_id2:
285 return commit_id1
285 return commit_id1
286
286
287 ancestors = self._remote.revs_from_revspec(
287 ancestors = self._remote.revs_from_revspec(
288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
288 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
289 other_path=repo2.path)
289 other_path=repo2.path)
290 return repo2[ancestors[0]].raw_id if ancestors else None
290 return repo2[ancestors[0]].raw_id if ancestors else None
291
291
292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
292 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
293 if commit_id1 == commit_id2:
293 if commit_id1 == commit_id2:
294 commits = []
294 commits = []
295 else:
295 else:
296 if merge:
296 if merge:
297 indexes = self._remote.revs_from_revspec(
297 indexes = self._remote.revs_from_revspec(
298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
298 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
299 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
300 else:
300 else:
301 indexes = self._remote.revs_from_revspec(
301 indexes = self._remote.revs_from_revspec(
302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
302 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
303 commit_id1, other_path=repo2.path)
303 commit_id1, other_path=repo2.path)
304
304
305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
305 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
306 for idx in indexes]
306 for idx in indexes]
307
307
308 return commits
308 return commits
309
309
310 @staticmethod
310 @staticmethod
311 def check_url(url, config):
311 def check_url(url, config):
312 """
312 """
313 Function will check given url and try to verify if it's a valid
313 Function will check given url and try to verify if it's a valid
314 link. Sometimes it may happened that mercurial will issue basic
314 link. Sometimes it may happened that mercurial will issue basic
315 auth request that can cause whole API to hang when used from python
315 auth request that can cause whole API to hang when used from python
316 or other external calls.
316 or other external calls.
317
317
318 On failures it'll raise urllib2.HTTPError, exception is also thrown
318 On failures it'll raise urllib2.HTTPError, exception is also thrown
319 when the return code is non 200
319 when the return code is non 200
320 """
320 """
321 # check first if it's not an local url
321 # check first if it's not an local url
322 if os.path.isdir(url) or url.startswith('file:'):
322 if os.path.isdir(url) or url.startswith('file:'):
323 return True
323 return True
324
324
325 # Request the _remote to verify the url
325 # Request the _remote to verify the url
326 return connection.Hg.check_url(url, config.serialize())
326 return connection.Hg.check_url(url, config.serialize())
327
327
328 @staticmethod
328 @staticmethod
329 def is_valid_repository(path):
329 def is_valid_repository(path):
330 return os.path.isdir(os.path.join(path, '.hg'))
330 return os.path.isdir(os.path.join(path, '.hg'))
331
331
332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
332 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
333 """
333 """
334 Function will check for mercurial repository in given path. If there
334 Function will check for mercurial repository in given path. If there
335 is no repository in that path it will raise an exception unless
335 is no repository in that path it will raise an exception unless
336 `create` parameter is set to True - in that case repository would
336 `create` parameter is set to True - in that case repository would
337 be created.
337 be created.
338
338
339 If `src_url` is given, would try to clone repository from the
339 If `src_url` is given, would try to clone repository from the
340 location at given clone_point. Additionally it'll make update to
340 location at given clone_point. Additionally it'll make update to
341 working copy accordingly to `do_workspace_checkout` flag.
341 working copy accordingly to `do_workspace_checkout` flag.
342 """
342 """
343 if create and os.path.exists(self.path):
343 if create and os.path.exists(self.path):
344 raise RepositoryError(
344 raise RepositoryError(
345 "Cannot create repository at %s, location already exist"
345 "Cannot create repository at %s, location already exist"
346 % self.path)
346 % self.path)
347
347
348 if src_url:
348 if src_url:
349 url = str(self._get_url(src_url))
349 url = str(self._get_url(src_url))
350 MercurialRepository.check_url(url, self.config)
350 MercurialRepository.check_url(url, self.config)
351
351
352 self._remote.clone(url, self.path, do_workspace_checkout)
352 self._remote.clone(url, self.path, do_workspace_checkout)
353
353
354 # Don't try to create if we've already cloned repo
354 # Don't try to create if we've already cloned repo
355 create = False
355 create = False
356
356
357 if create:
357 if create:
358 os.makedirs(self.path, mode=0o755)
358 os.makedirs(self.path, mode=0o755)
359
359
360 self._remote.localrepository(create)
360 self._remote.localrepository(create)
361
361
362 @LazyProperty
362 @LazyProperty
363 def in_memory_commit(self):
363 def in_memory_commit(self):
364 return MercurialInMemoryCommit(self)
364 return MercurialInMemoryCommit(self)
365
365
366 @LazyProperty
366 @LazyProperty
367 def description(self):
367 def description(self):
368 description = self._remote.get_config_value(
368 description = self._remote.get_config_value(
369 'web', 'description', untrusted=True)
369 'web', 'description', untrusted=True)
370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
370 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
371
371
372 @LazyProperty
372 @LazyProperty
373 def contact(self):
373 def contact(self):
374 contact = (
374 contact = (
375 self._remote.get_config_value("web", "contact") or
375 self._remote.get_config_value("web", "contact") or
376 self._remote.get_config_value("ui", "username"))
376 self._remote.get_config_value("ui", "username"))
377 return safe_unicode(contact or self.DEFAULT_CONTACT)
377 return safe_unicode(contact or self.DEFAULT_CONTACT)
378
378
379 @LazyProperty
379 @LazyProperty
380 def last_change(self):
380 def last_change(self):
381 """
381 """
382 Returns last change made on this repository as
382 Returns last change made on this repository as
383 `datetime.datetime` object.
383 `datetime.datetime` object.
384 """
384 """
385 try:
385 try:
386 return self.get_commit().date
386 return self.get_commit().date
387 except RepositoryError:
387 except RepositoryError:
388 tzoffset = makedate()[1]
388 tzoffset = makedate()[1]
389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
389 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
390
390
391 def _get_fs_mtime(self):
391 def _get_fs_mtime(self):
392 # fallback to filesystem
392 # fallback to filesystem
393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
393 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
394 st_path = os.path.join(self.path, '.hg', "store")
394 st_path = os.path.join(self.path, '.hg', "store")
395 if os.path.exists(cl_path):
395 if os.path.exists(cl_path):
396 return os.stat(cl_path).st_mtime
396 return os.stat(cl_path).st_mtime
397 else:
397 else:
398 return os.stat(st_path).st_mtime
398 return os.stat(st_path).st_mtime
399
399
400 def _get_url(self, url):
400 def _get_url(self, url):
401 """
401 """
402 Returns normalized url. If schema is not given, would fall
402 Returns normalized url. If schema is not given, would fall
403 to filesystem
403 to filesystem
404 (``file:///``) schema.
404 (``file:///``) schema.
405 """
405 """
406 url = url.encode('utf8')
406 url = url.encode('utf8')
407 if url != 'default' and '://' not in url:
407 if url != 'default' and '://' not in url:
408 url = "file:" + urllib.pathname2url(url)
408 url = "file:" + urllib.pathname2url(url)
409 return url
409 return url
410
410
411 def get_hook_location(self):
411 def get_hook_location(self):
412 """
412 """
413 returns absolute path to location where hooks are stored
413 returns absolute path to location where hooks are stored
414 """
414 """
415 return os.path.join(self.path, '.hg', '.hgrc')
415 return os.path.join(self.path, '.hg', '.hgrc')
416
416
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
417 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
418 """
418 """
419 Returns ``MercurialCommit`` object representing repository's
419 Returns ``MercurialCommit`` object representing repository's
420 commit at the given `commit_id` or `commit_idx`.
420 commit at the given `commit_id` or `commit_idx`.
421 """
421 """
422 if self.is_empty():
422 if self.is_empty():
423 raise EmptyRepositoryError("There are no commits yet")
423 raise EmptyRepositoryError("There are no commits yet")
424
424
425 if commit_id is not None:
425 if commit_id is not None:
426 self._validate_commit_id(commit_id)
426 self._validate_commit_id(commit_id)
427 try:
427 try:
428 idx = self._commit_ids[commit_id]
428 idx = self._commit_ids[commit_id]
429 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
429 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
430 except KeyError:
430 except KeyError:
431 pass
431 pass
432 elif commit_idx is not None:
432 elif commit_idx is not None:
433 self._validate_commit_idx(commit_idx)
433 self._validate_commit_idx(commit_idx)
434 try:
434 try:
435 id_ = self.commit_ids[commit_idx]
435 id_ = self.commit_ids[commit_idx]
436 if commit_idx < 0:
436 if commit_idx < 0:
437 commit_idx += len(self.commit_ids)
437 commit_idx += len(self.commit_ids)
438 return MercurialCommit(
438 return MercurialCommit(
439 self, id_, commit_idx, pre_load=pre_load)
439 self, id_, commit_idx, pre_load=pre_load)
440 except IndexError:
440 except IndexError:
441 commit_id = commit_idx
441 commit_id = commit_idx
442 else:
442 else:
443 commit_id = "tip"
443 commit_id = "tip"
444
444
445 if isinstance(commit_id, unicode):
445 if isinstance(commit_id, unicode):
446 commit_id = safe_str(commit_id)
446 commit_id = safe_str(commit_id)
447
447
448 try:
448 try:
449 raw_id, idx = self._remote.lookup(commit_id, both=True)
449 raw_id, idx = self._remote.lookup(commit_id, both=True)
450 except CommitDoesNotExistError:
450 except CommitDoesNotExistError:
451 msg = "Commit %s does not exist for %s" % (
451 msg = "Commit %s does not exist for %s" % (
452 commit_id, self)
452 commit_id, self)
453 raise CommitDoesNotExistError(msg)
453 raise CommitDoesNotExistError(msg)
454
454
455 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
455 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
456
456
457 def get_commits(
457 def get_commits(
458 self, start_id=None, end_id=None, start_date=None, end_date=None,
458 self, start_id=None, end_id=None, start_date=None, end_date=None,
459 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
459 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
460 """
460 """
461 Returns generator of ``MercurialCommit`` objects from start to end
461 Returns generator of ``MercurialCommit`` objects from start to end
462 (both are inclusive)
462 (both are inclusive)
463
463
464 :param start_id: None, str(commit_id)
464 :param start_id: None, str(commit_id)
465 :param end_id: None, str(commit_id)
465 :param end_id: None, str(commit_id)
466 :param start_date: if specified, commits with commit date less than
466 :param start_date: if specified, commits with commit date less than
467 ``start_date`` would be filtered out from returned set
467 ``start_date`` would be filtered out from returned set
468 :param end_date: if specified, commits with commit date greater than
468 :param end_date: if specified, commits with commit date greater than
469 ``end_date`` would be filtered out from returned set
469 ``end_date`` would be filtered out from returned set
470 :param branch_name: if specified, commits not reachable from given
470 :param branch_name: if specified, commits not reachable from given
471 branch would be filtered out from returned set
471 branch would be filtered out from returned set
472 :param show_hidden: Show hidden commits such as obsolete or hidden from
472 :param show_hidden: Show hidden commits such as obsolete or hidden from
473 Mercurial evolve
473 Mercurial evolve
474 :raise BranchDoesNotExistError: If given ``branch_name`` does not
474 :raise BranchDoesNotExistError: If given ``branch_name`` does not
475 exist.
475 exist.
476 :raise CommitDoesNotExistError: If commit for given ``start`` or
476 :raise CommitDoesNotExistError: If commit for given ``start`` or
477 ``end`` could not be found.
477 ``end`` could not be found.
478 """
478 """
479 # actually we should check now if it's not an empty repo
479 # actually we should check now if it's not an empty repo
480 branch_ancestors = False
480 branch_ancestors = False
481 if self.is_empty():
481 if self.is_empty():
482 raise EmptyRepositoryError("There are no commits yet")
482 raise EmptyRepositoryError("There are no commits yet")
483 self._validate_branch_name(branch_name)
483 self._validate_branch_name(branch_name)
484
484
485 if start_id is not None:
485 if start_id is not None:
486 self._validate_commit_id(start_id)
486 self._validate_commit_id(start_id)
487 c_start = self.get_commit(commit_id=start_id)
487 c_start = self.get_commit(commit_id=start_id)
488 start_pos = self._commit_ids[c_start.raw_id]
488 start_pos = self._commit_ids[c_start.raw_id]
489 else:
489 else:
490 start_pos = None
490 start_pos = None
491
491
492 if end_id is not None:
492 if end_id is not None:
493 self._validate_commit_id(end_id)
493 self._validate_commit_id(end_id)
494 c_end = self.get_commit(commit_id=end_id)
494 c_end = self.get_commit(commit_id=end_id)
495 end_pos = max(0, self._commit_ids[c_end.raw_id])
495 end_pos = max(0, self._commit_ids[c_end.raw_id])
496 else:
496 else:
497 end_pos = None
497 end_pos = None
498
498
499 if None not in [start_id, end_id] and start_pos > end_pos:
499 if None not in [start_id, end_id] and start_pos > end_pos:
500 raise RepositoryError(
500 raise RepositoryError(
501 "Start commit '%s' cannot be after end commit '%s'" %
501 "Start commit '%s' cannot be after end commit '%s'" %
502 (start_id, end_id))
502 (start_id, end_id))
503
503
504 if end_pos is not None:
504 if end_pos is not None:
505 end_pos += 1
505 end_pos += 1
506
506
507 commit_filter = []
507 commit_filter = []
508
508
509 if branch_name and not branch_ancestors:
509 if branch_name and not branch_ancestors:
510 commit_filter.append('branch("%s")' % (branch_name,))
510 commit_filter.append('branch("%s")' % (branch_name,))
511 elif branch_name and branch_ancestors:
511 elif branch_name and branch_ancestors:
512 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
512 commit_filter.append('ancestors(branch("%s"))' % (branch_name,))
513
513
514 if start_date and not end_date:
514 if start_date and not end_date:
515 commit_filter.append('date(">%s")' % (start_date,))
515 commit_filter.append('date(">%s")' % (start_date,))
516 if end_date and not start_date:
516 if end_date and not start_date:
517 commit_filter.append('date("<%s")' % (end_date,))
517 commit_filter.append('date("<%s")' % (end_date,))
518 if start_date and end_date:
518 if start_date and end_date:
519 commit_filter.append(
519 commit_filter.append(
520 'date(">%s") and date("<%s")' % (start_date, end_date))
520 'date(">%s") and date("<%s")' % (start_date, end_date))
521
521
522 if not show_hidden:
522 if not show_hidden:
523 commit_filter.append('not obsolete()')
523 commit_filter.append('not obsolete()')
524 commit_filter.append('not hidden()')
524 commit_filter.append('not hidden()')
525
525
526 # TODO: johbo: Figure out a simpler way for this solution
526 # TODO: johbo: Figure out a simpler way for this solution
527 collection_generator = CollectionGenerator
527 collection_generator = CollectionGenerator
528 if commit_filter:
528 if commit_filter:
529 commit_filter = ' and '.join(map(safe_str, commit_filter))
529 commit_filter = ' and '.join(map(safe_str, commit_filter))
530 revisions = self._remote.rev_range([commit_filter])
530 revisions = self._remote.rev_range([commit_filter])
531 collection_generator = MercurialIndexBasedCollectionGenerator
531 collection_generator = MercurialIndexBasedCollectionGenerator
532 else:
532 else:
533 revisions = self.commit_ids
533 revisions = self.commit_ids
534
534
535 if start_pos or end_pos:
535 if start_pos or end_pos:
536 revisions = revisions[start_pos:end_pos]
536 revisions = revisions[start_pos:end_pos]
537
537
538 return collection_generator(self, revisions, pre_load=pre_load)
538 return collection_generator(self, revisions, pre_load=pre_load)
539
539
540 def pull(self, url, commit_ids=None):
540 def pull(self, url, commit_ids=None):
541 """
541 """
542 Pull changes from external location.
542 Pull changes from external location.
543
543
544 :param commit_ids: Optional. Can be set to a list of commit ids
544 :param commit_ids: Optional. Can be set to a list of commit ids
545 which shall be pulled from the other repository.
545 which shall be pulled from the other repository.
546 """
546 """
547 url = self._get_url(url)
547 url = self._get_url(url)
548 self._remote.pull(url, commit_ids=commit_ids)
548 self._remote.pull(url, commit_ids=commit_ids)
549 self._remote.invalidate_vcs_cache()
549 self._remote.invalidate_vcs_cache()
550
550
551 def fetch(self, url, commit_ids=None):
551 def fetch(self, url, commit_ids=None):
552 """
552 """
553 Backward compatibility with GIT fetch==pull
553 Backward compatibility with GIT fetch==pull
554 """
554 """
555 return self.pull(url, commit_ids=commit_ids)
555 return self.pull(url, commit_ids=commit_ids)
556
556
557 def push(self, url):
557 def push(self, url):
558 url = self._get_url(url)
558 url = self._get_url(url)
559 self._remote.sync_push(url)
559 self._remote.sync_push(url)
560
560
561 def _local_clone(self, clone_path):
561 def _local_clone(self, clone_path):
562 """
562 """
563 Create a local clone of the current repo.
563 Create a local clone of the current repo.
564 """
564 """
565 self._remote.clone(self.path, clone_path, update_after_clone=True,
565 self._remote.clone(self.path, clone_path, update_after_clone=True,
566 hooks=False)
566 hooks=False)
567
567
568 def _update(self, revision, clean=False):
568 def _update(self, revision, clean=False):
569 """
569 """
570 Update the working copy to the specified revision.
570 Update the working copy to the specified revision.
571 """
571 """
572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
572 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
573 self._remote.update(revision, clean=clean)
573 self._remote.update(revision, clean=clean)
574
574
575 def _identify(self):
575 def _identify(self):
576 """
576 """
577 Return the current state of the working directory.
577 Return the current state of the working directory.
578 """
578 """
579 return self._remote.identify().strip().rstrip('+')
579 return self._remote.identify().strip().rstrip('+')
580
580
581 def _heads(self, branch=None):
581 def _heads(self, branch=None):
582 """
582 """
583 Return the commit ids of the repository heads.
583 Return the commit ids of the repository heads.
584 """
584 """
585 return self._remote.heads(branch=branch).strip().split(' ')
585 return self._remote.heads(branch=branch).strip().split(' ')
586
586
587 def _ancestor(self, revision1, revision2):
587 def _ancestor(self, revision1, revision2):
588 """
588 """
589 Return the common ancestor of the two revisions.
589 Return the common ancestor of the two revisions.
590 """
590 """
591 return self._remote.ancestor(revision1, revision2)
591 return self._remote.ancestor(revision1, revision2)
592
592
593 def _local_push(
593 def _local_push(
594 self, revision, repository_path, push_branches=False,
594 self, revision, repository_path, push_branches=False,
595 enable_hooks=False):
595 enable_hooks=False):
596 """
596 """
597 Push the given revision to the specified repository.
597 Push the given revision to the specified repository.
598
598
599 :param push_branches: allow to create branches in the target repo.
599 :param push_branches: allow to create branches in the target repo.
600 """
600 """
601 self._remote.push(
601 self._remote.push(
602 [revision], repository_path, hooks=enable_hooks,
602 [revision], repository_path, hooks=enable_hooks,
603 push_branches=push_branches)
603 push_branches=push_branches)
604
604
605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
605 def _local_merge(self, target_ref, merge_message, user_name, user_email,
606 source_ref, use_rebase=False, dry_run=False):
606 source_ref, use_rebase=False, dry_run=False):
607 """
607 """
608 Merge the given source_revision into the checked out revision.
608 Merge the given source_revision into the checked out revision.
609
609
610 Returns the commit id of the merge and a boolean indicating if the
610 Returns the commit id of the merge and a boolean indicating if the
611 commit needs to be pushed.
611 commit needs to be pushed.
612 """
612 """
613 self._update(target_ref.commit_id, clean=True)
613 self._update(target_ref.commit_id, clean=True)
614
614
615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
615 ancestor = self._ancestor(target_ref.commit_id, source_ref.commit_id)
616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
616 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
617
617
618 if ancestor == source_ref.commit_id:
618 if ancestor == source_ref.commit_id:
619 # Nothing to do, the changes were already integrated
619 # Nothing to do, the changes were already integrated
620 return target_ref.commit_id, False
620 return target_ref.commit_id, False
621
621
622 elif ancestor == target_ref.commit_id and is_the_same_branch:
622 elif ancestor == target_ref.commit_id and is_the_same_branch:
623 # In this case we should force a commit message
623 # In this case we should force a commit message
624 return source_ref.commit_id, True
624 return source_ref.commit_id, True
625
625
626 if use_rebase:
626 if use_rebase:
627 try:
627 try:
628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
628 bookmark_name = 'rcbook%s%s' % (source_ref.commit_id,
629 target_ref.commit_id)
629 target_ref.commit_id)
630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
630 self.bookmark(bookmark_name, revision=source_ref.commit_id)
631 self._remote.rebase(
631 self._remote.rebase(
632 source=source_ref.commit_id, dest=target_ref.commit_id)
632 source=source_ref.commit_id, dest=target_ref.commit_id)
633 self._remote.invalidate_vcs_cache()
633 self._remote.invalidate_vcs_cache()
634 self._update(bookmark_name, clean=True)
634 self._update(bookmark_name, clean=True)
635 return self._identify(), True
635 return self._identify(), True
636 except RepositoryError:
636 except RepositoryError:
637 # The rebase-abort may raise another exception which 'hides'
637 # The rebase-abort may raise another exception which 'hides'
638 # the original one, therefore we log it here.
638 # the original one, therefore we log it here.
639 log.exception('Error while rebasing shadow repo during merge.')
639 log.exception('Error while rebasing shadow repo during merge.')
640
640
641 # Cleanup any rebase leftovers
641 # Cleanup any rebase leftovers
642 self._remote.invalidate_vcs_cache()
642 self._remote.invalidate_vcs_cache()
643 self._remote.rebase(abort=True)
643 self._remote.rebase(abort=True)
644 self._remote.invalidate_vcs_cache()
644 self._remote.invalidate_vcs_cache()
645 self._remote.update(clean=True)
645 self._remote.update(clean=True)
646 raise
646 raise
647 else:
647 else:
648 try:
648 try:
649 self._remote.merge(source_ref.commit_id)
649 self._remote.merge(source_ref.commit_id)
650 self._remote.invalidate_vcs_cache()
650 self._remote.invalidate_vcs_cache()
651 self._remote.commit(
651 self._remote.commit(
652 message=safe_str(merge_message),
652 message=safe_str(merge_message),
653 username=safe_str('%s <%s>' % (user_name, user_email)))
653 username=safe_str('%s <%s>' % (user_name, user_email)))
654 self._remote.invalidate_vcs_cache()
654 self._remote.invalidate_vcs_cache()
655 return self._identify(), True
655 return self._identify(), True
656 except RepositoryError:
656 except RepositoryError:
657 # Cleanup any merge leftovers
657 # Cleanup any merge leftovers
658 self._remote.update(clean=True)
658 self._remote.update(clean=True)
659 raise
659 raise
660
660
661 def _local_close(self, target_ref, user_name, user_email,
661 def _local_close(self, target_ref, user_name, user_email,
662 source_ref, close_message=''):
662 source_ref, close_message=''):
663 """
663 """
664 Close the branch of the given source_revision
664 Close the branch of the given source_revision
665
665
666 Returns the commit id of the close and a boolean indicating if the
666 Returns the commit id of the close and a boolean indicating if the
667 commit needs to be pushed.
667 commit needs to be pushed.
668 """
668 """
669 self._update(source_ref.commit_id)
669 self._update(source_ref.commit_id)
670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
670 message = close_message or "Closing branch: `{}`".format(source_ref.name)
671 try:
671 try:
672 self._remote.commit(
672 self._remote.commit(
673 message=safe_str(message),
673 message=safe_str(message),
674 username=safe_str('%s <%s>' % (user_name, user_email)),
674 username=safe_str('%s <%s>' % (user_name, user_email)),
675 close_branch=True)
675 close_branch=True)
676 self._remote.invalidate_vcs_cache()
676 self._remote.invalidate_vcs_cache()
677 return self._identify(), True
677 return self._identify(), True
678 except RepositoryError:
678 except RepositoryError:
679 # Cleanup any commit leftovers
679 # Cleanup any commit leftovers
680 self._remote.update(clean=True)
680 self._remote.update(clean=True)
681 raise
681 raise
682
682
683 def _is_the_same_branch(self, target_ref, source_ref):
683 def _is_the_same_branch(self, target_ref, source_ref):
684 return (
684 return (
685 self._get_branch_name(target_ref) ==
685 self._get_branch_name(target_ref) ==
686 self._get_branch_name(source_ref))
686 self._get_branch_name(source_ref))
687
687
688 def _get_branch_name(self, ref):
688 def _get_branch_name(self, ref):
689 if ref.type == 'branch':
689 if ref.type == 'branch':
690 return ref.name
690 return ref.name
691 return self._remote.ctx_branch(ref.commit_id)
691 return self._remote.ctx_branch(ref.commit_id)
692
692
693 def _maybe_prepare_merge_workspace(
693 def _maybe_prepare_merge_workspace(
694 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
694 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
695 shadow_repository_path = self._get_shadow_repository_path(
695 shadow_repository_path = self._get_shadow_repository_path(
696 repo_id, workspace_id)
696 repo_id, workspace_id)
697 if not os.path.exists(shadow_repository_path):
697 if not os.path.exists(shadow_repository_path):
698 self._local_clone(shadow_repository_path)
698 self._local_clone(shadow_repository_path)
699 log.debug(
699 log.debug(
700 'Prepared shadow repository in %s', shadow_repository_path)
700 'Prepared shadow repository in %s', shadow_repository_path)
701
701
702 return shadow_repository_path
702 return shadow_repository_path
703
703
704 def _merge_repo(self, repo_id, workspace_id, target_ref,
704 def _merge_repo(self, repo_id, workspace_id, target_ref,
705 source_repo, source_ref, merge_message,
705 source_repo, source_ref, merge_message,
706 merger_name, merger_email, dry_run=False,
706 merger_name, merger_email, dry_run=False,
707 use_rebase=False, close_branch=False):
707 use_rebase=False, close_branch=False):
708
708
709 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
709 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
710 'rebase' if use_rebase else 'merge', dry_run)
710 'rebase' if use_rebase else 'merge', dry_run)
711 if target_ref.commit_id not in self._heads():
711 if target_ref.commit_id not in self._heads():
712 return MergeResponse(
712 return MergeResponse(
713 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
713 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
714 metadata={'target_ref': target_ref})
714 metadata={'target_ref': target_ref})
715
715
716 try:
716 try:
717 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
717 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
718 heads = '\n,'.join(self._heads(target_ref.name))
718 heads = '\n,'.join(self._heads(target_ref.name))
719 metadata = {
719 metadata = {
720 'target_ref': target_ref,
720 'target_ref': target_ref,
721 'source_ref': source_ref,
721 'source_ref': source_ref,
722 'heads': heads
722 'heads': heads
723 }
723 }
724 return MergeResponse(
724 return MergeResponse(
725 False, False, None,
725 False, False, None,
726 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
726 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
727 metadata=metadata)
727 metadata=metadata)
728 except CommitDoesNotExistError:
728 except CommitDoesNotExistError:
729 log.exception('Failure when looking up branch heads on hg target')
729 log.exception('Failure when looking up branch heads on hg target')
730 return MergeResponse(
730 return MergeResponse(
731 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
731 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
732 metadata={'target_ref': target_ref})
732 metadata={'target_ref': target_ref})
733
733
734 shadow_repository_path = self._maybe_prepare_merge_workspace(
734 shadow_repository_path = self._maybe_prepare_merge_workspace(
735 repo_id, workspace_id, target_ref, source_ref)
735 repo_id, workspace_id, target_ref, source_ref)
736 shadow_repo = self._get_shadow_instance(shadow_repository_path)
736 shadow_repo = self._get_shadow_instance(shadow_repository_path)
737
737
738 log.debug('Pulling in target reference %s', target_ref)
738 log.debug('Pulling in target reference %s', target_ref)
739 self._validate_pull_reference(target_ref)
739 self._validate_pull_reference(target_ref)
740 shadow_repo._local_pull(self.path, target_ref)
740 shadow_repo._local_pull(self.path, target_ref)
741
741
742 try:
742 try:
743 log.debug('Pulling in source reference %s', source_ref)
743 log.debug('Pulling in source reference %s', source_ref)
744 source_repo._validate_pull_reference(source_ref)
744 source_repo._validate_pull_reference(source_ref)
745 shadow_repo._local_pull(source_repo.path, source_ref)
745 shadow_repo._local_pull(source_repo.path, source_ref)
746 except CommitDoesNotExistError:
746 except CommitDoesNotExistError:
747 log.exception('Failure when doing local pull on hg shadow repo')
747 log.exception('Failure when doing local pull on hg shadow repo')
748 return MergeResponse(
748 return MergeResponse(
749 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
749 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
750 metadata={'source_ref': source_ref})
750 metadata={'source_ref': source_ref})
751
751
752 merge_ref = None
752 merge_ref = None
753 merge_commit_id = None
753 merge_commit_id = None
754 close_commit_id = None
754 close_commit_id = None
755 merge_failure_reason = MergeFailureReason.NONE
755 merge_failure_reason = MergeFailureReason.NONE
756 metadata = {}
756 metadata = {}
757
757
758 # enforce that close branch should be used only in case we source from
758 # enforce that close branch should be used only in case we source from
759 # an actual Branch
759 # an actual Branch
760 close_branch = close_branch and source_ref.type == 'branch'
760 close_branch = close_branch and source_ref.type == 'branch'
761
761
762 # don't allow to close branch if source and target are the same
762 # don't allow to close branch if source and target are the same
763 close_branch = close_branch and source_ref.name != target_ref.name
763 close_branch = close_branch and source_ref.name != target_ref.name
764
764
765 needs_push_on_close = False
765 needs_push_on_close = False
766 if close_branch and not use_rebase and not dry_run:
766 if close_branch and not use_rebase and not dry_run:
767 try:
767 try:
768 close_commit_id, needs_push_on_close = shadow_repo._local_close(
768 close_commit_id, needs_push_on_close = shadow_repo._local_close(
769 target_ref, merger_name, merger_email, source_ref)
769 target_ref, merger_name, merger_email, source_ref)
770 merge_possible = True
770 merge_possible = True
771 except RepositoryError:
771 except RepositoryError:
772 log.exception('Failure when doing close branch on '
772 log.exception('Failure when doing close branch on '
773 'shadow repo: %s', shadow_repo)
773 'shadow repo: %s', shadow_repo)
774 merge_possible = False
774 merge_possible = False
775 merge_failure_reason = MergeFailureReason.MERGE_FAILED
775 merge_failure_reason = MergeFailureReason.MERGE_FAILED
776 else:
776 else:
777 merge_possible = True
777 merge_possible = True
778
778
779 needs_push = False
779 needs_push = False
780 if merge_possible:
780 if merge_possible:
781 try:
781 try:
782 merge_commit_id, needs_push = shadow_repo._local_merge(
782 merge_commit_id, needs_push = shadow_repo._local_merge(
783 target_ref, merge_message, merger_name, merger_email,
783 target_ref, merge_message, merger_name, merger_email,
784 source_ref, use_rebase=use_rebase, dry_run=dry_run)
784 source_ref, use_rebase=use_rebase, dry_run=dry_run)
785 merge_possible = True
785 merge_possible = True
786
786
787 # read the state of the close action, if it
787 # read the state of the close action, if it
788 # maybe required a push
788 # maybe required a push
789 needs_push = needs_push or needs_push_on_close
789 needs_push = needs_push or needs_push_on_close
790
790
791 # Set a bookmark pointing to the merge commit. This bookmark
791 # Set a bookmark pointing to the merge commit. This bookmark
792 # may be used to easily identify the last successful merge
792 # may be used to easily identify the last successful merge
793 # commit in the shadow repository.
793 # commit in the shadow repository.
794 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
794 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
795 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
795 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
796 except SubrepoMergeError:
796 except SubrepoMergeError:
797 log.exception(
797 log.exception(
798 'Subrepo merge error during local merge on hg shadow repo.')
798 'Subrepo merge error during local merge on hg shadow repo.')
799 merge_possible = False
799 merge_possible = False
800 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
800 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
801 needs_push = False
801 needs_push = False
802 except RepositoryError:
802 except RepositoryError:
803 log.exception('Failure when doing local merge on hg shadow repo')
803 log.exception('Failure when doing local merge on hg shadow repo')
804 merge_possible = False
804 merge_possible = False
805 merge_failure_reason = MergeFailureReason.MERGE_FAILED
805 merge_failure_reason = MergeFailureReason.MERGE_FAILED
806 needs_push = False
806 needs_push = False
807
807
808 if merge_possible and not dry_run:
808 if merge_possible and not dry_run:
809 if needs_push:
809 if needs_push:
810 # In case the target is a bookmark, update it, so after pushing
810 # In case the target is a bookmark, update it, so after pushing
811 # the bookmarks is also updated in the target.
811 # the bookmarks is also updated in the target.
812 if target_ref.type == 'book':
812 if target_ref.type == 'book':
813 shadow_repo.bookmark(
813 shadow_repo.bookmark(
814 target_ref.name, revision=merge_commit_id)
814 target_ref.name, revision=merge_commit_id)
815 try:
815 try:
816 shadow_repo_with_hooks = self._get_shadow_instance(
816 shadow_repo_with_hooks = self._get_shadow_instance(
817 shadow_repository_path,
817 shadow_repository_path,
818 enable_hooks=True)
818 enable_hooks=True)
819 # This is the actual merge action, we push from shadow
819 # This is the actual merge action, we push from shadow
820 # into origin.
820 # into origin.
821 # Note: the push_branches option will push any new branch
821 # Note: the push_branches option will push any new branch
822 # defined in the source repository to the target. This may
822 # defined in the source repository to the target. This may
823 # be dangerous as branches are permanent in Mercurial.
823 # be dangerous as branches are permanent in Mercurial.
824 # This feature was requested in issue #441.
824 # This feature was requested in issue #441.
825 shadow_repo_with_hooks._local_push(
825 shadow_repo_with_hooks._local_push(
826 merge_commit_id, self.path, push_branches=True,
826 merge_commit_id, self.path, push_branches=True,
827 enable_hooks=True)
827 enable_hooks=True)
828
828
829 # maybe we also need to push the close_commit_id
829 # maybe we also need to push the close_commit_id
830 if close_commit_id:
830 if close_commit_id:
831 shadow_repo_with_hooks._local_push(
831 shadow_repo_with_hooks._local_push(
832 close_commit_id, self.path, push_branches=True,
832 close_commit_id, self.path, push_branches=True,
833 enable_hooks=True)
833 enable_hooks=True)
834 merge_succeeded = True
834 merge_succeeded = True
835 except RepositoryError:
835 except RepositoryError:
836 log.exception(
836 log.exception(
837 'Failure when doing local push from the shadow '
837 'Failure when doing local push from the shadow '
838 'repository to the target repository at %s.', self.path)
838 'repository to the target repository at %s.', self.path)
839 merge_succeeded = False
839 merge_succeeded = False
840 merge_failure_reason = MergeFailureReason.PUSH_FAILED
840 merge_failure_reason = MergeFailureReason.PUSH_FAILED
841 metadata['target'] = 'hg shadow repo'
841 metadata['target'] = 'hg shadow repo'
842 metadata['merge_commit'] = merge_commit_id
842 metadata['merge_commit'] = merge_commit_id
843 else:
843 else:
844 merge_succeeded = True
844 merge_succeeded = True
845 else:
845 else:
846 merge_succeeded = False
846 merge_succeeded = False
847
847
848 return MergeResponse(
848 return MergeResponse(
849 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
849 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
850 metadata=metadata)
850 metadata=metadata)
851
851
852 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
852 def _get_shadow_instance(self, shadow_repository_path, enable_hooks=False):
853 config = self.config.copy()
853 config = self.config.copy()
854 if not enable_hooks:
854 if not enable_hooks:
855 config.clear_section('hooks')
855 config.clear_section('hooks')
856 return MercurialRepository(shadow_repository_path, config)
856 return MercurialRepository(shadow_repository_path, config)
857
857
858 def _validate_pull_reference(self, reference):
858 def _validate_pull_reference(self, reference):
859 if not (reference.name in self.bookmarks or
859 if not (reference.name in self.bookmarks or
860 reference.name in self.branches or
860 reference.name in self.branches or
861 self.get_commit(reference.commit_id)):
861 self.get_commit(reference.commit_id)):
862 raise CommitDoesNotExistError(
862 raise CommitDoesNotExistError(
863 'Unknown branch, bookmark or commit id')
863 'Unknown branch, bookmark or commit id')
864
864
865 def _local_pull(self, repository_path, reference):
865 def _local_pull(self, repository_path, reference):
866 """
866 """
867 Fetch a branch, bookmark or commit from a local repository.
867 Fetch a branch, bookmark or commit from a local repository.
868 """
868 """
869 repository_path = os.path.abspath(repository_path)
869 repository_path = os.path.abspath(repository_path)
870 if repository_path == self.path:
870 if repository_path == self.path:
871 raise ValueError('Cannot pull from the same repository')
871 raise ValueError('Cannot pull from the same repository')
872
872
873 reference_type_to_option_name = {
873 reference_type_to_option_name = {
874 'book': 'bookmark',
874 'book': 'bookmark',
875 'branch': 'branch',
875 'branch': 'branch',
876 }
876 }
877 option_name = reference_type_to_option_name.get(
877 option_name = reference_type_to_option_name.get(
878 reference.type, 'revision')
878 reference.type, 'revision')
879
879
880 if option_name == 'revision':
880 if option_name == 'revision':
881 ref = reference.commit_id
881 ref = reference.commit_id
882 else:
882 else:
883 ref = reference.name
883 ref = reference.name
884
884
885 options = {option_name: [ref]}
885 options = {option_name: [ref]}
886 self._remote.pull_cmd(repository_path, hooks=False, **options)
886 self._remote.pull_cmd(repository_path, hooks=False, **options)
887 self._remote.invalidate_vcs_cache()
887 self._remote.invalidate_vcs_cache()
888
888
889 def bookmark(self, bookmark, revision=None):
889 def bookmark(self, bookmark, revision=None):
890 if isinstance(bookmark, unicode):
890 if isinstance(bookmark, unicode):
891 bookmark = safe_str(bookmark)
891 bookmark = safe_str(bookmark)
892 self._remote.bookmark(bookmark, revision=revision)
892 self._remote.bookmark(bookmark, revision=revision)
893 self._remote.invalidate_vcs_cache()
893 self._remote.invalidate_vcs_cache()
894
894
895 def get_path_permissions(self, username):
895 def get_path_permissions(self, username):
896 hgacl_file = os.path.join(self.path, '.hg/hgacl')
896 hgacl_file = os.path.join(self.path, '.hg/hgacl')
897
897
898 def read_patterns(suffix):
898 def read_patterns(suffix):
899 svalue = None
899 svalue = None
900 try:
900 for section, option in [
901 svalue = hgacl.get('narrowhgacl', username + suffix)
901 ('narrowacl', username + suffix),
902 except configparser.NoOptionError:
902 ('narrowacl', 'default' + suffix),
903 ('narrowhgacl', username + suffix),
904 ('narrowhgacl', 'default' + suffix)
905 ]:
903 try:
906 try:
904 svalue = hgacl.get('narrowhgacl', 'default' + suffix)
907 svalue = hgacl.get(section, option)
908 break # stop at the first value we find
905 except configparser.NoOptionError:
909 except configparser.NoOptionError:
906 pass
910 pass
907 if not svalue:
911 if not svalue:
908 return None
912 return None
909 result = ['/']
913 result = ['/']
910 for pattern in svalue.split():
914 for pattern in svalue.split():
911 result.append(pattern)
915 result.append(pattern)
912 if '*' not in pattern and '?' not in pattern:
916 if '*' not in pattern and '?' not in pattern:
913 result.append(pattern + '/*')
917 result.append(pattern + '/*')
914 return result
918 return result
915
919
916 if os.path.exists(hgacl_file):
920 if os.path.exists(hgacl_file):
917 try:
921 try:
918 hgacl = configparser.RawConfigParser()
922 hgacl = configparser.RawConfigParser()
919 hgacl.read(hgacl_file)
923 hgacl.read(hgacl_file)
920
924
921 includes = read_patterns('.includes')
925 includes = read_patterns('.includes')
922 excludes = read_patterns('.excludes')
926 excludes = read_patterns('.excludes')
923 return BasePathPermissionChecker.create_from_patterns(
927 return BasePathPermissionChecker.create_from_patterns(
924 includes, excludes)
928 includes, excludes)
925 except BaseException as e:
929 except BaseException as e:
926 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
930 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
927 hgacl_file, self.name, e)
931 hgacl_file, self.name, e)
928 raise exceptions.RepositoryRequirementError(msg)
932 raise exceptions.RepositoryRequirementError(msg)
929 else:
933 else:
930 return None
934 return None
931
935
932
936
933 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
937 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
934
938
935 def _commit_factory(self, commit_id):
939 def _commit_factory(self, commit_id):
936 return self.repo.get_commit(
940 return self.repo.get_commit(
937 commit_idx=commit_id, pre_load=self.pre_load)
941 commit_idx=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now