##// END OF EJS Templates
chore(mercurial): Fixed usage of str in generation of mercurial configs, and fixed largefiles call
super-admin -
r5188:643e5e48 default
parent child Browse files
Show More
@@ -1,1013 +1,1017 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 HG repository module
20 HG repository module
21 """
21 """
22 import os
22 import os
23 import logging
23 import logging
24 import binascii
24 import binascii
25 import configparser
25 import configparser
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from collections import OrderedDict
32 from collections import OrderedDict
33 from rhodecode.lib.datelib import (
33 from rhodecode.lib.datelib import (
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.utils2 import CachedProperty
37 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs import connection, exceptions
38 from rhodecode.lib.vcs.backends.base import (
38 from rhodecode.lib.vcs.backends.base import (
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 MergeFailureReason, Reference, BasePathPermissionChecker)
40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '1')])
81 default=[('extensions', 'largefiles', '')])
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83
83
84 self._init_repo(create, src_url, do_workspace_checkout)
84 self._init_repo(create, src_url, do_workspace_checkout)
85
85
86 # caches
86 # caches
87 self._commit_ids = {}
87 self._commit_ids = {}
88
88
89 @LazyProperty
89 @LazyProperty
90 def _remote(self):
90 def _remote(self):
91 repo_id = self.path
91 repo_id = self.path
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93
93
94 @CachedProperty
94 @CachedProperty
95 def commit_ids(self):
95 def commit_ids(self):
96 """
96 """
97 Returns list of commit ids, in ascending order. Being lazy
97 Returns list of commit ids, in ascending order. Being lazy
98 attribute allows external tools to inject shas from cache.
98 attribute allows external tools to inject shas from cache.
99 """
99 """
100 commit_ids = self._get_all_commit_ids()
100 commit_ids = self._get_all_commit_ids()
101 self._rebuild_cache(commit_ids)
101 self._rebuild_cache(commit_ids)
102 return commit_ids
102 return commit_ids
103
103
104 def _rebuild_cache(self, commit_ids):
104 def _rebuild_cache(self, commit_ids):
105 self._commit_ids = {commit_id: index
105 self._commit_ids = {commit_id: index
106 for index, commit_id in enumerate(commit_ids)}
106 for index, commit_id in enumerate(commit_ids)}
107
107
108 @CachedProperty
108 @CachedProperty
109 def branches(self):
109 def branches(self):
110 return self._get_branches()
110 return self._get_branches()
111
111
112 @CachedProperty
112 @CachedProperty
113 def branches_closed(self):
113 def branches_closed(self):
114 return self._get_branches(active=False, closed=True)
114 return self._get_branches(active=False, closed=True)
115
115
116 @CachedProperty
116 @CachedProperty
117 def branches_all(self):
117 def branches_all(self):
118 all_branches = {}
118 all_branches = {}
119 all_branches.update(self.branches)
119 all_branches.update(self.branches)
120 all_branches.update(self.branches_closed)
120 all_branches.update(self.branches_closed)
121 return all_branches
121 return all_branches
122
122
123 def _get_branches(self, active=True, closed=False):
123 def _get_branches(self, active=True, closed=False):
124 """
124 """
125 Gets branches for this repository
125 Gets branches for this repository
126 Returns only not closed active branches by default
126 Returns only not closed active branches by default
127
127
128 :param active: return also active branches
128 :param active: return also active branches
129 :param closed: return also closed branches
129 :param closed: return also closed branches
130
130
131 """
131 """
132 if self.is_empty():
132 if self.is_empty():
133 return {}
133 return {}
134
134
135 def get_name(ctx):
135 def get_name(ctx):
136 return ctx[0]
136 return ctx[0]
137
137
138 _branches = [(n, h,) for n, h in
138 _branches = [(n, h,) for n, h in
139 self._remote.branches(active, closed).items()]
139 self._remote.branches(active, closed).items()]
140
140
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142
142
143 @CachedProperty
143 @CachedProperty
144 def tags(self):
144 def tags(self):
145 """
145 """
146 Gets tags for this repository
146 Gets tags for this repository
147 """
147 """
148 return self._get_tags()
148 return self._get_tags()
149
149
150 def _get_tags(self):
150 def _get_tags(self):
151 if self.is_empty():
151 if self.is_empty():
152 return {}
152 return {}
153
153
154 def get_name(ctx):
154 def get_name(ctx):
155 return ctx[0]
155 return ctx[0]
156
156
157 _tags = [(n, h,) for n, h in
157 _tags = [(n, h,) for n, h in
158 self._remote.tags().items()]
158 self._remote.tags().items()]
159
159
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161
161
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 """
163 """
164 Creates and returns a tag for the given ``commit_id``.
164 Creates and returns a tag for the given ``commit_id``.
165
165
166 :param name: name for new tag
166 :param name: name for new tag
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 :param commit_id: commit id for which new tag would be created
168 :param commit_id: commit id for which new tag would be created
169 :param message: message of the tag's commit
169 :param message: message of the tag's commit
170 :param date: date of tag's commit
170 :param date: date of tag's commit
171
171
172 :raises TagAlreadyExistError: if tag with same name already exists
172 :raises TagAlreadyExistError: if tag with same name already exists
173 """
173 """
174 if name in self.tags:
174 if name in self.tags:
175 raise TagAlreadyExistError("Tag %s already exists" % name)
175 raise TagAlreadyExistError("Tag %s already exists" % name)
176
176
177 commit = self.get_commit(commit_id=commit_id)
177 commit = self.get_commit(commit_id=commit_id)
178 local = kwargs.setdefault('local', False)
178 local = kwargs.setdefault('local', False)
179
179
180 if message is None:
180 if message is None:
181 message = f"Added tag {name} for commit {commit.short_id}"
181 message = f"Added tag {name} for commit {commit.short_id}"
182
182
183 date, tz = date_to_timestamp_plus_offset(date)
183 date, tz = date_to_timestamp_plus_offset(date)
184
184
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 self._remote.invalidate_vcs_cache()
186 self._remote.invalidate_vcs_cache()
187
187
188 # Reinitialize tags
188 # Reinitialize tags
189 self._invalidate_prop_cache('tags')
189 self._invalidate_prop_cache('tags')
190 tag_id = self.tags[name]
190 tag_id = self.tags[name]
191
191
192 return self.get_commit(commit_id=tag_id)
192 return self.get_commit(commit_id=tag_id)
193
193
194 def remove_tag(self, name, user, message=None, date=None):
194 def remove_tag(self, name, user, message=None, date=None):
195 """
195 """
196 Removes tag with the given `name`.
196 Removes tag with the given `name`.
197
197
198 :param name: name of the tag to be removed
198 :param name: name of the tag to be removed
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 :param message: message of the tag's removal commit
200 :param message: message of the tag's removal commit
201 :param date: date of tag's removal commit
201 :param date: date of tag's removal commit
202
202
203 :raises TagDoesNotExistError: if tag with given name does not exists
203 :raises TagDoesNotExistError: if tag with given name does not exists
204 """
204 """
205 if name not in self.tags:
205 if name not in self.tags:
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207
207
208 if message is None:
208 if message is None:
209 message = "Removed tag %s" % name
209 message = "Removed tag %s" % name
210 local = False
210 local = False
211
211
212 date, tz = date_to_timestamp_plus_offset(date)
212 date, tz = date_to_timestamp_plus_offset(date)
213
213
214 self._remote.tag(name, nullid, message, local, user, date, tz)
214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 self._remote.invalidate_vcs_cache()
215 self._remote.invalidate_vcs_cache()
216 self._invalidate_prop_cache('tags')
216 self._invalidate_prop_cache('tags')
217
217
218 @LazyProperty
218 @LazyProperty
219 def bookmarks(self):
219 def bookmarks(self):
220 """
220 """
221 Gets bookmarks for this repository
221 Gets bookmarks for this repository
222 """
222 """
223 return self._get_bookmarks()
223 return self._get_bookmarks()
224
224
225 def _get_bookmarks(self):
225 def _get_bookmarks(self):
226 if self.is_empty():
226 if self.is_empty():
227 return {}
227 return {}
228
228
229 def get_name(ctx):
229 def get_name(ctx):
230 return ctx[0]
230 return ctx[0]
231
231
232 _bookmarks = [
232 _bookmarks = [
233 (n, h) for n, h in
233 (n, h) for n, h in
234 self._remote.bookmarks().items()]
234 self._remote.bookmarks().items()]
235
235
236 return OrderedDict(sorted(_bookmarks, key=get_name))
236 return OrderedDict(sorted(_bookmarks, key=get_name))
237
237
238 def _get_all_commit_ids(self):
238 def _get_all_commit_ids(self):
239 return self._remote.get_all_commit_ids('visible')
239 return self._remote.get_all_commit_ids('visible')
240
240
241 def get_diff(
241 def get_diff(
242 self, commit1, commit2, path='', ignore_whitespace=False,
242 self, commit1, commit2, path='', ignore_whitespace=False,
243 context=3, path1=None):
243 context=3, path1=None):
244 """
244 """
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 `commit2` since `commit1`.
246 `commit2` since `commit1`.
247
247
248 :param commit1: Entry point from which diff is shown. Can be
248 :param commit1: Entry point from which diff is shown. Can be
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 the changes since empty state of the repository until `commit2`
250 the changes since empty state of the repository until `commit2`
251 :param commit2: Until which commit changes should be shown.
251 :param commit2: Until which commit changes should be shown.
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 changes. Defaults to ``False``.
253 changes. Defaults to ``False``.
254 :param context: How many lines before/after changed lines should be
254 :param context: How many lines before/after changed lines should be
255 shown. Defaults to ``3``.
255 shown. Defaults to ``3``.
256 """
256 """
257 self._validate_diff_commits(commit1, commit2)
257 self._validate_diff_commits(commit1, commit2)
258 if path1 is not None and path1 != path:
258 if path1 is not None and path1 != path:
259 raise ValueError("Diff of two different paths not supported.")
259 raise ValueError("Diff of two different paths not supported.")
260
260
261 if path:
261 if path:
262 file_filter = [self.path, path]
262 file_filter = [self.path, path]
263 else:
263 else:
264 file_filter = None
264 file_filter = None
265
265
266 diff = self._remote.diff(
266 diff = self._remote.diff(
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 opt_git=True, opt_ignorews=ignore_whitespace,
268 opt_git=True, opt_ignorews=ignore_whitespace,
269 context=context)
269 context=context)
270 return MercurialDiff(diff)
270 return MercurialDiff(diff)
271
271
272 def strip(self, commit_id, branch=None):
272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup=False)
273 self._remote.strip(commit_id, update=False, backup=False)
274
274
275 self._remote.invalidate_vcs_cache()
275 self._remote.invalidate_vcs_cache()
276 # clear cache
276 # clear cache
277 self._invalidate_prop_cache('commit_ids')
277 self._invalidate_prop_cache('commit_ids')
278
278
279 return len(self.commit_ids)
279 return len(self.commit_ids)
280
280
281 def verify(self):
281 def verify(self):
282 verify = self._remote.verify()
282 verify = self._remote.verify()
283
283
284 self._remote.invalidate_vcs_cache()
284 self._remote.invalidate_vcs_cache()
285 return verify
285 return verify
286
286
287 def hg_update_cache(self):
287 def hg_update_cache(self):
288 update_cache = self._remote.hg_update_cache()
288 update_cache = self._remote.hg_update_cache()
289
289
290 self._remote.invalidate_vcs_cache()
290 self._remote.invalidate_vcs_cache()
291 return update_cache
291 return update_cache
292
292
293 def hg_rebuild_fn_cache(self):
293 def hg_rebuild_fn_cache(self):
294 update_cache = self._remote.hg_rebuild_fn_cache()
294 update_cache = self._remote.hg_rebuild_fn_cache()
295
295
296 self._remote.invalidate_vcs_cache()
296 self._remote.invalidate_vcs_cache()
297 return update_cache
297 return update_cache
298
298
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 self, commit_id1, repo2, commit_id2)
301 self, commit_id1, repo2, commit_id2)
302
302
303 if commit_id1 == commit_id2:
303 if commit_id1 == commit_id2:
304 return commit_id1
304 return commit_id1
305
305
306 ancestors = self._remote.revs_from_revspec(
306 ancestors = self._remote.revs_from_revspec(
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 other_path=repo2.path)
308 other_path=repo2.path)
309
309
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311
311
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 return ancestor_id
313 return ancestor_id
314
314
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 if commit_id1 == commit_id2:
316 if commit_id1 == commit_id2:
317 commits = []
317 commits = []
318 else:
318 else:
319 if merge:
319 if merge:
320 indexes = self._remote.revs_from_revspec(
320 indexes = self._remote.revs_from_revspec(
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 else:
323 else:
324 indexes = self._remote.revs_from_revspec(
324 indexes = self._remote.revs_from_revspec(
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 commit_id1, other_path=repo2.path)
326 commit_id1, other_path=repo2.path)
327
327
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 for idx in indexes]
329 for idx in indexes]
330
330
331 return commits
331 return commits
332
332
333 @staticmethod
333 @staticmethod
334 def check_url(url, config):
334 def check_url(url, config):
335 """
335 """
336 Function will check given url and try to verify if it's a valid
336 Function will check given url and try to verify if it's a valid
337 link. Sometimes it may happened that mercurial will issue basic
337 link. Sometimes it may happened that mercurial will issue basic
338 auth request that can cause whole API to hang when used from python
338 auth request that can cause whole API to hang when used from python
339 or other external calls.
339 or other external calls.
340
340
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 when the return code is non 200
342 when the return code is non 200
343 """
343 """
344 # check first if it's not an local url
344 # check first if it's not an local url
345 if os.path.isdir(url) or url.startswith('file:'):
345 if os.path.isdir(url) or url.startswith('file:'):
346 return True
346 return True
347
347
348 # Request the _remote to verify the url
348 # Request the _remote to verify the url
349 return connection.Hg.check_url(url, config.serialize())
349 return connection.Hg.check_url(url, config.serialize())
350
350
351 @staticmethod
351 @staticmethod
352 def is_valid_repository(path):
352 def is_valid_repository(path):
353 return os.path.isdir(os.path.join(path, '.hg'))
353 return os.path.isdir(os.path.join(path, '.hg'))
354
354
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 """
356 """
357 Function will check for mercurial repository in given path. If there
357 Function will check for mercurial repository in given path. If there
358 is no repository in that path it will raise an exception unless
358 is no repository in that path it will raise an exception unless
359 `create` parameter is set to True - in that case repository would
359 `create` parameter is set to True - in that case repository would
360 be created.
360 be created.
361
361
362 If `src_url` is given, would try to clone repository from the
362 If `src_url` is given, would try to clone repository from the
363 location at given clone_point. Additionally it'll make update to
363 location at given clone_point. Additionally it'll make update to
364 working copy accordingly to `do_workspace_checkout` flag.
364 working copy accordingly to `do_workspace_checkout` flag.
365 """
365 """
366 if create and os.path.exists(self.path):
366 if create and os.path.exists(self.path):
367 raise RepositoryError(
367 raise RepositoryError(
368 f"Cannot create repository at {self.path}, location already exist")
368 f"Cannot create repository at {self.path}, location already exist")
369
369
370 if src_url:
370 if src_url:
371 url = str(self._get_url(src_url))
371 url = str(self._get_url(src_url))
372 MercurialRepository.check_url(url, self.config)
372 MercurialRepository.check_url(url, self.config)
373
373
374 self._remote.clone(url, self.path, do_workspace_checkout)
374 self._remote.clone(url, self.path, do_workspace_checkout)
375
375
376 # Don't try to create if we've already cloned repo
376 # Don't try to create if we've already cloned repo
377 create = False
377 create = False
378
378
379 if create:
379 if create:
380 os.makedirs(self.path, mode=0o755)
380 os.makedirs(self.path, mode=0o755)
381
381
382 self._remote.localrepository(create)
382 self._remote.localrepository(create)
383
383
384 @LazyProperty
384 @LazyProperty
385 def in_memory_commit(self):
385 def in_memory_commit(self):
386 return MercurialInMemoryCommit(self)
386 return MercurialInMemoryCommit(self)
387
387
388 @LazyProperty
388 @LazyProperty
389 def description(self):
389 def description(self):
390 description = self._remote.get_config_value(
390 description = self._remote.get_config_value(
391 'web', 'description', untrusted=True)
391 'web', 'description', untrusted=True)
392 return safe_str(description or self.DEFAULT_DESCRIPTION)
392 return safe_str(description or self.DEFAULT_DESCRIPTION)
393
393
394 @LazyProperty
394 @LazyProperty
395 def contact(self):
395 def contact(self):
396 contact = (
396 contact = (
397 self._remote.get_config_value("web", "contact") or
397 self._remote.get_config_value("web", "contact") or
398 self._remote.get_config_value("ui", "username"))
398 self._remote.get_config_value("ui", "username"))
399 return safe_str(contact or self.DEFAULT_CONTACT)
399 return safe_str(contact or self.DEFAULT_CONTACT)
400
400
401 @LazyProperty
401 @LazyProperty
402 def last_change(self):
402 def last_change(self):
403 """
403 """
404 Returns last change made on this repository as
404 Returns last change made on this repository as
405 `datetime.datetime` object.
405 `datetime.datetime` object.
406 """
406 """
407 try:
407 try:
408 return self.get_commit().date
408 return self.get_commit().date
409 except RepositoryError:
409 except RepositoryError:
410 tzoffset = makedate()[1]
410 tzoffset = makedate()[1]
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412
412
413 def _get_fs_mtime(self):
413 def _get_fs_mtime(self):
414 # fallback to filesystem
414 # fallback to filesystem
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 st_path = os.path.join(self.path, '.hg', "store")
416 st_path = os.path.join(self.path, '.hg', "store")
417 if os.path.exists(cl_path):
417 if os.path.exists(cl_path):
418 return os.stat(cl_path).st_mtime
418 return os.stat(cl_path).st_mtime
419 else:
419 else:
420 return os.stat(st_path).st_mtime
420 return os.stat(st_path).st_mtime
421
421
422 def _get_url(self, url):
422 def _get_url(self, url):
423 """
423 """
424 Returns normalized url. If schema is not given, would fall
424 Returns normalized url. If schema is not given, would fall
425 to filesystem
425 to filesystem
426 (``file:///``) schema.
426 (``file:///``) schema.
427 """
427 """
428 if url != 'default' and '://' not in url:
428 if url != 'default' and '://' not in url:
429 url = "file:" + urllib.request.pathname2url(url)
429 url = "file:" + urllib.request.pathname2url(url)
430 return url
430 return url
431
431
432 def get_hook_location(self):
432 def get_hook_location(self):
433 """
433 """
434 returns absolute path to location where hooks are stored
434 returns absolute path to location where hooks are stored
435 """
435 """
436 return os.path.join(self.path, '.hg', '.hgrc')
436 return os.path.join(self.path, '.hg', '.hgrc')
437
437
438 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
438 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
439 translate_tag=None, maybe_unreachable=False, reference_obj=None):
439 translate_tag=None, maybe_unreachable=False, reference_obj=None):
440 """
440 """
441 Returns ``MercurialCommit`` object representing repository's
441 Returns ``MercurialCommit`` object representing repository's
442 commit at the given `commit_id` or `commit_idx`.
442 commit at the given `commit_id` or `commit_idx`.
443 """
443 """
444 if self.is_empty():
444 if self.is_empty():
445 raise EmptyRepositoryError("There are no commits yet")
445 raise EmptyRepositoryError("There are no commits yet")
446
446
447 if commit_id is not None:
447 if commit_id is not None:
448 self._validate_commit_id(commit_id)
448 self._validate_commit_id(commit_id)
449 try:
449 try:
450 # we have cached idx, use it without contacting the remote
450 # we have cached idx, use it without contacting the remote
451 idx = self._commit_ids[commit_id]
451 idx = self._commit_ids[commit_id]
452 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
452 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
453 except KeyError:
453 except KeyError:
454 pass
454 pass
455
455
456 elif commit_idx is not None:
456 elif commit_idx is not None:
457 self._validate_commit_idx(commit_idx)
457 self._validate_commit_idx(commit_idx)
458 try:
458 try:
459 _commit_id = self.commit_ids[commit_idx]
459 _commit_id = self.commit_ids[commit_idx]
460 if commit_idx < 0:
460 if commit_idx < 0:
461 commit_idx = self.commit_ids.index(_commit_id)
461 commit_idx = self.commit_ids.index(_commit_id)
462
462
463 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
463 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
464 except IndexError:
464 except IndexError:
465 commit_id = commit_idx
465 commit_id = commit_idx
466 else:
466 else:
467 commit_id = "tip"
467 commit_id = "tip"
468
468
469 # case here is no cached version, do an actual lookup instead
469 # case here is no cached version, do an actual lookup instead
470 try:
470 try:
471 raw_id, idx = self._remote.lookup(commit_id, both=True)
471 raw_id, idx = self._remote.lookup(commit_id, both=True)
472 except CommitDoesNotExistError:
472 except CommitDoesNotExistError:
473 msg = "Commit {} does not exist for `{}`".format(
473 msg = "Commit {} does not exist for `{}`".format(
474 *map(safe_str, [commit_id, self.name]))
474 *map(safe_str, [commit_id, self.name]))
475 raise CommitDoesNotExistError(msg)
475 raise CommitDoesNotExistError(msg)
476
476
477 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
477 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
478
478
479 def get_commits(
479 def get_commits(
480 self, start_id=None, end_id=None, start_date=None, end_date=None,
480 self, start_id=None, end_id=None, start_date=None, end_date=None,
481 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
481 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
482 """
482 """
483 Returns generator of ``MercurialCommit`` objects from start to end
483 Returns generator of ``MercurialCommit`` objects from start to end
484 (both are inclusive)
484 (both are inclusive)
485
485
486 :param start_id: None, str(commit_id)
486 :param start_id: None, str(commit_id)
487 :param end_id: None, str(commit_id)
487 :param end_id: None, str(commit_id)
488 :param start_date: if specified, commits with commit date less than
488 :param start_date: if specified, commits with commit date less than
489 ``start_date`` would be filtered out from returned set
489 ``start_date`` would be filtered out from returned set
490 :param end_date: if specified, commits with commit date greater than
490 :param end_date: if specified, commits with commit date greater than
491 ``end_date`` would be filtered out from returned set
491 ``end_date`` would be filtered out from returned set
492 :param branch_name: if specified, commits not reachable from given
492 :param branch_name: if specified, commits not reachable from given
493 branch would be filtered out from returned set
493 branch would be filtered out from returned set
494 :param show_hidden: Show hidden commits such as obsolete or hidden from
494 :param show_hidden: Show hidden commits such as obsolete or hidden from
495 Mercurial evolve
495 Mercurial evolve
496 :raise BranchDoesNotExistError: If given ``branch_name`` does not
496 :raise BranchDoesNotExistError: If given ``branch_name`` does not
497 exist.
497 exist.
498 :raise CommitDoesNotExistError: If commit for given ``start`` or
498 :raise CommitDoesNotExistError: If commit for given ``start`` or
499 ``end`` could not be found.
499 ``end`` could not be found.
500 """
500 """
501 # actually we should check now if it's not an empty repo
501 # actually we should check now if it's not an empty repo
502 if self.is_empty():
502 if self.is_empty():
503 raise EmptyRepositoryError("There are no commits yet")
503 raise EmptyRepositoryError("There are no commits yet")
504 self._validate_branch_name(branch_name)
504 self._validate_branch_name(branch_name)
505
505
506 branch_ancestors = False
506 branch_ancestors = False
507 if start_id is not None:
507 if start_id is not None:
508 self._validate_commit_id(start_id)
508 self._validate_commit_id(start_id)
509 c_start = self.get_commit(commit_id=start_id)
509 c_start = self.get_commit(commit_id=start_id)
510 start_pos = self._commit_ids[c_start.raw_id]
510 start_pos = self._commit_ids[c_start.raw_id]
511 else:
511 else:
512 start_pos = None
512 start_pos = None
513
513
514 if end_id is not None:
514 if end_id is not None:
515 self._validate_commit_id(end_id)
515 self._validate_commit_id(end_id)
516 c_end = self.get_commit(commit_id=end_id)
516 c_end = self.get_commit(commit_id=end_id)
517 end_pos = max(0, self._commit_ids[c_end.raw_id])
517 end_pos = max(0, self._commit_ids[c_end.raw_id])
518 else:
518 else:
519 end_pos = None
519 end_pos = None
520
520
521 if None not in [start_id, end_id] and start_pos > end_pos:
521 if None not in [start_id, end_id] and start_pos > end_pos:
522 raise RepositoryError(
522 raise RepositoryError(
523 "Start commit '%s' cannot be after end commit '%s'" %
523 "Start commit '%s' cannot be after end commit '%s'" %
524 (start_id, end_id))
524 (start_id, end_id))
525
525
526 if end_pos is not None:
526 if end_pos is not None:
527 end_pos += 1
527 end_pos += 1
528
528
529 commit_filter = []
529 commit_filter = []
530
530
531 if branch_name and not branch_ancestors:
531 if branch_name and not branch_ancestors:
532 commit_filter.append(f'branch("{branch_name}")')
532 commit_filter.append(f'branch("{branch_name}")')
533 elif branch_name and branch_ancestors:
533 elif branch_name and branch_ancestors:
534 commit_filter.append(f'ancestors(branch("{branch_name}"))')
534 commit_filter.append(f'ancestors(branch("{branch_name}"))')
535
535
536 if start_date and not end_date:
536 if start_date and not end_date:
537 commit_filter.append(f'date(">{start_date}")')
537 commit_filter.append(f'date(">{start_date}")')
538 if end_date and not start_date:
538 if end_date and not start_date:
539 commit_filter.append(f'date("<{end_date}")')
539 commit_filter.append(f'date("<{end_date}")')
540 if start_date and end_date:
540 if start_date and end_date:
541 commit_filter.append(
541 commit_filter.append(
542 f'date(">{start_date}") and date("<{end_date}")')
542 f'date(">{start_date}") and date("<{end_date}")')
543
543
544 if not show_hidden:
544 if not show_hidden:
545 commit_filter.append('not obsolete()')
545 commit_filter.append('not obsolete()')
546 commit_filter.append('not hidden()')
546 commit_filter.append('not hidden()')
547
547
548 # TODO: johbo: Figure out a simpler way for this solution
548 # TODO: johbo: Figure out a simpler way for this solution
549 collection_generator = CollectionGenerator
549 collection_generator = CollectionGenerator
550 if commit_filter:
550 if commit_filter:
551 commit_filter = ' and '.join(map(safe_str, commit_filter))
551 commit_filter = ' and '.join(map(safe_str, commit_filter))
552 revisions = self._remote.rev_range([commit_filter])
552 revisions = self._remote.rev_range([commit_filter])
553 collection_generator = MercurialIndexBasedCollectionGenerator
553 collection_generator = MercurialIndexBasedCollectionGenerator
554 else:
554 else:
555 revisions = self.commit_ids
555 revisions = self.commit_ids
556
556
557 if start_pos or end_pos:
557 if start_pos or end_pos:
558 revisions = revisions[start_pos:end_pos]
558 revisions = revisions[start_pos:end_pos]
559
559
560 return collection_generator(self, revisions, pre_load=pre_load)
560 return collection_generator(self, revisions, pre_load=pre_load)
561
561
562 def pull(self, url, commit_ids=None):
562 def pull(self, url, commit_ids=None):
563 """
563 """
564 Pull changes from external location.
564 Pull changes from external location.
565
565
566 :param commit_ids: Optional. Can be set to a list of commit ids
566 :param commit_ids: Optional. Can be set to a list of commit ids
567 which shall be pulled from the other repository.
567 which shall be pulled from the other repository.
568 """
568 """
569 url = self._get_url(url)
569 url = self._get_url(url)
570 self._remote.pull(url, commit_ids=commit_ids)
570 self._remote.pull(url, commit_ids=commit_ids)
571 self._remote.invalidate_vcs_cache()
571 self._remote.invalidate_vcs_cache()
572
572
573 def fetch(self, url, commit_ids=None):
573 def fetch(self, url, commit_ids=None):
574 """
574 """
575 Backward compatibility with GIT fetch==pull
575 Backward compatibility with GIT fetch==pull
576 """
576 """
577 return self.pull(url, commit_ids=commit_ids)
577 return self.pull(url, commit_ids=commit_ids)
578
578
579 def push(self, url):
579 def push(self, url):
580 url = self._get_url(url)
580 url = self._get_url(url)
581 self._remote.sync_push(url)
581 self._remote.sync_push(url)
582
582
583 def _local_clone(self, clone_path):
583 def _local_clone(self, clone_path):
584 """
584 """
585 Create a local clone of the current repo.
585 Create a local clone of the current repo.
586 """
586 """
587 self._remote.clone(self.path, clone_path, update_after_clone=True,
587 self._remote.clone(self.path, clone_path, update_after_clone=True,
588 hooks=False)
588 hooks=False)
589
589
590 def _update(self, revision, clean=False):
590 def _update(self, revision, clean=False):
591 """
591 """
592 Update the working copy to the specified revision.
592 Update the working copy to the specified revision.
593 """
593 """
594 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
594 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
595 self._remote.update(revision, clean=clean)
595 self._remote.update(revision, clean=clean)
596
596
597 def _identify(self):
597 def _identify(self):
598 """
598 """
599 Return the current state of the working directory.
599 Return the current state of the working directory.
600 """
600 """
601 return self._remote.identify().strip().rstrip('+')
601 return self._remote.identify().strip().rstrip('+')
602
602
603 def _heads(self, branch=None):
603 def _heads(self, branch=None):
604 """
604 """
605 Return the commit ids of the repository heads.
605 Return the commit ids of the repository heads.
606 """
606 """
607 return self._remote.heads(branch=branch).strip().split(' ')
607 return self._remote.heads(branch=branch).strip().split(' ')
608
608
609 def _ancestor(self, revision1, revision2):
609 def _ancestor(self, revision1, revision2):
610 """
610 """
611 Return the common ancestor of the two revisions.
611 Return the common ancestor of the two revisions.
612 """
612 """
613 return self._remote.ancestor(revision1, revision2)
613 return self._remote.ancestor(revision1, revision2)
614
614
615 def _local_push(
615 def _local_push(
616 self, revision, repository_path, push_branches=False,
616 self, revision, repository_path, push_branches=False,
617 enable_hooks=False):
617 enable_hooks=False):
618 """
618 """
619 Push the given revision to the specified repository.
619 Push the given revision to the specified repository.
620
620
621 :param push_branches: allow to create branches in the target repo.
621 :param push_branches: allow to create branches in the target repo.
622 """
622 """
623 self._remote.push(
623 self._remote.push(
624 [revision], repository_path, hooks=enable_hooks,
624 [revision], repository_path, hooks=enable_hooks,
625 push_branches=push_branches)
625 push_branches=push_branches)
626
626
627 def _local_merge(self, target_ref, merge_message, user_name, user_email,
627 def _local_merge(self, target_ref, merge_message, user_name, user_email,
628 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
628 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
629 """
629 """
630 Merge the given source_revision into the checked out revision.
630 Merge the given source_revision into the checked out revision.
631
631
632 Returns the commit id of the merge and a boolean indicating if the
632 Returns the commit id of the merge and a boolean indicating if the
633 commit needs to be pushed.
633 commit needs to be pushed.
634 """
634 """
635
635 source_ref_commit_id = source_ref.commit_id
636 source_ref_commit_id = source_ref.commit_id
636 target_ref_commit_id = target_ref.commit_id
637 target_ref_commit_id = target_ref.commit_id
637
638
638 # update our workdir to target ref, for proper merge
639 # update our workdir to target ref, for proper merge
639 self._update(target_ref_commit_id, clean=True)
640 self._update(target_ref_commit_id, clean=True)
640
641
641 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
642 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
642 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
643 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
643
644
644 if close_commit_id:
645 if close_commit_id:
645 # NOTE(marcink): if we get the close commit, this is our new source
646 # NOTE(marcink): if we get the close commit, this is our new source
646 # which will include the close commit itself.
647 # which will include the close commit itself.
647 source_ref_commit_id = close_commit_id
648 source_ref_commit_id = close_commit_id
648
649
649 if ancestor == source_ref_commit_id:
650 if ancestor == source_ref_commit_id:
650 # Nothing to do, the changes were already integrated
651 # Nothing to do, the changes were already integrated
651 return target_ref_commit_id, False
652 return target_ref_commit_id, False
652
653
653 elif ancestor == target_ref_commit_id and is_the_same_branch:
654 elif ancestor == target_ref_commit_id and is_the_same_branch:
654 # In this case we should force a commit message
655 # In this case we should force a commit message
655 return source_ref_commit_id, True
656 return source_ref_commit_id, True
656
657
657 unresolved = None
658 unresolved = None
658 if use_rebase:
659 if use_rebase:
659 try:
660 try:
660 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
661 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
661 self.bookmark(bookmark_name, revision=source_ref.commit_id)
662 self.bookmark(bookmark_name, revision=source_ref.commit_id)
662 self._remote.rebase(
663 self._remote.rebase(
663 source=source_ref_commit_id, dest=target_ref_commit_id)
664 source=source_ref_commit_id, dest=target_ref_commit_id)
664 self._remote.invalidate_vcs_cache()
665 self._remote.invalidate_vcs_cache()
665 self._update(bookmark_name, clean=True)
666 self._update(bookmark_name, clean=True)
666 return self._identify(), True
667 return self._identify(), True
667 except RepositoryError as e:
668 except RepositoryError as e:
668 # The rebase-abort may raise another exception which 'hides'
669 # The rebase-abort may raise another exception which 'hides'
669 # the original one, therefore we log it here.
670 # the original one, therefore we log it here.
670 log.exception('Error while rebasing shadow repo during merge.')
671 log.exception('Error while rebasing shadow repo during merge.')
671 if 'unresolved conflicts' in safe_str(e):
672 if 'unresolved conflicts' in safe_str(e):
672 unresolved = self._remote.get_unresolved_files()
673 unresolved = self._remote.get_unresolved_files()
673 log.debug('unresolved files: %s', unresolved)
674 log.debug('unresolved files: %s', unresolved)
674
675
675 # Cleanup any rebase leftovers
676 # Cleanup any rebase leftovers
676 self._remote.invalidate_vcs_cache()
677 self._remote.invalidate_vcs_cache()
677 self._remote.rebase(abort=True)
678 self._remote.rebase(abort=True)
678 self._remote.invalidate_vcs_cache()
679 self._remote.invalidate_vcs_cache()
679 self._remote.update(clean=True)
680 self._remote.update(clean=True)
680 if unresolved:
681 if unresolved:
681 raise UnresolvedFilesInRepo(unresolved)
682 raise UnresolvedFilesInRepo(unresolved)
682 else:
683 else:
683 raise
684 raise
684 else:
685 else:
685 try:
686 try:
686 self._remote.merge(source_ref_commit_id)
687 self._remote.merge(source_ref_commit_id)
687 self._remote.invalidate_vcs_cache()
688 self._remote.invalidate_vcs_cache()
688 self._remote.commit(
689 self._remote.commit(
689 message=safe_str(merge_message),
690 message=safe_str(merge_message),
690 username=safe_str(f'{user_name} <{user_email}>'))
691 username=safe_str(f'{user_name} <{user_email}>'))
691 self._remote.invalidate_vcs_cache()
692 self._remote.invalidate_vcs_cache()
692 return self._identify(), True
693 return self._identify(), True
693 except RepositoryError as e:
694 except RepositoryError as e:
694 # The merge-abort may raise another exception which 'hides'
695 # The merge-abort may raise another exception which 'hides'
695 # the original one, therefore we log it here.
696 # the original one, therefore we log it here.
696 log.exception('Error while merging shadow repo during merge.')
697 log.exception('Error while merging shadow repo during merge.')
697 if 'unresolved merge conflicts' in safe_str(e):
698 if 'unresolved merge conflicts' in safe_str(e):
698 unresolved = self._remote.get_unresolved_files()
699 unresolved = self._remote.get_unresolved_files()
699 log.debug('unresolved files: %s', unresolved)
700 log.debug('unresolved files: %s', unresolved)
700
701
701 # Cleanup any merge leftovers
702 # Cleanup any merge leftovers
702 self._remote.update(clean=True)
703 self._remote.update(clean=True)
703 if unresolved:
704 if unresolved:
704 raise UnresolvedFilesInRepo(unresolved)
705 raise UnresolvedFilesInRepo(unresolved)
705 else:
706 else:
706 raise
707 raise
707
708
708 def _local_close(self, target_ref, user_name, user_email,
709 def _local_close(self, target_ref, user_name, user_email,
709 source_ref, close_message=''):
710 source_ref, close_message=''):
710 """
711 """
711 Close the branch of the given source_revision
712 Close the branch of the given source_revision
712
713
713 Returns the commit id of the close and a boolean indicating if the
714 Returns the commit id of the close and a boolean indicating if the
714 commit needs to be pushed.
715 commit needs to be pushed.
715 """
716 """
716 self._update(source_ref.commit_id)
717 self._update(source_ref.commit_id)
717 message = close_message or f"Closing branch: `{source_ref.name}`"
718 message = close_message or f"Closing branch: `{source_ref.name}`"
718 try:
719 try:
719 self._remote.commit(
720 self._remote.commit(
720 message=safe_str(message),
721 message=safe_str(message),
721 username=safe_str(f'{user_name} <{user_email}>'),
722 username=safe_str(f'{user_name} <{user_email}>'),
722 close_branch=True)
723 close_branch=True)
723 self._remote.invalidate_vcs_cache()
724 self._remote.invalidate_vcs_cache()
724 return self._identify(), True
725 return self._identify(), True
725 except RepositoryError:
726 except RepositoryError:
726 # Cleanup any commit leftovers
727 # Cleanup any commit leftovers
727 self._remote.update(clean=True)
728 self._remote.update(clean=True)
728 raise
729 raise
729
730
730 def _is_the_same_branch(self, target_ref, source_ref):
731 def _is_the_same_branch(self, target_ref, source_ref):
731 return (
732 return (
732 self._get_branch_name(target_ref) ==
733 self._get_branch_name(target_ref) ==
733 self._get_branch_name(source_ref))
734 self._get_branch_name(source_ref))
734
735
735 def _get_branch_name(self, ref):
736 def _get_branch_name(self, ref):
736 if ref.type == 'branch':
737 if ref.type == 'branch':
737 return ref.name
738 return ref.name
738 return self._remote.ctx_branch(ref.commit_id)
739 return self._remote.ctx_branch(ref.commit_id)
739
740
740 def _maybe_prepare_merge_workspace(
741 def _maybe_prepare_merge_workspace(
741 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
742 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
742 shadow_repository_path = self._get_shadow_repository_path(
743 shadow_repository_path = self._get_shadow_repository_path(
743 self.path, repo_id, workspace_id)
744 self.path, repo_id, workspace_id)
744 if not os.path.exists(shadow_repository_path):
745 if not os.path.exists(shadow_repository_path):
745 self._local_clone(shadow_repository_path)
746 self._local_clone(shadow_repository_path)
746 log.debug(
747 log.debug(
747 'Prepared shadow repository in %s', shadow_repository_path)
748 'Prepared shadow repository in %s', shadow_repository_path)
748
749
749 return shadow_repository_path
750 return shadow_repository_path
750
751
751 def _merge_repo(self, repo_id, workspace_id, target_ref,
752 def _merge_repo(self, repo_id, workspace_id, target_ref,
752 source_repo, source_ref, merge_message,
753 source_repo, source_ref, merge_message,
753 merger_name, merger_email, dry_run=False,
754 merger_name, merger_email, dry_run=False,
754 use_rebase=False, close_branch=False):
755 use_rebase=False, close_branch=False):
755
756
756 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
757 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
757 'rebase' if use_rebase else 'merge', dry_run)
758 'rebase' if use_rebase else 'merge', dry_run)
759
758 if target_ref.commit_id not in self._heads():
760 if target_ref.commit_id not in self._heads():
759 return MergeResponse(
761 return MergeResponse(
760 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
762 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
761 metadata={'target_ref': target_ref})
763 metadata={'target_ref': target_ref})
762
764
763 try:
765 try:
764 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
766 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
765 heads_all = self._heads(target_ref.name)
767 heads_all = self._heads(target_ref.name)
766 max_heads = 10
768 max_heads = 10
767 if len(heads_all) > max_heads:
769 if len(heads_all) > max_heads:
768 heads = '\n,'.join(
770 heads = '\n,'.join(
769 heads_all[:max_heads] +
771 heads_all[:max_heads] +
770 [f'and {len(heads_all)-max_heads} more.'])
772 [f'and {len(heads_all)-max_heads} more.'])
771 else:
773 else:
772 heads = '\n,'.join(heads_all)
774 heads = '\n,'.join(heads_all)
773 metadata = {
775 metadata = {
774 'target_ref': target_ref,
776 'target_ref': target_ref,
775 'source_ref': source_ref,
777 'source_ref': source_ref,
776 'heads': heads
778 'heads': heads
777 }
779 }
778 return MergeResponse(
780 return MergeResponse(
779 False, False, None,
781 False, False, None,
780 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
782 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
781 metadata=metadata)
783 metadata=metadata)
782 except CommitDoesNotExistError:
784 except CommitDoesNotExistError:
783 log.exception('Failure when looking up branch heads on hg target')
785 log.exception('Failure when looking up branch heads on hg target')
784 return MergeResponse(
786 return MergeResponse(
785 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
787 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
786 metadata={'target_ref': target_ref})
788 metadata={'target_ref': target_ref})
787
789
788 shadow_repository_path = self._maybe_prepare_merge_workspace(
790 shadow_repository_path = self._maybe_prepare_merge_workspace(
789 repo_id, workspace_id, target_ref, source_ref)
791 repo_id, workspace_id, target_ref, source_ref)
790 shadow_repo = self.get_shadow_instance(shadow_repository_path)
792 shadow_repo = self.get_shadow_instance(shadow_repository_path)
791
793
792 log.debug('Pulling in target reference %s', target_ref)
794 log.debug('Pulling in target reference %s', target_ref)
793 self._validate_pull_reference(target_ref)
795 self._validate_pull_reference(target_ref)
794 shadow_repo._local_pull(self.path, target_ref)
796 shadow_repo._local_pull(self.path, target_ref)
795
797
796 try:
798 try:
797 log.debug('Pulling in source reference %s', source_ref)
799 log.debug('Pulling in source reference %s', source_ref)
798 source_repo._validate_pull_reference(source_ref)
800 source_repo._validate_pull_reference(source_ref)
799 shadow_repo._local_pull(source_repo.path, source_ref)
801 shadow_repo._local_pull(source_repo.path, source_ref)
800 except CommitDoesNotExistError:
802 except CommitDoesNotExistError:
801 log.exception('Failure when doing local pull on hg shadow repo')
803 log.exception('Failure when doing local pull on hg shadow repo')
802 return MergeResponse(
804 return MergeResponse(
803 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
805 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
804 metadata={'source_ref': source_ref})
806 metadata={'source_ref': source_ref})
805
807
806 merge_ref = None
808 merge_ref = None
807 merge_commit_id = None
809 merge_commit_id = None
808 close_commit_id = None
810 close_commit_id = None
809 merge_failure_reason = MergeFailureReason.NONE
811 merge_failure_reason = MergeFailureReason.NONE
810 metadata = {}
812 metadata = {}
811
813
812 # enforce that close branch should be used only in case we source from
814 # enforce that close branch should be used only in case we source from
813 # an actual Branch
815 # an actual Branch
814 close_branch = close_branch and source_ref.type == 'branch'
816 close_branch = close_branch and source_ref.type == 'branch'
815
817
816 # don't allow to close branch if source and target are the same
818 # don't allow to close branch if source and target are the same
817 close_branch = close_branch and source_ref.name != target_ref.name
819 close_branch = close_branch and source_ref.name != target_ref.name
818
820
819 needs_push_on_close = False
821 needs_push_on_close = False
820 if close_branch and not use_rebase and not dry_run:
822 if close_branch and not use_rebase and not dry_run:
821 try:
823 try:
822 close_commit_id, needs_push_on_close = shadow_repo._local_close(
824 close_commit_id, needs_push_on_close = shadow_repo._local_close(
823 target_ref, merger_name, merger_email, source_ref)
825 target_ref, merger_name, merger_email, source_ref)
824 merge_possible = True
826 merge_possible = True
825 except RepositoryError:
827 except RepositoryError:
826 log.exception('Failure when doing close branch on '
828 log.exception('Failure when doing close branch on '
827 'shadow repo: %s', shadow_repo)
829 'shadow repo: %s', shadow_repo)
828 merge_possible = False
830 merge_possible = False
829 merge_failure_reason = MergeFailureReason.MERGE_FAILED
831 merge_failure_reason = MergeFailureReason.MERGE_FAILED
830 else:
832 else:
831 merge_possible = True
833 merge_possible = True
832
834
833 needs_push = False
835 needs_push = False
834 if merge_possible:
836 if merge_possible:
835
837
836 try:
838 try:
837 merge_commit_id, needs_push = shadow_repo._local_merge(
839 merge_commit_id, needs_push = shadow_repo._local_merge(
838 target_ref, merge_message, merger_name, merger_email,
840 target_ref, merge_message, merger_name, merger_email,
839 source_ref, use_rebase=use_rebase,
841 source_ref, use_rebase=use_rebase,
840 close_commit_id=close_commit_id, dry_run=dry_run)
842 close_commit_id=close_commit_id, dry_run=dry_run)
841 merge_possible = True
843 merge_possible = True
842
844
843 # read the state of the close action, if it
845 # read the state of the close action, if it
844 # maybe required a push
846 # maybe required a push
845 needs_push = needs_push or needs_push_on_close
847 needs_push = needs_push or needs_push_on_close
846
848
847 # Set a bookmark pointing to the merge commit. This bookmark
849 # Set a bookmark pointing to the merge commit. This bookmark
848 # may be used to easily identify the last successful merge
850 # may be used to easily identify the last successful merge
849 # commit in the shadow repository.
851 # commit in the shadow repository.
850 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
852 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
851 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
853 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
852 except SubrepoMergeError:
854 except SubrepoMergeError:
853 log.exception(
855 log.exception(
854 'Subrepo merge error during local merge on hg shadow repo.')
856 'Subrepo merge error during local merge on hg shadow repo.')
855 merge_possible = False
857 merge_possible = False
856 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
858 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
857 needs_push = False
859 needs_push = False
858 except RepositoryError as e:
860 except RepositoryError as e:
859 log.exception('Failure when doing local merge on hg shadow repo')
861 log.exception('Failure when doing local merge on hg shadow repo')
862 metadata['unresolved_files'] = 'no unresolved files found'
863
860 if isinstance(e, UnresolvedFilesInRepo):
864 if isinstance(e, UnresolvedFilesInRepo):
861 all_conflicts = list(e.args[0])
865 all_conflicts = list(e.args[0])
862 max_conflicts = 20
866 max_conflicts = 20
863 if len(all_conflicts) > max_conflicts:
867 if len(all_conflicts) > max_conflicts:
864 conflicts = all_conflicts[:max_conflicts] \
868 conflicts = all_conflicts[:max_conflicts] \
865 + [f'and {len(all_conflicts)-max_conflicts} more.']
869 + [f'and {len(all_conflicts)-max_conflicts} more.']
866 else:
870 else:
867 conflicts = all_conflicts
871 conflicts = all_conflicts
868 metadata['unresolved_files'] = \
872 metadata['unresolved_files'] = \
869 '\n* conflict: ' + \
873 '\n* conflict: ' + \
870 ('\n * conflict: '.join(conflicts))
874 ('\n * conflict: '.join(conflicts))
871
875
872 merge_possible = False
876 merge_possible = False
873 merge_failure_reason = MergeFailureReason.MERGE_FAILED
877 merge_failure_reason = MergeFailureReason.MERGE_FAILED
874 needs_push = False
878 needs_push = False
875
879
876 if merge_possible and not dry_run:
880 if merge_possible and not dry_run:
877 if needs_push:
881 if needs_push:
878 # In case the target is a bookmark, update it, so after pushing
882 # In case the target is a bookmark, update it, so after pushing
879 # the bookmarks is also updated in the target.
883 # the bookmarks is also updated in the target.
880 if target_ref.type == 'book':
884 if target_ref.type == 'book':
881 shadow_repo.bookmark(
885 shadow_repo.bookmark(
882 target_ref.name, revision=merge_commit_id)
886 target_ref.name, revision=merge_commit_id)
883 try:
887 try:
884 shadow_repo_with_hooks = self.get_shadow_instance(
888 shadow_repo_with_hooks = self.get_shadow_instance(
885 shadow_repository_path,
889 shadow_repository_path,
886 enable_hooks=True)
890 enable_hooks=True)
887 # This is the actual merge action, we push from shadow
891 # This is the actual merge action, we push from shadow
888 # into origin.
892 # into origin.
889 # Note: the push_branches option will push any new branch
893 # Note: the push_branches option will push any new branch
890 # defined in the source repository to the target. This may
894 # defined in the source repository to the target. This may
891 # be dangerous as branches are permanent in Mercurial.
895 # be dangerous as branches are permanent in Mercurial.
892 # This feature was requested in issue #441.
896 # This feature was requested in issue #441.
893 shadow_repo_with_hooks._local_push(
897 shadow_repo_with_hooks._local_push(
894 merge_commit_id, self.path, push_branches=True,
898 merge_commit_id, self.path, push_branches=True,
895 enable_hooks=True)
899 enable_hooks=True)
896
900
897 # maybe we also need to push the close_commit_id
901 # maybe we also need to push the close_commit_id
898 if close_commit_id:
902 if close_commit_id:
899 shadow_repo_with_hooks._local_push(
903 shadow_repo_with_hooks._local_push(
900 close_commit_id, self.path, push_branches=True,
904 close_commit_id, self.path, push_branches=True,
901 enable_hooks=True)
905 enable_hooks=True)
902 merge_succeeded = True
906 merge_succeeded = True
903 except RepositoryError:
907 except RepositoryError:
904 log.exception(
908 log.exception(
905 'Failure when doing local push from the shadow '
909 'Failure when doing local push from the shadow '
906 'repository to the target repository at %s.', self.path)
910 'repository to the target repository at %s.', self.path)
907 merge_succeeded = False
911 merge_succeeded = False
908 merge_failure_reason = MergeFailureReason.PUSH_FAILED
912 merge_failure_reason = MergeFailureReason.PUSH_FAILED
909 metadata['target'] = 'hg shadow repo'
913 metadata['target'] = 'hg shadow repo'
910 metadata['merge_commit'] = merge_commit_id
914 metadata['merge_commit'] = merge_commit_id
911 else:
915 else:
912 merge_succeeded = True
916 merge_succeeded = True
913 else:
917 else:
914 merge_succeeded = False
918 merge_succeeded = False
915
919
916 return MergeResponse(
920 return MergeResponse(
917 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
921 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
918 metadata=metadata)
922 metadata=metadata)
919
923
920 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
924 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
921 config = self.config.copy()
925 config = self.config.copy()
922 if not enable_hooks:
926 if not enable_hooks:
923 config.clear_section('hooks')
927 config.clear_section('hooks')
924 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
928 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
925
929
926 def _validate_pull_reference(self, reference):
930 def _validate_pull_reference(self, reference):
927 if not (reference.name in self.bookmarks or
931 if not (reference.name in self.bookmarks or
928 reference.name in self.branches or
932 reference.name in self.branches or
929 self.get_commit(reference.commit_id)):
933 self.get_commit(reference.commit_id)):
930 raise CommitDoesNotExistError(
934 raise CommitDoesNotExistError(
931 'Unknown branch, bookmark or commit id')
935 'Unknown branch, bookmark or commit id')
932
936
933 def _local_pull(self, repository_path, reference):
937 def _local_pull(self, repository_path, reference):
934 """
938 """
935 Fetch a branch, bookmark or commit from a local repository.
939 Fetch a branch, bookmark or commit from a local repository.
936 """
940 """
937 repository_path = os.path.abspath(repository_path)
941 repository_path = os.path.abspath(repository_path)
938 if repository_path == self.path:
942 if repository_path == self.path:
939 raise ValueError('Cannot pull from the same repository')
943 raise ValueError('Cannot pull from the same repository')
940
944
941 reference_type_to_option_name = {
945 reference_type_to_option_name = {
942 'book': 'bookmark',
946 'book': 'bookmark',
943 'branch': 'branch',
947 'branch': 'branch',
944 }
948 }
945 option_name = reference_type_to_option_name.get(
949 option_name = reference_type_to_option_name.get(
946 reference.type, 'revision')
950 reference.type, 'revision')
947
951
948 if option_name == 'revision':
952 if option_name == 'revision':
949 ref = reference.commit_id
953 ref = reference.commit_id
950 else:
954 else:
951 ref = reference.name
955 ref = reference.name
952
956
953 options = {option_name: [ref]}
957 options = {option_name: [ref]}
954 self._remote.pull_cmd(repository_path, hooks=False, **options)
958 self._remote.pull_cmd(repository_path, hooks=False, **options)
955 self._remote.invalidate_vcs_cache()
959 self._remote.invalidate_vcs_cache()
956
960
957 def bookmark(self, bookmark, revision=None):
961 def bookmark(self, bookmark, revision=None):
958 if isinstance(bookmark, str):
962 if isinstance(bookmark, str):
959 bookmark = safe_str(bookmark)
963 bookmark = safe_str(bookmark)
960 self._remote.bookmark(bookmark, revision=revision)
964 self._remote.bookmark(bookmark, revision=revision)
961 self._remote.invalidate_vcs_cache()
965 self._remote.invalidate_vcs_cache()
962
966
963 def get_path_permissions(self, username):
967 def get_path_permissions(self, username):
964 hgacl_file = os.path.join(self.path, '.hg/hgacl')
968 hgacl_file = os.path.join(self.path, '.hg/hgacl')
965
969
966 def read_patterns(suffix):
970 def read_patterns(suffix):
967 svalue = None
971 svalue = None
968 for section, option in [
972 for section, option in [
969 ('narrowacl', username + suffix),
973 ('narrowacl', username + suffix),
970 ('narrowacl', 'default' + suffix),
974 ('narrowacl', 'default' + suffix),
971 ('narrowhgacl', username + suffix),
975 ('narrowhgacl', username + suffix),
972 ('narrowhgacl', 'default' + suffix)
976 ('narrowhgacl', 'default' + suffix)
973 ]:
977 ]:
974 try:
978 try:
975 svalue = hgacl.get(section, option)
979 svalue = hgacl.get(section, option)
976 break # stop at the first value we find
980 break # stop at the first value we find
977 except configparser.NoOptionError:
981 except configparser.NoOptionError:
978 pass
982 pass
979 if not svalue:
983 if not svalue:
980 return None
984 return None
981 result = ['/']
985 result = ['/']
982 for pattern in svalue.split():
986 for pattern in svalue.split():
983 result.append(pattern)
987 result.append(pattern)
984 if '*' not in pattern and '?' not in pattern:
988 if '*' not in pattern and '?' not in pattern:
985 result.append(pattern + '/*')
989 result.append(pattern + '/*')
986 return result
990 return result
987
991
988 if os.path.exists(hgacl_file):
992 if os.path.exists(hgacl_file):
989 try:
993 try:
990 hgacl = configparser.RawConfigParser()
994 hgacl = configparser.RawConfigParser()
991 hgacl.read(hgacl_file)
995 hgacl.read(hgacl_file)
992
996
993 includes = read_patterns('.includes')
997 includes = read_patterns('.includes')
994 excludes = read_patterns('.excludes')
998 excludes = read_patterns('.excludes')
995 return BasePathPermissionChecker.create_from_patterns(
999 return BasePathPermissionChecker.create_from_patterns(
996 includes, excludes)
1000 includes, excludes)
997 except BaseException as e:
1001 except BaseException as e:
998 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1002 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
999 hgacl_file, self.name, e)
1003 hgacl_file, self.name, e)
1000 raise exceptions.RepositoryRequirementError(msg)
1004 raise exceptions.RepositoryRequirementError(msg)
1001 else:
1005 else:
1002 return None
1006 return None
1003
1007
1004
1008
1005 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1009 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1006
1010
1007 def _commit_factory(self, commit_id):
1011 def _commit_factory(self, commit_id):
1008 if isinstance(commit_id, int):
1012 if isinstance(commit_id, int):
1009 return self.repo.get_commit(
1013 return self.repo.get_commit(
1010 commit_idx=commit_id, pre_load=self.pre_load)
1014 commit_idx=commit_id, pre_load=self.pre_load)
1011 else:
1015 else:
1012 return self.repo.get_commit(
1016 return self.repo.get_commit(
1013 commit_id=commit_id, pre_load=self.pre_load)
1017 commit_id=commit_id, pre_load=self.pre_load)
@@ -1,1181 +1,1181 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import os
20 import os
21
21
22 import mock
22 import mock
23 import pytest
23 import pytest
24
24
25 from rhodecode.lib.str_utils import safe_bytes
25 from rhodecode.lib.str_utils import safe_bytes
26 from rhodecode.lib.utils import make_db_config
26 from rhodecode.lib.utils import make_db_config
27 from rhodecode.lib.vcs import backends
27 from rhodecode.lib.vcs import backends
28 from rhodecode.lib.vcs.backends.base import (
28 from rhodecode.lib.vcs.backends.base import (
29 Reference, MergeResponse, MergeFailureReason)
29 Reference, MergeResponse, MergeFailureReason)
30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
30 from rhodecode.lib.vcs.backends.hg import MercurialRepository, MercurialCommit
31 from rhodecode.lib.vcs.exceptions import (
31 from rhodecode.lib.vcs.exceptions import (
32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
32 RepositoryError, VCSError, NodeDoesNotExistError, CommitDoesNotExistError)
33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
33 from rhodecode.lib.vcs.nodes import FileNode, NodeKind, NodeState
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
34 from rhodecode.tests import TEST_HG_REPO, TEST_HG_REPO_CLONE, repo_id_generator
35
35
36
36
37 pytestmark = pytest.mark.backends("hg")
37 pytestmark = pytest.mark.backends("hg")
38
38
39
39
40 def repo_path_generator():
40 def repo_path_generator():
41 """
41 """
42 Return a different path to be used for cloning repos.
42 Return a different path to be used for cloning repos.
43 """
43 """
44 i = 0
44 i = 0
45 while True:
45 while True:
46 i += 1
46 i += 1
47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
47 yield '%s-%d' % (TEST_HG_REPO_CLONE, i)
48
48
49 REPO_PATH_GENERATOR = repo_path_generator()
49 REPO_PATH_GENERATOR = repo_path_generator()
50
50
51
51
52 @pytest.fixture(scope='class', autouse=True)
52 @pytest.fixture(scope='class', autouse=True)
53 def repo(request, baseapp):
53 def repo(request, baseapp):
54 repo = MercurialRepository(TEST_HG_REPO)
54 repo = MercurialRepository(TEST_HG_REPO)
55 if request.cls:
55 if request.cls:
56 request.cls.repo = repo
56 request.cls.repo = repo
57 return repo
57 return repo
58
58
59
59
60 class TestMercurialRepository(object):
60 class TestMercurialRepository(object):
61
61
62 # pylint: disable=protected-access
62 # pylint: disable=protected-access
63
63
64 def get_clone_repo(self):
64 def get_clone_repo(self):
65 """
65 """
66 Return a clone of the base repo.
66 Return a clone of the base repo.
67 """
67 """
68 clone_path = next(REPO_PATH_GENERATOR)
68 clone_path = next(REPO_PATH_GENERATOR)
69 repo_clone = MercurialRepository(
69 repo_clone = MercurialRepository(
70 clone_path, create=True, src_url=self.repo.path)
70 clone_path, create=True, src_url=self.repo.path)
71
71
72 return repo_clone
72 return repo_clone
73
73
74 def get_empty_repo(self):
74 def get_empty_repo(self):
75 """
75 """
76 Return an empty repo.
76 Return an empty repo.
77 """
77 """
78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
78 return MercurialRepository(next(REPO_PATH_GENERATOR), create=True)
79
79
80 def test_wrong_repo_path(self):
80 def test_wrong_repo_path(self):
81 wrong_repo_path = '/tmp/errorrepo_hg'
81 wrong_repo_path = '/tmp/errorrepo_hg'
82 with pytest.raises(RepositoryError):
82 with pytest.raises(RepositoryError):
83 MercurialRepository(wrong_repo_path)
83 MercurialRepository(wrong_repo_path)
84
84
85 def test_unicode_path_repo(self):
85 def test_unicode_path_repo(self):
86 with pytest.raises(VCSError):
86 with pytest.raises(VCSError):
87 MercurialRepository('iShouldFail')
87 MercurialRepository('iShouldFail')
88
88
89 def test_unicode_commit_id(self):
89 def test_unicode_commit_id(self):
90 with pytest.raises(CommitDoesNotExistError):
90 with pytest.raises(CommitDoesNotExistError):
91 self.repo.get_commit('unicode-commit-id')
91 self.repo.get_commit('unicode-commit-id')
92 with pytest.raises(CommitDoesNotExistError):
92 with pytest.raises(CommitDoesNotExistError):
93 self.repo.get_commit('unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id')
93 self.repo.get_commit('unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-commit-id')
94
94
95 def test_unicode_bookmark(self):
95 def test_unicode_bookmark(self):
96 self.repo.bookmark('unicode-bookmark')
96 self.repo.bookmark('unicode-bookmark')
97 self.repo.bookmark('unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark')
97 self.repo.bookmark('unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-bookmark')
98
98
99 def test_unicode_branch(self):
99 def test_unicode_branch(self):
100 with pytest.raises(KeyError):
100 with pytest.raises(KeyError):
101 assert self.repo.branches['unicode-branch']
101 assert self.repo.branches['unicode-branch']
102 with pytest.raises(KeyError):
102 with pytest.raises(KeyError):
103 assert self.repo.branches['unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch']
103 assert self.repo.branches['unΓ­cΓΈde-spéçial-chΓ€rΓ₯cter-branch']
104
104
105 def test_repo_clone(self):
105 def test_repo_clone(self):
106 if os.path.exists(TEST_HG_REPO_CLONE):
106 if os.path.exists(TEST_HG_REPO_CLONE):
107 self.fail(
107 self.fail(
108 'Cannot test mercurial clone repo as location %s already '
108 'Cannot test mercurial clone repo as location %s already '
109 'exists. You should manually remove it first.'
109 'exists. You should manually remove it first.'
110 % TEST_HG_REPO_CLONE)
110 % TEST_HG_REPO_CLONE)
111
111
112 repo = MercurialRepository(TEST_HG_REPO)
112 repo = MercurialRepository(TEST_HG_REPO)
113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
113 repo_clone = MercurialRepository(TEST_HG_REPO_CLONE,
114 src_url=TEST_HG_REPO)
114 src_url=TEST_HG_REPO)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
115 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
116 # Checking hashes of commits should be enough
116 # Checking hashes of commits should be enough
117 for commit in repo.get_commits():
117 for commit in repo.get_commits():
118 raw_id = commit.raw_id
118 raw_id = commit.raw_id
119 assert raw_id == repo_clone.get_commit(raw_id).raw_id
119 assert raw_id == repo_clone.get_commit(raw_id).raw_id
120
120
121 def test_repo_clone_with_update(self):
121 def test_repo_clone_with_update(self):
122 repo = MercurialRepository(TEST_HG_REPO)
122 repo = MercurialRepository(TEST_HG_REPO)
123 repo_clone = MercurialRepository(
123 repo_clone = MercurialRepository(
124 TEST_HG_REPO_CLONE + '_w_update',
124 TEST_HG_REPO_CLONE + '_w_update',
125 src_url=TEST_HG_REPO, do_workspace_checkout=True)
125 src_url=TEST_HG_REPO, do_workspace_checkout=True)
126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
126 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
127
127
128 # check if current workdir was updated
128 # check if current workdir was updated
129 assert os.path.isfile(
129 assert os.path.isfile(
130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
130 os.path.join(TEST_HG_REPO_CLONE + '_w_update', 'MANIFEST.in'))
131
131
132 def test_repo_clone_without_update(self):
132 def test_repo_clone_without_update(self):
133 repo = MercurialRepository(TEST_HG_REPO)
133 repo = MercurialRepository(TEST_HG_REPO)
134 repo_clone = MercurialRepository(
134 repo_clone = MercurialRepository(
135 TEST_HG_REPO_CLONE + '_wo_update',
135 TEST_HG_REPO_CLONE + '_wo_update',
136 src_url=TEST_HG_REPO, do_workspace_checkout=False)
136 src_url=TEST_HG_REPO, do_workspace_checkout=False)
137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
137 assert len(repo.commit_ids) == len(repo_clone.commit_ids)
138 assert not os.path.isfile(
138 assert not os.path.isfile(
139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
139 os.path.join(TEST_HG_REPO_CLONE + '_wo_update', 'MANIFEST.in'))
140
140
141 def test_commit_ids(self):
141 def test_commit_ids(self):
142 # there are 21 commits at bitbucket now
142 # there are 21 commits at bitbucket now
143 # so we can assume they would be available from now on
143 # so we can assume they would be available from now on
144 subset = {'b986218ba1c9b0d6a259fac9b050b1724ed8e545', '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
144 subset = {'b986218ba1c9b0d6a259fac9b050b1724ed8e545', '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
145 '6cba7170863a2411822803fa77a0a264f1310b35', '56349e29c2af3ac913b28bde9a2c6154436e615b',
145 '6cba7170863a2411822803fa77a0a264f1310b35', '56349e29c2af3ac913b28bde9a2c6154436e615b',
146 '2dda4e345facb0ccff1a191052dd1606dba6781d', '6fff84722075f1607a30f436523403845f84cd9e',
146 '2dda4e345facb0ccff1a191052dd1606dba6781d', '6fff84722075f1607a30f436523403845f84cd9e',
147 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
147 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7', '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
148 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', 'be90031137367893f1c406e0a8683010fd115b79',
148 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c', 'be90031137367893f1c406e0a8683010fd115b79',
149 'db8e58be770518cbb2b1cdfa69146e47cd481481', '84478366594b424af694a6c784cb991a16b87c21',
149 'db8e58be770518cbb2b1cdfa69146e47cd481481', '84478366594b424af694a6c784cb991a16b87c21',
150 '17f8e105dddb9f339600389c6dc7175d395a535c', '20a662e756499bde3095ffc9bc0643d1def2d0eb',
150 '17f8e105dddb9f339600389c6dc7175d395a535c', '20a662e756499bde3095ffc9bc0643d1def2d0eb',
151 '2e319b85e70a707bba0beff866d9f9de032aa4f9', '786facd2c61deb9cf91e9534735124fb8fc11842',
151 '2e319b85e70a707bba0beff866d9f9de032aa4f9', '786facd2c61deb9cf91e9534735124fb8fc11842',
152 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', 'aa6a0de05b7612707db567078e130a6cd114a9a7',
152 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8', 'aa6a0de05b7612707db567078e130a6cd114a9a7',
153 'eada5a770da98ab0dd7325e29d00e0714f228d09'
153 'eada5a770da98ab0dd7325e29d00e0714f228d09'
154 }
154 }
155 assert subset.issubset(set(self.repo.commit_ids))
155 assert subset.issubset(set(self.repo.commit_ids))
156
156
157 # check if we have the proper order of commits
157 # check if we have the proper order of commits
158 org = [
158 org = [
159 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
159 'b986218ba1c9b0d6a259fac9b050b1724ed8e545',
160 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
160 '3d8f361e72ab303da48d799ff1ac40d5ac37c67e',
161 '6cba7170863a2411822803fa77a0a264f1310b35',
161 '6cba7170863a2411822803fa77a0a264f1310b35',
162 '56349e29c2af3ac913b28bde9a2c6154436e615b',
162 '56349e29c2af3ac913b28bde9a2c6154436e615b',
163 '2dda4e345facb0ccff1a191052dd1606dba6781d',
163 '2dda4e345facb0ccff1a191052dd1606dba6781d',
164 '6fff84722075f1607a30f436523403845f84cd9e',
164 '6fff84722075f1607a30f436523403845f84cd9e',
165 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
165 '7d4bc8ec6be56c0f10425afb40b6fc315a4c25e7',
166 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
166 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb',
167 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
167 'dc5d2c0661b61928834a785d3e64a3f80d3aad9c',
168 'be90031137367893f1c406e0a8683010fd115b79',
168 'be90031137367893f1c406e0a8683010fd115b79',
169 'db8e58be770518cbb2b1cdfa69146e47cd481481',
169 'db8e58be770518cbb2b1cdfa69146e47cd481481',
170 '84478366594b424af694a6c784cb991a16b87c21',
170 '84478366594b424af694a6c784cb991a16b87c21',
171 '17f8e105dddb9f339600389c6dc7175d395a535c',
171 '17f8e105dddb9f339600389c6dc7175d395a535c',
172 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
172 '20a662e756499bde3095ffc9bc0643d1def2d0eb',
173 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
173 '2e319b85e70a707bba0beff866d9f9de032aa4f9',
174 '786facd2c61deb9cf91e9534735124fb8fc11842',
174 '786facd2c61deb9cf91e9534735124fb8fc11842',
175 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
175 '94593d2128d38210a2fcd1aabff6dda0d6d9edf8',
176 'aa6a0de05b7612707db567078e130a6cd114a9a7',
176 'aa6a0de05b7612707db567078e130a6cd114a9a7',
177 'eada5a770da98ab0dd7325e29d00e0714f228d09',
177 'eada5a770da98ab0dd7325e29d00e0714f228d09',
178 '2c1885c735575ca478bf9e17b0029dca68824458',
178 '2c1885c735575ca478bf9e17b0029dca68824458',
179 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
179 'd9bcd465040bf869799b09ad732c04e0eea99fe9',
180 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
180 '469e9c847fe1f6f7a697b8b25b4bc5b48780c1a7',
181 '4fb8326d78e5120da2c7468dcf7098997be385da',
181 '4fb8326d78e5120da2c7468dcf7098997be385da',
182 '62b4a097164940bd66030c4db51687f3ec035eed',
182 '62b4a097164940bd66030c4db51687f3ec035eed',
183 '536c1a19428381cfea92ac44985304f6a8049569',
183 '536c1a19428381cfea92ac44985304f6a8049569',
184 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
184 '965e8ab3c44b070cdaa5bf727ddef0ada980ecc4',
185 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
185 '9bb326a04ae5d98d437dece54be04f830cf1edd9',
186 'f8940bcb890a98c4702319fbe36db75ea309b475',
186 'f8940bcb890a98c4702319fbe36db75ea309b475',
187 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
187 'ff5ab059786ebc7411e559a2cc309dfae3625a3b',
188 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
188 '6b6ad5f82ad5bb6190037671bd254bd4e1f4bf08',
189 'ee87846a61c12153b51543bf860e1026c6d3dcba',
189 'ee87846a61c12153b51543bf860e1026c6d3dcba',
190 ]
190 ]
191 assert org == self.repo.commit_ids[:31]
191 assert org == self.repo.commit_ids[:31]
192
192
193 def test_iter_slice(self):
193 def test_iter_slice(self):
194 sliced = list(self.repo[:10])
194 sliced = list(self.repo[:10])
195 itered = list(self.repo)[:10]
195 itered = list(self.repo)[:10]
196 assert sliced == itered
196 assert sliced == itered
197
197
198 def test_slicing(self):
198 def test_slicing(self):
199 # 4 1 5 10 95
199 # 4 1 5 10 95
200 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
200 for sfrom, sto, size in [(0, 4, 4), (1, 2, 1), (10, 15, 5),
201 (10, 20, 10), (5, 100, 95)]:
201 (10, 20, 10), (5, 100, 95)]:
202 indexes = list(self.repo[sfrom:sto])
202 indexes = list(self.repo[sfrom:sto])
203 assert len(indexes) == size
203 assert len(indexes) == size
204 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
204 assert indexes[0] == self.repo.get_commit(commit_idx=sfrom)
205 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
205 assert indexes[-1] == self.repo.get_commit(commit_idx=sto - 1)
206
206
207 def test_branches(self):
207 def test_branches(self):
208 # TODO: Need more tests here
208 # TODO: Need more tests here
209
209
210 # active branches
210 # active branches
211 assert 'default' in self.repo.branches
211 assert 'default' in self.repo.branches
212 assert 'stable' in self.repo.branches
212 assert 'stable' in self.repo.branches
213
213
214 # closed
214 # closed
215 assert 'git' in self.repo._get_branches(closed=True)
215 assert 'git' in self.repo._get_branches(closed=True)
216 assert 'web' in self.repo._get_branches(closed=True)
216 assert 'web' in self.repo._get_branches(closed=True)
217
217
218 for name, id in self.repo.branches.items():
218 for name, id in self.repo.branches.items():
219 assert isinstance(self.repo.get_commit(id), MercurialCommit)
219 assert isinstance(self.repo.get_commit(id), MercurialCommit)
220
220
221 def test_tip_in_tags(self):
221 def test_tip_in_tags(self):
222 # tip is always a tag
222 # tip is always a tag
223 assert 'tip' in self.repo.tags
223 assert 'tip' in self.repo.tags
224
224
225 def test_tip_commit_in_tags(self):
225 def test_tip_commit_in_tags(self):
226 tip = self.repo.get_commit()
226 tip = self.repo.get_commit()
227 assert self.repo.tags['tip'] == tip.raw_id
227 assert self.repo.tags['tip'] == tip.raw_id
228
228
229 def test_initial_commit(self):
229 def test_initial_commit(self):
230 init_commit = self.repo.get_commit(commit_idx=0)
230 init_commit = self.repo.get_commit(commit_idx=0)
231 init_author = init_commit.author
231 init_author = init_commit.author
232
232
233 assert init_commit.message == 'initial import'
233 assert init_commit.message == 'initial import'
234 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
234 assert init_author == 'Marcin Kuzminski <marcin@python-blog.com>'
235 assert init_author == init_commit.committer
235 assert init_author == init_commit.committer
236 assert sorted(init_commit._file_paths) == sorted([
236 assert sorted(init_commit._file_paths) == sorted([
237 'vcs/__init__.py',
237 'vcs/__init__.py',
238 'vcs/backends/BaseRepository.py',
238 'vcs/backends/BaseRepository.py',
239 'vcs/backends/__init__.py',
239 'vcs/backends/__init__.py',
240 ])
240 ])
241 assert sorted(init_commit._dir_paths) == sorted(
241 assert sorted(init_commit._dir_paths) == sorted(
242 ['', 'vcs', 'vcs/backends'])
242 ['', 'vcs', 'vcs/backends'])
243
243
244 assert init_commit._dir_paths + init_commit._file_paths == \
244 assert init_commit._dir_paths + init_commit._file_paths == \
245 init_commit._paths
245 init_commit._paths
246
246
247 with pytest.raises(NodeDoesNotExistError):
247 with pytest.raises(NodeDoesNotExistError):
248 init_commit.get_node(path='foobar')
248 init_commit.get_node(path='foobar')
249
249
250 node = init_commit.get_node('vcs/')
250 node = init_commit.get_node('vcs/')
251 assert hasattr(node, 'kind')
251 assert hasattr(node, 'kind')
252 assert node.kind == NodeKind.DIR
252 assert node.kind == NodeKind.DIR
253
253
254 node = init_commit.get_node('vcs')
254 node = init_commit.get_node('vcs')
255 assert hasattr(node, 'kind')
255 assert hasattr(node, 'kind')
256 assert node.kind == NodeKind.DIR
256 assert node.kind == NodeKind.DIR
257
257
258 node = init_commit.get_node('vcs/__init__.py')
258 node = init_commit.get_node('vcs/__init__.py')
259 assert hasattr(node, 'kind')
259 assert hasattr(node, 'kind')
260 assert node.kind == NodeKind.FILE
260 assert node.kind == NodeKind.FILE
261
261
262 def test_not_existing_commit(self):
262 def test_not_existing_commit(self):
263 # rawid
263 # rawid
264 with pytest.raises(RepositoryError):
264 with pytest.raises(RepositoryError):
265 self.repo.get_commit('abcd' * 10)
265 self.repo.get_commit('abcd' * 10)
266 # shortid
266 # shortid
267 with pytest.raises(RepositoryError):
267 with pytest.raises(RepositoryError):
268 self.repo.get_commit('erro' * 4)
268 self.repo.get_commit('erro' * 4)
269 # numeric
269 # numeric
270 with pytest.raises(RepositoryError):
270 with pytest.raises(RepositoryError):
271 self.repo.get_commit(commit_idx=self.repo.count() + 1)
271 self.repo.get_commit(commit_idx=self.repo.count() + 1)
272
272
273 # Small chance we ever get to this one
273 # Small chance we ever get to this one
274 idx = pow(2, 30)
274 idx = pow(2, 30)
275 with pytest.raises(RepositoryError):
275 with pytest.raises(RepositoryError):
276 self.repo.get_commit(commit_idx=idx)
276 self.repo.get_commit(commit_idx=idx)
277
277
278 def test_commit10(self):
278 def test_commit10(self):
279 commit10 = self.repo.get_commit(commit_idx=10)
279 commit10 = self.repo.get_commit(commit_idx=10)
280 README = """===
280 README = """===
281 VCS
281 VCS
282 ===
282 ===
283
283
284 Various Version Control System management abstraction layer for Python.
284 Various Version Control System management abstraction layer for Python.
285
285
286 Introduction
286 Introduction
287 ------------
287 ------------
288
288
289 TODO: To be written...
289 TODO: To be written...
290
290
291 """
291 """
292 node = commit10.get_node('README.rst')
292 node = commit10.get_node('README.rst')
293 assert node.kind == NodeKind.FILE
293 assert node.kind == NodeKind.FILE
294 assert node.str_content == README
294 assert node.str_content == README
295
295
296 def test_local_clone(self):
296 def test_local_clone(self):
297 clone_path = next(REPO_PATH_GENERATOR)
297 clone_path = next(REPO_PATH_GENERATOR)
298 self.repo._local_clone(clone_path)
298 self.repo._local_clone(clone_path)
299 repo_clone = MercurialRepository(clone_path)
299 repo_clone = MercurialRepository(clone_path)
300
300
301 assert self.repo.commit_ids == repo_clone.commit_ids
301 assert self.repo.commit_ids == repo_clone.commit_ids
302
302
303 def test_local_clone_fails_if_target_exists(self):
303 def test_local_clone_fails_if_target_exists(self):
304 with pytest.raises(RepositoryError):
304 with pytest.raises(RepositoryError):
305 self.repo._local_clone(self.repo.path)
305 self.repo._local_clone(self.repo.path)
306
306
307 def test_update(self):
307 def test_update(self):
308 repo_clone = self.get_clone_repo()
308 repo_clone = self.get_clone_repo()
309 branches = repo_clone.branches
309 branches = repo_clone.branches
310
310
311 repo_clone._update('default')
311 repo_clone._update('default')
312 assert branches['default'] == repo_clone._identify()
312 assert branches['default'] == repo_clone._identify()
313 repo_clone._update('stable')
313 repo_clone._update('stable')
314 assert branches['stable'] == repo_clone._identify()
314 assert branches['stable'] == repo_clone._identify()
315
315
316 def test_local_pull_branch(self):
316 def test_local_pull_branch(self):
317 target_repo = self.get_empty_repo()
317 target_repo = self.get_empty_repo()
318 source_repo = self.get_clone_repo()
318 source_repo = self.get_clone_repo()
319
319
320 default = Reference(
320 default = Reference(
321 'branch', 'default', source_repo.branches['default'])
321 'branch', 'default', source_repo.branches['default'])
322 target_repo._local_pull(source_repo.path, default)
322 target_repo._local_pull(source_repo.path, default)
323 target_repo = MercurialRepository(target_repo.path)
323 target_repo = MercurialRepository(target_repo.path)
324 assert (target_repo.branches['default'] ==
324 assert (target_repo.branches['default'] ==
325 source_repo.branches['default'])
325 source_repo.branches['default'])
326
326
327 stable = Reference('branch', 'stable', source_repo.branches['stable'])
327 stable = Reference('branch', 'stable', source_repo.branches['stable'])
328 target_repo._local_pull(source_repo.path, stable)
328 target_repo._local_pull(source_repo.path, stable)
329 target_repo = MercurialRepository(target_repo.path)
329 target_repo = MercurialRepository(target_repo.path)
330 assert target_repo.branches['stable'] == source_repo.branches['stable']
330 assert target_repo.branches['stable'] == source_repo.branches['stable']
331
331
332 def test_local_pull_bookmark(self):
332 def test_local_pull_bookmark(self):
333 target_repo = self.get_empty_repo()
333 target_repo = self.get_empty_repo()
334 source_repo = self.get_clone_repo()
334 source_repo = self.get_clone_repo()
335
335
336 commits = list(source_repo.get_commits(branch_name='default'))
336 commits = list(source_repo.get_commits(branch_name='default'))
337 foo1_id = commits[-5].raw_id
337 foo1_id = commits[-5].raw_id
338 foo1 = Reference('book', 'foo1', foo1_id)
338 foo1 = Reference('book', 'foo1', foo1_id)
339 source_repo._update(foo1_id)
339 source_repo._update(foo1_id)
340 source_repo.bookmark('foo1')
340 source_repo.bookmark('foo1')
341
341
342 foo2_id = commits[-3].raw_id
342 foo2_id = commits[-3].raw_id
343 foo2 = Reference('book', 'foo2', foo2_id)
343 foo2 = Reference('book', 'foo2', foo2_id)
344 source_repo._update(foo2_id)
344 source_repo._update(foo2_id)
345 source_repo.bookmark('foo2')
345 source_repo.bookmark('foo2')
346
346
347 target_repo._local_pull(source_repo.path, foo1)
347 target_repo._local_pull(source_repo.path, foo1)
348 target_repo = MercurialRepository(target_repo.path)
348 target_repo = MercurialRepository(target_repo.path)
349 assert target_repo.branches['default'] == commits[-5].raw_id
349 assert target_repo.branches['default'] == commits[-5].raw_id
350
350
351 target_repo._local_pull(source_repo.path, foo2)
351 target_repo._local_pull(source_repo.path, foo2)
352 target_repo = MercurialRepository(target_repo.path)
352 target_repo = MercurialRepository(target_repo.path)
353 assert target_repo.branches['default'] == commits[-3].raw_id
353 assert target_repo.branches['default'] == commits[-3].raw_id
354
354
355 def test_local_pull_commit(self):
355 def test_local_pull_commit(self):
356 target_repo = self.get_empty_repo()
356 target_repo = self.get_empty_repo()
357 source_repo = self.get_clone_repo()
357 source_repo = self.get_clone_repo()
358
358
359 commits = list(source_repo.get_commits(branch_name='default'))
359 commits = list(source_repo.get_commits(branch_name='default'))
360 commit_id = commits[-5].raw_id
360 commit_id = commits[-5].raw_id
361 commit = Reference('rev', commit_id, commit_id)
361 commit = Reference('rev', commit_id, commit_id)
362 target_repo._local_pull(source_repo.path, commit)
362 target_repo._local_pull(source_repo.path, commit)
363 target_repo = MercurialRepository(target_repo.path)
363 target_repo = MercurialRepository(target_repo.path)
364 assert target_repo.branches['default'] == commit_id
364 assert target_repo.branches['default'] == commit_id
365
365
366 commit_id = commits[-3].raw_id
366 commit_id = commits[-3].raw_id
367 commit = Reference('rev', commit_id, commit_id)
367 commit = Reference('rev', commit_id, commit_id)
368 target_repo._local_pull(source_repo.path, commit)
368 target_repo._local_pull(source_repo.path, commit)
369 target_repo = MercurialRepository(target_repo.path)
369 target_repo = MercurialRepository(target_repo.path)
370 assert target_repo.branches['default'] == commit_id
370 assert target_repo.branches['default'] == commit_id
371
371
372 def test_local_pull_from_same_repo(self):
372 def test_local_pull_from_same_repo(self):
373 reference = Reference('branch', 'default', None)
373 reference = Reference('branch', 'default', None)
374 with pytest.raises(ValueError):
374 with pytest.raises(ValueError):
375 self.repo._local_pull(self.repo.path, reference)
375 self.repo._local_pull(self.repo.path, reference)
376
376
377 def test_validate_pull_reference_raises_on_missing_reference(
377 def test_validate_pull_reference_raises_on_missing_reference(
378 self, vcsbackend_hg):
378 self, vcsbackend_hg):
379 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
379 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
380 reference = Reference(
380 reference = Reference(
381 'book', 'invalid_reference', 'a' * 40)
381 'book', 'invalid_reference', 'a' * 40)
382
382
383 with pytest.raises(CommitDoesNotExistError):
383 with pytest.raises(CommitDoesNotExistError):
384 target_repo._validate_pull_reference(reference)
384 target_repo._validate_pull_reference(reference)
385
385
386 def test_heads(self):
386 def test_heads(self):
387 assert set(self.repo._heads()) == set(self.repo.branches.values())
387 assert set(self.repo._heads()) == set(self.repo.branches.values())
388
388
389 def test_ancestor(self):
389 def test_ancestor(self):
390 commits = [
390 commits = [
391 c.raw_id for c in self.repo.get_commits(branch_name='default')]
391 c.raw_id for c in self.repo.get_commits(branch_name='default')]
392 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
392 assert self.repo._ancestor(commits[-3], commits[-5]) == commits[-5]
393 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
393 assert self.repo._ancestor(commits[-5], commits[-3]) == commits[-5]
394
394
395 def test_local_push(self):
395 def test_local_push(self):
396 target_repo = self.get_empty_repo()
396 target_repo = self.get_empty_repo()
397
397
398 revisions = list(self.repo.get_commits(branch_name='default'))
398 revisions = list(self.repo.get_commits(branch_name='default'))
399 revision = revisions[-5].raw_id
399 revision = revisions[-5].raw_id
400 self.repo._local_push(revision, target_repo.path)
400 self.repo._local_push(revision, target_repo.path)
401
401
402 target_repo = MercurialRepository(target_repo.path)
402 target_repo = MercurialRepository(target_repo.path)
403
403
404 assert target_repo.branches['default'] == revision
404 assert target_repo.branches['default'] == revision
405
405
406 def test_hooks_can_be_enabled_for_local_push(self):
406 def test_hooks_can_be_enabled_for_local_push(self):
407 revision = 'deadbeef'
407 revision = 'deadbeef'
408 repo_path = 'test_group/test_repo'
408 repo_path = 'test_group/test_repo'
409 with mock.patch.object(self.repo, '_remote') as remote_mock:
409 with mock.patch.object(self.repo, '_remote') as remote_mock:
410 self.repo._local_push(revision, repo_path, enable_hooks=True)
410 self.repo._local_push(revision, repo_path, enable_hooks=True)
411 remote_mock.push.assert_called_once_with(
411 remote_mock.push.assert_called_once_with(
412 [revision], repo_path, hooks=True, push_branches=False)
412 [revision], repo_path, hooks=True, push_branches=False)
413
413
414 def test_local_merge(self, vcsbackend_hg):
414 def test_local_merge(self, vcsbackend_hg):
415 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
415 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
416 source_repo = vcsbackend_hg.clone_repo(target_repo)
416 source_repo = vcsbackend_hg.clone_repo(target_repo)
417 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
417 vcsbackend_hg.add_file(target_repo, b'README_MERGE1', b'Version 1')
418 target_repo = MercurialRepository(target_repo.path)
418 target_repo = MercurialRepository(target_repo.path)
419 target_rev = target_repo.branches['default']
419 target_rev = target_repo.branches['default']
420 target_ref = Reference(
420 target_ref = Reference(
421 type='branch', name='default', commit_id=target_rev)
421 type='branch', name='default', commit_id=target_rev)
422 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
422 vcsbackend_hg.add_file(source_repo, b'README_MERGE2', b'Version 2')
423 source_repo = MercurialRepository(source_repo.path)
423 source_repo = MercurialRepository(source_repo.path)
424 source_rev = source_repo.branches['default']
424 source_rev = source_repo.branches['default']
425 source_ref = Reference(
425 source_ref = Reference(
426 type='branch', name='default', commit_id=source_rev)
426 type='branch', name='default', commit_id=source_rev)
427
427
428 target_repo._local_pull(source_repo.path, source_ref)
428 target_repo._local_pull(source_repo.path, source_ref)
429
429
430 merge_message = 'Merge message\n\nDescription:...'
430 merge_message = 'Merge message\n\nDescription:...'
431 user_name = 'Albert Einstein'
431 user_name = 'Albert Einstein'
432 user_email = 'albert@einstein.com'
432 user_email = 'albert@einstein.com'
433 merge_commit_id, needs_push = target_repo._local_merge(
433 merge_commit_id, needs_push = target_repo._local_merge(
434 target_ref, merge_message, user_name, user_email, source_ref)
434 target_ref, merge_message, user_name, user_email, source_ref)
435 assert needs_push
435 assert needs_push
436
436
437 target_repo = MercurialRepository(target_repo.path)
437 target_repo = MercurialRepository(target_repo.path)
438 assert target_repo.commit_ids[-3] == target_rev
438 assert target_repo.commit_ids[-3] == target_rev
439 assert target_repo.commit_ids[-2] == source_rev
439 assert target_repo.commit_ids[-2] == source_rev
440 last_commit = target_repo.get_commit(merge_commit_id)
440 last_commit = target_repo.get_commit(merge_commit_id)
441 assert last_commit.message.strip() == merge_message
441 assert last_commit.message.strip() == merge_message
442 assert last_commit.author == '%s <%s>' % (user_name, user_email)
442 assert last_commit.author == '%s <%s>' % (user_name, user_email)
443
443
444 assert not os.path.exists(
444 assert not os.path.exists(
445 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
445 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
446
446
447 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
447 def test_local_merge_source_is_fast_forward(self, vcsbackend_hg):
448 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
448 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
449 source_repo = vcsbackend_hg.clone_repo(target_repo)
449 source_repo = vcsbackend_hg.clone_repo(target_repo)
450 target_rev = target_repo.branches['default']
450 target_rev = target_repo.branches['default']
451 target_ref = Reference(
451 target_ref = Reference(
452 type='branch', name='default', commit_id=target_rev)
452 type='branch', name='default', commit_id=target_rev)
453 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
453 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
454 source_repo = MercurialRepository(source_repo.path)
454 source_repo = MercurialRepository(source_repo.path)
455 source_rev = source_repo.branches['default']
455 source_rev = source_repo.branches['default']
456 source_ref = Reference(
456 source_ref = Reference(
457 type='branch', name='default', commit_id=source_rev)
457 type='branch', name='default', commit_id=source_rev)
458
458
459 target_repo._local_pull(source_repo.path, source_ref)
459 target_repo._local_pull(source_repo.path, source_ref)
460
460
461 merge_message = 'Merge message\n\nDescription:...'
461 merge_message = 'Merge message\n\nDescription:...'
462 user_name = 'Albert Einstein'
462 user_name = 'Albert Einstein'
463 user_email = 'albert@einstein.com'
463 user_email = 'albert@einstein.com'
464 merge_commit_id, needs_push = target_repo._local_merge(
464 merge_commit_id, needs_push = target_repo._local_merge(
465 target_ref, merge_message, user_name, user_email, source_ref)
465 target_ref, merge_message, user_name, user_email, source_ref)
466 assert merge_commit_id == source_rev
466 assert merge_commit_id == source_rev
467 assert needs_push
467 assert needs_push
468
468
469 target_repo = MercurialRepository(target_repo.path)
469 target_repo = MercurialRepository(target_repo.path)
470 assert target_repo.commit_ids[-2] == target_rev
470 assert target_repo.commit_ids[-2] == target_rev
471 assert target_repo.commit_ids[-1] == source_rev
471 assert target_repo.commit_ids[-1] == source_rev
472
472
473 assert not os.path.exists(
473 assert not os.path.exists(
474 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
474 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
475
475
476 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
476 def test_local_merge_source_is_integrated(self, vcsbackend_hg):
477 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
477 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
478 target_rev = target_repo.branches['default']
478 target_rev = target_repo.branches['default']
479 target_ref = Reference(
479 target_ref = Reference(
480 type='branch', name='default', commit_id=target_rev)
480 type='branch', name='default', commit_id=target_rev)
481
481
482 merge_message = 'Merge message\n\nDescription:...'
482 merge_message = 'Merge message\n\nDescription:...'
483 user_name = 'Albert Einstein'
483 user_name = 'Albert Einstein'
484 user_email = 'albert@einstein.com'
484 user_email = 'albert@einstein.com'
485 merge_commit_id, needs_push = target_repo._local_merge(
485 merge_commit_id, needs_push = target_repo._local_merge(
486 target_ref, merge_message, user_name, user_email, target_ref)
486 target_ref, merge_message, user_name, user_email, target_ref)
487 assert merge_commit_id == target_rev
487 assert merge_commit_id == target_rev
488 assert not needs_push
488 assert not needs_push
489
489
490 target_repo = MercurialRepository(target_repo.path)
490 target_repo = MercurialRepository(target_repo.path)
491 assert target_repo.commit_ids[-1] == target_rev
491 assert target_repo.commit_ids[-1] == target_rev
492
492
493 assert not os.path.exists(
493 assert not os.path.exists(
494 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
494 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
495
495
496 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
496 def test_local_merge_raises_exception_on_conflict(self, vcsbackend_hg):
497 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
497 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
498 source_repo = vcsbackend_hg.clone_repo(target_repo)
498 source_repo = vcsbackend_hg.clone_repo(target_repo)
499 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
499 vcsbackend_hg.add_file(target_repo, 'README_MERGE', 'Version 1')
500 target_repo = MercurialRepository(target_repo.path)
500 target_repo = MercurialRepository(target_repo.path)
501 target_rev = target_repo.branches['default']
501 target_rev = target_repo.branches['default']
502 target_ref = Reference(
502 target_ref = Reference(
503 type='branch', name='default', commit_id=target_rev)
503 type='branch', name='default', commit_id=target_rev)
504 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
504 vcsbackend_hg.add_file(source_repo, 'README_MERGE', 'Version 2')
505 source_repo = MercurialRepository(source_repo.path)
505 source_repo = MercurialRepository(source_repo.path)
506 source_rev = source_repo.branches['default']
506 source_rev = source_repo.branches['default']
507 source_ref = Reference(
507 source_ref = Reference(
508 type='branch', name='default', commit_id=source_rev)
508 type='branch', name='default', commit_id=source_rev)
509
509
510 target_repo._local_pull(source_repo.path, source_ref)
510 target_repo._local_pull(source_repo.path, source_ref)
511 with pytest.raises(RepositoryError):
511 with pytest.raises(RepositoryError):
512 target_repo._local_merge(
512 target_repo._local_merge(
513 target_ref, 'merge_message', 'user name', 'user@name.com',
513 target_ref, 'merge_message', 'user name', 'user@name.com',
514 source_ref)
514 source_ref)
515
515
516 # Check we are not left in an intermediate merge state
516 # Check we are not left in an intermediate merge state
517 assert not os.path.exists(
517 assert not os.path.exists(
518 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
518 os.path.join(target_repo.path, '.hg', 'merge', 'state'))
519
519
520 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
520 def test_local_merge_of_two_branches_of_the_same_repo(self, backend_hg):
521 commits = [
521 commits = [
522 {'message': 'a'},
522 {'message': 'a'},
523 {'message': 'b', 'branch': 'b'},
523 {'message': 'b', 'branch': 'b'},
524 ]
524 ]
525 repo = backend_hg.create_repo(commits)
525 repo = backend_hg.create_repo(commits)
526 commit_ids = backend_hg.commit_ids
526 commit_ids = backend_hg.commit_ids
527 target_ref = Reference(
527 target_ref = Reference(
528 type='branch', name='default', commit_id=commit_ids['a'])
528 type='branch', name='default', commit_id=commit_ids['a'])
529 source_ref = Reference(
529 source_ref = Reference(
530 type='branch', name='b', commit_id=commit_ids['b'])
530 type='branch', name='b', commit_id=commit_ids['b'])
531 merge_message = 'Merge message\n\nDescription:...'
531 merge_message = 'Merge message\n\nDescription:...'
532 user_name = 'Albert Einstein'
532 user_name = 'Albert Einstein'
533 user_email = 'albert@einstein.com'
533 user_email = 'albert@einstein.com'
534 vcs_repo = repo.scm_instance()
534 vcs_repo = repo.scm_instance()
535 merge_commit_id, needs_push = vcs_repo._local_merge(
535 merge_commit_id, needs_push = vcs_repo._local_merge(
536 target_ref, merge_message, user_name, user_email, source_ref)
536 target_ref, merge_message, user_name, user_email, source_ref)
537 assert merge_commit_id != source_ref.commit_id
537 assert merge_commit_id != source_ref.commit_id
538 assert needs_push is True
538 assert needs_push is True
539 commit = vcs_repo.get_commit(merge_commit_id)
539 commit = vcs_repo.get_commit(merge_commit_id)
540 assert commit.merge is True
540 assert commit.merge is True
541 assert commit.message == merge_message
541 assert commit.message == merge_message
542
542
543 def test_maybe_prepare_merge_workspace(self):
543 def test_maybe_prepare_merge_workspace(self):
544 workspace = self.repo._maybe_prepare_merge_workspace(
544 workspace = self.repo._maybe_prepare_merge_workspace(
545 1, 'pr2', 'unused', 'unused2')
545 1, 'pr2', 'unused', 'unused2')
546
546
547 assert os.path.isdir(workspace)
547 assert os.path.isdir(workspace)
548 workspace_repo = MercurialRepository(workspace)
548 workspace_repo = MercurialRepository(workspace)
549 assert workspace_repo.branches == self.repo.branches
549 assert workspace_repo.branches == self.repo.branches
550
550
551 # Calling it a second time should also succeed
551 # Calling it a second time should also succeed
552 workspace = self.repo._maybe_prepare_merge_workspace(
552 workspace = self.repo._maybe_prepare_merge_workspace(
553 1, 'pr2', 'unused', 'unused2')
553 1, 'pr2', 'unused', 'unused2')
554 assert os.path.isdir(workspace)
554 assert os.path.isdir(workspace)
555
555
556 def test_cleanup_merge_workspace(self):
556 def test_cleanup_merge_workspace(self):
557 workspace = self.repo._maybe_prepare_merge_workspace(
557 workspace = self.repo._maybe_prepare_merge_workspace(
558 1, 'pr3', 'unused', 'unused2')
558 1, 'pr3', 'unused', 'unused2')
559
559
560 assert os.path.isdir(workspace)
560 assert os.path.isdir(workspace)
561 self.repo.cleanup_merge_workspace(1, 'pr3')
561 self.repo.cleanup_merge_workspace(1, 'pr3')
562
562
563 assert not os.path.exists(workspace)
563 assert not os.path.exists(workspace)
564
564
565 def test_cleanup_merge_workspace_invalid_workspace_id(self):
565 def test_cleanup_merge_workspace_invalid_workspace_id(self):
566 # No assert: because in case of an inexistent workspace this function
566 # No assert: because in case of an inexistent workspace this function
567 # should still succeed.
567 # should still succeed.
568 self.repo.cleanup_merge_workspace(1, 'pr4')
568 self.repo.cleanup_merge_workspace(1, 'pr4')
569
569
570 def test_merge_target_is_bookmark(self, vcsbackend_hg):
570 def test_merge_target_is_bookmark(self, vcsbackend_hg):
571 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
571 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
572 source_repo = vcsbackend_hg.clone_repo(target_repo)
572 source_repo = vcsbackend_hg.clone_repo(target_repo)
573 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
573 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
574 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
574 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
575 imc = source_repo.in_memory_commit
575 imc = source_repo.in_memory_commit
576 imc.add(FileNode(b'file_x', content=source_repo.name))
576 imc.add(FileNode(b'file_x', content=source_repo.name))
577 imc.commit(
577 imc.commit(
578 message='Automatic commit from repo merge test',
578 message='Automatic commit from repo merge test',
579 author='Automatic <automatic@rhodecode.com>')
579 author='Automatic <automatic@rhodecode.com>')
580 target_commit = target_repo.get_commit()
580 target_commit = target_repo.get_commit()
581 source_commit = source_repo.get_commit()
581 source_commit = source_repo.get_commit()
582 default_branch = target_repo.DEFAULT_BRANCH_NAME
582 default_branch = target_repo.DEFAULT_BRANCH_NAME
583 bookmark_name = 'bookmark'
583 bookmark_name = 'bookmark'
584 target_repo._update(default_branch)
584 target_repo._update(default_branch)
585 target_repo.bookmark(bookmark_name)
585 target_repo.bookmark(bookmark_name)
586 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
586 target_ref = Reference('book', bookmark_name, target_commit.raw_id)
587 source_ref = Reference('branch', default_branch, source_commit.raw_id)
587 source_ref = Reference('branch', default_branch, source_commit.raw_id)
588 workspace_id = 'test-merge'
588 workspace_id = 'test-merge'
589 repo_id = repo_id_generator(target_repo.path)
589 repo_id = repo_id_generator(target_repo.path)
590 merge_response = target_repo.merge(
590 merge_response = target_repo.merge(
591 repo_id, workspace_id, target_ref, source_repo, source_ref,
591 repo_id, workspace_id, target_ref, source_repo, source_ref,
592 'test user', 'test@rhodecode.com', 'merge message 1',
592 'test user', 'test@rhodecode.com', 'merge message 1',
593 dry_run=False)
593 dry_run=False)
594 expected_merge_response = MergeResponse(
594 expected_merge_response = MergeResponse(
595 True, True, merge_response.merge_ref,
595 True, True, merge_response.merge_ref,
596 MergeFailureReason.NONE)
596 MergeFailureReason.NONE)
597 assert merge_response == expected_merge_response
597 assert merge_response == expected_merge_response
598
598
599 target_repo = backends.get_backend(vcsbackend_hg.alias)(
599 target_repo = backends.get_backend(vcsbackend_hg.alias)(
600 target_repo.path)
600 target_repo.path)
601 target_commits = list(target_repo.get_commits())
601 target_commits = list(target_repo.get_commits())
602 commit_ids = [c.raw_id for c in target_commits[:-1]]
602 commit_ids = [c.raw_id for c in target_commits[:-1]]
603 assert source_ref.commit_id in commit_ids
603 assert source_ref.commit_id in commit_ids
604 assert target_ref.commit_id in commit_ids
604 assert target_ref.commit_id in commit_ids
605
605
606 merge_commit = target_commits[-1]
606 merge_commit = target_commits[-1]
607 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
607 assert merge_commit.raw_id == merge_response.merge_ref.commit_id
608 assert merge_commit.message.strip() == 'merge message 1'
608 assert merge_commit.message.strip() == 'merge message 1'
609 assert merge_commit.author == 'test user <test@rhodecode.com>'
609 assert merge_commit.author == 'test user <test@rhodecode.com>'
610
610
611 # Check the bookmark was updated in the target repo
611 # Check the bookmark was updated in the target repo
612 assert (
612 assert (
613 target_repo.bookmarks[bookmark_name] ==
613 target_repo.bookmarks[bookmark_name] ==
614 merge_response.merge_ref.commit_id)
614 merge_response.merge_ref.commit_id)
615
615
616 def test_merge_source_is_bookmark(self, vcsbackend_hg):
616 def test_merge_source_is_bookmark(self, vcsbackend_hg):
617 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
617 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
618 source_repo = vcsbackend_hg.clone_repo(target_repo)
618 source_repo = vcsbackend_hg.clone_repo(target_repo)
619 imc = source_repo.in_memory_commit
619 imc = source_repo.in_memory_commit
620 imc.add(FileNode(b'file_x', content=source_repo.name))
620 imc.add(FileNode(b'file_x', content=source_repo.name))
621 imc.commit(
621 imc.commit(
622 message='Automatic commit from repo merge test',
622 message='Automatic commit from repo merge test',
623 author='Automatic <automatic@rhodecode.com>')
623 author='Automatic <automatic@rhodecode.com>')
624 target_commit = target_repo.get_commit()
624 target_commit = target_repo.get_commit()
625 source_commit = source_repo.get_commit()
625 source_commit = source_repo.get_commit()
626 default_branch = target_repo.DEFAULT_BRANCH_NAME
626 default_branch = target_repo.DEFAULT_BRANCH_NAME
627 bookmark_name = 'bookmark'
627 bookmark_name = 'bookmark'
628 target_ref = Reference('branch', default_branch, target_commit.raw_id)
628 target_ref = Reference('branch', default_branch, target_commit.raw_id)
629 source_repo._update(default_branch)
629 source_repo._update(default_branch)
630 source_repo.bookmark(bookmark_name)
630 source_repo.bookmark(bookmark_name)
631 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
631 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
632 workspace_id = 'test-merge'
632 workspace_id = 'test-merge'
633 repo_id = repo_id_generator(target_repo.path)
633 repo_id = repo_id_generator(target_repo.path)
634 merge_response = target_repo.merge(
634 merge_response = target_repo.merge(
635 repo_id, workspace_id, target_ref, source_repo, source_ref,
635 repo_id, workspace_id, target_ref, source_repo, source_ref,
636 'test user', 'test@rhodecode.com', 'merge message 1',
636 'test user', 'test@rhodecode.com', 'merge message 1',
637 dry_run=False)
637 dry_run=False)
638 expected_merge_response = MergeResponse(
638 expected_merge_response = MergeResponse(
639 True, True, merge_response.merge_ref,
639 True, True, merge_response.merge_ref,
640 MergeFailureReason.NONE)
640 MergeFailureReason.NONE)
641 assert merge_response == expected_merge_response
641 assert merge_response == expected_merge_response
642
642
643 target_repo = backends.get_backend(vcsbackend_hg.alias)(
643 target_repo = backends.get_backend(vcsbackend_hg.alias)(
644 target_repo.path)
644 target_repo.path)
645 target_commits = list(target_repo.get_commits())
645 target_commits = list(target_repo.get_commits())
646 commit_ids = [c.raw_id for c in target_commits]
646 commit_ids = [c.raw_id for c in target_commits]
647 assert source_ref.commit_id == commit_ids[-1]
647 assert source_ref.commit_id == commit_ids[-1]
648 assert target_ref.commit_id == commit_ids[-2]
648 assert target_ref.commit_id == commit_ids[-2]
649
649
650 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
650 def test_merge_target_has_multiple_heads(self, vcsbackend_hg):
651 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
651 target_repo = vcsbackend_hg.create_repo(number_of_commits=2)
652 source_repo = vcsbackend_hg.clone_repo(target_repo)
652 source_repo = vcsbackend_hg.clone_repo(target_repo)
653 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
653 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
654 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
654 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
655
655
656 # add an extra head to the target repo
656 # add an extra head to the target repo
657 imc = target_repo.in_memory_commit
657 imc = target_repo.in_memory_commit
658 imc.add(FileNode(b'file_x', content='foo'))
658 imc.add(FileNode(b'file_x', content='foo'))
659 commits = list(target_repo.get_commits())
659 commits = list(target_repo.get_commits())
660 imc.commit(
660 imc.commit(
661 message='Automatic commit from repo merge test',
661 message='Automatic commit from repo merge test',
662 author='Automatic <automatic@rhodecode.com>', parents=commits[0:1])
662 author='Automatic <automatic@rhodecode.com>', parents=commits[0:1])
663
663
664 target_commit = target_repo.get_commit()
664 target_commit = target_repo.get_commit()
665 source_commit = source_repo.get_commit()
665 source_commit = source_repo.get_commit()
666 default_branch = target_repo.DEFAULT_BRANCH_NAME
666 default_branch = target_repo.DEFAULT_BRANCH_NAME
667 target_repo._update(default_branch)
667 target_repo._update(default_branch)
668
668
669 target_ref = Reference('branch', default_branch, target_commit.raw_id)
669 target_ref = Reference('branch', default_branch, target_commit.raw_id)
670 source_ref = Reference('branch', default_branch, source_commit.raw_id)
670 source_ref = Reference('branch', default_branch, source_commit.raw_id)
671 workspace_id = 'test-merge'
671 workspace_id = 'test-merge'
672
672
673 assert len(target_repo._heads(branch='default')) == 2
673 assert len(target_repo._heads(branch='default')) == 2
674 heads = target_repo._heads(branch='default')
674 heads = target_repo._heads(branch='default')
675 expected_merge_response = MergeResponse(
675 expected_merge_response = MergeResponse(
676 False, False, None,
676 False, False, None,
677 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
677 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
678 metadata={'heads': heads})
678 metadata={'heads': heads})
679 repo_id = repo_id_generator(target_repo.path)
679 repo_id = repo_id_generator(target_repo.path)
680 merge_response = target_repo.merge(
680 merge_response = target_repo.merge(
681 repo_id, workspace_id, target_ref, source_repo, source_ref,
681 repo_id, workspace_id, target_ref, source_repo, source_ref,
682 'test user', 'test@rhodecode.com', 'merge message 1',
682 'test user', 'test@rhodecode.com', 'merge message 1',
683 dry_run=False)
683 dry_run=False)
684 assert merge_response == expected_merge_response
684 assert merge_response == expected_merge_response
685
685
686 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
686 def test_merge_rebase_source_is_updated_bookmark(self, vcsbackend_hg):
687 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
687 target_repo = vcsbackend_hg.create_repo(number_of_commits=1)
688 source_repo = vcsbackend_hg.clone_repo(target_repo)
688 source_repo = vcsbackend_hg.clone_repo(target_repo)
689 vcsbackend_hg.add_file(target_repo, 'README_MERGE1', 'Version 1')
689 vcsbackend_hg.add_file(target_repo, b'README_MERGE1', b'Version 1')
690 vcsbackend_hg.add_file(source_repo, 'README_MERGE2', 'Version 2')
690 vcsbackend_hg.add_file(source_repo, b'README_MERGE2', b'Version 2')
691
691
692 imc = source_repo.in_memory_commit
692 imc = source_repo.in_memory_commit
693 imc.add(FileNode(b'file_x', content=safe_bytes(source_repo.name)))
693 imc.add(FileNode(b'file_x', content=safe_bytes(source_repo.name)))
694 imc.commit(
694 imc.commit(
695 message='Automatic commit from repo merge test',
695 message='Automatic commit from repo merge test',
696 author='Automatic <automatic@rhodecode.com>')
696 author='Automatic <automatic@rhodecode.com>')
697
697
698 target_commit = target_repo.get_commit()
698 target_commit = target_repo.get_commit()
699 source_commit = source_repo.get_commit()
699 source_commit = source_repo.get_commit()
700
700
701 vcsbackend_hg.add_file(source_repo, 'LICENSE', 'LICENSE Info')
701 vcsbackend_hg.add_file(source_repo, b'LICENSE', b'LICENSE Info')
702
702
703 default_branch = target_repo.DEFAULT_BRANCH_NAME
703 default_branch = target_repo.DEFAULT_BRANCH_NAME
704 bookmark_name = 'bookmark'
704 bookmark_name = 'bookmark'
705 source_repo._update(default_branch)
705 source_repo._update(default_branch)
706 source_repo.bookmark(bookmark_name)
706 source_repo.bookmark(bookmark_name)
707
707
708 target_ref = Reference('branch', default_branch, target_commit.raw_id)
708 target_ref = Reference('branch', default_branch, target_commit.raw_id)
709 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
709 source_ref = Reference('book', bookmark_name, source_commit.raw_id)
710 repo_id = repo_id_generator(target_repo.path)
710 repo_id = repo_id_generator(target_repo.path)
711 workspace_id = 'test-merge'
711 workspace_id = 'test-merge'
712
712
713 merge_response = target_repo.merge(
713 merge_response = target_repo.merge(
714 repo_id, workspace_id, target_ref, source_repo, source_ref,
714 repo_id, workspace_id, target_ref, source_repo, source_ref,
715 'test user', 'test@rhodecode.com', 'merge message 1',
715 'test user', 'test@rhodecode.com', 'merge message 1',
716 dry_run=False, use_rebase=True)
716 dry_run=False, use_rebase=True)
717
717
718 expected_merge_response = MergeResponse(
718 expected_merge_response = MergeResponse(
719 True, True, merge_response.merge_ref,
719 True, True, merge_response.merge_ref,
720 MergeFailureReason.NONE)
720 MergeFailureReason.NONE)
721 assert merge_response == expected_merge_response
721 assert merge_response == expected_merge_response
722
722
723 target_repo = backends.get_backend(vcsbackend_hg.alias)(
723 target_repo = backends.get_backend(vcsbackend_hg.alias)(
724 target_repo.path)
724 target_repo.path)
725 last_commit = target_repo.get_commit()
725 last_commit = target_repo.get_commit()
726 assert last_commit.message == source_commit.message
726 assert last_commit.message == source_commit.message
727 assert last_commit.author == source_commit.author
727 assert last_commit.author == source_commit.author
728 # This checks that we effectively did a rebase
728 # This checks that we effectively did a rebase
729 assert last_commit.raw_id != source_commit.raw_id
729 assert last_commit.raw_id != source_commit.raw_id
730
730
731 # Check the target has only 4 commits: 2 were already in target and
731 # Check the target has only 4 commits: 2 were already in target and
732 # only two should have been added
732 # only two should have been added
733 assert len(target_repo.commit_ids) == 2 + 2
733 assert len(target_repo.commit_ids) == 2 + 2
734
734
735
735
736 class TestGetShadowInstance(object):
736 class TestGetShadowInstance(object):
737
737
738 @pytest.fixture()
738 @pytest.fixture()
739 def repo(self, vcsbackend_hg, monkeypatch):
739 def repo(self, vcsbackend_hg, monkeypatch):
740 repo = vcsbackend_hg.repo
740 repo = vcsbackend_hg.repo
741 monkeypatch.setattr(repo, 'config', mock.Mock())
741 monkeypatch.setattr(repo, 'config', mock.Mock())
742 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
742 monkeypatch.setattr('rhodecode.lib.vcs.connection.Hg', mock.Mock())
743 return repo
743 return repo
744
744
745 def test_passes_config(self, repo):
745 def test_passes_config(self, repo):
746 shadow = repo.get_shadow_instance(repo.path)
746 shadow = repo.get_shadow_instance(repo.path)
747 assert shadow.config == repo.config.copy()
747 assert shadow.config == repo.config.copy()
748
748
749 def test_disables_hooks(self, repo):
749 def test_disables_hooks(self, repo):
750 shadow = repo.get_shadow_instance(repo.path)
750 shadow = repo.get_shadow_instance(repo.path)
751 shadow.config.clear_section.assert_called_once_with('hooks')
751 shadow.config.clear_section.assert_called_once_with('hooks')
752
752
753 def test_allows_to_keep_hooks(self, repo):
753 def test_allows_to_keep_hooks(self, repo):
754 shadow = repo.get_shadow_instance(repo.path, enable_hooks=True)
754 shadow = repo.get_shadow_instance(repo.path, enable_hooks=True)
755 assert not shadow.config.clear_section.called
755 assert not shadow.config.clear_section.called
756
756
757
757
758 class TestMercurialCommit(object):
758 class TestMercurialCommit(object):
759
759
760 def _test_equality(self, commit):
760 def _test_equality(self, commit):
761 idx = commit.idx
761 idx = commit.idx
762 assert commit == self.repo.get_commit(commit_idx=idx)
762 assert commit == self.repo.get_commit(commit_idx=idx)
763
763
764 def test_equality(self):
764 def test_equality(self):
765 indexes = [0, 10, 20]
765 indexes = [0, 10, 20]
766 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
766 commits = [self.repo.get_commit(commit_idx=idx) for idx in indexes]
767 for commit in commits:
767 for commit in commits:
768 self._test_equality(commit)
768 self._test_equality(commit)
769
769
770 def test_default_commit(self):
770 def test_default_commit(self):
771 tip = self.repo.get_commit('tip')
771 tip = self.repo.get_commit('tip')
772 assert tip == self.repo.get_commit()
772 assert tip == self.repo.get_commit()
773 assert tip == self.repo.get_commit(commit_id=None)
773 assert tip == self.repo.get_commit(commit_id=None)
774 assert tip == self.repo.get_commit(commit_idx=None)
774 assert tip == self.repo.get_commit(commit_idx=None)
775 assert tip == list(self.repo[-1:])[0]
775 assert tip == list(self.repo[-1:])[0]
776
776
777 def test_root_node(self):
777 def test_root_node(self):
778 tip = self.repo.get_commit('tip')
778 tip = self.repo.get_commit('tip')
779 assert tip.root is tip.get_node('')
779 assert tip.root is tip.get_node('')
780
780
781 def test_lazy_fetch(self):
781 def test_lazy_fetch(self):
782 """
782 """
783 Test if commit's nodes expands and are cached as we walk through
783 Test if commit's nodes expands and are cached as we walk through
784 the commit. This test is somewhat hard to write as order of tests
784 the commit. This test is somewhat hard to write as order of tests
785 is a key here. Written by running command after command in a shell.
785 is a key here. Written by running command after command in a shell.
786 """
786 """
787 commit = self.repo.get_commit(commit_idx=45)
787 commit = self.repo.get_commit(commit_idx=45)
788 assert len(commit.nodes) == 0
788 assert len(commit.nodes) == 0
789 root = commit.root
789 root = commit.root
790 assert len(commit.nodes) == 1
790 assert len(commit.nodes) == 1
791 assert len(root.nodes) == 8
791 assert len(root.nodes) == 8
792 # accessing root.nodes updates commit.nodes
792 # accessing root.nodes updates commit.nodes
793 assert len(commit.nodes) == 9
793 assert len(commit.nodes) == 9
794
794
795 docs = root.get_node('docs')
795 docs = root.get_node('docs')
796 # we haven't yet accessed anything new as docs dir was already cached
796 # we haven't yet accessed anything new as docs dir was already cached
797 assert len(commit.nodes) == 9
797 assert len(commit.nodes) == 9
798 assert len(docs.nodes) == 8
798 assert len(docs.nodes) == 8
799 # accessing docs.nodes updates commit.nodes
799 # accessing docs.nodes updates commit.nodes
800 assert len(commit.nodes) == 17
800 assert len(commit.nodes) == 17
801
801
802 assert docs is commit.get_node('docs')
802 assert docs is commit.get_node('docs')
803 assert docs is root.nodes[0]
803 assert docs is root.nodes[0]
804 assert docs is root.dirs[0]
804 assert docs is root.dirs[0]
805 assert docs is commit.get_node('docs')
805 assert docs is commit.get_node('docs')
806
806
807 def test_nodes_with_commit(self):
807 def test_nodes_with_commit(self):
808 commit = self.repo.get_commit(commit_idx=45)
808 commit = self.repo.get_commit(commit_idx=45)
809 root = commit.root
809 root = commit.root
810 docs = root.get_node('docs')
810 docs = root.get_node('docs')
811 assert docs is commit.get_node('docs')
811 assert docs is commit.get_node('docs')
812 api = docs.get_node('api')
812 api = docs.get_node('api')
813 assert api is commit.get_node('docs/api')
813 assert api is commit.get_node('docs/api')
814 index = api.get_node('index.rst')
814 index = api.get_node('index.rst')
815 assert index is commit.get_node('docs/api/index.rst')
815 assert index is commit.get_node('docs/api/index.rst')
816 assert index is commit.get_node(
816 assert index is commit.get_node(
817 'docs').get_node('api').get_node('index.rst')
817 'docs').get_node('api').get_node('index.rst')
818
818
819 def test_branch_and_tags(self):
819 def test_branch_and_tags(self):
820 commit0 = self.repo.get_commit(commit_idx=0)
820 commit0 = self.repo.get_commit(commit_idx=0)
821 assert commit0.branch == 'default'
821 assert commit0.branch == 'default'
822 assert commit0.tags == []
822 assert commit0.tags == []
823
823
824 commit10 = self.repo.get_commit(commit_idx=10)
824 commit10 = self.repo.get_commit(commit_idx=10)
825 assert commit10.branch == 'default'
825 assert commit10.branch == 'default'
826 assert commit10.tags == []
826 assert commit10.tags == []
827
827
828 commit44 = self.repo.get_commit(commit_idx=44)
828 commit44 = self.repo.get_commit(commit_idx=44)
829 assert commit44.branch == 'web'
829 assert commit44.branch == 'web'
830
830
831 tip = self.repo.get_commit('tip')
831 tip = self.repo.get_commit('tip')
832 assert 'tip' in tip.tags
832 assert 'tip' in tip.tags
833
833
834 def test_bookmarks(self):
834 def test_bookmarks(self):
835 commit0 = self.repo.get_commit(commit_idx=0)
835 commit0 = self.repo.get_commit(commit_idx=0)
836 assert commit0.bookmarks == []
836 assert commit0.bookmarks == []
837
837
838 def _test_file_size(self, idx, path, size):
838 def _test_file_size(self, idx, path, size):
839 node = self.repo.get_commit(commit_idx=idx).get_node(path)
839 node = self.repo.get_commit(commit_idx=idx).get_node(path)
840 assert node.is_file()
840 assert node.is_file()
841 assert node.size == size
841 assert node.size == size
842
842
843 def test_file_size(self):
843 def test_file_size(self):
844 to_check = (
844 to_check = (
845 (10, 'setup.py', 1068),
845 (10, 'setup.py', 1068),
846 (20, 'setup.py', 1106),
846 (20, 'setup.py', 1106),
847 (60, 'setup.py', 1074),
847 (60, 'setup.py', 1074),
848
848
849 (10, 'vcs/backends/base.py', 2921),
849 (10, 'vcs/backends/base.py', 2921),
850 (20, 'vcs/backends/base.py', 3936),
850 (20, 'vcs/backends/base.py', 3936),
851 (60, 'vcs/backends/base.py', 6189),
851 (60, 'vcs/backends/base.py', 6189),
852 )
852 )
853 for idx, path, size in to_check:
853 for idx, path, size in to_check:
854 self._test_file_size(idx, path, size)
854 self._test_file_size(idx, path, size)
855
855
856 def test_file_history_from_commits(self):
856 def test_file_history_from_commits(self):
857 node = self.repo[10].get_node('setup.py')
857 node = self.repo[10].get_node('setup.py')
858 commit_ids = [commit.raw_id for commit in node.history]
858 commit_ids = [commit.raw_id for commit in node.history]
859 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
859 assert ['3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == commit_ids
860
860
861 node = self.repo[20].get_node('setup.py')
861 node = self.repo[20].get_node('setup.py')
862 node_ids = [commit.raw_id for commit in node.history]
862 node_ids = [commit.raw_id for commit in node.history]
863 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
863 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
864 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
864 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
865
865
866 # special case we check history from commit that has this particular
866 # special case we check history from commit that has this particular
867 # file changed this means we check if it's included as well
867 # file changed this means we check if it's included as well
868 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
868 node = self.repo.get_commit('eada5a770da98ab0dd7325e29d00e0714f228d09')\
869 .get_node('setup.py')
869 .get_node('setup.py')
870 node_ids = [commit.raw_id for commit in node.history]
870 node_ids = [commit.raw_id for commit in node.history]
871 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
871 assert ['eada5a770da98ab0dd7325e29d00e0714f228d09',
872 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
872 '3803844fdbd3b711175fc3da9bdacfcd6d29a6fb'] == node_ids
873
873
874 def test_file_history(self):
874 def test_file_history(self):
875 # we can only check if those commits are present in the history
875 # we can only check if those commits are present in the history
876 # as we cannot update this test every time file is changed
876 # as we cannot update this test every time file is changed
877 files = {
877 files = {
878 'setup.py': [7, 18, 45, 46, 47, 69, 77],
878 'setup.py': [7, 18, 45, 46, 47, 69, 77],
879 'vcs/nodes.py': [
879 'vcs/nodes.py': [
880 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
880 7, 8, 24, 26, 30, 45, 47, 49, 56, 57, 58, 59, 60, 61, 73, 76],
881 'vcs/backends/hg.py': [
881 'vcs/backends/hg.py': [
882 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
882 4, 5, 6, 11, 12, 13, 14, 15, 16, 21, 22, 23, 26, 27, 28, 30,
883 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
883 31, 33, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 48, 49, 53, 54,
884 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
884 55, 58, 60, 61, 67, 68, 69, 70, 73, 77, 78, 79, 82],
885 }
885 }
886 for path, indexes in files.items():
886 for path, indexes in files.items():
887 tip = self.repo.get_commit(commit_idx=indexes[-1])
887 tip = self.repo.get_commit(commit_idx=indexes[-1])
888 node = tip.get_node(path)
888 node = tip.get_node(path)
889 node_indexes = [commit.idx for commit in node.history]
889 node_indexes = [commit.idx for commit in node.history]
890 assert set(indexes).issubset(set(node_indexes)), (
890 assert set(indexes).issubset(set(node_indexes)), (
891 "We assumed that %s is subset of commits for which file %s "
891 "We assumed that %s is subset of commits for which file %s "
892 "has been changed, and history of that node returned: %s"
892 "has been changed, and history of that node returned: %s"
893 % (indexes, path, node_indexes))
893 % (indexes, path, node_indexes))
894
894
895 def test_file_annotate(self):
895 def test_file_annotate(self):
896 files = {
896 files = {
897 'vcs/backends/__init__.py': {
897 'vcs/backends/__init__.py': {
898 89: {
898 89: {
899 'lines_no': 31,
899 'lines_no': 31,
900 'commits': [
900 'commits': [
901 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
901 32, 32, 61, 32, 32, 37, 32, 32, 32, 44,
902 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
902 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
903 32, 32, 32, 32, 37, 32, 37, 37, 32,
903 32, 32, 32, 32, 37, 32, 37, 37, 32,
904 32, 32
904 32, 32
905 ]
905 ]
906 },
906 },
907 20: {
907 20: {
908 'lines_no': 1,
908 'lines_no': 1,
909 'commits': [4]
909 'commits': [4]
910 },
910 },
911 55: {
911 55: {
912 'lines_no': 31,
912 'lines_no': 31,
913 'commits': [
913 'commits': [
914 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
914 32, 32, 45, 32, 32, 37, 32, 32, 32, 44,
915 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
915 37, 37, 37, 37, 45, 37, 44, 37, 37, 37,
916 32, 32, 32, 32, 37, 32, 37, 37, 32,
916 32, 32, 32, 32, 37, 32, 37, 37, 32,
917 32, 32
917 32, 32
918 ]
918 ]
919 }
919 }
920 },
920 },
921 'vcs/exceptions.py': {
921 'vcs/exceptions.py': {
922 89: {
922 89: {
923 'lines_no': 18,
923 'lines_no': 18,
924 'commits': [
924 'commits': [
925 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
925 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
926 16, 16, 17, 16, 16, 18, 18, 18
926 16, 16, 17, 16, 16, 18, 18, 18
927 ]
927 ]
928 },
928 },
929 20: {
929 20: {
930 'lines_no': 18,
930 'lines_no': 18,
931 'commits': [
931 'commits': [
932 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
932 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
933 16, 16, 17, 16, 16, 18, 18, 18
933 16, 16, 17, 16, 16, 18, 18, 18
934 ]
934 ]
935 },
935 },
936 55: {
936 55: {
937 'lines_no': 18,
937 'lines_no': 18,
938 'commits': [
938 'commits': [
939 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
939 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
940 17, 16, 16, 18, 18, 18
940 17, 16, 16, 18, 18, 18
941 ]
941 ]
942 }
942 }
943 },
943 },
944 'MANIFEST.in': {
944 'MANIFEST.in': {
945 89: {
945 89: {
946 'lines_no': 5,
946 'lines_no': 5,
947 'commits': [7, 7, 7, 71, 71]
947 'commits': [7, 7, 7, 71, 71]
948 },
948 },
949 20: {
949 20: {
950 'lines_no': 3,
950 'lines_no': 3,
951 'commits': [7, 7, 7]
951 'commits': [7, 7, 7]
952 },
952 },
953 55: {
953 55: {
954 'lines_no': 3,
954 'lines_no': 3,
955 'commits': [7, 7, 7]
955 'commits': [7, 7, 7]
956 }
956 }
957 }
957 }
958 }
958 }
959
959
960 for fname, commit_dict in files.items():
960 for fname, commit_dict in files.items():
961 for idx, __ in commit_dict.items():
961 for idx, __ in commit_dict.items():
962 commit = self.repo.get_commit(commit_idx=idx)
962 commit = self.repo.get_commit(commit_idx=idx)
963 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
963 l1_1 = [x[1] for x in commit.get_file_annotate(fname)]
964 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
964 l1_2 = [x[2]().raw_id for x in commit.get_file_annotate(fname)]
965 assert l1_1 == l1_2
965 assert l1_1 == l1_2
966 l1 = l1_2 = [
966 l1 = l1_2 = [
967 x[2]().idx for x in commit.get_file_annotate(fname)]
967 x[2]().idx for x in commit.get_file_annotate(fname)]
968 l2 = files[fname][idx]['commits']
968 l2 = files[fname][idx]['commits']
969 assert l1 == l2, (
969 assert l1 == l2, (
970 "The lists of commit for %s@commit_id%s"
970 "The lists of commit for %s@commit_id%s"
971 "from annotation list should match each other,"
971 "from annotation list should match each other,"
972 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
972 "got \n%s \nvs \n%s " % (fname, idx, l1, l2))
973
973
974 def test_commit_state(self):
974 def test_commit_state(self):
975 """
975 """
976 Tests which files have been added/changed/removed at particular commit
976 Tests which files have been added/changed/removed at particular commit
977 """
977 """
978
978
979 # commit_id 46ad32a4f974:
979 # commit_id 46ad32a4f974:
980 # hg st --rev 46ad32a4f974
980 # hg st --rev 46ad32a4f974
981 # changed: 13
981 # changed: 13
982 # added: 20
982 # added: 20
983 # removed: 1
983 # removed: 1
984 changed = set([
984 changed = set([
985 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
985 '.hgignore', 'README.rst', 'docs/conf.py', 'docs/index.rst',
986 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
986 'setup.py', 'tests/test_hg.py', 'tests/test_nodes.py',
987 'vcs/__init__.py', 'vcs/backends/__init__.py',
987 'vcs/__init__.py', 'vcs/backends/__init__.py',
988 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
988 'vcs/backends/base.py', 'vcs/backends/hg.py', 'vcs/nodes.py',
989 'vcs/utils/__init__.py'])
989 'vcs/utils/__init__.py'])
990
990
991 added = set([
991 added = set([
992 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
992 'docs/api/backends/hg.rst', 'docs/api/backends/index.rst',
993 'docs/api/index.rst', 'docs/api/nodes.rst',
993 'docs/api/index.rst', 'docs/api/nodes.rst',
994 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
994 'docs/api/web/index.rst', 'docs/api/web/simplevcs.rst',
995 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
995 'docs/installation.rst', 'docs/quickstart.rst', 'setup.cfg',
996 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
996 'vcs/utils/baseui_config.py', 'vcs/utils/web.py',
997 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
997 'vcs/web/__init__.py', 'vcs/web/exceptions.py',
998 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
998 'vcs/web/simplevcs/__init__.py', 'vcs/web/simplevcs/exceptions.py',
999 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
999 'vcs/web/simplevcs/middleware.py', 'vcs/web/simplevcs/models.py',
1000 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1000 'vcs/web/simplevcs/settings.py', 'vcs/web/simplevcs/utils.py',
1001 'vcs/web/simplevcs/views.py'])
1001 'vcs/web/simplevcs/views.py'])
1002
1002
1003 removed = set(['docs/api.rst'])
1003 removed = set(['docs/api.rst'])
1004
1004
1005 commit64 = self.repo.get_commit('46ad32a4f974')
1005 commit64 = self.repo.get_commit('46ad32a4f974')
1006 assert set((node.path for node in commit64.added)) == added
1006 assert set((node.path for node in commit64.added)) == added
1007 assert set((node.path for node in commit64.changed)) == changed
1007 assert set((node.path for node in commit64.changed)) == changed
1008 assert set((node.path for node in commit64.removed)) == removed
1008 assert set((node.path for node in commit64.removed)) == removed
1009
1009
1010 # commit_id b090f22d27d6:
1010 # commit_id b090f22d27d6:
1011 # hg st --rev b090f22d27d6
1011 # hg st --rev b090f22d27d6
1012 # changed: 13
1012 # changed: 13
1013 # added: 20
1013 # added: 20
1014 # removed: 1
1014 # removed: 1
1015 commit88 = self.repo.get_commit('b090f22d27d6')
1015 commit88 = self.repo.get_commit('b090f22d27d6')
1016 assert set((node.path for node in commit88.added)) == set()
1016 assert set((node.path for node in commit88.added)) == set()
1017 assert set((node.path for node in commit88.changed)) == \
1017 assert set((node.path for node in commit88.changed)) == \
1018 set(['.hgignore'])
1018 set(['.hgignore'])
1019 assert set((node.path for node in commit88.removed)) == set()
1019 assert set((node.path for node in commit88.removed)) == set()
1020
1020
1021 #
1021 #
1022 # 85:
1022 # 85:
1023 # added: 2 [
1023 # added: 2 [
1024 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1024 # 'vcs/utils/diffs.py', 'vcs/web/simplevcs/views/diffs.py']
1025 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1025 # changed: 4 ['vcs/web/simplevcs/models.py', ...]
1026 # removed: 1 ['vcs/utils/web.py']
1026 # removed: 1 ['vcs/utils/web.py']
1027 commit85 = self.repo.get_commit(commit_idx=85)
1027 commit85 = self.repo.get_commit(commit_idx=85)
1028 assert set((node.path for node in commit85.added)) == set([
1028 assert set((node.path for node in commit85.added)) == set([
1029 'vcs/utils/diffs.py',
1029 'vcs/utils/diffs.py',
1030 'vcs/web/simplevcs/views/diffs.py'])
1030 'vcs/web/simplevcs/views/diffs.py'])
1031 assert set((node.path for node in commit85.changed)) == set([
1031 assert set((node.path for node in commit85.changed)) == set([
1032 'vcs/web/simplevcs/models.py',
1032 'vcs/web/simplevcs/models.py',
1033 'vcs/web/simplevcs/utils.py',
1033 'vcs/web/simplevcs/utils.py',
1034 'vcs/web/simplevcs/views/__init__.py',
1034 'vcs/web/simplevcs/views/__init__.py',
1035 'vcs/web/simplevcs/views/repository.py',
1035 'vcs/web/simplevcs/views/repository.py',
1036 ])
1036 ])
1037 assert set((node.path for node in commit85.removed)) == \
1037 assert set((node.path for node in commit85.removed)) == \
1038 set(['vcs/utils/web.py'])
1038 set(['vcs/utils/web.py'])
1039
1039
1040 def test_files_state(self):
1040 def test_files_state(self):
1041 """
1041 """
1042 Tests state of FileNodes.
1042 Tests state of FileNodes.
1043 """
1043 """
1044 commit = self.repo.get_commit(commit_idx=85)
1044 commit = self.repo.get_commit(commit_idx=85)
1045 node = commit.get_node('vcs/utils/diffs.py')
1045 node = commit.get_node('vcs/utils/diffs.py')
1046 assert node.state, NodeState.ADDED
1046 assert node.state, NodeState.ADDED
1047 assert node.added
1047 assert node.added
1048 assert not node.changed
1048 assert not node.changed
1049 assert not node.not_changed
1049 assert not node.not_changed
1050 assert not node.removed
1050 assert not node.removed
1051
1051
1052 commit = self.repo.get_commit(commit_idx=88)
1052 commit = self.repo.get_commit(commit_idx=88)
1053 node = commit.get_node('.hgignore')
1053 node = commit.get_node('.hgignore')
1054 assert node.state, NodeState.CHANGED
1054 assert node.state, NodeState.CHANGED
1055 assert not node.added
1055 assert not node.added
1056 assert node.changed
1056 assert node.changed
1057 assert not node.not_changed
1057 assert not node.not_changed
1058 assert not node.removed
1058 assert not node.removed
1059
1059
1060 commit = self.repo.get_commit(commit_idx=85)
1060 commit = self.repo.get_commit(commit_idx=85)
1061 node = commit.get_node('setup.py')
1061 node = commit.get_node('setup.py')
1062 assert node.state, NodeState.NOT_CHANGED
1062 assert node.state, NodeState.NOT_CHANGED
1063 assert not node.added
1063 assert not node.added
1064 assert not node.changed
1064 assert not node.changed
1065 assert node.not_changed
1065 assert node.not_changed
1066 assert not node.removed
1066 assert not node.removed
1067
1067
1068 # If node has REMOVED state then trying to fetch it would raise
1068 # If node has REMOVED state then trying to fetch it would raise
1069 # CommitError exception
1069 # CommitError exception
1070 commit = self.repo.get_commit(commit_idx=2)
1070 commit = self.repo.get_commit(commit_idx=2)
1071 path = 'vcs/backends/BaseRepository.py'
1071 path = 'vcs/backends/BaseRepository.py'
1072 with pytest.raises(NodeDoesNotExistError):
1072 with pytest.raises(NodeDoesNotExistError):
1073 commit.get_node(path)
1073 commit.get_node(path)
1074 # but it would be one of ``removed`` (commit's attribute)
1074 # but it would be one of ``removed`` (commit's attribute)
1075 assert path in [rf.path for rf in commit.removed]
1075 assert path in [rf.path for rf in commit.removed]
1076
1076
1077 def test_commit_message_is_unicode(self):
1077 def test_commit_message_is_unicode(self):
1078 for cm in self.repo:
1078 for cm in self.repo:
1079 assert type(cm.message) == str
1079 assert type(cm.message) == str
1080
1080
1081 def test_commit_author_is_unicode(self):
1081 def test_commit_author_is_unicode(self):
1082 for cm in self.repo:
1082 for cm in self.repo:
1083 assert type(cm.author) == str
1083 assert type(cm.author) == str
1084
1084
1085 def test_repo_files_content_type(self):
1085 def test_repo_files_content_type(self):
1086 test_commit = self.repo.get_commit(commit_idx=100)
1086 test_commit = self.repo.get_commit(commit_idx=100)
1087 for node in test_commit.get_node('/'):
1087 for node in test_commit.get_node('/'):
1088 if node.is_file():
1088 if node.is_file():
1089 assert type(node.content) == bytes
1089 assert type(node.content) == bytes
1090 assert type(node.str_content) == str
1090 assert type(node.str_content) == str
1091
1091
1092 def test_wrong_path(self):
1092 def test_wrong_path(self):
1093 # There is 'setup.py' in the root dir but not there:
1093 # There is 'setup.py' in the root dir but not there:
1094 path = 'foo/bar/setup.py'
1094 path = 'foo/bar/setup.py'
1095 with pytest.raises(VCSError):
1095 with pytest.raises(VCSError):
1096 self.repo.get_commit().get_node(path)
1096 self.repo.get_commit().get_node(path)
1097
1097
1098 def test_author_email(self):
1098 def test_author_email(self):
1099 assert 'marcin@python-blog.com' == \
1099 assert 'marcin@python-blog.com' == \
1100 self.repo.get_commit('b986218ba1c9').author_email
1100 self.repo.get_commit('b986218ba1c9').author_email
1101 assert 'lukasz.balcerzak@python-center.pl' == \
1101 assert 'lukasz.balcerzak@python-center.pl' == \
1102 self.repo.get_commit('3803844fdbd3').author_email
1102 self.repo.get_commit('3803844fdbd3').author_email
1103 assert '' == self.repo.get_commit('84478366594b').author_email
1103 assert '' == self.repo.get_commit('84478366594b').author_email
1104
1104
1105 def test_author_username(self):
1105 def test_author_username(self):
1106 assert 'Marcin Kuzminski' == \
1106 assert 'Marcin Kuzminski' == \
1107 self.repo.get_commit('b986218ba1c9').author_name
1107 self.repo.get_commit('b986218ba1c9').author_name
1108 assert 'Lukasz Balcerzak' == \
1108 assert 'Lukasz Balcerzak' == \
1109 self.repo.get_commit('3803844fdbd3').author_name
1109 self.repo.get_commit('3803844fdbd3').author_name
1110 assert 'marcink' == \
1110 assert 'marcink' == \
1111 self.repo.get_commit('84478366594b').author_name
1111 self.repo.get_commit('84478366594b').author_name
1112
1112
1113
1113
1114 class TestLargeFileRepo(object):
1114 class TestLargeFileRepo(object):
1115
1115
1116 def test_large_file(self, backend_hg):
1116 def test_large_file(self, backend_hg):
1117 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1117 repo = backend_hg.create_test_repo('largefiles', make_db_config())
1118
1118
1119 tip = repo.scm_instance().get_commit()
1119 tip = repo.scm_instance().get_commit()
1120 node = tip.get_node('.hglf/thisfileislarge')
1120 node = tip.get_node('.hglf/thisfileislarge')
1121
1121
1122 lf_node = node.get_largefile_node()
1122 lf_node = node.get_largefile_node()
1123
1123
1124 assert lf_node.is_largefile() is True
1124 assert lf_node.is_largefile() is True
1125 assert lf_node.size == 1024000
1125 assert lf_node.size == 1024000
1126 assert lf_node.name == '.hglf/thisfileislarge'
1126 assert lf_node.name == '.hglf/thisfileislarge'
1127
1127
1128
1128
1129 class TestGetBranchName(object):
1129 class TestGetBranchName(object):
1130 def test_returns_ref_name_when_type_is_branch(self):
1130 def test_returns_ref_name_when_type_is_branch(self):
1131 ref = self._create_ref('branch', 'fake-name')
1131 ref = self._create_ref('branch', 'fake-name')
1132 result = self.repo._get_branch_name(ref)
1132 result = self.repo._get_branch_name(ref)
1133 assert result == ref.name
1133 assert result == ref.name
1134
1134
1135 @pytest.mark.parametrize("type_", ("book", "tag"))
1135 @pytest.mark.parametrize("type_", ("book", "tag"))
1136 def test_queries_remote_when_type_is_not_branch(self, type_):
1136 def test_queries_remote_when_type_is_not_branch(self, type_):
1137 ref = self._create_ref(type_, 'wrong-fake-name')
1137 ref = self._create_ref(type_, 'wrong-fake-name')
1138 with mock.patch.object(self.repo, "_remote") as remote_mock:
1138 with mock.patch.object(self.repo, "_remote") as remote_mock:
1139 remote_mock.ctx_branch.return_value = "fake-name"
1139 remote_mock.ctx_branch.return_value = "fake-name"
1140 result = self.repo._get_branch_name(ref)
1140 result = self.repo._get_branch_name(ref)
1141 assert result == "fake-name"
1141 assert result == "fake-name"
1142 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1142 remote_mock.ctx_branch.assert_called_once_with(ref.commit_id)
1143
1143
1144 def _create_ref(self, type_, name):
1144 def _create_ref(self, type_, name):
1145 ref = mock.Mock()
1145 ref = mock.Mock()
1146 ref.type = type_
1146 ref.type = type_
1147 ref.name = 'wrong-fake-name'
1147 ref.name = 'wrong-fake-name'
1148 ref.commit_id = "deadbeef"
1148 ref.commit_id = "deadbeef"
1149 return ref
1149 return ref
1150
1150
1151
1151
1152 class TestIsTheSameBranch(object):
1152 class TestIsTheSameBranch(object):
1153 def test_returns_true_when_branches_are_equal(self):
1153 def test_returns_true_when_branches_are_equal(self):
1154 source_ref = mock.Mock(name="source-ref")
1154 source_ref = mock.Mock(name="source-ref")
1155 target_ref = mock.Mock(name="target-ref")
1155 target_ref = mock.Mock(name="target-ref")
1156 branch_name_patcher = mock.patch.object(
1156 branch_name_patcher = mock.patch.object(
1157 self.repo, "_get_branch_name", return_value="default")
1157 self.repo, "_get_branch_name", return_value="default")
1158 with branch_name_patcher as branch_name_mock:
1158 with branch_name_patcher as branch_name_mock:
1159 result = self.repo._is_the_same_branch(source_ref, target_ref)
1159 result = self.repo._is_the_same_branch(source_ref, target_ref)
1160
1160
1161 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1161 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1162 assert branch_name_mock.call_args_list == expected_calls
1162 assert branch_name_mock.call_args_list == expected_calls
1163 assert result is True
1163 assert result is True
1164
1164
1165 def test_returns_false_when_branches_are_not_equal(self):
1165 def test_returns_false_when_branches_are_not_equal(self):
1166 source_ref = mock.Mock(name="source-ref")
1166 source_ref = mock.Mock(name="source-ref")
1167 source_ref.name = "source-branch"
1167 source_ref.name = "source-branch"
1168 target_ref = mock.Mock(name="target-ref")
1168 target_ref = mock.Mock(name="target-ref")
1169 source_ref.name = "target-branch"
1169 source_ref.name = "target-branch"
1170
1170
1171 def side_effect(ref):
1171 def side_effect(ref):
1172 return ref.name
1172 return ref.name
1173
1173
1174 branch_name_patcher = mock.patch.object(
1174 branch_name_patcher = mock.patch.object(
1175 self.repo, "_get_branch_name", side_effect=side_effect)
1175 self.repo, "_get_branch_name", side_effect=side_effect)
1176 with branch_name_patcher as branch_name_mock:
1176 with branch_name_patcher as branch_name_mock:
1177 result = self.repo._is_the_same_branch(source_ref, target_ref)
1177 result = self.repo._is_the_same_branch(source_ref, target_ref)
1178
1178
1179 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1179 expected_calls = [mock.call(source_ref), mock.call(target_ref)]
1180 assert branch_name_mock.call_args_list == expected_calls
1180 assert branch_name_mock.call_args_list == expected_calls
1181 assert result is False
1181 assert result is False
General Comments 0
You need to be logged in to leave comments. Login now