##// END OF EJS Templates
fix(mercurial): actually use assert_path instead of always initializing repo object....
super-admin -
r5581:68a5b57b default
parent child Browse files
Show More
@@ -1,1030 +1,1036 b''
1 1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 HG repository module
21 21 """
22 22 import os
23 23 import logging
24 24 import binascii
25 25 import configparser
26 26 import urllib.request
27 27 import urllib.parse
28 28 import urllib.error
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from collections import OrderedDict
33 33 from rhodecode.lib.datelib import (
34 34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 35 from rhodecode.lib.str_utils import safe_str
36 36 from rhodecode.lib.utils2 import CachedProperty
37 37 from rhodecode.lib.vcs import connection, exceptions
38 38 from rhodecode.lib.vcs.conf import settings as vcs_settings
39 39 from rhodecode.lib.vcs.backends.base import (
40 40 BaseRepository, CollectionGenerator, Config, MergeResponse,
41 41 MergeFailureReason, Reference, BasePathPermissionChecker)
42 42 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
43 43 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
44 44 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
47 47 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
48 48
49 49 hexlify = binascii.hexlify
50 50 nullid = "\0" * 20
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 class MercurialRepository(BaseRepository):
56 56 """
57 57 Mercurial repository backend
58 58 """
59 59 DEFAULT_BRANCH_NAME = 'default'
60 60
61 61 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 62 do_workspace_checkout=False, with_wire=None, bare=False):
63 63 """
64 64 Raises RepositoryError if repository could not be find at the given
65 65 ``repo_path``.
66 66
67 67 :param repo_path: local path of the repository
68 68 :param config: config object containing the repo configuration
69 69 :param create=False: if set to True, would try to create repository if
70 70 it does not exist rather than raising exception
71 71 :param src_url=None: would try to clone repository from given location
72 72 :param do_workspace_checkout=False: sets update of working copy after
73 73 making a clone
74 74 :param bare: not used, compatible with other VCS
75 75 """
76 76
77 77 self.path = safe_str(os.path.abspath(repo_path))
78 78 # mercurial since 4.4.X requires certain configuration to be present
79 79 # because sometimes we init the repos with config we need to meet
80 80 # special requirements
81 81 self.config = config if config else self.get_default_config(
82 82 default=[('extensions', 'largefiles', '')])
83 83
84 84 # NOTE(marcink): since python3 hgsubversion is deprecated.
85 85 # From old installations we might still have this set enabled
86 86 # we explicitly remove this now here to make sure it wont propagate further
87 87 if config and config.get('extensions', 'hgsubversion') is not None:
88 88 config.drop_option('extensions', 'hgsubversion')
89 89
90 90 self.with_wire = with_wire or {"cache": False} # default should not use cache
91 91
92 92 self._init_repo(create, src_url, do_workspace_checkout)
93 93
94 94 # caches
95 95 self._commit_ids = {}
96 96
97 97 @LazyProperty
98 98 def _remote(self):
99 99 repo_id = self.path
100 100 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
101 101
102 102 @CachedProperty
103 103 def commit_ids(self):
104 104 """
105 105 Returns list of commit ids, in ascending order. Being lazy
106 106 attribute allows external tools to inject shas from cache.
107 107 """
108 108 commit_ids = self._get_all_commit_ids()
109 109 self._rebuild_cache(commit_ids)
110 110 return commit_ids
111 111
112 112 def _rebuild_cache(self, commit_ids):
113 113 self._commit_ids = {commit_id: index
114 114 for index, commit_id in enumerate(commit_ids)}
115 115
116 116 @CachedProperty
117 117 def branches(self):
118 118 return self._get_branches()
119 119
120 120 @CachedProperty
121 121 def branches_closed(self):
122 122 return self._get_branches(active=False, closed=True)
123 123
124 124 @CachedProperty
125 125 def branches_all(self):
126 126 all_branches = {}
127 127 all_branches.update(self.branches)
128 128 all_branches.update(self.branches_closed)
129 129 return all_branches
130 130
131 131 def _get_branches(self, active=True, closed=False):
132 132 """
133 133 Gets branches for this repository
134 134 Returns only not closed active branches by default
135 135
136 136 :param active: return also active branches
137 137 :param closed: return also closed branches
138 138
139 139 """
140 140 if self.is_empty():
141 141 return {}
142 142
143 143 def get_name(ctx):
144 144 return ctx[0]
145 145
146 146 _branches = [(n, h,) for n, h in
147 147 self._remote.branches(active, closed).items()]
148 148
149 149 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
150 150
151 151 @CachedProperty
152 152 def tags(self):
153 153 """
154 154 Gets tags for this repository
155 155 """
156 156 return self._get_tags()
157 157
158 158 def _get_tags(self):
159 159 if self.is_empty():
160 160 return {}
161 161
162 162 def get_name(ctx):
163 163 return ctx[0]
164 164
165 165 _tags = [(n, h,) for n, h in
166 166 self._remote.tags().items()]
167 167
168 168 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
169 169
170 170 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
171 171 """
172 172 Creates and returns a tag for the given ``commit_id``.
173 173
174 174 :param name: name for new tag
175 175 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
176 176 :param commit_id: commit id for which new tag would be created
177 177 :param message: message of the tag's commit
178 178 :param date: date of tag's commit
179 179
180 180 :raises TagAlreadyExistError: if tag with same name already exists
181 181 """
182 182 if name in self.tags:
183 183 raise TagAlreadyExistError("Tag %s already exists" % name)
184 184
185 185 commit = self.get_commit(commit_id=commit_id)
186 186 local = kwargs.setdefault('local', False)
187 187
188 188 if message is None:
189 189 message = f"Added tag {name} for commit {commit.short_id}"
190 190
191 191 date, tz = date_to_timestamp_plus_offset(date)
192 192
193 193 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
194 194 self._remote.invalidate_vcs_cache()
195 195
196 196 # Reinitialize tags
197 197 self._invalidate_prop_cache('tags')
198 198 tag_id = self.tags[name]
199 199
200 200 return self.get_commit(commit_id=tag_id)
201 201
202 202 def remove_tag(self, name, user, message=None, date=None):
203 203 """
204 204 Removes tag with the given `name`.
205 205
206 206 :param name: name of the tag to be removed
207 207 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
208 208 :param message: message of the tag's removal commit
209 209 :param date: date of tag's removal commit
210 210
211 211 :raises TagDoesNotExistError: if tag with given name does not exists
212 212 """
213 213 if name not in self.tags:
214 214 raise TagDoesNotExistError("Tag %s does not exist" % name)
215 215
216 216 if message is None:
217 217 message = "Removed tag %s" % name
218 218 local = False
219 219
220 220 date, tz = date_to_timestamp_plus_offset(date)
221 221
222 222 self._remote.tag(name, nullid, message, local, user, date, tz)
223 223 self._remote.invalidate_vcs_cache()
224 224 self._invalidate_prop_cache('tags')
225 225
226 226 @LazyProperty
227 227 def bookmarks(self):
228 228 """
229 229 Gets bookmarks for this repository
230 230 """
231 231 return self._get_bookmarks()
232 232
233 233 def _get_bookmarks(self):
234 234 if self.is_empty():
235 235 return {}
236 236
237 237 def get_name(ctx):
238 238 return ctx[0]
239 239
240 240 _bookmarks = [
241 241 (n, h) for n, h in
242 242 self._remote.bookmarks().items()]
243 243
244 244 return OrderedDict(sorted(_bookmarks, key=get_name))
245 245
246 246 def _get_all_commit_ids(self):
247 247 return self._remote.get_all_commit_ids('visible')
248 248
249 249 def get_diff(
250 250 self, commit1, commit2, path='', ignore_whitespace=False,
251 251 context=3, path1=None):
252 252 """
253 253 Returns (git like) *diff*, as plain text. Shows changes introduced by
254 254 `commit2` since `commit1`.
255 255
256 256 :param commit1: Entry point from which diff is shown. Can be
257 257 ``self.EMPTY_COMMIT`` - in this case, patch showing all
258 258 the changes since empty state of the repository until `commit2`
259 259 :param commit2: Until which commit changes should be shown.
260 260 :param ignore_whitespace: If set to ``True``, would not show whitespace
261 261 changes. Defaults to ``False``.
262 262 :param context: How many lines before/after changed lines should be
263 263 shown. Defaults to ``3``.
264 264 """
265 265 self._validate_diff_commits(commit1, commit2)
266 266 if path1 is not None and path1 != path:
267 267 raise ValueError("Diff of two different paths not supported.")
268 268
269 269 if path:
270 270 file_filter = [self.path, path]
271 271 else:
272 272 file_filter = None
273 273
274 274 diff = self._remote.diff(
275 275 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
276 276 opt_git=True, opt_ignorews=ignore_whitespace,
277 277 context=context)
278 278 return MercurialDiff(diff)
279 279
280 280 def strip(self, commit_id, branch=None):
281 281 self._remote.strip(commit_id, update=False, backup=False)
282 282
283 283 self._remote.invalidate_vcs_cache()
284 284 # clear cache
285 285 self._invalidate_prop_cache('commit_ids')
286 286
287 287 return len(self.commit_ids)
288 288
289 289 def verify(self):
290 290 verify = self._remote.verify()
291 291
292 292 self._remote.invalidate_vcs_cache()
293 293 return verify
294 294
295 295 def hg_update_cache(self):
296 296 update_cache = self._remote.hg_update_cache()
297 297
298 298 self._remote.invalidate_vcs_cache()
299 299 return update_cache
300 300
301 301 def hg_rebuild_fn_cache(self):
302 302 update_cache = self._remote.hg_rebuild_fn_cache()
303 303
304 304 self._remote.invalidate_vcs_cache()
305 305 return update_cache
306 306
307 307 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
308 308 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
309 309 self, commit_id1, repo2, commit_id2)
310 310
311 311 if commit_id1 == commit_id2:
312 312 return commit_id1
313 313
314 314 ancestors = self._remote.revs_from_revspec(
315 315 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
316 316 other_path=repo2.path)
317 317
318 318 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
319 319
320 320 log.debug('Found common ancestor with sha: %s', ancestor_id)
321 321 return ancestor_id
322 322
323 323 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
324 324 if commit_id1 == commit_id2:
325 325 commits = []
326 326 else:
327 327 if merge:
328 328 indexes = self._remote.revs_from_revspec(
329 329 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
330 330 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
331 331 else:
332 332 indexes = self._remote.revs_from_revspec(
333 333 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
334 334 commit_id1, other_path=repo2.path)
335 335
336 336 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
337 337 for idx in indexes]
338 338
339 339 return commits
340 340
341 341 @staticmethod
342 342 def check_url(url, config):
343 343 """
344 344 Function will check given url and try to verify if it's a valid
345 345 link. Sometimes it may happened that mercurial will issue basic
346 346 auth request that can cause whole API to hang when used from python
347 347 or other external calls.
348 348
349 349 On failures it'll raise urllib2.HTTPError, exception is also thrown
350 350 when the return code is non 200
351 351 """
352 352 # check first if it's not an local url
353 353 if os.path.isdir(url) or url.startswith('file:'):
354 354 return True
355 355
356 356 # Request the _remote to verify the url
357 357 return connection.Hg.check_url(url, config.serialize())
358 358
359 359 @staticmethod
360 360 def is_valid_repository(path):
361 361 return os.path.isdir(os.path.join(path, '.hg'))
362 362
363 363 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
364 364 """
365 365 Function will check for mercurial repository in given path. If there
366 366 is no repository in that path it will raise an exception unless
367 367 `create` parameter is set to True - in that case repository would
368 368 be created.
369 369
370 370 If `src_url` is given, would try to clone repository from the
371 location at given clone_point. Additionally it'll make update to
371 location at given clone_point. Additionally, it'll make update to
372 372 working copy accordingly to `do_workspace_checkout` flag.
373 373 """
374 374 if create and os.path.exists(self.path):
375 375 raise RepositoryError(
376 376 f"Cannot create repository at {self.path}, location already exist")
377 377
378 if src_url:
379 url = str(self._get_url(src_url))
380 MercurialRepository.check_url(url, self.config)
378 if create:
379 if src_url:
380 url = str(self._get_url(src_url))
381 MercurialRepository.check_url(url, self.config)
381 382
382 self._remote.clone(url, self.path, do_workspace_checkout)
383 self._remote.clone(url, self.path, do_workspace_checkout)
383 384
384 # Don't try to create if we've already cloned repo
385 create = False
385 # Don't try to create if we've already cloned repo
386 create = False
387 self._remote.localrepository(create)
388 else:
389 os.makedirs(self.path, mode=0o755)
390 create = True
391 self._remote.localrepository(create)
386 392
387 if create:
388 os.makedirs(self.path, mode=0o755)
389
390 self._remote.localrepository(create)
393 else:
394 if not self._remote.assert_correct_path():
395 raise RepositoryError(
396 f'Path "{self.path}" does not contain a Mercurial repository')
391 397
392 398 @LazyProperty
393 399 def in_memory_commit(self):
394 400 return MercurialInMemoryCommit(self)
395 401
396 402 @LazyProperty
397 403 def description(self):
398 404 description = self._remote.get_config_value(
399 405 'web', 'description', untrusted=True)
400 406 return safe_str(description or self.DEFAULT_DESCRIPTION)
401 407
402 408 @LazyProperty
403 409 def contact(self):
404 410 contact = (
405 411 self._remote.get_config_value("web", "contact") or
406 412 self._remote.get_config_value("ui", "username"))
407 413 return safe_str(contact or self.DEFAULT_CONTACT)
408 414
409 415 @LazyProperty
410 416 def last_change(self):
411 417 """
412 418 Returns last change made on this repository as
413 419 `datetime.datetime` object.
414 420 """
415 421 try:
416 422 return self.get_commit().date
417 423 except RepositoryError:
418 424 tzoffset = makedate()[1]
419 425 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
420 426
421 427 def _get_fs_mtime(self):
422 428 # fallback to filesystem
423 429 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
424 430 st_path = os.path.join(self.path, '.hg', "store")
425 431 if os.path.exists(cl_path):
426 432 return os.stat(cl_path).st_mtime
427 433 else:
428 434 return os.stat(st_path).st_mtime
429 435
430 436 def _get_url(self, url):
431 437 """
432 438 Returns normalized url. If schema is not given, would fall
433 439 to filesystem
434 440 (``file:///``) schema.
435 441 """
436 442 if url != 'default' and '://' not in url:
437 443 url = "file:" + urllib.request.pathname2url(url)
438 444 return url
439 445
440 446 def get_hook_location(self):
441 447 """
442 448 returns absolute path to location where hooks are stored
443 449 """
444 450 return os.path.join(self.path, '.hg', '.hgrc')
445 451
446 452 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
447 453 translate_tag=None, maybe_unreachable=False, reference_obj=None):
448 454 """
449 455 Returns ``MercurialCommit`` object representing repository's
450 456 commit at the given `commit_id` or `commit_idx`.
451 457 """
452 458 if self.is_empty():
453 459 raise EmptyRepositoryError("There are no commits yet")
454 460
455 461 if commit_id is not None:
456 462 self._validate_commit_id(commit_id)
457 463 try:
458 464 # we have cached idx, use it without contacting the remote
459 465 idx = self._commit_ids[commit_id]
460 466 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
461 467 except KeyError:
462 468 pass
463 469
464 470 elif commit_idx is not None:
465 471 self._validate_commit_idx(commit_idx)
466 472 try:
467 473 _commit_id = self.commit_ids[commit_idx]
468 474 if commit_idx < 0:
469 475 commit_idx = self.commit_ids.index(_commit_id)
470 476
471 477 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
472 478 except IndexError:
473 479 commit_id = commit_idx
474 480 else:
475 481 commit_id = "tip"
476 482
477 483 # case here is no cached version, do an actual lookup instead
478 484 try:
479 485 raw_id, idx = self._remote.lookup(commit_id, both=True)
480 486 except CommitDoesNotExistError:
481 487 msg = "Commit {} does not exist for `{}`".format(
482 488 *map(safe_str, [commit_id, self.name]))
483 489 raise CommitDoesNotExistError(msg)
484 490
485 491 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
486 492
487 493 def get_commits(
488 494 self, start_id=None, end_id=None, start_date=None, end_date=None,
489 495 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
490 496 """
491 497 Returns generator of ``MercurialCommit`` objects from start to end
492 498 (both are inclusive)
493 499
494 500 :param start_id: None, str(commit_id)
495 501 :param end_id: None, str(commit_id)
496 502 :param start_date: if specified, commits with commit date less than
497 503 ``start_date`` would be filtered out from returned set
498 504 :param end_date: if specified, commits with commit date greater than
499 505 ``end_date`` would be filtered out from returned set
500 506 :param branch_name: if specified, commits not reachable from given
501 507 branch would be filtered out from returned set
502 508 :param show_hidden: Show hidden commits such as obsolete or hidden from
503 509 Mercurial evolve
504 510 :raise BranchDoesNotExistError: If given ``branch_name`` does not
505 511 exist.
506 512 :raise CommitDoesNotExistError: If commit for given ``start`` or
507 513 ``end`` could not be found.
508 514 """
509 515 # actually we should check now if it's not an empty repo
510 516 if self.is_empty():
511 517 raise EmptyRepositoryError("There are no commits yet")
512 518 self._validate_branch_name(branch_name)
513 519
514 520 branch_ancestors = False
515 521 if start_id is not None:
516 522 self._validate_commit_id(start_id)
517 523 c_start = self.get_commit(commit_id=start_id)
518 524 start_pos = self._commit_ids[c_start.raw_id]
519 525 else:
520 526 start_pos = None
521 527
522 528 if end_id is not None:
523 529 self._validate_commit_id(end_id)
524 530 c_end = self.get_commit(commit_id=end_id)
525 531 end_pos = max(0, self._commit_ids[c_end.raw_id])
526 532 else:
527 533 end_pos = None
528 534
529 535 if None not in [start_id, end_id] and start_pos > end_pos:
530 536 raise RepositoryError(
531 537 "Start commit '%s' cannot be after end commit '%s'" %
532 538 (start_id, end_id))
533 539
534 540 if end_pos is not None:
535 541 end_pos += 1
536 542
537 543 commit_filter = []
538 544
539 545 if branch_name and not branch_ancestors:
540 546 commit_filter.append(f'branch("{branch_name}")')
541 547 elif branch_name and branch_ancestors:
542 548 commit_filter.append(f'ancestors(branch("{branch_name}"))')
543 549
544 550 if start_date and not end_date:
545 551 commit_filter.append(f'date(">{start_date}")')
546 552 if end_date and not start_date:
547 553 commit_filter.append(f'date("<{end_date}")')
548 554 if start_date and end_date:
549 555 commit_filter.append(
550 556 f'date(">{start_date}") and date("<{end_date}")')
551 557
552 558 if not show_hidden:
553 559 commit_filter.append('not obsolete()')
554 560 commit_filter.append('not hidden()')
555 561
556 562 # TODO: johbo: Figure out a simpler way for this solution
557 563 collection_generator = CollectionGenerator
558 564 if commit_filter:
559 565 commit_filter = ' and '.join(map(safe_str, commit_filter))
560 566 revisions = self._remote.rev_range([commit_filter])
561 567 collection_generator = MercurialIndexBasedCollectionGenerator
562 568 else:
563 569 revisions = self.commit_ids
564 570
565 571 if start_pos or end_pos:
566 572 revisions = revisions[start_pos:end_pos]
567 573
568 574 return collection_generator(self, revisions, pre_load=pre_load)
569 575
570 576 def pull(self, url, commit_ids=None):
571 577 """
572 578 Pull changes from external location.
573 579
574 580 :param commit_ids: Optional. Can be set to a list of commit ids
575 581 which shall be pulled from the other repository.
576 582 """
577 583 url = self._get_url(url)
578 584 self._remote.pull(url, commit_ids=commit_ids)
579 585 self._remote.invalidate_vcs_cache()
580 586
581 587 def fetch(self, url, commit_ids=None, **kwargs):
582 588 """
583 589 Backward compatibility with GIT fetch==pull
584 590 """
585 591 return self.pull(url, commit_ids=commit_ids)
586 592
587 593 def push(self, url, **kwargs):
588 594 url = self._get_url(url)
589 595 self._remote.sync_push(url)
590 596
591 597 def _local_clone(self, clone_path):
592 598 """
593 599 Create a local clone of the current repo.
594 600 """
595 601 self._remote.clone(self.path, clone_path, update_after_clone=True,
596 602 hooks=False)
597 603
598 604 def _update(self, revision, clean=False):
599 605 """
600 606 Update the working copy to the specified revision.
601 607 """
602 608 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
603 609 self._remote.update(revision, clean=clean)
604 610
605 611 def _identify(self):
606 612 """
607 613 Return the current state of the working directory.
608 614 """
609 615 return self._remote.identify().strip().rstrip('+')
610 616
611 617 def _heads(self, branch=None):
612 618 """
613 619 Return the commit ids of the repository heads.
614 620 """
615 621 return self._remote.heads(branch=branch).strip().split(' ')
616 622
617 623 def _ancestor(self, revision1, revision2):
618 624 """
619 625 Return the common ancestor of the two revisions.
620 626 """
621 627 return self._remote.ancestor(revision1, revision2)
622 628
623 629 def _local_push(
624 630 self, revision, repository_path, push_branches=False,
625 631 enable_hooks=False):
626 632 """
627 633 Push the given revision to the specified repository.
628 634
629 635 :param push_branches: allow to create branches in the target repo.
630 636 """
631 637 self._remote.push(
632 638 [revision], repository_path, hooks=enable_hooks,
633 639 push_branches=push_branches)
634 640
635 641 def _local_merge(self, target_ref, merge_message, user_name, user_email,
636 642 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
637 643 """
638 644 Merge the given source_revision into the checked out revision.
639 645
640 646 Returns the commit id of the merge and a boolean indicating if the
641 647 commit needs to be pushed.
642 648 """
643 649
644 650 source_ref_commit_id = source_ref.commit_id
645 651 target_ref_commit_id = target_ref.commit_id
646 652
647 653 # update our workdir to target ref, for proper merge
648 654 self._update(target_ref_commit_id, clean=True)
649 655
650 656 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
651 657 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
652 658
653 659 if close_commit_id:
654 660 # NOTE(marcink): if we get the close commit, this is our new source
655 661 # which will include the close commit itself.
656 662 source_ref_commit_id = close_commit_id
657 663
658 664 if ancestor == source_ref_commit_id:
659 665 # Nothing to do, the changes were already integrated
660 666 return target_ref_commit_id, False
661 667
662 668 elif ancestor == target_ref_commit_id and is_the_same_branch:
663 669 # In this case we should force a commit message
664 670 return source_ref_commit_id, True
665 671
666 672 unresolved = None
667 673 if use_rebase:
668 674 try:
669 675 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
670 676 self.bookmark(bookmark_name, revision=source_ref.commit_id)
671 677 self._remote.rebase(
672 678 source=source_ref_commit_id, dest=target_ref_commit_id)
673 679 self._remote.invalidate_vcs_cache()
674 680 self._update(bookmark_name, clean=True)
675 681 return self._identify(), True
676 682 except RepositoryError as e:
677 683 # The rebase-abort may raise another exception which 'hides'
678 684 # the original one, therefore we log it here.
679 685 log.exception('Error while rebasing shadow repo during merge.')
680 686 if 'unresolved conflicts' in safe_str(e):
681 687 unresolved = self._remote.get_unresolved_files()
682 688 log.debug('unresolved files: %s', unresolved)
683 689
684 690 # Cleanup any rebase leftovers
685 691 self._remote.invalidate_vcs_cache()
686 692 self._remote.rebase(abort=True)
687 693 self._remote.invalidate_vcs_cache()
688 694 self._remote.update(clean=True)
689 695 if unresolved:
690 696 raise UnresolvedFilesInRepo(unresolved)
691 697 else:
692 698 raise
693 699 else:
694 700 try:
695 701 self._remote.merge(source_ref_commit_id)
696 702 self._remote.invalidate_vcs_cache()
697 703 self._remote.commit(
698 704 message=safe_str(merge_message),
699 705 username=safe_str(f'{user_name} <{user_email}>'))
700 706 self._remote.invalidate_vcs_cache()
701 707 return self._identify(), True
702 708 except RepositoryError as e:
703 709 # The merge-abort may raise another exception which 'hides'
704 710 # the original one, therefore we log it here.
705 711 log.exception('Error while merging shadow repo during merge.')
706 712 if 'unresolved merge conflicts' in safe_str(e):
707 713 unresolved = self._remote.get_unresolved_files()
708 714 log.debug('unresolved files: %s', unresolved)
709 715
710 716 # Cleanup any merge leftovers
711 717 self._remote.update(clean=True)
712 718 if unresolved:
713 719 raise UnresolvedFilesInRepo(unresolved)
714 720 else:
715 721 raise
716 722
717 723 def _local_close(self, target_ref, user_name, user_email,
718 724 source_ref, close_message=''):
719 725 """
720 726 Close the branch of the given source_revision
721 727
722 728 Returns the commit id of the close and a boolean indicating if the
723 729 commit needs to be pushed.
724 730 """
725 731 self._update(source_ref.commit_id)
726 732 message = (close_message or vcs_settings.HG_CLOSE_BRANCH_MESSAGE_TMPL).format(
727 733 user_name=user_name,
728 734 user_email=user_email,
729 735 target_ref_name=target_ref.name,
730 736 source_ref_name=source_ref.name
731 737 )
732 738 try:
733 739 self._remote.commit(
734 740 message=safe_str(message),
735 741 username=safe_str(f'{user_name} <{user_email}>'),
736 742 close_branch=True)
737 743 self._remote.invalidate_vcs_cache()
738 744 return self._identify(), True
739 745 except RepositoryError:
740 746 # Cleanup any commit leftovers
741 747 self._remote.update(clean=True)
742 748 raise
743 749
744 750 def _is_the_same_branch(self, target_ref, source_ref):
745 751 return (
746 752 self._get_branch_name(target_ref) ==
747 753 self._get_branch_name(source_ref))
748 754
749 755 def _get_branch_name(self, ref):
750 756 if ref.type == 'branch':
751 757 return ref.name
752 758 return self._remote.ctx_branch(ref.commit_id)
753 759
754 760 def _maybe_prepare_merge_workspace(
755 761 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
756 762 shadow_repository_path = self._get_shadow_repository_path(
757 763 self.path, repo_id, workspace_id)
758 764 if not os.path.exists(shadow_repository_path):
759 765 self._local_clone(shadow_repository_path)
760 766 log.debug(
761 767 'Prepared shadow repository in %s', shadow_repository_path)
762 768
763 769 return shadow_repository_path
764 770
765 771 def _merge_repo(self, repo_id, workspace_id, target_ref,
766 772 source_repo, source_ref, merge_message,
767 773 merger_name, merger_email, dry_run=False,
768 774 use_rebase=False, close_branch=False):
769 775
770 776 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
771 777 'rebase' if use_rebase else 'merge', dry_run)
772 778
773 779 if target_ref.commit_id not in self._heads():
774 780 return MergeResponse(
775 781 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
776 782 metadata={'target_ref': target_ref})
777 783
778 784 try:
779 785 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
780 786 heads_all = self._heads(target_ref.name)
781 787 max_heads = 10
782 788 if len(heads_all) > max_heads:
783 789 heads = '\n,'.join(
784 790 heads_all[:max_heads] +
785 791 [f'and {len(heads_all)-max_heads} more.'])
786 792 else:
787 793 heads = '\n,'.join(heads_all)
788 794 metadata = {
789 795 'target_ref': target_ref,
790 796 'source_ref': source_ref,
791 797 'heads': heads
792 798 }
793 799 return MergeResponse(
794 800 False, False, None,
795 801 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
796 802 metadata=metadata)
797 803 except CommitDoesNotExistError:
798 804 log.exception('Failure when looking up branch heads on hg target')
799 805 return MergeResponse(
800 806 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
801 807 metadata={'target_ref': target_ref})
802 808
803 809 shadow_repository_path = self._maybe_prepare_merge_workspace(
804 810 repo_id, workspace_id, target_ref, source_ref)
805 811 shadow_repo = self.get_shadow_instance(shadow_repository_path)
806 812
807 813 log.debug('Pulling in target reference %s', target_ref)
808 814 self._validate_pull_reference(target_ref)
809 815 shadow_repo._local_pull(self.path, target_ref)
810 816
811 817 try:
812 818 log.debug('Pulling in source reference %s', source_ref)
813 819 source_repo._validate_pull_reference(source_ref)
814 820 shadow_repo._local_pull(source_repo.path, source_ref)
815 821 except CommitDoesNotExistError:
816 822 log.exception('Failure when doing local pull on hg shadow repo')
817 823 return MergeResponse(
818 824 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
819 825 metadata={'source_ref': source_ref})
820 826
821 827 merge_ref = None
822 828 merge_commit_id = None
823 829 close_commit_id = None
824 830 merge_failure_reason = MergeFailureReason.NONE
825 831 metadata = {}
826 832
827 833 # enforce that close branch should be used only in case we source from
828 834 # an actual Branch
829 835 close_branch = close_branch and source_ref.type == 'branch'
830 836
831 837 # don't allow to close branch if source and target are the same
832 838 close_branch = close_branch and source_ref.name != target_ref.name
833 839
834 840 needs_push_on_close = False
835 841 if close_branch and not use_rebase and not dry_run:
836 842 try:
837 843 close_commit_id, needs_push_on_close = shadow_repo._local_close(
838 844 target_ref, merger_name, merger_email, source_ref)
839 845 merge_possible = True
840 846 except RepositoryError:
841 847 log.exception('Failure when doing close branch on '
842 848 'shadow repo: %s', shadow_repo)
843 849 merge_possible = False
844 850 merge_failure_reason = MergeFailureReason.MERGE_FAILED
845 851 else:
846 852 merge_possible = True
847 853
848 854 needs_push = False
849 855 if merge_possible:
850 856
851 857 try:
852 858 merge_commit_id, needs_push = shadow_repo._local_merge(
853 859 target_ref, merge_message, merger_name, merger_email,
854 860 source_ref, use_rebase=use_rebase,
855 861 close_commit_id=close_commit_id, dry_run=dry_run)
856 862 merge_possible = True
857 863
858 864 # read the state of the close action, if it
859 865 # maybe required a push
860 866 needs_push = needs_push or needs_push_on_close
861 867
862 868 # Set a bookmark pointing to the merge commit. This bookmark
863 869 # may be used to easily identify the last successful merge
864 870 # commit in the shadow repository.
865 871 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
866 872 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
867 873 except SubrepoMergeError:
868 874 log.exception(
869 875 'Subrepo merge error during local merge on hg shadow repo.')
870 876 merge_possible = False
871 877 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
872 878 needs_push = False
873 879 except RepositoryError as e:
874 880 log.exception('Failure when doing local merge on hg shadow repo')
875 881 metadata['unresolved_files'] = 'no unresolved files found'
876 882
877 883 if isinstance(e, UnresolvedFilesInRepo):
878 884 all_conflicts = list(e.args[0])
879 885 max_conflicts = 20
880 886 if len(all_conflicts) > max_conflicts:
881 887 conflicts = all_conflicts[:max_conflicts] \
882 888 + [f'and {len(all_conflicts)-max_conflicts} more.']
883 889 else:
884 890 conflicts = all_conflicts
885 891 metadata['unresolved_files'] = \
886 892 '\n* conflict: ' + \
887 893 ('\n * conflict: '.join(conflicts))
888 894
889 895 merge_possible = False
890 896 merge_failure_reason = MergeFailureReason.MERGE_FAILED
891 897 needs_push = False
892 898
893 899 if merge_possible and not dry_run:
894 900 if needs_push:
895 901 # In case the target is a bookmark, update it, so after pushing
896 902 # the bookmarks is also updated in the target.
897 903 if target_ref.type == 'book':
898 904 shadow_repo.bookmark(
899 905 target_ref.name, revision=merge_commit_id)
900 906 try:
901 907 shadow_repo_with_hooks = self.get_shadow_instance(
902 908 shadow_repository_path,
903 909 enable_hooks=True)
904 910 # This is the actual merge action, we push from shadow
905 911 # into origin.
906 912 # Note: the push_branches option will push any new branch
907 913 # defined in the source repository to the target. This may
908 914 # be dangerous as branches are permanent in Mercurial.
909 915 # This feature was requested in issue #441.
910 916 shadow_repo_with_hooks._local_push(
911 917 merge_commit_id, self.path, push_branches=True,
912 918 enable_hooks=True)
913 919
914 920 # maybe we also need to push the close_commit_id
915 921 if close_commit_id:
916 922 shadow_repo_with_hooks._local_push(
917 923 close_commit_id, self.path, push_branches=True,
918 924 enable_hooks=True)
919 925 merge_succeeded = True
920 926 except RepositoryError:
921 927 log.exception(
922 928 'Failure when doing local push from the shadow '
923 929 'repository to the target repository at %s.', self.path)
924 930 merge_succeeded = False
925 931 merge_failure_reason = MergeFailureReason.PUSH_FAILED
926 932 metadata['target'] = 'hg shadow repo'
927 933 metadata['merge_commit'] = merge_commit_id
928 934 else:
929 935 merge_succeeded = True
930 936 else:
931 937 merge_succeeded = False
932 938
933 939 return MergeResponse(
934 940 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
935 941 metadata=metadata)
936 942
937 943 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
938 944 config = self.config.copy()
939 945 if not enable_hooks:
940 946 config.clear_section('hooks')
941 947 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
942 948
943 949 def _validate_pull_reference(self, reference):
944 950 if not (reference.name in self.bookmarks or
945 951 reference.name in self.branches or
946 952 self.get_commit(reference.commit_id)):
947 953 raise CommitDoesNotExistError(
948 954 'Unknown branch, bookmark or commit id')
949 955
950 956 def _local_pull(self, repository_path, reference):
951 957 """
952 958 Fetch a branch, bookmark or commit from a local repository.
953 959 """
954 960 repository_path = os.path.abspath(repository_path)
955 961 if repository_path == self.path:
956 962 raise ValueError('Cannot pull from the same repository')
957 963
958 964 reference_type_to_option_name = {
959 965 'book': 'bookmark',
960 966 'branch': 'branch',
961 967 }
962 968 option_name = reference_type_to_option_name.get(
963 969 reference.type, 'revision')
964 970
965 971 if option_name == 'revision':
966 972 ref = reference.commit_id
967 973 else:
968 974 ref = reference.name
969 975
970 976 options = {option_name: [ref]}
971 977 self._remote.pull_cmd(repository_path, hooks=False, **options)
972 978 self._remote.invalidate_vcs_cache()
973 979
974 980 def bookmark(self, bookmark, revision=None):
975 981 if isinstance(bookmark, str):
976 982 bookmark = safe_str(bookmark)
977 983 self._remote.bookmark(bookmark, revision=revision)
978 984 self._remote.invalidate_vcs_cache()
979 985
980 986 def get_path_permissions(self, username):
981 987 hgacl_file = os.path.join(self.path, '.hg/hgacl')
982 988
983 989 def read_patterns(suffix):
984 990 svalue = None
985 991 for section, option in [
986 992 ('narrowacl', username + suffix),
987 993 ('narrowacl', 'default' + suffix),
988 994 ('narrowhgacl', username + suffix),
989 995 ('narrowhgacl', 'default' + suffix)
990 996 ]:
991 997 try:
992 998 svalue = hgacl.get(section, option)
993 999 break # stop at the first value we find
994 1000 except configparser.NoOptionError:
995 1001 pass
996 1002 if not svalue:
997 1003 return None
998 1004 result = ['/']
999 1005 for pattern in svalue.split():
1000 1006 result.append(pattern)
1001 1007 if '*' not in pattern and '?' not in pattern:
1002 1008 result.append(pattern + '/*')
1003 1009 return result
1004 1010
1005 1011 if os.path.exists(hgacl_file):
1006 1012 try:
1007 1013 hgacl = configparser.RawConfigParser()
1008 1014 hgacl.read(hgacl_file)
1009 1015
1010 1016 includes = read_patterns('.includes')
1011 1017 excludes = read_patterns('.excludes')
1012 1018 return BasePathPermissionChecker.create_from_patterns(
1013 1019 includes, excludes)
1014 1020 except BaseException as e:
1015 1021 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1016 1022 hgacl_file, self.name, e)
1017 1023 raise exceptions.RepositoryRequirementError(msg)
1018 1024 else:
1019 1025 return None
1020 1026
1021 1027
1022 1028 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1023 1029
1024 1030 def _commit_factory(self, commit_id):
1025 1031 if isinstance(commit_id, int):
1026 1032 return self.repo.get_commit(
1027 1033 commit_idx=commit_id, pre_load=self.pre_load)
1028 1034 else:
1029 1035 return self.repo.get_commit(
1030 1036 commit_id=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now