##// END OF EJS Templates
strip: fix backup command args
super-admin -
r5158:79ec5964 default
parent child Browse files
Show More
@@ -1,1013 +1,1013 b''
1 1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 HG repository module
21 21 """
22 22 import os
23 23 import logging
24 24 import binascii
25 25 import configparser
26 26 import urllib.request
27 27 import urllib.parse
28 28 import urllib.error
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from collections import OrderedDict
33 33 from rhodecode.lib.datelib import (
34 34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 35 from rhodecode.lib.str_utils import safe_str
36 36 from rhodecode.lib.utils2 import CachedProperty
37 37 from rhodecode.lib.vcs import connection, exceptions
38 38 from rhodecode.lib.vcs.backends.base import (
39 39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '1')])
82 82 self.with_wire = with_wire or {"cache": False} # default should not use cache
83 83
84 84 self._init_repo(create, src_url, do_workspace_checkout)
85 85
86 86 # caches
87 87 self._commit_ids = {}
88 88
89 89 @LazyProperty
90 90 def _remote(self):
91 91 repo_id = self.path
92 92 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
93 93
94 94 @CachedProperty
95 95 def commit_ids(self):
96 96 """
97 97 Returns list of commit ids, in ascending order. Being lazy
98 98 attribute allows external tools to inject shas from cache.
99 99 """
100 100 commit_ids = self._get_all_commit_ids()
101 101 self._rebuild_cache(commit_ids)
102 102 return commit_ids
103 103
104 104 def _rebuild_cache(self, commit_ids):
105 105 self._commit_ids = {commit_id: index
106 106 for index, commit_id in enumerate(commit_ids)}
107 107
108 108 @CachedProperty
109 109 def branches(self):
110 110 return self._get_branches()
111 111
112 112 @CachedProperty
113 113 def branches_closed(self):
114 114 return self._get_branches(active=False, closed=True)
115 115
116 116 @CachedProperty
117 117 def branches_all(self):
118 118 all_branches = {}
119 119 all_branches.update(self.branches)
120 120 all_branches.update(self.branches_closed)
121 121 return all_branches
122 122
123 123 def _get_branches(self, active=True, closed=False):
124 124 """
125 125 Gets branches for this repository
126 126 Returns only not closed active branches by default
127 127
128 128 :param active: return also active branches
129 129 :param closed: return also closed branches
130 130
131 131 """
132 132 if self.is_empty():
133 133 return {}
134 134
135 135 def get_name(ctx):
136 136 return ctx[0]
137 137
138 138 _branches = [(n, h,) for n, h in
139 139 self._remote.branches(active, closed).items()]
140 140
141 141 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
142 142
143 143 @CachedProperty
144 144 def tags(self):
145 145 """
146 146 Gets tags for this repository
147 147 """
148 148 return self._get_tags()
149 149
150 150 def _get_tags(self):
151 151 if self.is_empty():
152 152 return {}
153 153
154 154 def get_name(ctx):
155 155 return ctx[0]
156 156
157 157 _tags = [(n, h,) for n, h in
158 158 self._remote.tags().items()]
159 159
160 160 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
161 161
162 162 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
163 163 """
164 164 Creates and returns a tag for the given ``commit_id``.
165 165
166 166 :param name: name for new tag
167 167 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
168 168 :param commit_id: commit id for which new tag would be created
169 169 :param message: message of the tag's commit
170 170 :param date: date of tag's commit
171 171
172 172 :raises TagAlreadyExistError: if tag with same name already exists
173 173 """
174 174 if name in self.tags:
175 175 raise TagAlreadyExistError("Tag %s already exists" % name)
176 176
177 177 commit = self.get_commit(commit_id=commit_id)
178 178 local = kwargs.setdefault('local', False)
179 179
180 180 if message is None:
181 181 message = f"Added tag {name} for commit {commit.short_id}"
182 182
183 183 date, tz = date_to_timestamp_plus_offset(date)
184 184
185 185 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
186 186 self._remote.invalidate_vcs_cache()
187 187
188 188 # Reinitialize tags
189 189 self._invalidate_prop_cache('tags')
190 190 tag_id = self.tags[name]
191 191
192 192 return self.get_commit(commit_id=tag_id)
193 193
194 194 def remove_tag(self, name, user, message=None, date=None):
195 195 """
196 196 Removes tag with the given `name`.
197 197
198 198 :param name: name of the tag to be removed
199 199 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
200 200 :param message: message of the tag's removal commit
201 201 :param date: date of tag's removal commit
202 202
203 203 :raises TagDoesNotExistError: if tag with given name does not exists
204 204 """
205 205 if name not in self.tags:
206 206 raise TagDoesNotExistError("Tag %s does not exist" % name)
207 207
208 208 if message is None:
209 209 message = "Removed tag %s" % name
210 210 local = False
211 211
212 212 date, tz = date_to_timestamp_plus_offset(date)
213 213
214 214 self._remote.tag(name, nullid, message, local, user, date, tz)
215 215 self._remote.invalidate_vcs_cache()
216 216 self._invalidate_prop_cache('tags')
217 217
218 218 @LazyProperty
219 219 def bookmarks(self):
220 220 """
221 221 Gets bookmarks for this repository
222 222 """
223 223 return self._get_bookmarks()
224 224
225 225 def _get_bookmarks(self):
226 226 if self.is_empty():
227 227 return {}
228 228
229 229 def get_name(ctx):
230 230 return ctx[0]
231 231
232 232 _bookmarks = [
233 233 (n, h) for n, h in
234 234 self._remote.bookmarks().items()]
235 235
236 236 return OrderedDict(sorted(_bookmarks, key=get_name))
237 237
238 238 def _get_all_commit_ids(self):
239 239 return self._remote.get_all_commit_ids('visible')
240 240
241 241 def get_diff(
242 242 self, commit1, commit2, path='', ignore_whitespace=False,
243 243 context=3, path1=None):
244 244 """
245 245 Returns (git like) *diff*, as plain text. Shows changes introduced by
246 246 `commit2` since `commit1`.
247 247
248 248 :param commit1: Entry point from which diff is shown. Can be
249 249 ``self.EMPTY_COMMIT`` - in this case, patch showing all
250 250 the changes since empty state of the repository until `commit2`
251 251 :param commit2: Until which commit changes should be shown.
252 252 :param ignore_whitespace: If set to ``True``, would not show whitespace
253 253 changes. Defaults to ``False``.
254 254 :param context: How many lines before/after changed lines should be
255 255 shown. Defaults to ``3``.
256 256 """
257 257 self._validate_diff_commits(commit1, commit2)
258 258 if path1 is not None and path1 != path:
259 259 raise ValueError("Diff of two different paths not supported.")
260 260
261 261 if path:
262 262 file_filter = [self.path, path]
263 263 else:
264 264 file_filter = None
265 265
266 266 diff = self._remote.diff(
267 267 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
268 268 opt_git=True, opt_ignorews=ignore_whitespace,
269 269 context=context)
270 270 return MercurialDiff(diff)
271 271
272 272 def strip(self, commit_id, branch=None):
273 self._remote.strip(commit_id, update=False, backup="none")
273 self._remote.strip(commit_id, update=False, backup=False)
274 274
275 275 self._remote.invalidate_vcs_cache()
276 276 # clear cache
277 277 self._invalidate_prop_cache('commit_ids')
278 278
279 279 return len(self.commit_ids)
280 280
281 281 def verify(self):
282 282 verify = self._remote.verify()
283 283
284 284 self._remote.invalidate_vcs_cache()
285 285 return verify
286 286
287 287 def hg_update_cache(self):
288 288 update_cache = self._remote.hg_update_cache()
289 289
290 290 self._remote.invalidate_vcs_cache()
291 291 return update_cache
292 292
293 293 def hg_rebuild_fn_cache(self):
294 294 update_cache = self._remote.hg_rebuild_fn_cache()
295 295
296 296 self._remote.invalidate_vcs_cache()
297 297 return update_cache
298 298
299 299 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
300 300 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
301 301 self, commit_id1, repo2, commit_id2)
302 302
303 303 if commit_id1 == commit_id2:
304 304 return commit_id1
305 305
306 306 ancestors = self._remote.revs_from_revspec(
307 307 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
308 308 other_path=repo2.path)
309 309
310 310 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
311 311
312 312 log.debug('Found common ancestor with sha: %s', ancestor_id)
313 313 return ancestor_id
314 314
315 315 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
316 316 if commit_id1 == commit_id2:
317 317 commits = []
318 318 else:
319 319 if merge:
320 320 indexes = self._remote.revs_from_revspec(
321 321 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
322 322 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
323 323 else:
324 324 indexes = self._remote.revs_from_revspec(
325 325 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
326 326 commit_id1, other_path=repo2.path)
327 327
328 328 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
329 329 for idx in indexes]
330 330
331 331 return commits
332 332
333 333 @staticmethod
334 334 def check_url(url, config):
335 335 """
336 336 Function will check given url and try to verify if it's a valid
337 337 link. Sometimes it may happened that mercurial will issue basic
338 338 auth request that can cause whole API to hang when used from python
339 339 or other external calls.
340 340
341 341 On failures it'll raise urllib2.HTTPError, exception is also thrown
342 342 when the return code is non 200
343 343 """
344 344 # check first if it's not an local url
345 345 if os.path.isdir(url) or url.startswith('file:'):
346 346 return True
347 347
348 348 # Request the _remote to verify the url
349 349 return connection.Hg.check_url(url, config.serialize())
350 350
351 351 @staticmethod
352 352 def is_valid_repository(path):
353 353 return os.path.isdir(os.path.join(path, '.hg'))
354 354
355 355 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
356 356 """
357 357 Function will check for mercurial repository in given path. If there
358 358 is no repository in that path it will raise an exception unless
359 359 `create` parameter is set to True - in that case repository would
360 360 be created.
361 361
362 362 If `src_url` is given, would try to clone repository from the
363 363 location at given clone_point. Additionally it'll make update to
364 364 working copy accordingly to `do_workspace_checkout` flag.
365 365 """
366 366 if create and os.path.exists(self.path):
367 367 raise RepositoryError(
368 368 f"Cannot create repository at {self.path}, location already exist")
369 369
370 370 if src_url:
371 371 url = str(self._get_url(src_url))
372 372 MercurialRepository.check_url(url, self.config)
373 373
374 374 self._remote.clone(url, self.path, do_workspace_checkout)
375 375
376 376 # Don't try to create if we've already cloned repo
377 377 create = False
378 378
379 379 if create:
380 380 os.makedirs(self.path, mode=0o755)
381 381
382 382 self._remote.localrepository(create)
383 383
384 384 @LazyProperty
385 385 def in_memory_commit(self):
386 386 return MercurialInMemoryCommit(self)
387 387
388 388 @LazyProperty
389 389 def description(self):
390 390 description = self._remote.get_config_value(
391 391 'web', 'description', untrusted=True)
392 392 return safe_str(description or self.DEFAULT_DESCRIPTION)
393 393
394 394 @LazyProperty
395 395 def contact(self):
396 396 contact = (
397 397 self._remote.get_config_value("web", "contact") or
398 398 self._remote.get_config_value("ui", "username"))
399 399 return safe_str(contact or self.DEFAULT_CONTACT)
400 400
401 401 @LazyProperty
402 402 def last_change(self):
403 403 """
404 404 Returns last change made on this repository as
405 405 `datetime.datetime` object.
406 406 """
407 407 try:
408 408 return self.get_commit().date
409 409 except RepositoryError:
410 410 tzoffset = makedate()[1]
411 411 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
412 412
413 413 def _get_fs_mtime(self):
414 414 # fallback to filesystem
415 415 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
416 416 st_path = os.path.join(self.path, '.hg', "store")
417 417 if os.path.exists(cl_path):
418 418 return os.stat(cl_path).st_mtime
419 419 else:
420 420 return os.stat(st_path).st_mtime
421 421
422 422 def _get_url(self, url):
423 423 """
424 424 Returns normalized url. If schema is not given, would fall
425 425 to filesystem
426 426 (``file:///``) schema.
427 427 """
428 428 if url != 'default' and '://' not in url:
429 429 url = "file:" + urllib.request.pathname2url(url)
430 430 return url
431 431
432 432 def get_hook_location(self):
433 433 """
434 434 returns absolute path to location where hooks are stored
435 435 """
436 436 return os.path.join(self.path, '.hg', '.hgrc')
437 437
438 438 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
439 439 translate_tag=None, maybe_unreachable=False, reference_obj=None):
440 440 """
441 441 Returns ``MercurialCommit`` object representing repository's
442 442 commit at the given `commit_id` or `commit_idx`.
443 443 """
444 444 if self.is_empty():
445 445 raise EmptyRepositoryError("There are no commits yet")
446 446
447 447 if commit_id is not None:
448 448 self._validate_commit_id(commit_id)
449 449 try:
450 450 # we have cached idx, use it without contacting the remote
451 451 idx = self._commit_ids[commit_id]
452 452 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
453 453 except KeyError:
454 454 pass
455 455
456 456 elif commit_idx is not None:
457 457 self._validate_commit_idx(commit_idx)
458 458 try:
459 459 _commit_id = self.commit_ids[commit_idx]
460 460 if commit_idx < 0:
461 461 commit_idx = self.commit_ids.index(_commit_id)
462 462
463 463 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
464 464 except IndexError:
465 465 commit_id = commit_idx
466 466 else:
467 467 commit_id = "tip"
468 468
469 469 # case here is no cached version, do an actual lookup instead
470 470 try:
471 471 raw_id, idx = self._remote.lookup(commit_id, both=True)
472 472 except CommitDoesNotExistError:
473 473 msg = "Commit {} does not exist for `{}`".format(
474 474 *map(safe_str, [commit_id, self.name]))
475 475 raise CommitDoesNotExistError(msg)
476 476
477 477 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
478 478
479 479 def get_commits(
480 480 self, start_id=None, end_id=None, start_date=None, end_date=None,
481 481 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
482 482 """
483 483 Returns generator of ``MercurialCommit`` objects from start to end
484 484 (both are inclusive)
485 485
486 486 :param start_id: None, str(commit_id)
487 487 :param end_id: None, str(commit_id)
488 488 :param start_date: if specified, commits with commit date less than
489 489 ``start_date`` would be filtered out from returned set
490 490 :param end_date: if specified, commits with commit date greater than
491 491 ``end_date`` would be filtered out from returned set
492 492 :param branch_name: if specified, commits not reachable from given
493 493 branch would be filtered out from returned set
494 494 :param show_hidden: Show hidden commits such as obsolete or hidden from
495 495 Mercurial evolve
496 496 :raise BranchDoesNotExistError: If given ``branch_name`` does not
497 497 exist.
498 498 :raise CommitDoesNotExistError: If commit for given ``start`` or
499 499 ``end`` could not be found.
500 500 """
501 501 # actually we should check now if it's not an empty repo
502 502 if self.is_empty():
503 503 raise EmptyRepositoryError("There are no commits yet")
504 504 self._validate_branch_name(branch_name)
505 505
506 506 branch_ancestors = False
507 507 if start_id is not None:
508 508 self._validate_commit_id(start_id)
509 509 c_start = self.get_commit(commit_id=start_id)
510 510 start_pos = self._commit_ids[c_start.raw_id]
511 511 else:
512 512 start_pos = None
513 513
514 514 if end_id is not None:
515 515 self._validate_commit_id(end_id)
516 516 c_end = self.get_commit(commit_id=end_id)
517 517 end_pos = max(0, self._commit_ids[c_end.raw_id])
518 518 else:
519 519 end_pos = None
520 520
521 521 if None not in [start_id, end_id] and start_pos > end_pos:
522 522 raise RepositoryError(
523 523 "Start commit '%s' cannot be after end commit '%s'" %
524 524 (start_id, end_id))
525 525
526 526 if end_pos is not None:
527 527 end_pos += 1
528 528
529 529 commit_filter = []
530 530
531 531 if branch_name and not branch_ancestors:
532 532 commit_filter.append(f'branch("{branch_name}")')
533 533 elif branch_name and branch_ancestors:
534 534 commit_filter.append(f'ancestors(branch("{branch_name}"))')
535 535
536 536 if start_date and not end_date:
537 537 commit_filter.append(f'date(">{start_date}")')
538 538 if end_date and not start_date:
539 539 commit_filter.append(f'date("<{end_date}")')
540 540 if start_date and end_date:
541 541 commit_filter.append(
542 542 f'date(">{start_date}") and date("<{end_date}")')
543 543
544 544 if not show_hidden:
545 545 commit_filter.append('not obsolete()')
546 546 commit_filter.append('not hidden()')
547 547
548 548 # TODO: johbo: Figure out a simpler way for this solution
549 549 collection_generator = CollectionGenerator
550 550 if commit_filter:
551 551 commit_filter = ' and '.join(map(safe_str, commit_filter))
552 552 revisions = self._remote.rev_range([commit_filter])
553 553 collection_generator = MercurialIndexBasedCollectionGenerator
554 554 else:
555 555 revisions = self.commit_ids
556 556
557 557 if start_pos or end_pos:
558 558 revisions = revisions[start_pos:end_pos]
559 559
560 560 return collection_generator(self, revisions, pre_load=pre_load)
561 561
562 562 def pull(self, url, commit_ids=None):
563 563 """
564 564 Pull changes from external location.
565 565
566 566 :param commit_ids: Optional. Can be set to a list of commit ids
567 567 which shall be pulled from the other repository.
568 568 """
569 569 url = self._get_url(url)
570 570 self._remote.pull(url, commit_ids=commit_ids)
571 571 self._remote.invalidate_vcs_cache()
572 572
573 573 def fetch(self, url, commit_ids=None):
574 574 """
575 575 Backward compatibility with GIT fetch==pull
576 576 """
577 577 return self.pull(url, commit_ids=commit_ids)
578 578
579 579 def push(self, url):
580 580 url = self._get_url(url)
581 581 self._remote.sync_push(url)
582 582
583 583 def _local_clone(self, clone_path):
584 584 """
585 585 Create a local clone of the current repo.
586 586 """
587 587 self._remote.clone(self.path, clone_path, update_after_clone=True,
588 588 hooks=False)
589 589
590 590 def _update(self, revision, clean=False):
591 591 """
592 592 Update the working copy to the specified revision.
593 593 """
594 594 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
595 595 self._remote.update(revision, clean=clean)
596 596
597 597 def _identify(self):
598 598 """
599 599 Return the current state of the working directory.
600 600 """
601 601 return self._remote.identify().strip().rstrip('+')
602 602
603 603 def _heads(self, branch=None):
604 604 """
605 605 Return the commit ids of the repository heads.
606 606 """
607 607 return self._remote.heads(branch=branch).strip().split(' ')
608 608
609 609 def _ancestor(self, revision1, revision2):
610 610 """
611 611 Return the common ancestor of the two revisions.
612 612 """
613 613 return self._remote.ancestor(revision1, revision2)
614 614
615 615 def _local_push(
616 616 self, revision, repository_path, push_branches=False,
617 617 enable_hooks=False):
618 618 """
619 619 Push the given revision to the specified repository.
620 620
621 621 :param push_branches: allow to create branches in the target repo.
622 622 """
623 623 self._remote.push(
624 624 [revision], repository_path, hooks=enable_hooks,
625 625 push_branches=push_branches)
626 626
627 627 def _local_merge(self, target_ref, merge_message, user_name, user_email,
628 628 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
629 629 """
630 630 Merge the given source_revision into the checked out revision.
631 631
632 632 Returns the commit id of the merge and a boolean indicating if the
633 633 commit needs to be pushed.
634 634 """
635 635 source_ref_commit_id = source_ref.commit_id
636 636 target_ref_commit_id = target_ref.commit_id
637 637
638 638 # update our workdir to target ref, for proper merge
639 639 self._update(target_ref_commit_id, clean=True)
640 640
641 641 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
642 642 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
643 643
644 644 if close_commit_id:
645 645 # NOTE(marcink): if we get the close commit, this is our new source
646 646 # which will include the close commit itself.
647 647 source_ref_commit_id = close_commit_id
648 648
649 649 if ancestor == source_ref_commit_id:
650 650 # Nothing to do, the changes were already integrated
651 651 return target_ref_commit_id, False
652 652
653 653 elif ancestor == target_ref_commit_id and is_the_same_branch:
654 654 # In this case we should force a commit message
655 655 return source_ref_commit_id, True
656 656
657 657 unresolved = None
658 658 if use_rebase:
659 659 try:
660 660 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
661 661 self.bookmark(bookmark_name, revision=source_ref.commit_id)
662 662 self._remote.rebase(
663 663 source=source_ref_commit_id, dest=target_ref_commit_id)
664 664 self._remote.invalidate_vcs_cache()
665 665 self._update(bookmark_name, clean=True)
666 666 return self._identify(), True
667 667 except RepositoryError as e:
668 668 # The rebase-abort may raise another exception which 'hides'
669 669 # the original one, therefore we log it here.
670 670 log.exception('Error while rebasing shadow repo during merge.')
671 671 if 'unresolved conflicts' in safe_str(e):
672 672 unresolved = self._remote.get_unresolved_files()
673 673 log.debug('unresolved files: %s', unresolved)
674 674
675 675 # Cleanup any rebase leftovers
676 676 self._remote.invalidate_vcs_cache()
677 677 self._remote.rebase(abort=True)
678 678 self._remote.invalidate_vcs_cache()
679 679 self._remote.update(clean=True)
680 680 if unresolved:
681 681 raise UnresolvedFilesInRepo(unresolved)
682 682 else:
683 683 raise
684 684 else:
685 685 try:
686 686 self._remote.merge(source_ref_commit_id)
687 687 self._remote.invalidate_vcs_cache()
688 688 self._remote.commit(
689 689 message=safe_str(merge_message),
690 690 username=safe_str(f'{user_name} <{user_email}>'))
691 691 self._remote.invalidate_vcs_cache()
692 692 return self._identify(), True
693 693 except RepositoryError as e:
694 694 # The merge-abort may raise another exception which 'hides'
695 695 # the original one, therefore we log it here.
696 696 log.exception('Error while merging shadow repo during merge.')
697 697 if 'unresolved merge conflicts' in safe_str(e):
698 698 unresolved = self._remote.get_unresolved_files()
699 699 log.debug('unresolved files: %s', unresolved)
700 700
701 701 # Cleanup any merge leftovers
702 702 self._remote.update(clean=True)
703 703 if unresolved:
704 704 raise UnresolvedFilesInRepo(unresolved)
705 705 else:
706 706 raise
707 707
708 708 def _local_close(self, target_ref, user_name, user_email,
709 709 source_ref, close_message=''):
710 710 """
711 711 Close the branch of the given source_revision
712 712
713 713 Returns the commit id of the close and a boolean indicating if the
714 714 commit needs to be pushed.
715 715 """
716 716 self._update(source_ref.commit_id)
717 717 message = close_message or f"Closing branch: `{source_ref.name}`"
718 718 try:
719 719 self._remote.commit(
720 720 message=safe_str(message),
721 721 username=safe_str(f'{user_name} <{user_email}>'),
722 722 close_branch=True)
723 723 self._remote.invalidate_vcs_cache()
724 724 return self._identify(), True
725 725 except RepositoryError:
726 726 # Cleanup any commit leftovers
727 727 self._remote.update(clean=True)
728 728 raise
729 729
730 730 def _is_the_same_branch(self, target_ref, source_ref):
731 731 return (
732 732 self._get_branch_name(target_ref) ==
733 733 self._get_branch_name(source_ref))
734 734
735 735 def _get_branch_name(self, ref):
736 736 if ref.type == 'branch':
737 737 return ref.name
738 738 return self._remote.ctx_branch(ref.commit_id)
739 739
740 740 def _maybe_prepare_merge_workspace(
741 741 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
742 742 shadow_repository_path = self._get_shadow_repository_path(
743 743 self.path, repo_id, workspace_id)
744 744 if not os.path.exists(shadow_repository_path):
745 745 self._local_clone(shadow_repository_path)
746 746 log.debug(
747 747 'Prepared shadow repository in %s', shadow_repository_path)
748 748
749 749 return shadow_repository_path
750 750
751 751 def _merge_repo(self, repo_id, workspace_id, target_ref,
752 752 source_repo, source_ref, merge_message,
753 753 merger_name, merger_email, dry_run=False,
754 754 use_rebase=False, close_branch=False):
755 755
756 756 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
757 757 'rebase' if use_rebase else 'merge', dry_run)
758 758 if target_ref.commit_id not in self._heads():
759 759 return MergeResponse(
760 760 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
761 761 metadata={'target_ref': target_ref})
762 762
763 763 try:
764 764 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
765 765 heads_all = self._heads(target_ref.name)
766 766 max_heads = 10
767 767 if len(heads_all) > max_heads:
768 768 heads = '\n,'.join(
769 769 heads_all[:max_heads] +
770 770 [f'and {len(heads_all)-max_heads} more.'])
771 771 else:
772 772 heads = '\n,'.join(heads_all)
773 773 metadata = {
774 774 'target_ref': target_ref,
775 775 'source_ref': source_ref,
776 776 'heads': heads
777 777 }
778 778 return MergeResponse(
779 779 False, False, None,
780 780 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
781 781 metadata=metadata)
782 782 except CommitDoesNotExistError:
783 783 log.exception('Failure when looking up branch heads on hg target')
784 784 return MergeResponse(
785 785 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
786 786 metadata={'target_ref': target_ref})
787 787
788 788 shadow_repository_path = self._maybe_prepare_merge_workspace(
789 789 repo_id, workspace_id, target_ref, source_ref)
790 790 shadow_repo = self.get_shadow_instance(shadow_repository_path)
791 791
792 792 log.debug('Pulling in target reference %s', target_ref)
793 793 self._validate_pull_reference(target_ref)
794 794 shadow_repo._local_pull(self.path, target_ref)
795 795
796 796 try:
797 797 log.debug('Pulling in source reference %s', source_ref)
798 798 source_repo._validate_pull_reference(source_ref)
799 799 shadow_repo._local_pull(source_repo.path, source_ref)
800 800 except CommitDoesNotExistError:
801 801 log.exception('Failure when doing local pull on hg shadow repo')
802 802 return MergeResponse(
803 803 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
804 804 metadata={'source_ref': source_ref})
805 805
806 806 merge_ref = None
807 807 merge_commit_id = None
808 808 close_commit_id = None
809 809 merge_failure_reason = MergeFailureReason.NONE
810 810 metadata = {}
811 811
812 812 # enforce that close branch should be used only in case we source from
813 813 # an actual Branch
814 814 close_branch = close_branch and source_ref.type == 'branch'
815 815
816 816 # don't allow to close branch if source and target are the same
817 817 close_branch = close_branch and source_ref.name != target_ref.name
818 818
819 819 needs_push_on_close = False
820 820 if close_branch and not use_rebase and not dry_run:
821 821 try:
822 822 close_commit_id, needs_push_on_close = shadow_repo._local_close(
823 823 target_ref, merger_name, merger_email, source_ref)
824 824 merge_possible = True
825 825 except RepositoryError:
826 826 log.exception('Failure when doing close branch on '
827 827 'shadow repo: %s', shadow_repo)
828 828 merge_possible = False
829 829 merge_failure_reason = MergeFailureReason.MERGE_FAILED
830 830 else:
831 831 merge_possible = True
832 832
833 833 needs_push = False
834 834 if merge_possible:
835 835
836 836 try:
837 837 merge_commit_id, needs_push = shadow_repo._local_merge(
838 838 target_ref, merge_message, merger_name, merger_email,
839 839 source_ref, use_rebase=use_rebase,
840 840 close_commit_id=close_commit_id, dry_run=dry_run)
841 841 merge_possible = True
842 842
843 843 # read the state of the close action, if it
844 844 # maybe required a push
845 845 needs_push = needs_push or needs_push_on_close
846 846
847 847 # Set a bookmark pointing to the merge commit. This bookmark
848 848 # may be used to easily identify the last successful merge
849 849 # commit in the shadow repository.
850 850 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
851 851 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
852 852 except SubrepoMergeError:
853 853 log.exception(
854 854 'Subrepo merge error during local merge on hg shadow repo.')
855 855 merge_possible = False
856 856 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
857 857 needs_push = False
858 858 except RepositoryError as e:
859 859 log.exception('Failure when doing local merge on hg shadow repo')
860 860 if isinstance(e, UnresolvedFilesInRepo):
861 861 all_conflicts = list(e.args[0])
862 862 max_conflicts = 20
863 863 if len(all_conflicts) > max_conflicts:
864 864 conflicts = all_conflicts[:max_conflicts] \
865 865 + [f'and {len(all_conflicts)-max_conflicts} more.']
866 866 else:
867 867 conflicts = all_conflicts
868 868 metadata['unresolved_files'] = \
869 869 '\n* conflict: ' + \
870 870 ('\n * conflict: '.join(conflicts))
871 871
872 872 merge_possible = False
873 873 merge_failure_reason = MergeFailureReason.MERGE_FAILED
874 874 needs_push = False
875 875
876 876 if merge_possible and not dry_run:
877 877 if needs_push:
878 878 # In case the target is a bookmark, update it, so after pushing
879 879 # the bookmarks is also updated in the target.
880 880 if target_ref.type == 'book':
881 881 shadow_repo.bookmark(
882 882 target_ref.name, revision=merge_commit_id)
883 883 try:
884 884 shadow_repo_with_hooks = self.get_shadow_instance(
885 885 shadow_repository_path,
886 886 enable_hooks=True)
887 887 # This is the actual merge action, we push from shadow
888 888 # into origin.
889 889 # Note: the push_branches option will push any new branch
890 890 # defined in the source repository to the target. This may
891 891 # be dangerous as branches are permanent in Mercurial.
892 892 # This feature was requested in issue #441.
893 893 shadow_repo_with_hooks._local_push(
894 894 merge_commit_id, self.path, push_branches=True,
895 895 enable_hooks=True)
896 896
897 897 # maybe we also need to push the close_commit_id
898 898 if close_commit_id:
899 899 shadow_repo_with_hooks._local_push(
900 900 close_commit_id, self.path, push_branches=True,
901 901 enable_hooks=True)
902 902 merge_succeeded = True
903 903 except RepositoryError:
904 904 log.exception(
905 905 'Failure when doing local push from the shadow '
906 906 'repository to the target repository at %s.', self.path)
907 907 merge_succeeded = False
908 908 merge_failure_reason = MergeFailureReason.PUSH_FAILED
909 909 metadata['target'] = 'hg shadow repo'
910 910 metadata['merge_commit'] = merge_commit_id
911 911 else:
912 912 merge_succeeded = True
913 913 else:
914 914 merge_succeeded = False
915 915
916 916 return MergeResponse(
917 917 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
918 918 metadata=metadata)
919 919
920 920 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
921 921 config = self.config.copy()
922 922 if not enable_hooks:
923 923 config.clear_section('hooks')
924 924 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
925 925
926 926 def _validate_pull_reference(self, reference):
927 927 if not (reference.name in self.bookmarks or
928 928 reference.name in self.branches or
929 929 self.get_commit(reference.commit_id)):
930 930 raise CommitDoesNotExistError(
931 931 'Unknown branch, bookmark or commit id')
932 932
933 933 def _local_pull(self, repository_path, reference):
934 934 """
935 935 Fetch a branch, bookmark or commit from a local repository.
936 936 """
937 937 repository_path = os.path.abspath(repository_path)
938 938 if repository_path == self.path:
939 939 raise ValueError('Cannot pull from the same repository')
940 940
941 941 reference_type_to_option_name = {
942 942 'book': 'bookmark',
943 943 'branch': 'branch',
944 944 }
945 945 option_name = reference_type_to_option_name.get(
946 946 reference.type, 'revision')
947 947
948 948 if option_name == 'revision':
949 949 ref = reference.commit_id
950 950 else:
951 951 ref = reference.name
952 952
953 953 options = {option_name: [ref]}
954 954 self._remote.pull_cmd(repository_path, hooks=False, **options)
955 955 self._remote.invalidate_vcs_cache()
956 956
957 957 def bookmark(self, bookmark, revision=None):
958 958 if isinstance(bookmark, str):
959 959 bookmark = safe_str(bookmark)
960 960 self._remote.bookmark(bookmark, revision=revision)
961 961 self._remote.invalidate_vcs_cache()
962 962
963 963 def get_path_permissions(self, username):
964 964 hgacl_file = os.path.join(self.path, '.hg/hgacl')
965 965
966 966 def read_patterns(suffix):
967 967 svalue = None
968 968 for section, option in [
969 969 ('narrowacl', username + suffix),
970 970 ('narrowacl', 'default' + suffix),
971 971 ('narrowhgacl', username + suffix),
972 972 ('narrowhgacl', 'default' + suffix)
973 973 ]:
974 974 try:
975 975 svalue = hgacl.get(section, option)
976 976 break # stop at the first value we find
977 977 except configparser.NoOptionError:
978 978 pass
979 979 if not svalue:
980 980 return None
981 981 result = ['/']
982 982 for pattern in svalue.split():
983 983 result.append(pattern)
984 984 if '*' not in pattern and '?' not in pattern:
985 985 result.append(pattern + '/*')
986 986 return result
987 987
988 988 if os.path.exists(hgacl_file):
989 989 try:
990 990 hgacl = configparser.RawConfigParser()
991 991 hgacl.read(hgacl_file)
992 992
993 993 includes = read_patterns('.includes')
994 994 excludes = read_patterns('.excludes')
995 995 return BasePathPermissionChecker.create_from_patterns(
996 996 includes, excludes)
997 997 except BaseException as e:
998 998 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
999 999 hgacl_file, self.name, e)
1000 1000 raise exceptions.RepositoryRequirementError(msg)
1001 1001 else:
1002 1002 return None
1003 1003
1004 1004
1005 1005 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1006 1006
1007 1007 def _commit_factory(self, commit_id):
1008 1008 if isinstance(commit_id, int):
1009 1009 return self.repo.get_commit(
1010 1010 commit_idx=commit_id, pre_load=self.pre_load)
1011 1011 else:
1012 1012 return self.repo.get_commit(
1013 1013 commit_id=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now