##// END OF EJS Templates
fix(repo): fixed empty config case
super-admin -
r5251:711a4878 default
parent child Browse files
Show More
@@ -1,1024 +1,1024 b''
1 1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 2 #
3 3 # This program is free software: you can redistribute it and/or modify
4 4 # it under the terms of the GNU Affero General Public License, version 3
5 5 # (only), as published by the Free Software Foundation.
6 6 #
7 7 # This program is distributed in the hope that it will be useful,
8 8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 10 # GNU General Public License for more details.
11 11 #
12 12 # You should have received a copy of the GNU Affero General Public License
13 13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 14 #
15 15 # This program is dual-licensed. If you wish to learn more about the
16 16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 18
19 19 """
20 20 HG repository module
21 21 """
22 22 import os
23 23 import logging
24 24 import binascii
25 25 import configparser
26 26 import urllib.request
27 27 import urllib.parse
28 28 import urllib.error
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from collections import OrderedDict
33 33 from rhodecode.lib.datelib import (
34 34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 35 from rhodecode.lib.str_utils import safe_str
36 36 from rhodecode.lib.utils2 import CachedProperty
37 37 from rhodecode.lib.vcs import connection, exceptions
38 38 from rhodecode.lib.vcs.backends.base import (
39 39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 44 from rhodecode.lib.vcs.exceptions import (
45 45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47 47
48 48 hexlify = binascii.hexlify
49 49 nullid = "\0" * 20
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class MercurialRepository(BaseRepository):
55 55 """
56 56 Mercurial repository backend
57 57 """
58 58 DEFAULT_BRANCH_NAME = 'default'
59 59
60 60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 61 do_workspace_checkout=False, with_wire=None, bare=False):
62 62 """
63 63 Raises RepositoryError if repository could not be find at the given
64 64 ``repo_path``.
65 65
66 66 :param repo_path: local path of the repository
67 67 :param config: config object containing the repo configuration
68 68 :param create=False: if set to True, would try to create repository if
69 69 it does not exist rather than raising exception
70 70 :param src_url=None: would try to clone repository from given location
71 71 :param do_workspace_checkout=False: sets update of working copy after
72 72 making a clone
73 73 :param bare: not used, compatible with other VCS
74 74 """
75 75
76 76 self.path = safe_str(os.path.abspath(repo_path))
77 77 # mercurial since 4.4.X requires certain configuration to be present
78 78 # because sometimes we init the repos with config we need to meet
79 79 # special requirements
80 80 self.config = config if config else self.get_default_config(
81 81 default=[('extensions', 'largefiles', '')])
82 82
83 83 # NOTE(marcink): since python3 hgsubversion is deprecated.
84 84 # From old installations we might still have this set enabled
85 85 # we explicitly remove this now here to make sure it wont propagate further
86 if config.get('extensions', 'hgsubversion') is not None:
86 if config and config.get('extensions', 'hgsubversion') is not None:
87 87 config.drop_option('extensions', 'hgsubversion')
88 88
89 89 self.with_wire = with_wire or {"cache": False} # default should not use cache
90 90
91 91 self._init_repo(create, src_url, do_workspace_checkout)
92 92
93 93 # caches
94 94 self._commit_ids = {}
95 95
96 96 @LazyProperty
97 97 def _remote(self):
98 98 repo_id = self.path
99 99 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
100 100
101 101 @CachedProperty
102 102 def commit_ids(self):
103 103 """
104 104 Returns list of commit ids, in ascending order. Being lazy
105 105 attribute allows external tools to inject shas from cache.
106 106 """
107 107 commit_ids = self._get_all_commit_ids()
108 108 self._rebuild_cache(commit_ids)
109 109 return commit_ids
110 110
111 111 def _rebuild_cache(self, commit_ids):
112 112 self._commit_ids = {commit_id: index
113 113 for index, commit_id in enumerate(commit_ids)}
114 114
115 115 @CachedProperty
116 116 def branches(self):
117 117 return self._get_branches()
118 118
119 119 @CachedProperty
120 120 def branches_closed(self):
121 121 return self._get_branches(active=False, closed=True)
122 122
123 123 @CachedProperty
124 124 def branches_all(self):
125 125 all_branches = {}
126 126 all_branches.update(self.branches)
127 127 all_branches.update(self.branches_closed)
128 128 return all_branches
129 129
130 130 def _get_branches(self, active=True, closed=False):
131 131 """
132 132 Gets branches for this repository
133 133 Returns only not closed active branches by default
134 134
135 135 :param active: return also active branches
136 136 :param closed: return also closed branches
137 137
138 138 """
139 139 if self.is_empty():
140 140 return {}
141 141
142 142 def get_name(ctx):
143 143 return ctx[0]
144 144
145 145 _branches = [(n, h,) for n, h in
146 146 self._remote.branches(active, closed).items()]
147 147
148 148 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
149 149
150 150 @CachedProperty
151 151 def tags(self):
152 152 """
153 153 Gets tags for this repository
154 154 """
155 155 return self._get_tags()
156 156
157 157 def _get_tags(self):
158 158 if self.is_empty():
159 159 return {}
160 160
161 161 def get_name(ctx):
162 162 return ctx[0]
163 163
164 164 _tags = [(n, h,) for n, h in
165 165 self._remote.tags().items()]
166 166
167 167 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
168 168
169 169 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
170 170 """
171 171 Creates and returns a tag for the given ``commit_id``.
172 172
173 173 :param name: name for new tag
174 174 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
175 175 :param commit_id: commit id for which new tag would be created
176 176 :param message: message of the tag's commit
177 177 :param date: date of tag's commit
178 178
179 179 :raises TagAlreadyExistError: if tag with same name already exists
180 180 """
181 181 if name in self.tags:
182 182 raise TagAlreadyExistError("Tag %s already exists" % name)
183 183
184 184 commit = self.get_commit(commit_id=commit_id)
185 185 local = kwargs.setdefault('local', False)
186 186
187 187 if message is None:
188 188 message = f"Added tag {name} for commit {commit.short_id}"
189 189
190 190 date, tz = date_to_timestamp_plus_offset(date)
191 191
192 192 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
193 193 self._remote.invalidate_vcs_cache()
194 194
195 195 # Reinitialize tags
196 196 self._invalidate_prop_cache('tags')
197 197 tag_id = self.tags[name]
198 198
199 199 return self.get_commit(commit_id=tag_id)
200 200
201 201 def remove_tag(self, name, user, message=None, date=None):
202 202 """
203 203 Removes tag with the given `name`.
204 204
205 205 :param name: name of the tag to be removed
206 206 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
207 207 :param message: message of the tag's removal commit
208 208 :param date: date of tag's removal commit
209 209
210 210 :raises TagDoesNotExistError: if tag with given name does not exists
211 211 """
212 212 if name not in self.tags:
213 213 raise TagDoesNotExistError("Tag %s does not exist" % name)
214 214
215 215 if message is None:
216 216 message = "Removed tag %s" % name
217 217 local = False
218 218
219 219 date, tz = date_to_timestamp_plus_offset(date)
220 220
221 221 self._remote.tag(name, nullid, message, local, user, date, tz)
222 222 self._remote.invalidate_vcs_cache()
223 223 self._invalidate_prop_cache('tags')
224 224
225 225 @LazyProperty
226 226 def bookmarks(self):
227 227 """
228 228 Gets bookmarks for this repository
229 229 """
230 230 return self._get_bookmarks()
231 231
232 232 def _get_bookmarks(self):
233 233 if self.is_empty():
234 234 return {}
235 235
236 236 def get_name(ctx):
237 237 return ctx[0]
238 238
239 239 _bookmarks = [
240 240 (n, h) for n, h in
241 241 self._remote.bookmarks().items()]
242 242
243 243 return OrderedDict(sorted(_bookmarks, key=get_name))
244 244
245 245 def _get_all_commit_ids(self):
246 246 return self._remote.get_all_commit_ids('visible')
247 247
248 248 def get_diff(
249 249 self, commit1, commit2, path='', ignore_whitespace=False,
250 250 context=3, path1=None):
251 251 """
252 252 Returns (git like) *diff*, as plain text. Shows changes introduced by
253 253 `commit2` since `commit1`.
254 254
255 255 :param commit1: Entry point from which diff is shown. Can be
256 256 ``self.EMPTY_COMMIT`` - in this case, patch showing all
257 257 the changes since empty state of the repository until `commit2`
258 258 :param commit2: Until which commit changes should be shown.
259 259 :param ignore_whitespace: If set to ``True``, would not show whitespace
260 260 changes. Defaults to ``False``.
261 261 :param context: How many lines before/after changed lines should be
262 262 shown. Defaults to ``3``.
263 263 """
264 264 self._validate_diff_commits(commit1, commit2)
265 265 if path1 is not None and path1 != path:
266 266 raise ValueError("Diff of two different paths not supported.")
267 267
268 268 if path:
269 269 file_filter = [self.path, path]
270 270 else:
271 271 file_filter = None
272 272
273 273 diff = self._remote.diff(
274 274 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
275 275 opt_git=True, opt_ignorews=ignore_whitespace,
276 276 context=context)
277 277 return MercurialDiff(diff)
278 278
279 279 def strip(self, commit_id, branch=None):
280 280 self._remote.strip(commit_id, update=False, backup=False)
281 281
282 282 self._remote.invalidate_vcs_cache()
283 283 # clear cache
284 284 self._invalidate_prop_cache('commit_ids')
285 285
286 286 return len(self.commit_ids)
287 287
288 288 def verify(self):
289 289 verify = self._remote.verify()
290 290
291 291 self._remote.invalidate_vcs_cache()
292 292 return verify
293 293
294 294 def hg_update_cache(self):
295 295 update_cache = self._remote.hg_update_cache()
296 296
297 297 self._remote.invalidate_vcs_cache()
298 298 return update_cache
299 299
300 300 def hg_rebuild_fn_cache(self):
301 301 update_cache = self._remote.hg_rebuild_fn_cache()
302 302
303 303 self._remote.invalidate_vcs_cache()
304 304 return update_cache
305 305
306 306 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
307 307 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
308 308 self, commit_id1, repo2, commit_id2)
309 309
310 310 if commit_id1 == commit_id2:
311 311 return commit_id1
312 312
313 313 ancestors = self._remote.revs_from_revspec(
314 314 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
315 315 other_path=repo2.path)
316 316
317 317 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
318 318
319 319 log.debug('Found common ancestor with sha: %s', ancestor_id)
320 320 return ancestor_id
321 321
322 322 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
323 323 if commit_id1 == commit_id2:
324 324 commits = []
325 325 else:
326 326 if merge:
327 327 indexes = self._remote.revs_from_revspec(
328 328 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
329 329 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
330 330 else:
331 331 indexes = self._remote.revs_from_revspec(
332 332 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
333 333 commit_id1, other_path=repo2.path)
334 334
335 335 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
336 336 for idx in indexes]
337 337
338 338 return commits
339 339
340 340 @staticmethod
341 341 def check_url(url, config):
342 342 """
343 343 Function will check given url and try to verify if it's a valid
344 344 link. Sometimes it may happened that mercurial will issue basic
345 345 auth request that can cause whole API to hang when used from python
346 346 or other external calls.
347 347
348 348 On failures it'll raise urllib2.HTTPError, exception is also thrown
349 349 when the return code is non 200
350 350 """
351 351 # check first if it's not an local url
352 352 if os.path.isdir(url) or url.startswith('file:'):
353 353 return True
354 354
355 355 # Request the _remote to verify the url
356 356 return connection.Hg.check_url(url, config.serialize())
357 357
358 358 @staticmethod
359 359 def is_valid_repository(path):
360 360 return os.path.isdir(os.path.join(path, '.hg'))
361 361
362 362 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
363 363 """
364 364 Function will check for mercurial repository in given path. If there
365 365 is no repository in that path it will raise an exception unless
366 366 `create` parameter is set to True - in that case repository would
367 367 be created.
368 368
369 369 If `src_url` is given, would try to clone repository from the
370 370 location at given clone_point. Additionally it'll make update to
371 371 working copy accordingly to `do_workspace_checkout` flag.
372 372 """
373 373 if create and os.path.exists(self.path):
374 374 raise RepositoryError(
375 375 f"Cannot create repository at {self.path}, location already exist")
376 376
377 377 if src_url:
378 378 url = str(self._get_url(src_url))
379 379 MercurialRepository.check_url(url, self.config)
380 380
381 381 self._remote.clone(url, self.path, do_workspace_checkout)
382 382
383 383 # Don't try to create if we've already cloned repo
384 384 create = False
385 385
386 386 if create:
387 387 os.makedirs(self.path, mode=0o755)
388 388
389 389 self._remote.localrepository(create)
390 390
391 391 @LazyProperty
392 392 def in_memory_commit(self):
393 393 return MercurialInMemoryCommit(self)
394 394
395 395 @LazyProperty
396 396 def description(self):
397 397 description = self._remote.get_config_value(
398 398 'web', 'description', untrusted=True)
399 399 return safe_str(description or self.DEFAULT_DESCRIPTION)
400 400
401 401 @LazyProperty
402 402 def contact(self):
403 403 contact = (
404 404 self._remote.get_config_value("web", "contact") or
405 405 self._remote.get_config_value("ui", "username"))
406 406 return safe_str(contact or self.DEFAULT_CONTACT)
407 407
408 408 @LazyProperty
409 409 def last_change(self):
410 410 """
411 411 Returns last change made on this repository as
412 412 `datetime.datetime` object.
413 413 """
414 414 try:
415 415 return self.get_commit().date
416 416 except RepositoryError:
417 417 tzoffset = makedate()[1]
418 418 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
419 419
420 420 def _get_fs_mtime(self):
421 421 # fallback to filesystem
422 422 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
423 423 st_path = os.path.join(self.path, '.hg', "store")
424 424 if os.path.exists(cl_path):
425 425 return os.stat(cl_path).st_mtime
426 426 else:
427 427 return os.stat(st_path).st_mtime
428 428
429 429 def _get_url(self, url):
430 430 """
431 431 Returns normalized url. If schema is not given, would fall
432 432 to filesystem
433 433 (``file:///``) schema.
434 434 """
435 435 if url != 'default' and '://' not in url:
436 436 url = "file:" + urllib.request.pathname2url(url)
437 437 return url
438 438
439 439 def get_hook_location(self):
440 440 """
441 441 returns absolute path to location where hooks are stored
442 442 """
443 443 return os.path.join(self.path, '.hg', '.hgrc')
444 444
445 445 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
446 446 translate_tag=None, maybe_unreachable=False, reference_obj=None):
447 447 """
448 448 Returns ``MercurialCommit`` object representing repository's
449 449 commit at the given `commit_id` or `commit_idx`.
450 450 """
451 451 if self.is_empty():
452 452 raise EmptyRepositoryError("There are no commits yet")
453 453
454 454 if commit_id is not None:
455 455 self._validate_commit_id(commit_id)
456 456 try:
457 457 # we have cached idx, use it without contacting the remote
458 458 idx = self._commit_ids[commit_id]
459 459 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
460 460 except KeyError:
461 461 pass
462 462
463 463 elif commit_idx is not None:
464 464 self._validate_commit_idx(commit_idx)
465 465 try:
466 466 _commit_id = self.commit_ids[commit_idx]
467 467 if commit_idx < 0:
468 468 commit_idx = self.commit_ids.index(_commit_id)
469 469
470 470 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
471 471 except IndexError:
472 472 commit_id = commit_idx
473 473 else:
474 474 commit_id = "tip"
475 475
476 476 # case here is no cached version, do an actual lookup instead
477 477 try:
478 478 raw_id, idx = self._remote.lookup(commit_id, both=True)
479 479 except CommitDoesNotExistError:
480 480 msg = "Commit {} does not exist for `{}`".format(
481 481 *map(safe_str, [commit_id, self.name]))
482 482 raise CommitDoesNotExistError(msg)
483 483
484 484 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
485 485
486 486 def get_commits(
487 487 self, start_id=None, end_id=None, start_date=None, end_date=None,
488 488 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
489 489 """
490 490 Returns generator of ``MercurialCommit`` objects from start to end
491 491 (both are inclusive)
492 492
493 493 :param start_id: None, str(commit_id)
494 494 :param end_id: None, str(commit_id)
495 495 :param start_date: if specified, commits with commit date less than
496 496 ``start_date`` would be filtered out from returned set
497 497 :param end_date: if specified, commits with commit date greater than
498 498 ``end_date`` would be filtered out from returned set
499 499 :param branch_name: if specified, commits not reachable from given
500 500 branch would be filtered out from returned set
501 501 :param show_hidden: Show hidden commits such as obsolete or hidden from
502 502 Mercurial evolve
503 503 :raise BranchDoesNotExistError: If given ``branch_name`` does not
504 504 exist.
505 505 :raise CommitDoesNotExistError: If commit for given ``start`` or
506 506 ``end`` could not be found.
507 507 """
508 508 # actually we should check now if it's not an empty repo
509 509 if self.is_empty():
510 510 raise EmptyRepositoryError("There are no commits yet")
511 511 self._validate_branch_name(branch_name)
512 512
513 513 branch_ancestors = False
514 514 if start_id is not None:
515 515 self._validate_commit_id(start_id)
516 516 c_start = self.get_commit(commit_id=start_id)
517 517 start_pos = self._commit_ids[c_start.raw_id]
518 518 else:
519 519 start_pos = None
520 520
521 521 if end_id is not None:
522 522 self._validate_commit_id(end_id)
523 523 c_end = self.get_commit(commit_id=end_id)
524 524 end_pos = max(0, self._commit_ids[c_end.raw_id])
525 525 else:
526 526 end_pos = None
527 527
528 528 if None not in [start_id, end_id] and start_pos > end_pos:
529 529 raise RepositoryError(
530 530 "Start commit '%s' cannot be after end commit '%s'" %
531 531 (start_id, end_id))
532 532
533 533 if end_pos is not None:
534 534 end_pos += 1
535 535
536 536 commit_filter = []
537 537
538 538 if branch_name and not branch_ancestors:
539 539 commit_filter.append(f'branch("{branch_name}")')
540 540 elif branch_name and branch_ancestors:
541 541 commit_filter.append(f'ancestors(branch("{branch_name}"))')
542 542
543 543 if start_date and not end_date:
544 544 commit_filter.append(f'date(">{start_date}")')
545 545 if end_date and not start_date:
546 546 commit_filter.append(f'date("<{end_date}")')
547 547 if start_date and end_date:
548 548 commit_filter.append(
549 549 f'date(">{start_date}") and date("<{end_date}")')
550 550
551 551 if not show_hidden:
552 552 commit_filter.append('not obsolete()')
553 553 commit_filter.append('not hidden()')
554 554
555 555 # TODO: johbo: Figure out a simpler way for this solution
556 556 collection_generator = CollectionGenerator
557 557 if commit_filter:
558 558 commit_filter = ' and '.join(map(safe_str, commit_filter))
559 559 revisions = self._remote.rev_range([commit_filter])
560 560 collection_generator = MercurialIndexBasedCollectionGenerator
561 561 else:
562 562 revisions = self.commit_ids
563 563
564 564 if start_pos or end_pos:
565 565 revisions = revisions[start_pos:end_pos]
566 566
567 567 return collection_generator(self, revisions, pre_load=pre_load)
568 568
569 569 def pull(self, url, commit_ids=None):
570 570 """
571 571 Pull changes from external location.
572 572
573 573 :param commit_ids: Optional. Can be set to a list of commit ids
574 574 which shall be pulled from the other repository.
575 575 """
576 576 url = self._get_url(url)
577 577 self._remote.pull(url, commit_ids=commit_ids)
578 578 self._remote.invalidate_vcs_cache()
579 579
580 580 def fetch(self, url, commit_ids=None):
581 581 """
582 582 Backward compatibility with GIT fetch==pull
583 583 """
584 584 return self.pull(url, commit_ids=commit_ids)
585 585
586 586 def push(self, url):
587 587 url = self._get_url(url)
588 588 self._remote.sync_push(url)
589 589
590 590 def _local_clone(self, clone_path):
591 591 """
592 592 Create a local clone of the current repo.
593 593 """
594 594 self._remote.clone(self.path, clone_path, update_after_clone=True,
595 595 hooks=False)
596 596
597 597 def _update(self, revision, clean=False):
598 598 """
599 599 Update the working copy to the specified revision.
600 600 """
601 601 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
602 602 self._remote.update(revision, clean=clean)
603 603
604 604 def _identify(self):
605 605 """
606 606 Return the current state of the working directory.
607 607 """
608 608 return self._remote.identify().strip().rstrip('+')
609 609
610 610 def _heads(self, branch=None):
611 611 """
612 612 Return the commit ids of the repository heads.
613 613 """
614 614 return self._remote.heads(branch=branch).strip().split(' ')
615 615
616 616 def _ancestor(self, revision1, revision2):
617 617 """
618 618 Return the common ancestor of the two revisions.
619 619 """
620 620 return self._remote.ancestor(revision1, revision2)
621 621
622 622 def _local_push(
623 623 self, revision, repository_path, push_branches=False,
624 624 enable_hooks=False):
625 625 """
626 626 Push the given revision to the specified repository.
627 627
628 628 :param push_branches: allow to create branches in the target repo.
629 629 """
630 630 self._remote.push(
631 631 [revision], repository_path, hooks=enable_hooks,
632 632 push_branches=push_branches)
633 633
634 634 def _local_merge(self, target_ref, merge_message, user_name, user_email,
635 635 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
636 636 """
637 637 Merge the given source_revision into the checked out revision.
638 638
639 639 Returns the commit id of the merge and a boolean indicating if the
640 640 commit needs to be pushed.
641 641 """
642 642
643 643 source_ref_commit_id = source_ref.commit_id
644 644 target_ref_commit_id = target_ref.commit_id
645 645
646 646 # update our workdir to target ref, for proper merge
647 647 self._update(target_ref_commit_id, clean=True)
648 648
649 649 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
650 650 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
651 651
652 652 if close_commit_id:
653 653 # NOTE(marcink): if we get the close commit, this is our new source
654 654 # which will include the close commit itself.
655 655 source_ref_commit_id = close_commit_id
656 656
657 657 if ancestor == source_ref_commit_id:
658 658 # Nothing to do, the changes were already integrated
659 659 return target_ref_commit_id, False
660 660
661 661 elif ancestor == target_ref_commit_id and is_the_same_branch:
662 662 # In this case we should force a commit message
663 663 return source_ref_commit_id, True
664 664
665 665 unresolved = None
666 666 if use_rebase:
667 667 try:
668 668 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
669 669 self.bookmark(bookmark_name, revision=source_ref.commit_id)
670 670 self._remote.rebase(
671 671 source=source_ref_commit_id, dest=target_ref_commit_id)
672 672 self._remote.invalidate_vcs_cache()
673 673 self._update(bookmark_name, clean=True)
674 674 return self._identify(), True
675 675 except RepositoryError as e:
676 676 # The rebase-abort may raise another exception which 'hides'
677 677 # the original one, therefore we log it here.
678 678 log.exception('Error while rebasing shadow repo during merge.')
679 679 if 'unresolved conflicts' in safe_str(e):
680 680 unresolved = self._remote.get_unresolved_files()
681 681 log.debug('unresolved files: %s', unresolved)
682 682
683 683 # Cleanup any rebase leftovers
684 684 self._remote.invalidate_vcs_cache()
685 685 self._remote.rebase(abort=True)
686 686 self._remote.invalidate_vcs_cache()
687 687 self._remote.update(clean=True)
688 688 if unresolved:
689 689 raise UnresolvedFilesInRepo(unresolved)
690 690 else:
691 691 raise
692 692 else:
693 693 try:
694 694 self._remote.merge(source_ref_commit_id)
695 695 self._remote.invalidate_vcs_cache()
696 696 self._remote.commit(
697 697 message=safe_str(merge_message),
698 698 username=safe_str(f'{user_name} <{user_email}>'))
699 699 self._remote.invalidate_vcs_cache()
700 700 return self._identify(), True
701 701 except RepositoryError as e:
702 702 # The merge-abort may raise another exception which 'hides'
703 703 # the original one, therefore we log it here.
704 704 log.exception('Error while merging shadow repo during merge.')
705 705 if 'unresolved merge conflicts' in safe_str(e):
706 706 unresolved = self._remote.get_unresolved_files()
707 707 log.debug('unresolved files: %s', unresolved)
708 708
709 709 # Cleanup any merge leftovers
710 710 self._remote.update(clean=True)
711 711 if unresolved:
712 712 raise UnresolvedFilesInRepo(unresolved)
713 713 else:
714 714 raise
715 715
716 716 def _local_close(self, target_ref, user_name, user_email,
717 717 source_ref, close_message=''):
718 718 """
719 719 Close the branch of the given source_revision
720 720
721 721 Returns the commit id of the close and a boolean indicating if the
722 722 commit needs to be pushed.
723 723 """
724 724 self._update(source_ref.commit_id)
725 725 message = close_message or f"Closing branch: `{source_ref.name}`"
726 726 try:
727 727 self._remote.commit(
728 728 message=safe_str(message),
729 729 username=safe_str(f'{user_name} <{user_email}>'),
730 730 close_branch=True)
731 731 self._remote.invalidate_vcs_cache()
732 732 return self._identify(), True
733 733 except RepositoryError:
734 734 # Cleanup any commit leftovers
735 735 self._remote.update(clean=True)
736 736 raise
737 737
738 738 def _is_the_same_branch(self, target_ref, source_ref):
739 739 return (
740 740 self._get_branch_name(target_ref) ==
741 741 self._get_branch_name(source_ref))
742 742
743 743 def _get_branch_name(self, ref):
744 744 if ref.type == 'branch':
745 745 return ref.name
746 746 return self._remote.ctx_branch(ref.commit_id)
747 747
748 748 def _maybe_prepare_merge_workspace(
749 749 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
750 750 shadow_repository_path = self._get_shadow_repository_path(
751 751 self.path, repo_id, workspace_id)
752 752 if not os.path.exists(shadow_repository_path):
753 753 self._local_clone(shadow_repository_path)
754 754 log.debug(
755 755 'Prepared shadow repository in %s', shadow_repository_path)
756 756
757 757 return shadow_repository_path
758 758
759 759 def _merge_repo(self, repo_id, workspace_id, target_ref,
760 760 source_repo, source_ref, merge_message,
761 761 merger_name, merger_email, dry_run=False,
762 762 use_rebase=False, close_branch=False):
763 763
764 764 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
765 765 'rebase' if use_rebase else 'merge', dry_run)
766 766
767 767 if target_ref.commit_id not in self._heads():
768 768 return MergeResponse(
769 769 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
770 770 metadata={'target_ref': target_ref})
771 771
772 772 try:
773 773 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
774 774 heads_all = self._heads(target_ref.name)
775 775 max_heads = 10
776 776 if len(heads_all) > max_heads:
777 777 heads = '\n,'.join(
778 778 heads_all[:max_heads] +
779 779 [f'and {len(heads_all)-max_heads} more.'])
780 780 else:
781 781 heads = '\n,'.join(heads_all)
782 782 metadata = {
783 783 'target_ref': target_ref,
784 784 'source_ref': source_ref,
785 785 'heads': heads
786 786 }
787 787 return MergeResponse(
788 788 False, False, None,
789 789 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
790 790 metadata=metadata)
791 791 except CommitDoesNotExistError:
792 792 log.exception('Failure when looking up branch heads on hg target')
793 793 return MergeResponse(
794 794 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
795 795 metadata={'target_ref': target_ref})
796 796
797 797 shadow_repository_path = self._maybe_prepare_merge_workspace(
798 798 repo_id, workspace_id, target_ref, source_ref)
799 799 shadow_repo = self.get_shadow_instance(shadow_repository_path)
800 800
801 801 log.debug('Pulling in target reference %s', target_ref)
802 802 self._validate_pull_reference(target_ref)
803 803 shadow_repo._local_pull(self.path, target_ref)
804 804
805 805 try:
806 806 log.debug('Pulling in source reference %s', source_ref)
807 807 source_repo._validate_pull_reference(source_ref)
808 808 shadow_repo._local_pull(source_repo.path, source_ref)
809 809 except CommitDoesNotExistError:
810 810 log.exception('Failure when doing local pull on hg shadow repo')
811 811 return MergeResponse(
812 812 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
813 813 metadata={'source_ref': source_ref})
814 814
815 815 merge_ref = None
816 816 merge_commit_id = None
817 817 close_commit_id = None
818 818 merge_failure_reason = MergeFailureReason.NONE
819 819 metadata = {}
820 820
821 821 # enforce that close branch should be used only in case we source from
822 822 # an actual Branch
823 823 close_branch = close_branch and source_ref.type == 'branch'
824 824
825 825 # don't allow to close branch if source and target are the same
826 826 close_branch = close_branch and source_ref.name != target_ref.name
827 827
828 828 needs_push_on_close = False
829 829 if close_branch and not use_rebase and not dry_run:
830 830 try:
831 831 close_commit_id, needs_push_on_close = shadow_repo._local_close(
832 832 target_ref, merger_name, merger_email, source_ref)
833 833 merge_possible = True
834 834 except RepositoryError:
835 835 log.exception('Failure when doing close branch on '
836 836 'shadow repo: %s', shadow_repo)
837 837 merge_possible = False
838 838 merge_failure_reason = MergeFailureReason.MERGE_FAILED
839 839 else:
840 840 merge_possible = True
841 841
842 842 needs_push = False
843 843 if merge_possible:
844 844
845 845 try:
846 846 merge_commit_id, needs_push = shadow_repo._local_merge(
847 847 target_ref, merge_message, merger_name, merger_email,
848 848 source_ref, use_rebase=use_rebase,
849 849 close_commit_id=close_commit_id, dry_run=dry_run)
850 850 merge_possible = True
851 851
852 852 # read the state of the close action, if it
853 853 # maybe required a push
854 854 needs_push = needs_push or needs_push_on_close
855 855
856 856 # Set a bookmark pointing to the merge commit. This bookmark
857 857 # may be used to easily identify the last successful merge
858 858 # commit in the shadow repository.
859 859 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
860 860 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
861 861 except SubrepoMergeError:
862 862 log.exception(
863 863 'Subrepo merge error during local merge on hg shadow repo.')
864 864 merge_possible = False
865 865 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
866 866 needs_push = False
867 867 except RepositoryError as e:
868 868 log.exception('Failure when doing local merge on hg shadow repo')
869 869 metadata['unresolved_files'] = 'no unresolved files found'
870 870
871 871 if isinstance(e, UnresolvedFilesInRepo):
872 872 all_conflicts = list(e.args[0])
873 873 max_conflicts = 20
874 874 if len(all_conflicts) > max_conflicts:
875 875 conflicts = all_conflicts[:max_conflicts] \
876 876 + [f'and {len(all_conflicts)-max_conflicts} more.']
877 877 else:
878 878 conflicts = all_conflicts
879 879 metadata['unresolved_files'] = \
880 880 '\n* conflict: ' + \
881 881 ('\n * conflict: '.join(conflicts))
882 882
883 883 merge_possible = False
884 884 merge_failure_reason = MergeFailureReason.MERGE_FAILED
885 885 needs_push = False
886 886
887 887 if merge_possible and not dry_run:
888 888 if needs_push:
889 889 # In case the target is a bookmark, update it, so after pushing
890 890 # the bookmarks is also updated in the target.
891 891 if target_ref.type == 'book':
892 892 shadow_repo.bookmark(
893 893 target_ref.name, revision=merge_commit_id)
894 894 try:
895 895 shadow_repo_with_hooks = self.get_shadow_instance(
896 896 shadow_repository_path,
897 897 enable_hooks=True)
898 898 # This is the actual merge action, we push from shadow
899 899 # into origin.
900 900 # Note: the push_branches option will push any new branch
901 901 # defined in the source repository to the target. This may
902 902 # be dangerous as branches are permanent in Mercurial.
903 903 # This feature was requested in issue #441.
904 904 shadow_repo_with_hooks._local_push(
905 905 merge_commit_id, self.path, push_branches=True,
906 906 enable_hooks=True)
907 907
908 908 # maybe we also need to push the close_commit_id
909 909 if close_commit_id:
910 910 shadow_repo_with_hooks._local_push(
911 911 close_commit_id, self.path, push_branches=True,
912 912 enable_hooks=True)
913 913 merge_succeeded = True
914 914 except RepositoryError:
915 915 log.exception(
916 916 'Failure when doing local push from the shadow '
917 917 'repository to the target repository at %s.', self.path)
918 918 merge_succeeded = False
919 919 merge_failure_reason = MergeFailureReason.PUSH_FAILED
920 920 metadata['target'] = 'hg shadow repo'
921 921 metadata['merge_commit'] = merge_commit_id
922 922 else:
923 923 merge_succeeded = True
924 924 else:
925 925 merge_succeeded = False
926 926
927 927 return MergeResponse(
928 928 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
929 929 metadata=metadata)
930 930
931 931 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
932 932 config = self.config.copy()
933 933 if not enable_hooks:
934 934 config.clear_section('hooks')
935 935 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
936 936
937 937 def _validate_pull_reference(self, reference):
938 938 if not (reference.name in self.bookmarks or
939 939 reference.name in self.branches or
940 940 self.get_commit(reference.commit_id)):
941 941 raise CommitDoesNotExistError(
942 942 'Unknown branch, bookmark or commit id')
943 943
944 944 def _local_pull(self, repository_path, reference):
945 945 """
946 946 Fetch a branch, bookmark or commit from a local repository.
947 947 """
948 948 repository_path = os.path.abspath(repository_path)
949 949 if repository_path == self.path:
950 950 raise ValueError('Cannot pull from the same repository')
951 951
952 952 reference_type_to_option_name = {
953 953 'book': 'bookmark',
954 954 'branch': 'branch',
955 955 }
956 956 option_name = reference_type_to_option_name.get(
957 957 reference.type, 'revision')
958 958
959 959 if option_name == 'revision':
960 960 ref = reference.commit_id
961 961 else:
962 962 ref = reference.name
963 963
964 964 options = {option_name: [ref]}
965 965 self._remote.pull_cmd(repository_path, hooks=False, **options)
966 966 self._remote.invalidate_vcs_cache()
967 967
968 968 def bookmark(self, bookmark, revision=None):
969 969 if isinstance(bookmark, str):
970 970 bookmark = safe_str(bookmark)
971 971 self._remote.bookmark(bookmark, revision=revision)
972 972 self._remote.invalidate_vcs_cache()
973 973
974 974 def get_path_permissions(self, username):
975 975 hgacl_file = os.path.join(self.path, '.hg/hgacl')
976 976
977 977 def read_patterns(suffix):
978 978 svalue = None
979 979 for section, option in [
980 980 ('narrowacl', username + suffix),
981 981 ('narrowacl', 'default' + suffix),
982 982 ('narrowhgacl', username + suffix),
983 983 ('narrowhgacl', 'default' + suffix)
984 984 ]:
985 985 try:
986 986 svalue = hgacl.get(section, option)
987 987 break # stop at the first value we find
988 988 except configparser.NoOptionError:
989 989 pass
990 990 if not svalue:
991 991 return None
992 992 result = ['/']
993 993 for pattern in svalue.split():
994 994 result.append(pattern)
995 995 if '*' not in pattern and '?' not in pattern:
996 996 result.append(pattern + '/*')
997 997 return result
998 998
999 999 if os.path.exists(hgacl_file):
1000 1000 try:
1001 1001 hgacl = configparser.RawConfigParser()
1002 1002 hgacl.read(hgacl_file)
1003 1003
1004 1004 includes = read_patterns('.includes')
1005 1005 excludes = read_patterns('.excludes')
1006 1006 return BasePathPermissionChecker.create_from_patterns(
1007 1007 includes, excludes)
1008 1008 except BaseException as e:
1009 1009 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1010 1010 hgacl_file, self.name, e)
1011 1011 raise exceptions.RepositoryRequirementError(msg)
1012 1012 else:
1013 1013 return None
1014 1014
1015 1015
1016 1016 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1017 1017
1018 1018 def _commit_factory(self, commit_id):
1019 1019 if isinstance(commit_id, int):
1020 1020 return self.repo.get_commit(
1021 1021 commit_idx=commit_id, pre_load=self.pre_load)
1022 1022 else:
1023 1023 return self.repo.get_commit(
1024 1024 commit_id=commit_id, pre_load=self.pre_load)
General Comments 0
You need to be logged in to leave comments. Login now