Show More
@@ -309,9 +309,15 b' def _changesetforwardcopies(a, b, match)' | |||||
309 | iterrevs.update(roots) |
|
309 | iterrevs.update(roots) | |
310 | iterrevs.remove(b.rev()) |
|
310 | iterrevs.remove(b.rev()) | |
311 | revs = sorted(iterrevs) |
|
311 | revs = sorted(iterrevs) | |
312 | return _combine_changeset_copies( |
|
312 | ||
313 | revs, children, b.rev(), revinfo, match, isancestor |
|
313 | if repo.filecopiesmode == b'changeset-sidedata': | |
314 | ) |
|
314 | return _combine_changeset_copies( | |
|
315 | revs, children, b.rev(), revinfo, match, isancestor | |||
|
316 | ) | |||
|
317 | else: | |||
|
318 | return _combine_changeset_copies_extra( | |||
|
319 | revs, children, b.rev(), revinfo, match, isancestor | |||
|
320 | ) | |||
315 |
|
321 | |||
316 |
|
322 | |||
317 | def _combine_changeset_copies( |
|
323 | def _combine_changeset_copies( | |
@@ -422,6 +428,98 b' def _merge_copies_dict(minor, major, isa' | |||||
422 | minor[dest] = value |
|
428 | minor[dest] = value | |
423 |
|
429 | |||
424 |
|
430 | |||
|
431 | def _combine_changeset_copies_extra( | |||
|
432 | revs, children, targetrev, revinfo, match, isancestor | |||
|
433 | ): | |||
|
434 | """version of `_combine_changeset_copies` that works with the Google | |||
|
435 | specific "extra" based storage for copy information""" | |||
|
436 | all_copies = {} | |||
|
437 | alwaysmatch = match.always() | |||
|
438 | for r in revs: | |||
|
439 | copies = all_copies.pop(r, None) | |||
|
440 | if copies is None: | |||
|
441 | # this is a root | |||
|
442 | copies = {} | |||
|
443 | for i, c in enumerate(children[r]): | |||
|
444 | p1, p2, p1copies, p2copies, removed, ismerged = revinfo(c) | |||
|
445 | if r == p1: | |||
|
446 | parent = 1 | |||
|
447 | childcopies = p1copies | |||
|
448 | else: | |||
|
449 | assert r == p2 | |||
|
450 | parent = 2 | |||
|
451 | childcopies = p2copies | |||
|
452 | if not alwaysmatch: | |||
|
453 | childcopies = { | |||
|
454 | dst: src for dst, src in childcopies.items() if match(dst) | |||
|
455 | } | |||
|
456 | newcopies = copies | |||
|
457 | if childcopies: | |||
|
458 | newcopies = copies.copy() | |||
|
459 | for dest, source in pycompat.iteritems(childcopies): | |||
|
460 | prev = copies.get(source) | |||
|
461 | if prev is not None and prev[1] is not None: | |||
|
462 | source = prev[1] | |||
|
463 | newcopies[dest] = (c, source) | |||
|
464 | assert newcopies is not copies | |||
|
465 | for f in removed: | |||
|
466 | if f in newcopies: | |||
|
467 | if newcopies is copies: | |||
|
468 | # copy on write to avoid affecting potential other | |||
|
469 | # branches. when there are no other branches, this | |||
|
470 | # could be avoided. | |||
|
471 | newcopies = copies.copy() | |||
|
472 | newcopies[f] = (c, None) | |||
|
473 | othercopies = all_copies.get(c) | |||
|
474 | if othercopies is None: | |||
|
475 | all_copies[c] = newcopies | |||
|
476 | else: | |||
|
477 | # we are the second parent to work on c, we need to merge our | |||
|
478 | # work with the other. | |||
|
479 | # | |||
|
480 | # In case of conflict, parent 1 take precedence over parent 2. | |||
|
481 | # This is an arbitrary choice made anew when implementing | |||
|
482 | # changeset based copies. It was made without regards with | |||
|
483 | # potential filelog related behavior. | |||
|
484 | if parent == 1: | |||
|
485 | _merge_copies_dict_extra( | |||
|
486 | othercopies, newcopies, isancestor, ismerged | |||
|
487 | ) | |||
|
488 | else: | |||
|
489 | _merge_copies_dict_extra( | |||
|
490 | newcopies, othercopies, isancestor, ismerged | |||
|
491 | ) | |||
|
492 | all_copies[c] = newcopies | |||
|
493 | ||||
|
494 | final_copies = {} | |||
|
495 | for dest, (tt, source) in all_copies[targetrev].items(): | |||
|
496 | if source is not None: | |||
|
497 | final_copies[dest] = source | |||
|
498 | return final_copies | |||
|
499 | ||||
|
500 | ||||
|
501 | def _merge_copies_dict_extra(minor, major, isancestor, ismerged): | |||
|
502 | """version of `_merge_copies_dict` that works with the Google | |||
|
503 | specific "extra" based storage for copy information""" | |||
|
504 | for dest, value in major.items(): | |||
|
505 | other = minor.get(dest) | |||
|
506 | if other is None: | |||
|
507 | minor[dest] = value | |||
|
508 | else: | |||
|
509 | new_tt = value[0] | |||
|
510 | other_tt = other[0] | |||
|
511 | if value[1] == other[1]: | |||
|
512 | continue | |||
|
513 | # content from "major" wins, unless it is older | |||
|
514 | # than the branch point or there is a merge | |||
|
515 | if ( | |||
|
516 | new_tt == other_tt | |||
|
517 | or not isancestor(new_tt, other_tt) | |||
|
518 | or ismerged(dest) | |||
|
519 | ): | |||
|
520 | minor[dest] = value | |||
|
521 | ||||
|
522 | ||||
425 | def _forwardcopies(a, b, base=None, match=None): |
|
523 | def _forwardcopies(a, b, base=None, match=None): | |
426 | """find {dst@b: src@a} copy mapping where a is an ancestor of b""" |
|
524 | """find {dst@b: src@a} copy mapping where a is an ancestor of b""" | |
427 |
|
525 |
General Comments 0
You need to be logged in to leave comments.
Login now