copies.py
1009 lines
| 34.2 KiB
| text/x-python
|
PythonLexer
/ mercurial / copies.py
Matt Mackall
|
r6274 | # copies.py - copy detection for Mercurial | ||
# | ||||
# Copyright 2008 Matt Mackall <mpm@selenic.com> | ||||
# | ||||
Martin Geisler
|
r8225 | # This software may be used and distributed according to the terms of the | ||
Matt Mackall
|
r10263 | # GNU General Public License version 2 or any later version. | ||
Matt Mackall
|
r6274 | |||
Gregory Szorc
|
r25924 | from __future__ import absolute_import | ||
Pulkit Goyal
|
r34180 | import collections | ||
Simon Heimberg
|
r8312 | import heapq | ||
Pulkit Goyal
|
r34180 | import os | ||
Matt Mackall
|
r6274 | |||
Pulkit Goyal
|
r34847 | from .i18n import _ | ||
r43418 | ||||
from .revlogutils.flagutil import REVIDX_SIDEDATA | ||||
Gregory Szorc
|
r25924 | from . import ( | ||
r43417 | error, | |||
Yuya Nishihara
|
r33869 | match as matchmod, | ||
Durham Goode
|
r28000 | node, | ||
Gregory Szorc
|
r25924 | pathutil, | ||
Gregory Szorc
|
r43376 | pycompat, | ||
Gregory Szorc
|
r25924 | util, | ||
) | ||||
r43418 | ||||
from .revlogutils import sidedata as sidedatamod | ||||
Augie Fackler
|
r43346 | from .utils import stringutil | ||
Gregory Szorc
|
r25924 | |||
Martin von Zweigbergk
|
r41438 | def _findlimit(repo, ctxa, ctxb): | ||
Ryan McElroy
|
r23071 | """ | ||
Find the last revision that needs to be checked to ensure that a full | ||||
transitive closure for file copies can be properly calculated. | ||||
Generally, this means finding the earliest revision number that's an | ||||
ancestor of a or b but not both, except when a or b is a direct descendent | ||||
of the other, in which case we can return the minimum revnum of a and b. | ||||
Patrick Mezard
|
r10179 | """ | ||
Ryan McElroy
|
r23071 | |||
Matt Mackall
|
r6429 | # basic idea: | ||
# - mark a and b with different sides | ||||
# - if a parent's children are all on the same side, the parent is | ||||
# on that side, otherwise it is on no side | ||||
# - walk the graph in topological order with the help of a heap; | ||||
# - add unseen parents to side map | ||||
# - clear side of any parent that has children on different sides | ||||
Matt Mackall
|
r6431 | # - track number of interesting revs that might still be on a side | ||
# - track the lowest interesting rev seen | ||||
# - quit when interesting revs is zero | ||||
Matt Mackall
|
r6430 | |||
cl = repo.changelog | ||||
Martin von Zweigbergk
|
r41439 | wdirparents = None | ||
Martin von Zweigbergk
|
r41438 | a = ctxa.rev() | ||
b = ctxb.rev() | ||||
Matt Mackall
|
r6430 | if a is None: | ||
Martin von Zweigbergk
|
r41439 | wdirparents = (ctxa.p1(), ctxa.p2()) | ||
Martin von Zweigbergk
|
r41267 | a = node.wdirrev | ||
Matt Mackall
|
r6430 | if b is None: | ||
Martin von Zweigbergk
|
r41439 | assert not wdirparents | ||
wdirparents = (ctxb.p1(), ctxb.p2()) | ||||
Martin von Zweigbergk
|
r41267 | b = node.wdirrev | ||
Matt Mackall
|
r6429 | |||
side = {a: -1, b: 1} | ||||
visit = [-a, -b] | ||||
heapq.heapify(visit) | ||||
interesting = len(visit) | ||||
Martin von Zweigbergk
|
r41267 | limit = node.wdirrev | ||
Matt Mackall
|
r6429 | |||
while interesting: | ||||
Augie Fackler
|
r43346 | r = -(heapq.heappop(visit)) | ||
Martin von Zweigbergk
|
r41267 | if r == node.wdirrev: | ||
Martin von Zweigbergk
|
r41439 | parents = [pctx.rev() for pctx in wdirparents] | ||
Matt Mackall
|
r6430 | else: | ||
parents = cl.parentrevs(r) | ||||
Martin von Zweigbergk
|
r41437 | if parents[1] == node.nullrev: | ||
parents = parents[:1] | ||||
Matt Mackall
|
r6430 | for p in parents: | ||
Matt Mackall
|
r6429 | if p not in side: | ||
# first time we see p; add it to visit | ||||
side[p] = side[r] | ||||
if side[p]: | ||||
interesting += 1 | ||||
heapq.heappush(visit, -p) | ||||
elif side[p] and side[p] != side[r]: | ||||
# p was interesting but now we know better | ||||
side[p] = 0 | ||||
interesting -= 1 | ||||
Matt Mackall
|
r6430 | if side[r]: | ||
Augie Fackler
|
r43346 | limit = r # lowest rev visited | ||
Matt Mackall
|
r6430 | interesting -= 1 | ||
Patrick Mezard
|
r10179 | |||
Ryan McElroy
|
r23071 | # Consider the following flow (see test-commit-amend.t under issue4405): | ||
# 1/ File 'a0' committed | ||||
# 2/ File renamed from 'a0' to 'a1' in a new commit (call it 'a1') | ||||
# 3/ Move back to first commit | ||||
# 4/ Create a new commit via revert to contents of 'a1' (call it 'a1-amend') | ||||
# 5/ Rename file from 'a1' to 'a2' and commit --amend 'a1-msg' | ||||
# | ||||
# During the amend in step five, we will be in this state: | ||||
# | ||||
# @ 3 temporary amend commit for a1-amend | ||||
# | | ||||
# o 2 a1-amend | ||||
# | | ||||
# | o 1 a1 | ||||
# |/ | ||||
# o 0 a0 | ||||
# | ||||
Mads Kiilerich
|
r23139 | # When _findlimit is called, a and b are revs 3 and 0, so limit will be 2, | ||
Ryan McElroy
|
r23071 | # yet the filelog has the copy information in rev 1 and we will not look | ||
# back far enough unless we also look at the a and b as candidates. | ||||
# This only occurs when a is a descendent of b or visa-versa. | ||||
return min(limit, a, b) | ||||
Matt Mackall
|
r6429 | |||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42796 | def _filter(src, dst, t): | ||
"""filters out invalid copies after chaining""" | ||||
Martin von Zweigbergk
|
r42413 | |||
Martin von Zweigbergk
|
r42796 | # When _chain()'ing copies in 'a' (from 'src' via some other commit 'mid') | ||
# with copies in 'b' (from 'mid' to 'dst'), we can get the different cases | ||||
# in the following table (not including trivial cases). For example, case 2 | ||||
# is where a file existed in 'src' and remained under that name in 'mid' and | ||||
Martin von Zweigbergk
|
r42413 | # then was renamed between 'mid' and 'dst'. | ||
# | ||||
# case src mid dst result | ||||
# 1 x y - - | ||||
# 2 x y y x->y | ||||
# 3 x y x - | ||||
# 4 x y z x->z | ||||
# 5 - x y - | ||||
# 6 x x y x->y | ||||
Martin von Zweigbergk
|
r42565 | # | ||
# _chain() takes care of chaining the copies in 'a' and 'b', but it | ||||
# cannot tell the difference between cases 1 and 2, between 3 and 4, or | ||||
# between 5 and 6, so it includes all cases in its result. | ||||
# Cases 1, 3, and 5 are then removed by _filter(). | ||||
Martin von Zweigbergk
|
r42413 | |||
Martin von Zweigbergk
|
r42565 | for k, v in list(t.items()): | ||
# remove copies from files that didn't exist | ||||
if v not in src: | ||||
del t[k] | ||||
# remove criss-crossed copies | ||||
elif k in src and v in dst: | ||||
del t[k] | ||||
# remove copies to files that were then removed | ||||
elif k not in dst: | ||||
del t[k] | ||||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42565 | def _chain(a, b): | ||
"""chain two sets of copies 'a' and 'b'""" | ||||
Matt Mackall
|
r15775 | t = a.copy() | ||
Gregory Szorc
|
r43376 | for k, v in pycompat.iteritems(b): | ||
Matt Mackall
|
r15775 | if v in t: | ||
Martin von Zweigbergk
|
r42440 | t[k] = t[v] | ||
Martin von Zweigbergk
|
r42416 | else: | ||
Matt Mackall
|
r15775 | t[k] = v | ||
return t | ||||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42798 | def _tracefile(fctx, am, basemf, limit): | ||
Martin von Zweigbergk
|
r35422 | """return file context that is the ancestor of fctx present in ancestor | ||
manifest am, stopping after the first ancestor lower than limit""" | ||||
Matt Mackall
|
r15775 | |||
for f in fctx.ancestors(): | ||||
Martin von Zweigbergk
|
r42751 | path = f.path() | ||
if am.get(path, None) == f.filenode(): | ||||
return path | ||||
Martin von Zweigbergk
|
r42798 | if basemf and basemf.get(path, None) == f.filenode(): | ||
return path | ||||
Martin von Zweigbergk
|
r42427 | if not f.isintroducedafter(limit): | ||
Matt Mackall
|
r15775 | return None | ||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r41918 | def _dirstatecopies(repo, match=None): | ||
ds = repo.dirstate | ||||
Matt Mackall
|
r15775 | c = ds.copies().copy() | ||
Pulkit Goyal
|
r34350 | for k in list(c): | ||
Augie Fackler
|
r43347 | if ds[k] not in b'anm' or (match and not match(k)): | ||
Matt Mackall
|
r15775 | del c[k] | ||
return c | ||||
Augie Fackler
|
r43346 | |||
Durham Goode
|
r24782 | def _computeforwardmissing(a, b, match=None): | ||
Durham Goode
|
r24011 | """Computes which files are in b but not a. | ||
This is its own function so extensions can easily wrap this call to see what | ||||
files _forwardcopies is about to process. | ||||
""" | ||||
Durham Goode
|
r24782 | ma = a.manifest() | ||
mb = b.manifest() | ||||
Durham Goode
|
r31256 | return mb.filesnotin(ma, match=match) | ||
Durham Goode
|
r24011 | |||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42284 | def usechangesetcentricalgo(repo): | ||
"""Checks if we should use changeset-centric copy algorithms""" | ||||
r43416 | if repo.filecopiesmode == b'changeset-sidedata': | |||
return True | ||||
Augie Fackler
|
r43347 | readfrom = repo.ui.config(b'experimental', b'copies.read-from') | ||
changesetsource = (b'changeset-only', b'compatibility') | ||||
r43290 | return readfrom in changesetsource | |||
Martin von Zweigbergk
|
r42284 | |||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42798 | def _committedforwardcopies(a, b, base, match): | ||
Martin von Zweigbergk
|
r35423 | """Like _forwardcopies(), but b.rev() cannot be None (working copy)""" | ||
Mads Kiilerich
|
r20294 | # files might have to be traced back to the fctx parent of the last | ||
# one-side-only changeset, but not further back than that | ||||
Boris Feld
|
r40093 | repo = a._repo | ||
Martin von Zweigbergk
|
r41922 | |||
Martin von Zweigbergk
|
r42284 | if usechangesetcentricalgo(repo): | ||
Martin von Zweigbergk
|
r41922 | return _changesetforwardcopies(a, b, match) | ||
Augie Fackler
|
r43347 | debug = repo.ui.debugflag and repo.ui.configbool(b'devel', b'debug.copies') | ||
Boris Feld
|
r40093 | dbg = repo.ui.debug | ||
if debug: | ||||
Augie Fackler
|
r43347 | dbg(b'debug.copies: looking into rename from %s to %s\n' % (a, b)) | ||
Martin von Zweigbergk
|
r41438 | limit = _findlimit(repo, a, b) | ||
Boris Feld
|
r40093 | if debug: | ||
Augie Fackler
|
r43347 | dbg(b'debug.copies: search limit: %d\n' % limit) | ||
Mads Kiilerich
|
r20294 | am = a.manifest() | ||
Martin von Zweigbergk
|
r42798 | basemf = None if base is None else base.manifest() | ||
Mads Kiilerich
|
r20294 | |||
Matt Mackall
|
r15775 | # find where new files came from | ||
# we currently don't try to find where old files went, too expensive | ||||
# this means we can miss a case like 'hg rm b; hg cp a b' | ||||
cm = {} | ||||
Durham Goode
|
r28000 | |||
# Computing the forward missing is quite expensive on large manifests, since | ||||
# it compares the entire manifests. We can optimize it in the common use | ||||
# case of computing what copies are in a commit versus its parent (like | ||||
# during a rebase or histedit). Note, we exclude merge commits from this | ||||
# optimization, since the ctx.files() for a merge commit is not correct for | ||||
# this comparison. | ||||
forwardmissingmatch = match | ||||
Yuya Nishihara
|
r33869 | if b.p1() == a and b.p2().node() == node.nullid: | ||
Martin von Zweigbergk
|
r42102 | filesmatcher = matchmod.exact(b.files()) | ||
Yuya Nishihara
|
r33869 | forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher) | ||
Durham Goode
|
r28000 | missing = _computeforwardmissing(a, b, match=forwardmissingmatch) | ||
Pierre-Yves David
|
r23980 | ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True) | ||
Boris Feld
|
r40093 | |||
if debug: | ||||
Augie Fackler
|
r43347 | dbg(b'debug.copies: missing files to search: %d\n' % len(missing)) | ||
Boris Feld
|
r40093 | |||
Martin von Zweigbergk
|
r42396 | for f in sorted(missing): | ||
Boris Feld
|
r40093 | if debug: | ||
Augie Fackler
|
r43347 | dbg(b'debug.copies: tracing file: %s\n' % f) | ||
Pierre-Yves David
|
r23980 | fctx = b[f] | ||
fctx._ancestrycontext = ancestrycontext | ||||
Boris Feld
|
r40093 | |||
Boris Feld
|
r40094 | if debug: | ||
start = util.timer() | ||||
Martin von Zweigbergk
|
r42798 | opath = _tracefile(fctx, am, basemf, limit) | ||
Martin von Zweigbergk
|
r42751 | if opath: | ||
Boris Feld
|
r40093 | if debug: | ||
Augie Fackler
|
r43347 | dbg(b'debug.copies: rename of: %s\n' % opath) | ||
Martin von Zweigbergk
|
r42751 | cm[f] = opath | ||
Boris Feld
|
r40094 | if debug: | ||
Augie Fackler
|
r43346 | dbg( | ||
Augie Fackler
|
r43347 | b'debug.copies: time: %f seconds\n' | ||
Augie Fackler
|
r43346 | % (util.timer() - start) | ||
) | ||||
Martin von Zweigbergk
|
r35423 | return cm | ||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r41922 | def _changesetforwardcopies(a, b, match): | ||
Martin von Zweigbergk
|
r42868 | if a.rev() in (node.nullrev, b.rev()): | ||
Martin von Zweigbergk
|
r41922 | return {} | ||
repo = a.repo() | ||||
children = {} | ||||
cl = repo.changelog | ||||
missingrevs = cl.findmissingrevs(common=[a.rev()], heads=[b.rev()]) | ||||
for r in missingrevs: | ||||
for p in cl.parentrevs(r): | ||||
if p == node.nullrev: | ||||
continue | ||||
if p not in children: | ||||
children[p] = [r] | ||||
else: | ||||
children[p].append(r) | ||||
roots = set(children) - set(missingrevs) | ||||
# 'work' contains 3-tuples of a (revision number, parent number, copies). | ||||
# The parent number is only used for knowing which parent the copies dict | ||||
# came from. | ||||
Martin von Zweigbergk
|
r42720 | # NOTE: To reduce costly copying the 'copies' dicts, we reuse the same | ||
# instance for *one* of the child nodes (the last one). Once an instance | ||||
# has been put on the queue, it is thus no longer safe to modify it. | ||||
# Conversely, it *is* safe to modify an instance popped off the queue. | ||||
Martin von Zweigbergk
|
r41922 | work = [(r, 1, {}) for r in roots] | ||
heapq.heapify(work) | ||||
Martin von Zweigbergk
|
r42688 | alwaysmatch = match.always() | ||
Martin von Zweigbergk
|
r41922 | while work: | ||
Martin von Zweigbergk
|
r42719 | r, i1, copies = heapq.heappop(work) | ||
Martin von Zweigbergk
|
r41922 | if work and work[0][0] == r: | ||
# We are tracing copies from both parents | ||||
r, i2, copies2 = heapq.heappop(work) | ||||
Martin von Zweigbergk
|
r42719 | for dst, src in copies2.items(): | ||
Martin von Zweigbergk
|
r42686 | # Unlike when copies are stored in the filelog, we consider | ||
# it a copy even if the destination already existed on the | ||||
# other branch. It's simply too expensive to check if the | ||||
# file existed in the manifest. | ||||
Martin von Zweigbergk
|
r42719 | if dst not in copies: | ||
# If it was copied on the p1 side, leave it as copied from | ||||
Martin von Zweigbergk
|
r42686 | # that side, even if it was also copied on the p2 side. | ||
copies[dst] = copies2[dst] | ||||
Martin von Zweigbergk
|
r41922 | if r == b.rev(): | ||
return copies | ||||
Martin von Zweigbergk
|
r42687 | for i, c in enumerate(children[r]): | ||
Martin von Zweigbergk
|
r41922 | childctx = repo[c] | ||
if r == childctx.p1().rev(): | ||||
parent = 1 | ||||
childcopies = childctx.p1copies() | ||||
else: | ||||
assert r == childctx.p2().rev() | ||||
parent = 2 | ||||
childcopies = childctx.p2copies() | ||||
Martin von Zweigbergk
|
r42688 | if not alwaysmatch: | ||
Augie Fackler
|
r43346 | childcopies = { | ||
dst: src for dst, src in childcopies.items() if match(dst) | ||||
} | ||||
Martin von Zweigbergk
|
r42687 | # Copy the dict only if later iterations will also need it | ||
if i != len(children[r]) - 1: | ||||
Martin von Zweigbergk
|
r42714 | newcopies = copies.copy() | ||
Martin von Zweigbergk
|
r42687 | else: | ||
Martin von Zweigbergk
|
r42714 | newcopies = copies | ||
if childcopies: | ||||
newcopies = _chain(newcopies, childcopies) | ||||
Martin von Zweigbergk
|
r42685 | for f in childctx.filesremoved(): | ||
Martin von Zweigbergk
|
r42714 | if f in newcopies: | ||
del newcopies[f] | ||||
heapq.heappush(work, (c, parent, newcopies)) | ||||
Martin von Zweigbergk
|
r41922 | assert False | ||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r42798 | def _forwardcopies(a, b, base=None, match=None): | ||
Martin von Zweigbergk
|
r35423 | """find {dst@b: src@a} copy mapping where a is an ancestor of b""" | ||
Martin von Zweigbergk
|
r42798 | if base is None: | ||
base = a | ||||
Martin von Zweigbergk
|
r40487 | match = a.repo().narrowmatch(match) | ||
Martin von Zweigbergk
|
r35423 | # check for working copy | ||
if b.rev() is None: | ||||
Martin von Zweigbergk
|
r42798 | cm = _committedforwardcopies(a, b.p1(), base, match) | ||
Martin von Zweigbergk
|
r35424 | # combine copies from dirstate if necessary | ||
Martin von Zweigbergk
|
r42796 | copies = _chain(cm, _dirstatecopies(b._repo, match)) | ||
Martin von Zweigbergk
|
r42795 | else: | ||
Augie Fackler
|
r43346 | copies = _committedforwardcopies(a, b, base, match) | ||
Martin von Zweigbergk
|
r42795 | return copies | ||
Matt Mackall
|
r15775 | |||
Augie Fackler
|
r43346 | |||
Martin von Zweigbergk
|
r41919 | def _backwardrenames(a, b, match): | ||
Augie Fackler
|
r43347 | if a._repo.ui.config(b'experimental', b'copytrace') == b'off': | ||
Durham Goode
|
r26013 | return {} | ||
Siddharth Agarwal
|
r18136 | # Even though we're not taking copies into account, 1:n rename situations | ||
# can still exist (e.g. hg cp a b; hg mv a c). In those cases we | ||||
# arbitrarily pick one of the renames. | ||||
Martin von Zweigbergk
|
r41919 | # We don't want to pass in "match" here, since that would filter | ||
# the destination by it. Since we're reversing the copies, we want | ||||
# to filter the source instead. | ||||
Matt Mackall
|
r15775 | f = _forwardcopies(b, a) | ||
r = {} | ||||
Gregory Szorc
|
r43376 | for k, v in sorted(pycompat.iteritems(f)): | ||
Martin von Zweigbergk
|
r41919 | if match and not match(v): | ||
continue | ||||
Siddharth Agarwal
|
r18136 | # remove copies | ||
if v in a: | ||||
continue | ||||
Matt Mackall
|
r15775 | r[v] = k | ||
return r | ||||
Augie Fackler
|
r43346 | |||
Durham Goode
|
r24782 | def pathcopies(x, y, match=None): | ||
Martin von Zweigbergk
|
r35422 | """find {dst@y: src@x} copy mapping for directed compare""" | ||
Boris Feld
|
r40093 | repo = x._repo | ||
Augie Fackler
|
r43347 | debug = repo.ui.debugflag and repo.ui.configbool(b'devel', b'debug.copies') | ||
Boris Feld
|
r40093 | if debug: | ||
Augie Fackler
|
r43347 | repo.ui.debug( | ||
b'debug.copies: searching copies from %s to %s\n' % (x, y) | ||||
) | ||||
Matt Mackall
|
r15775 | if x == y or not x or not y: | ||
return {} | ||||
a = y.ancestor(x) | ||||
if a == x: | ||||
Boris Feld
|
r40093 | if debug: | ||
Augie Fackler
|
r43347 | repo.ui.debug(b'debug.copies: search mode: forward\n') | ||
Martin von Zweigbergk
|
r42794 | if y.rev() is None and x == y.p1(): | ||
# short-circuit to avoid issues with merge states | ||||
return _dirstatecopies(repo, match) | ||||
Martin von Zweigbergk
|
r42795 | copies = _forwardcopies(x, y, match=match) | ||
elif a == y: | ||||
Boris Feld
|
r40093 | if debug: | ||
Augie Fackler
|
r43347 | repo.ui.debug(b'debug.copies: search mode: backward\n') | ||
Martin von Zweigbergk
|
r42795 | copies = _backwardrenames(x, y, match=match) | ||
else: | ||||
if debug: | ||||
Augie Fackler
|
r43347 | repo.ui.debug(b'debug.copies: search mode: combined\n') | ||
Martin von Zweigbergk
|
r42798 | base = None | ||
if a.rev() != node.nullrev: | ||||
base = x | ||||
Augie Fackler
|
r43346 | copies = _chain( | ||
_backwardrenames(x, a, match=match), | ||||
_forwardcopies(a, y, base, match=match), | ||||
) | ||||
Martin von Zweigbergk
|
r42797 | _filter(x, y, copies) | ||
Martin von Zweigbergk
|
r42795 | return copies | ||
Matt Mackall
|
r15774 | |||
Augie Fackler
|
r43346 | |||
Pierre-Yves David
|
r30186 | def mergecopies(repo, c1, c2, base): | ||
Matt Mackall
|
r6274 | """ | ||
Martin von Zweigbergk
|
r42287 | Finds moves and copies between context c1 and c2 that are relevant for | ||
Pulkit Goyal
|
r34080 | merging. 'base' will be used as the merge base. | ||
Copytracing is used in commands like rebase, merge, unshelve, etc to merge | ||||
files that were moved/ copied in one merge parent and modified in another. | ||||
For example: | ||||
Pulkit Goyal
|
r33821 | |||
o ---> 4 another commit | ||||
| | ||||
| o ---> 3 commit that modifies a.txt | ||||
| / | ||||
o / ---> 2 commit that moves a.txt to b.txt | ||||
|/ | ||||
o ---> 1 merge base | ||||
If we try to rebase revision 3 on revision 4, since there is no a.txt in | ||||
revision 4, and if user have copytrace disabled, we prints the following | ||||
message: | ||||
```other changed <file> which local deleted``` | ||||
Gábor Stefanik
|
r30581 | Returns five dicts: "copy", "movewithdir", "diverge", "renamedelete" and | ||
"dirmove". | ||||
Matt Mackall
|
r16168 | |||
Matt Mackall
|
r16177 | "copy" is a mapping from destination name -> source name, | ||
Matt Mackall
|
r16168 | where source is in c1 and destination is in c2 or vice-versa. | ||
Siddharth Agarwal
|
r18134 | "movewithdir" is a mapping from source name -> destination name, | ||
where the file at source present in one context but not the other | ||||
needs to be moved to destination by the merge process, because the | ||||
other context moved the directory it is in. | ||||
Matt Mackall
|
r16168 | "diverge" is a mapping of source name -> list of destination names | ||
for divergent renames. | ||||
Thomas Arendsen Hein
|
r16794 | |||
"renamedelete" is a mapping of source name -> list of destination | ||||
names for files deleted in c1 that were renamed in c2 or vice-versa. | ||||
Gábor Stefanik
|
r30581 | |||
"dirmove" is a mapping of detected source dir -> destination dir renames. | ||||
This is needed for handling changes to new files previously grafted into | ||||
renamed directories. | ||||
Martin von Zweigbergk
|
r42287 | |||
This function calls different copytracing algorithms based on config. | ||||
Matt Mackall
|
r6274 | """ | ||
# avoid silly behavior for update from empty dir | ||||
Matt Mackall
|
r6430 | if not c1 or not c2 or c1 == c2: | ||
Gábor Stefanik
|
r30581 | return {}, {}, {}, {}, {} | ||
Matt Mackall
|
r6274 | |||
Martin von Zweigbergk
|
r41918 | narrowmatch = c1.repo().narrowmatch() | ||
Matt Mackall
|
r6646 | # avoid silly behavior for parent -> working dir | ||
Matt Mackall
|
r13878 | if c2.node() is None and c1.node() == repo.dirstate.p1(): | ||
Martin von Zweigbergk
|
r41918 | return _dirstatecopies(repo, narrowmatch), {}, {}, {}, {} | ||
Matt Mackall
|
r6646 | |||
Augie Fackler
|
r43347 | copytracing = repo.ui.config(b'experimental', b'copytrace') | ||
Martin von Zweigbergk
|
r42411 | if stringutil.parsebool(copytracing) is False: | ||
# stringutil.parsebool() returns None when it is unable to parse the | ||||
# value, so we should rely on making sure copytracing is on such cases | ||||
return {}, {}, {}, {}, {} | ||||
Pulkit Goyal
|
r34080 | |||
Martin von Zweigbergk
|
r42412 | if usechangesetcentricalgo(repo): | ||
# The heuristics don't make sense when we need changeset-centric algos | ||||
return _fullcopytracing(repo, c1, c2, base) | ||||
Durham Goode
|
r26013 | # Copy trace disabling is explicitly below the node == p1 logic above | ||
# because the logic above is required for a simple copy to be kept across a | ||||
# rebase. | ||||
Augie Fackler
|
r43347 | if copytracing == b'heuristics': | ||
Yuya Nishihara
|
r34365 | # Do full copytracing if only non-public revisions are involved as | ||
# that will be fast enough and will also cover the copies which could | ||||
# be missed by heuristics | ||||
Pulkit Goyal
|
r34312 | if _isfullcopytraceable(repo, c1, base): | ||
Pulkit Goyal
|
r34289 | return _fullcopytracing(repo, c1, c2, base) | ||
Pulkit Goyal
|
r34180 | return _heuristicscopytracing(repo, c1, c2, base) | ||
Pulkit Goyal
|
r34080 | else: | ||
return _fullcopytracing(repo, c1, c2, base) | ||||
Durham Goode
|
r26013 | |||
Augie Fackler
|
r43346 | |||
Pulkit Goyal
|
r34312 | def _isfullcopytraceable(repo, c1, base): | ||
Yuya Nishihara
|
r34365 | """ Checks that if base, source and destination are all no-public branches, | ||
if yes let's use the full copytrace algorithm for increased capabilities | ||||
since it will be fast enough. | ||||
Pulkit Goyal
|
r34517 | |||
`experimental.copytrace.sourcecommitlimit` can be used to set a limit for | ||||
number of changesets from c1 to base such that if number of changesets are | ||||
more than the limit, full copytracing algorithm won't be used. | ||||
Pulkit Goyal
|
r34289 | """ | ||
Pulkit Goyal
|
r34312 | if c1.rev() is None: | ||
c1 = c1.p1() | ||||
Yuya Nishihara
|
r34365 | if c1.mutable() and base.mutable(): | ||
Augie Fackler
|
r43346 | sourcecommitlimit = repo.ui.configint( | ||
Augie Fackler
|
r43347 | b'experimental', b'copytrace.sourcecommitlimit' | ||
Augie Fackler
|
r43346 | ) | ||
Augie Fackler
|
r43347 | commits = len(repo.revs(b'%d::%d', base.rev(), c1.rev())) | ||
Pulkit Goyal
|
r34312 | return commits < sourcecommitlimit | ||
Pulkit Goyal
|
r34289 | return False | ||
Augie Fackler
|
r43346 | |||
def _checksinglesidecopies( | ||||
src, dsts1, m1, m2, mb, c2, base, copy, renamedelete | ||||
): | ||||
Martin von Zweigbergk
|
r42408 | if src not in m2: | ||
# deleted on side 2 | ||||
if src not in m1: | ||||
# renamed on side 1, deleted on side 2 | ||||
renamedelete[src] = dsts1 | ||||
elif m2[src] != mb[src]: | ||||
if not _related(c2[src], base[src]): | ||||
return | ||||
# modified on side 2 | ||||
for dst in dsts1: | ||||
if dst not in m2: | ||||
# dst not added on side 2 (handle as regular | ||||
# "both created" case in manifestmerge otherwise) | ||||
copy[dst] = src | ||||
Augie Fackler
|
r43346 | |||
Pulkit Goyal
|
r34080 | def _fullcopytracing(repo, c1, c2, base): | ||
""" The full copytracing algorithm which finds all the new files that were | ||||
added from merge base up to the top commit and for each file it checks if | ||||
this file was copied from another file. | ||||
This is pretty slow when a lot of changesets are involved but will track all | ||||
the copies. | ||||
""" | ||||
Matt Mackall
|
r6274 | m1 = c1.manifest() | ||
m2 = c2.manifest() | ||||
Pierre-Yves David
|
r30186 | mb = base.manifest() | ||
Matt Mackall
|
r6274 | |||
Martin von Zweigbergk
|
r42408 | copies1 = pathcopies(base, c1) | ||
copies2 = pathcopies(base, c2) | ||||
inversecopies1 = {} | ||||
inversecopies2 = {} | ||||
for dst, src in copies1.items(): | ||||
inversecopies1.setdefault(src, []).append(dst) | ||||
for dst, src in copies2.items(): | ||||
inversecopies2.setdefault(src, []).append(dst) | ||||
copy = {} | ||||
diverge = {} | ||||
renamedelete = {} | ||||
allsources = set(inversecopies1) | set(inversecopies2) | ||||
for src in allsources: | ||||
dsts1 = inversecopies1.get(src) | ||||
dsts2 = inversecopies2.get(src) | ||||
if dsts1 and dsts2: | ||||
# copied/renamed on both sides | ||||
if src not in m1 and src not in m2: | ||||
# renamed on both sides | ||||
dsts1 = set(dsts1) | ||||
dsts2 = set(dsts2) | ||||
# If there's some overlap in the rename destinations, we | ||||
# consider it not divergent. For example, if side 1 copies 'a' | ||||
# to 'b' and 'c' and deletes 'a', and side 2 copies 'a' to 'c' | ||||
# and 'd' and deletes 'a'. | ||||
if dsts1 & dsts2: | ||||
Augie Fackler
|
r43346 | for dst in dsts1 & dsts2: | ||
Martin von Zweigbergk
|
r42408 | copy[dst] = src | ||
else: | ||||
diverge[src] = sorted(dsts1 | dsts2) | ||||
elif src in m1 and src in m2: | ||||
# copied on both sides | ||||
dsts1 = set(dsts1) | ||||
dsts2 = set(dsts2) | ||||
Augie Fackler
|
r43346 | for dst in dsts1 & dsts2: | ||
Martin von Zweigbergk
|
r42408 | copy[dst] = src | ||
# TODO: Handle cases where it was renamed on one side and copied | ||||
# on the other side | ||||
elif dsts1: | ||||
# copied/renamed only on side 1 | ||||
Augie Fackler
|
r43346 | _checksinglesidecopies( | ||
src, dsts1, m1, m2, mb, c2, base, copy, renamedelete | ||||
) | ||||
Martin von Zweigbergk
|
r42408 | elif dsts2: | ||
# copied/renamed only on side 2 | ||||
Augie Fackler
|
r43346 | _checksinglesidecopies( | ||
src, dsts2, m2, m1, mb, c1, base, copy, renamedelete | ||||
) | ||||
Martin von Zweigbergk
|
r42408 | |||
renamedeleteset = set() | ||||
divergeset = set() | ||||
Martin von Zweigbergk
|
r42410 | for dsts in diverge.values(): | ||
Martin von Zweigbergk
|
r42408 | divergeset.update(dsts) | ||
Martin von Zweigbergk
|
r42410 | for dsts in renamedelete.values(): | ||
Martin von Zweigbergk
|
r42408 | renamedeleteset.update(dsts) | ||
Matt Mackall
|
r6274 | |||
Matt Mackall
|
r26659 | # find interesting file sets from manifests | ||
Martin von Zweigbergk
|
r40002 | addedinm1 = m1.filesnotin(mb, repo.narrowmatch()) | ||
addedinm2 = m2.filesnotin(mb, repo.narrowmatch()) | ||||
Martin von Zweigbergk
|
r42409 | u1 = sorted(addedinm1 - addedinm2) | ||
u2 = sorted(addedinm2 - addedinm1) | ||||
Augie Fackler
|
r43347 | header = b" unmatched files in %s" | ||
Martin von Zweigbergk
|
r42409 | if u1: | ||
Augie Fackler
|
r43347 | repo.ui.debug(b"%s:\n %s\n" % (header % b'local', b"\n ".join(u1))) | ||
Martin von Zweigbergk
|
r42409 | if u2: | ||
Augie Fackler
|
r43347 | repo.ui.debug(b"%s:\n %s\n" % (header % b'other', b"\n ".join(u2))) | ||
Matt Mackall
|
r6274 | |||
Martin von Zweigbergk
|
r42408 | fullcopy = copies1.copy() | ||
fullcopy.update(copies2) | ||||
Martin von Zweigbergk
|
r42342 | if not fullcopy: | ||
return copy, {}, diverge, renamedelete, {} | ||||
if repo.ui.debugflag: | ||||
Augie Fackler
|
r43346 | repo.ui.debug( | ||
Augie Fackler
|
r43347 | b" all copies found (* = to merge, ! = divergent, " | ||
b"% = renamed and deleted):\n" | ||||
Augie Fackler
|
r43346 | ) | ||
Mads Kiilerich
|
r18362 | for f in sorted(fullcopy): | ||
Augie Fackler
|
r43347 | note = b"" | ||
Matt Mackall
|
r10282 | if f in copy: | ||
Augie Fackler
|
r43347 | note += b"*" | ||
Matt Mackall
|
r26317 | if f in divergeset: | ||
Augie Fackler
|
r43347 | note += b"!" | ||
Matt Mackall
|
r26658 | if f in renamedeleteset: | ||
Augie Fackler
|
r43347 | note += b"%" | ||
Augie Fackler
|
r43346 | repo.ui.debug( | ||
Augie Fackler
|
r43347 | b" src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f, note) | ||
Augie Fackler
|
r43346 | ) | ||
Matt Mackall
|
r26317 | del divergeset | ||
Matt Mackall
|
r6274 | |||
Augie Fackler
|
r43347 | repo.ui.debug(b" checking for directory renames\n") | ||
Matt Mackall
|
r6274 | |||
# generate a directory move map | ||||
Matt Mackall
|
r16178 | d1, d2 = c1.dirs(), c2.dirs() | ||
Matt Mackall
|
r17055 | invalid = set() | ||
Matt Mackall
|
r6274 | dirmove = {} | ||
# examine each file copy for a potential directory move, which is | ||||
# when all the files in a directory are moved to a new directory | ||||
Gregory Szorc
|
r43376 | for dst, src in pycompat.iteritems(fullcopy): | ||
Durham Goode
|
r25282 | dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst) | ||
Matt Mackall
|
r6274 | if dsrc in invalid: | ||
# already seen to be uninteresting | ||||
continue | ||||
elif dsrc in d1 and ddst in d1: | ||||
# directory wasn't entirely moved locally | ||||
Kyle Lippincott
|
r39299 | invalid.add(dsrc) | ||
Matt Mackall
|
r6274 | elif dsrc in d2 and ddst in d2: | ||
# directory wasn't entirely moved remotely | ||||
Kyle Lippincott
|
r39299 | invalid.add(dsrc) | ||
elif dsrc in dirmove and dirmove[dsrc] != ddst: | ||||
Matt Mackall
|
r6274 | # files from the same directory moved to two different places | ||
Kyle Lippincott
|
r39299 | invalid.add(dsrc) | ||
Matt Mackall
|
r6274 | else: | ||
# looks good so far | ||||
Kyle Lippincott
|
r39299 | dirmove[dsrc] = ddst | ||
Matt Mackall
|
r6274 | |||
for i in invalid: | ||||
if i in dirmove: | ||||
del dirmove[i] | ||||
del d1, d2, invalid | ||||
if not dirmove: | ||||
Gábor Stefanik
|
r30581 | return copy, {}, diverge, renamedelete, {} | ||
Matt Mackall
|
r6274 | |||
Gregory Szorc
|
r43376 | dirmove = {k + b"/": v + b"/" for k, v in pycompat.iteritems(dirmove)} | ||
Kyle Lippincott
|
r39299 | |||
Matt Mackall
|
r6274 | for d in dirmove: | ||
Augie Fackler
|
r43346 | repo.ui.debug( | ||
Augie Fackler
|
r43347 | b" discovered dir src: '%s' -> dst: '%s'\n" % (d, dirmove[d]) | ||
Augie Fackler
|
r43346 | ) | ||
Matt Mackall
|
r6274 | |||
Pierre-Yves David
|
r30183 | movewithdir = {} | ||
Matt Mackall
|
r6274 | # check unaccounted nonoverlapping files against directory moves | ||
Martin von Zweigbergk
|
r42408 | for f in u1 + u2: | ||
Matt Mackall
|
r6274 | if f not in fullcopy: | ||
for d in dirmove: | ||||
if f.startswith(d): | ||||
# new file added in a directory that was moved, move it | ||||
Augie Fackler
|
r43346 | df = dirmove[d] + f[len(d) :] | ||
Matt Mackall
|
r6426 | if df not in copy: | ||
Siddharth Agarwal
|
r18134 | movewithdir[f] = df | ||
Augie Fackler
|
r43346 | repo.ui.debug( | ||
Martin von Zweigbergk
|
r43387 | b" pending file src: '%s' -> dst: '%s'\n" | ||
Augie Fackler
|
r43346 | % (f, df) | ||
) | ||||
Matt Mackall
|
r6274 | break | ||
Gábor Stefanik
|
r30581 | return copy, movewithdir, diverge, renamedelete, dirmove | ||
Durham Goode
|
r19178 | |||
Augie Fackler
|
r43346 | |||
Pulkit Goyal
|
r34180 | def _heuristicscopytracing(repo, c1, c2, base): | ||
""" Fast copytracing using filename heuristics | ||||
Assumes that moves or renames are of following two types: | ||||
1) Inside a directory only (same directory name but different filenames) | ||||
2) Move from one directory to another | ||||
(same filenames but different directory names) | ||||
Works only when there are no merge commits in the "source branch". | ||||
Source branch is commits from base up to c2 not including base. | ||||
If merge is involved it fallbacks to _fullcopytracing(). | ||||
Can be used by setting the following config: | ||||
[experimental] | ||||
copytrace = heuristics | ||||
Pulkit Goyal
|
r34847 | |||
In some cases the copy/move candidates found by heuristics can be very large | ||||
in number and that will make the algorithm slow. The number of possible | ||||
candidates to check can be limited by using the config | ||||
`experimental.copytrace.movecandidateslimit` which defaults to 100. | ||||
Pulkit Goyal
|
r34180 | """ | ||
if c1.rev() is None: | ||||
c1 = c1.p1() | ||||
if c2.rev() is None: | ||||
c2 = c2.p1() | ||||
copies = {} | ||||
changedfiles = set() | ||||
m1 = c1.manifest() | ||||
Augie Fackler
|
r43347 | if not repo.revs(b'%d::%d', base.rev(), c2.rev()): | ||
Pulkit Goyal
|
r34180 | # If base is not in c2 branch, we switch to fullcopytracing | ||
Augie Fackler
|
r43346 | repo.ui.debug( | ||
Augie Fackler
|
r43347 | b"switching to full copytracing as base is not " | ||
b"an ancestor of c2\n" | ||||
Augie Fackler
|
r43346 | ) | ||
Pulkit Goyal
|
r34180 | return _fullcopytracing(repo, c1, c2, base) | ||
ctx = c2 | ||||
while ctx != base: | ||||
if len(ctx.parents()) == 2: | ||||
# To keep things simple let's not handle merges | ||||
Augie Fackler
|
r43347 | repo.ui.debug(b"switching to full copytracing because of merges\n") | ||
Pulkit Goyal
|
r34180 | return _fullcopytracing(repo, c1, c2, base) | ||
changedfiles.update(ctx.files()) | ||||
ctx = ctx.p1() | ||||
cp = _forwardcopies(base, c2) | ||||
Gregory Szorc
|
r43376 | for dst, src in pycompat.iteritems(cp): | ||
Pulkit Goyal
|
r34180 | if src in m1: | ||
copies[dst] = src | ||||
# file is missing if it isn't present in the destination, but is present in | ||||
# the base and present in the source. | ||||
# Presence in the base is important to exclude added files, presence in the | ||||
# source is important to exclude removed files. | ||||
Augie Fackler
|
r36364 | filt = lambda f: f not in m1 and f in base and f in c2 | ||
missingfiles = [f for f in changedfiles if filt(f)] | ||||
Pulkit Goyal
|
r34180 | |||
if missingfiles: | ||||
basenametofilename = collections.defaultdict(list) | ||||
dirnametofilename = collections.defaultdict(list) | ||||
for f in m1.filesnotin(base.manifest()): | ||||
basename = os.path.basename(f) | ||||
dirname = os.path.dirname(f) | ||||
basenametofilename[basename].append(f) | ||||
dirnametofilename[dirname].append(f) | ||||
for f in missingfiles: | ||||
basename = os.path.basename(f) | ||||
dirname = os.path.dirname(f) | ||||
samebasename = basenametofilename[basename] | ||||
samedirname = dirnametofilename[dirname] | ||||
movecandidates = samebasename + samedirname | ||||
# f is guaranteed to be present in c2, that's why | ||||
# c2.filectx(f) won't fail | ||||
f2 = c2.filectx(f) | ||||
Pulkit Goyal
|
r34847 | # we can have a lot of candidates which can slow down the heuristics | ||
# config value to limit the number of candidates moves to check | ||||
Augie Fackler
|
r43346 | maxcandidates = repo.ui.configint( | ||
Augie Fackler
|
r43347 | b'experimental', b'copytrace.movecandidateslimit' | ||
Augie Fackler
|
r43346 | ) | ||
Pulkit Goyal
|
r34847 | |||
if len(movecandidates) > maxcandidates: | ||||
Augie Fackler
|
r43346 | repo.ui.status( | ||
_( | ||||
Augie Fackler
|
r43347 | b"skipping copytracing for '%s', more " | ||
b"candidates than the limit: %d\n" | ||||
Augie Fackler
|
r43346 | ) | ||
% (f, len(movecandidates)) | ||||
) | ||||
Pulkit Goyal
|
r34847 | continue | ||
Pulkit Goyal
|
r34180 | for candidate in movecandidates: | ||
f1 = c1.filectx(candidate) | ||||
Gábor Stefanik
|
r37410 | if _related(f1, f2): | ||
Pulkit Goyal
|
r34180 | # if there are a few related copies then we'll merge | ||
# changes into all of them. This matches the behaviour | ||||
# of upstream copytracing | ||||
copies[candidate] = f | ||||
return copies, {}, {}, {}, {} | ||||
Augie Fackler
|
r43346 | |||
Gábor Stefanik
|
r37410 | def _related(f1, f2): | ||
Pierre-Yves David
|
r30138 | """return True if f1 and f2 filectx have a common ancestor | ||
Walk back to common ancestor to see if the two files originate | ||||
from the same file. Since workingfilectx's rev() is None it messes | ||||
up the integer comparison logic, hence the pre-step check for | ||||
None (f1 and f2 can only be workingfilectx's initially). | ||||
""" | ||||
if f1 == f2: | ||||
Augie Fackler
|
r43346 | return True # a match | ||
Pierre-Yves David
|
r30138 | |||
g1, g2 = f1.ancestors(), f2.ancestors() | ||||
try: | ||||
f1r, f2r = f1.linkrev(), f2.linkrev() | ||||
if f1r is None: | ||||
f1 = next(g1) | ||||
if f2r is None: | ||||
f2 = next(g2) | ||||
while True: | ||||
f1r, f2r = f1.linkrev(), f2.linkrev() | ||||
if f1r > f2r: | ||||
f1 = next(g1) | ||||
elif f2r > f1r: | ||||
f2 = next(g2) | ||||
Augie Fackler
|
r43346 | else: # f1 and f2 point to files in the same linkrev | ||
return f1 == f2 # true if they point to the same file | ||||
Pierre-Yves David
|
r30138 | except StopIteration: | ||
return False | ||||
Augie Fackler
|
r43346 | |||
Phil Cohen
|
r34788 | def duplicatecopies(repo, wctx, rev, fromrev, skiprev=None): | ||
Martin von Zweigbergk
|
r35422 | """reproduce copies from fromrev to rev in the dirstate | ||
Matt Mackall
|
r22901 | |||
If skiprev is specified, it's a revision that should be used to | ||||
filter copy records. Any copies that occur between fromrev and | ||||
skiprev will not be duplicated, even if they appear in the set of | ||||
copies between fromrev and rev. | ||||
Martin von Zweigbergk
|
r35422 | """ | ||
Matt Mackall
|
r22901 | exclude = {} | ||
Augie Fackler
|
r43347 | ctraceconfig = repo.ui.config(b'experimental', b'copytrace') | ||
Pulkit Goyal
|
r39402 | bctrace = stringutil.parsebool(ctraceconfig) | ||
Augie Fackler
|
r43346 | if skiprev is not None and ( | ||
Augie Fackler
|
r43347 | ctraceconfig == b'heuristics' or bctrace or bctrace is None | ||
Augie Fackler
|
r43346 | ): | ||
Pulkit Goyal
|
r34079 | # copytrace='off' skips this line, but not the entire function because | ||
Durham Goode
|
r26013 | # the line below is O(size of the repo) during a rebase, while the rest | ||
# of the function is much faster (and is required for carrying copy | ||||
# metadata across the rebase anyway). | ||||
Matt Mackall
|
r22901 | exclude = pathcopies(repo[fromrev], repo[skiprev]) | ||
Gregory Szorc
|
r43376 | for dst, src in pycompat.iteritems(pathcopies(repo[fromrev], repo[rev])): | ||
Matt Mackall
|
r22901 | if dst in exclude: | ||
continue | ||||
Martin von Zweigbergk
|
r42509 | if dst in wctx: | ||
wctx[dst].markcopied(src) | ||||
r42935 | ||||
Augie Fackler
|
r43346 | |||
r43417 | def computechangesetfilesadded(ctx): | |||
"""return the list of files added in a changeset | ||||
""" | ||||
added = [] | ||||
for f in ctx.files(): | ||||
if not any(f in p for p in ctx.parents()): | ||||
added.append(f) | ||||
return added | ||||
def computechangesetfilesremoved(ctx): | ||||
"""return the list of files removed in a changeset | ||||
""" | ||||
removed = [] | ||||
for f in ctx.files(): | ||||
if f not in ctx: | ||||
removed.append(f) | ||||
return removed | ||||
r42935 | def computechangesetcopies(ctx): | |||
"""return the copies data for a changeset | ||||
The copies data are returned as a pair of dictionnary (p1copies, p2copies). | ||||
Each dictionnary are in the form: `{newname: oldname}` | ||||
""" | ||||
p1copies = {} | ||||
p2copies = {} | ||||
p1 = ctx.p1() | ||||
p2 = ctx.p2() | ||||
narrowmatch = ctx._repo.narrowmatch() | ||||
for dst in ctx.files(): | ||||
if not narrowmatch(dst) or dst not in ctx: | ||||
continue | ||||
copied = ctx[dst].renamed() | ||||
if not copied: | ||||
continue | ||||
src, srcnode = copied | ||||
if src in p1 and p1[src].filenode() == srcnode: | ||||
p1copies[dst] = src | ||||
elif src in p2 and p2[src].filenode() == srcnode: | ||||
p2copies[dst] = src | ||||
return p1copies, p2copies | ||||
r43417 | ||||
def encodecopies(files, copies): | ||||
items = [] | ||||
for i, dst in enumerate(files): | ||||
if dst in copies: | ||||
items.append(b'%d\0%s' % (i, copies[dst])) | ||||
if len(items) != len(copies): | ||||
raise error.ProgrammingError( | ||||
b'some copy targets missing from file list' | ||||
) | ||||
return b"\n".join(items) | ||||
def decodecopies(files, data): | ||||
try: | ||||
copies = {} | ||||
if not data: | ||||
return copies | ||||
for l in data.split(b'\n'): | ||||
strindex, src = l.split(b'\0') | ||||
i = int(strindex) | ||||
dst = files[i] | ||||
copies[dst] = src | ||||
return copies | ||||
except (ValueError, IndexError): | ||||
# Perhaps someone had chosen the same key name (e.g. "p1copies") and | ||||
# used different syntax for the value. | ||||
return None | ||||
def encodefileindices(files, subset): | ||||
subset = set(subset) | ||||
indices = [] | ||||
for i, f in enumerate(files): | ||||
if f in subset: | ||||
indices.append(b'%d' % i) | ||||
return b'\n'.join(indices) | ||||
def decodefileindices(files, data): | ||||
try: | ||||
subset = [] | ||||
if not data: | ||||
return subset | ||||
for strindex in data.split(b'\n'): | ||||
i = int(strindex) | ||||
if i < 0 or i >= len(files): | ||||
return None | ||||
subset.append(files[i]) | ||||
return subset | ||||
except (ValueError, IndexError): | ||||
# Perhaps someone had chosen the same key name (e.g. "added") and | ||||
# used different syntax for the value. | ||||
return None | ||||
r43418 | ||||
def _getsidedata(srcrepo, rev): | ||||
ctx = srcrepo[rev] | ||||
filescopies = computechangesetcopies(ctx) | ||||
filesadded = computechangesetfilesadded(ctx) | ||||
filesremoved = computechangesetfilesremoved(ctx) | ||||
sidedata = {} | ||||
if any([filescopies, filesadded, filesremoved]): | ||||
sortedfiles = sorted(ctx.files()) | ||||
p1copies, p2copies = filescopies | ||||
p1copies = encodecopies(sortedfiles, p1copies) | ||||
p2copies = encodecopies(sortedfiles, p2copies) | ||||
filesadded = encodefileindices(sortedfiles, filesadded) | ||||
filesremoved = encodefileindices(sortedfiles, filesremoved) | ||||
sidedata[sidedatamod.SD_P1COPIES] = p1copies | ||||
sidedata[sidedatamod.SD_P2COPIES] = p2copies | ||||
sidedata[sidedatamod.SD_FILESADDED] = filesadded | ||||
sidedata[sidedatamod.SD_FILESREMOVED] = filesremoved | ||||
return sidedata | ||||
def getsidedataadder(srcrepo, destrepo): | ||||
def sidedatacompanion(revlog, rev): | ||||
sidedata = {} | ||||
if util.safehasattr(revlog, 'filteredrevs'): # this is a changelog | ||||
sidedata = _getsidedata(srcrepo, rev) | ||||
return False, (), sidedata | ||||
return sidedatacompanion | ||||
def getsidedataremover(srcrepo, destrepo): | ||||
def sidedatacompanion(revlog, rev): | ||||
f = () | ||||
if util.safehasattr(revlog, 'filteredrevs'): # this is a changelog | ||||
if revlog.flags(rev) & REVIDX_SIDEDATA: | ||||
f = ( | ||||
sidedatamod.SD_P1COPIES, | ||||
sidedatamod.SD_P2COPIES, | ||||
sidedatamod.SD_FILESADDED, | ||||
sidedatamod.SD_FILESREMOVED, | ||||
) | ||||
return False, f, {} | ||||
return sidedatacompanion | ||||