# HG changeset patch
# User Pierre-Yves David <pierre-yves.david@octobus.net>
# Date 2019-11-13 19:42:13
# Node ID 421ea5772039e70c94e81fefd084b24de7e0142b
# Parent  3b039e43a1e6446cb3d4d256e708d77057bac8b9

copies: split the combination of the copies mapping in its own function

In some case, this part take up to 95% of the copy tracing that take about a
hundred second. This poor performance comes from the fact we keep duplciating
and merging dictionary that are mostly similar.

I want to experiment with smarter native code to do this, so I need to isolate
the function first.

diff --git a/mercurial/copies.py b/mercurial/copies.py
--- a/mercurial/copies.py
+++ b/mercurial/copies.py
@@ -281,9 +281,24 @@ def _changesetforwardcopies(a, b, match)
     iterrevs &= mrset
     iterrevs.update(roots)
     iterrevs.remove(b.rev())
+    revs = sorted(iterrevs)
+    return _combinechangesetcopies(revs, children, b.rev(), revinfo, match)
+
+
+def _combinechangesetcopies(revs, children, targetrev, revinfo, match):
+    """combine the copies information for each item of iterrevs
+
+    revs: sorted iterable of revision to visit
+    children: a {parent: [children]} mapping.
+    targetrev: the final copies destination revision (not in iterrevs)
+    revinfo(rev): a function that return (p1, p2, p1copies, p2copies, removed)
+    match: a matcher
+
+    It returns the aggregated copies information for `targetrev`.
+    """
     all_copies = {}
     alwaysmatch = match.always()
-    for r in sorted(iterrevs):
+    for r in revs:
         copies = all_copies.pop(r, None)
         if copies is None:
             # this is a root
@@ -336,7 +351,7 @@ def _changesetforwardcopies(a, b, match)
                 else:
                     newcopies.update(othercopies)
                     all_copies[c] = newcopies
-    return all_copies[b.rev()]
+    return all_copies[targetrev]
 
 
 def _forwardcopies(a, b, base=None, match=None):