##// END OF EJS Templates
copies: don't include copies that are not in source in directory move...
Martin von Zweigbergk -
r42338:31abb9d7 default
parent child Browse files
Show More
@@ -1,1012 +1,1017 b''
1 # copies.py - copy detection for Mercurial
1 # copies.py - copy detection for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import heapq
11 import heapq
12 import os
12 import os
13
13
14 from .i18n import _
14 from .i18n import _
15
15
16 from . import (
16 from . import (
17 match as matchmod,
17 match as matchmod,
18 node,
18 node,
19 pathutil,
19 pathutil,
20 util,
20 util,
21 )
21 )
22 from .utils import (
22 from .utils import (
23 stringutil,
23 stringutil,
24 )
24 )
25
25
26 def _findlimit(repo, ctxa, ctxb):
26 def _findlimit(repo, ctxa, ctxb):
27 """
27 """
28 Find the last revision that needs to be checked to ensure that a full
28 Find the last revision that needs to be checked to ensure that a full
29 transitive closure for file copies can be properly calculated.
29 transitive closure for file copies can be properly calculated.
30 Generally, this means finding the earliest revision number that's an
30 Generally, this means finding the earliest revision number that's an
31 ancestor of a or b but not both, except when a or b is a direct descendent
31 ancestor of a or b but not both, except when a or b is a direct descendent
32 of the other, in which case we can return the minimum revnum of a and b.
32 of the other, in which case we can return the minimum revnum of a and b.
33 """
33 """
34
34
35 # basic idea:
35 # basic idea:
36 # - mark a and b with different sides
36 # - mark a and b with different sides
37 # - if a parent's children are all on the same side, the parent is
37 # - if a parent's children are all on the same side, the parent is
38 # on that side, otherwise it is on no side
38 # on that side, otherwise it is on no side
39 # - walk the graph in topological order with the help of a heap;
39 # - walk the graph in topological order with the help of a heap;
40 # - add unseen parents to side map
40 # - add unseen parents to side map
41 # - clear side of any parent that has children on different sides
41 # - clear side of any parent that has children on different sides
42 # - track number of interesting revs that might still be on a side
42 # - track number of interesting revs that might still be on a side
43 # - track the lowest interesting rev seen
43 # - track the lowest interesting rev seen
44 # - quit when interesting revs is zero
44 # - quit when interesting revs is zero
45
45
46 cl = repo.changelog
46 cl = repo.changelog
47 wdirparents = None
47 wdirparents = None
48 a = ctxa.rev()
48 a = ctxa.rev()
49 b = ctxb.rev()
49 b = ctxb.rev()
50 if a is None:
50 if a is None:
51 wdirparents = (ctxa.p1(), ctxa.p2())
51 wdirparents = (ctxa.p1(), ctxa.p2())
52 a = node.wdirrev
52 a = node.wdirrev
53 if b is None:
53 if b is None:
54 assert not wdirparents
54 assert not wdirparents
55 wdirparents = (ctxb.p1(), ctxb.p2())
55 wdirparents = (ctxb.p1(), ctxb.p2())
56 b = node.wdirrev
56 b = node.wdirrev
57
57
58 side = {a: -1, b: 1}
58 side = {a: -1, b: 1}
59 visit = [-a, -b]
59 visit = [-a, -b]
60 heapq.heapify(visit)
60 heapq.heapify(visit)
61 interesting = len(visit)
61 interesting = len(visit)
62 limit = node.wdirrev
62 limit = node.wdirrev
63
63
64 while interesting:
64 while interesting:
65 r = -heapq.heappop(visit)
65 r = -heapq.heappop(visit)
66 if r == node.wdirrev:
66 if r == node.wdirrev:
67 parents = [pctx.rev() for pctx in wdirparents]
67 parents = [pctx.rev() for pctx in wdirparents]
68 else:
68 else:
69 parents = cl.parentrevs(r)
69 parents = cl.parentrevs(r)
70 if parents[1] == node.nullrev:
70 if parents[1] == node.nullrev:
71 parents = parents[:1]
71 parents = parents[:1]
72 for p in parents:
72 for p in parents:
73 if p not in side:
73 if p not in side:
74 # first time we see p; add it to visit
74 # first time we see p; add it to visit
75 side[p] = side[r]
75 side[p] = side[r]
76 if side[p]:
76 if side[p]:
77 interesting += 1
77 interesting += 1
78 heapq.heappush(visit, -p)
78 heapq.heappush(visit, -p)
79 elif side[p] and side[p] != side[r]:
79 elif side[p] and side[p] != side[r]:
80 # p was interesting but now we know better
80 # p was interesting but now we know better
81 side[p] = 0
81 side[p] = 0
82 interesting -= 1
82 interesting -= 1
83 if side[r]:
83 if side[r]:
84 limit = r # lowest rev visited
84 limit = r # lowest rev visited
85 interesting -= 1
85 interesting -= 1
86
86
87 # Consider the following flow (see test-commit-amend.t under issue4405):
87 # Consider the following flow (see test-commit-amend.t under issue4405):
88 # 1/ File 'a0' committed
88 # 1/ File 'a0' committed
89 # 2/ File renamed from 'a0' to 'a1' in a new commit (call it 'a1')
89 # 2/ File renamed from 'a0' to 'a1' in a new commit (call it 'a1')
90 # 3/ Move back to first commit
90 # 3/ Move back to first commit
91 # 4/ Create a new commit via revert to contents of 'a1' (call it 'a1-amend')
91 # 4/ Create a new commit via revert to contents of 'a1' (call it 'a1-amend')
92 # 5/ Rename file from 'a1' to 'a2' and commit --amend 'a1-msg'
92 # 5/ Rename file from 'a1' to 'a2' and commit --amend 'a1-msg'
93 #
93 #
94 # During the amend in step five, we will be in this state:
94 # During the amend in step five, we will be in this state:
95 #
95 #
96 # @ 3 temporary amend commit for a1-amend
96 # @ 3 temporary amend commit for a1-amend
97 # |
97 # |
98 # o 2 a1-amend
98 # o 2 a1-amend
99 # |
99 # |
100 # | o 1 a1
100 # | o 1 a1
101 # |/
101 # |/
102 # o 0 a0
102 # o 0 a0
103 #
103 #
104 # When _findlimit is called, a and b are revs 3 and 0, so limit will be 2,
104 # When _findlimit is called, a and b are revs 3 and 0, so limit will be 2,
105 # yet the filelog has the copy information in rev 1 and we will not look
105 # yet the filelog has the copy information in rev 1 and we will not look
106 # back far enough unless we also look at the a and b as candidates.
106 # back far enough unless we also look at the a and b as candidates.
107 # This only occurs when a is a descendent of b or visa-versa.
107 # This only occurs when a is a descendent of b or visa-versa.
108 return min(limit, a, b)
108 return min(limit, a, b)
109
109
110 def _chain(src, dst, a, b):
110 def _chain(src, dst, a, b):
111 """chain two sets of copies a->b"""
111 """chain two sets of copies a->b"""
112 t = a.copy()
112 t = a.copy()
113 for k, v in b.iteritems():
113 for k, v in b.iteritems():
114 if v in t:
114 if v in t:
115 # found a chain
115 # found a chain
116 if t[v] != k:
116 if t[v] != k:
117 # file wasn't renamed back to itself
117 # file wasn't renamed back to itself
118 t[k] = t[v]
118 t[k] = t[v]
119 if v not in dst:
119 if v not in dst:
120 # chain was a rename, not a copy
120 # chain was a rename, not a copy
121 del t[v]
121 del t[v]
122 if v in src:
122 if v in src:
123 # file is a copy of an existing file
123 # file is a copy of an existing file
124 t[k] = v
124 t[k] = v
125
125
126 for k, v in list(t.items()):
126 for k, v in list(t.items()):
127 # remove criss-crossed copies
127 # remove criss-crossed copies
128 if k in src and v in dst:
128 if k in src and v in dst:
129 del t[k]
129 del t[k]
130 # remove copies to files that were then removed
130 # remove copies to files that were then removed
131 elif k not in dst:
131 elif k not in dst:
132 del t[k]
132 del t[k]
133
133
134 return t
134 return t
135
135
136 def _tracefile(fctx, am, limit=node.nullrev):
136 def _tracefile(fctx, am, limit=node.nullrev):
137 """return file context that is the ancestor of fctx present in ancestor
137 """return file context that is the ancestor of fctx present in ancestor
138 manifest am, stopping after the first ancestor lower than limit"""
138 manifest am, stopping after the first ancestor lower than limit"""
139
139
140 for f in fctx.ancestors():
140 for f in fctx.ancestors():
141 if am.get(f.path(), None) == f.filenode():
141 if am.get(f.path(), None) == f.filenode():
142 return f
142 return f
143 if limit >= 0 and not f.isintroducedafter(limit):
143 if limit >= 0 and not f.isintroducedafter(limit):
144 return None
144 return None
145
145
146 def _dirstatecopies(repo, match=None):
146 def _dirstatecopies(repo, match=None):
147 ds = repo.dirstate
147 ds = repo.dirstate
148 c = ds.copies().copy()
148 c = ds.copies().copy()
149 for k in list(c):
149 for k in list(c):
150 if ds[k] not in 'anm' or (match and not match(k)):
150 if ds[k] not in 'anm' or (match and not match(k)):
151 del c[k]
151 del c[k]
152 return c
152 return c
153
153
154 def _computeforwardmissing(a, b, match=None):
154 def _computeforwardmissing(a, b, match=None):
155 """Computes which files are in b but not a.
155 """Computes which files are in b but not a.
156 This is its own function so extensions can easily wrap this call to see what
156 This is its own function so extensions can easily wrap this call to see what
157 files _forwardcopies is about to process.
157 files _forwardcopies is about to process.
158 """
158 """
159 ma = a.manifest()
159 ma = a.manifest()
160 mb = b.manifest()
160 mb = b.manifest()
161 return mb.filesnotin(ma, match=match)
161 return mb.filesnotin(ma, match=match)
162
162
163 def usechangesetcentricalgo(repo):
163 def usechangesetcentricalgo(repo):
164 """Checks if we should use changeset-centric copy algorithms"""
164 """Checks if we should use changeset-centric copy algorithms"""
165 return (repo.ui.config('experimental', 'copies.read-from') in
165 return (repo.ui.config('experimental', 'copies.read-from') in
166 ('changeset-only', 'compatibility'))
166 ('changeset-only', 'compatibility'))
167
167
168 def _committedforwardcopies(a, b, match):
168 def _committedforwardcopies(a, b, match):
169 """Like _forwardcopies(), but b.rev() cannot be None (working copy)"""
169 """Like _forwardcopies(), but b.rev() cannot be None (working copy)"""
170 # files might have to be traced back to the fctx parent of the last
170 # files might have to be traced back to the fctx parent of the last
171 # one-side-only changeset, but not further back than that
171 # one-side-only changeset, but not further back than that
172 repo = a._repo
172 repo = a._repo
173
173
174 if usechangesetcentricalgo(repo):
174 if usechangesetcentricalgo(repo):
175 return _changesetforwardcopies(a, b, match)
175 return _changesetforwardcopies(a, b, match)
176
176
177 debug = repo.ui.debugflag and repo.ui.configbool('devel', 'debug.copies')
177 debug = repo.ui.debugflag and repo.ui.configbool('devel', 'debug.copies')
178 dbg = repo.ui.debug
178 dbg = repo.ui.debug
179 if debug:
179 if debug:
180 dbg('debug.copies: looking into rename from %s to %s\n'
180 dbg('debug.copies: looking into rename from %s to %s\n'
181 % (a, b))
181 % (a, b))
182 limit = _findlimit(repo, a, b)
182 limit = _findlimit(repo, a, b)
183 if debug:
183 if debug:
184 dbg('debug.copies: search limit: %d\n' % limit)
184 dbg('debug.copies: search limit: %d\n' % limit)
185 am = a.manifest()
185 am = a.manifest()
186
186
187 # find where new files came from
187 # find where new files came from
188 # we currently don't try to find where old files went, too expensive
188 # we currently don't try to find where old files went, too expensive
189 # this means we can miss a case like 'hg rm b; hg cp a b'
189 # this means we can miss a case like 'hg rm b; hg cp a b'
190 cm = {}
190 cm = {}
191
191
192 # Computing the forward missing is quite expensive on large manifests, since
192 # Computing the forward missing is quite expensive on large manifests, since
193 # it compares the entire manifests. We can optimize it in the common use
193 # it compares the entire manifests. We can optimize it in the common use
194 # case of computing what copies are in a commit versus its parent (like
194 # case of computing what copies are in a commit versus its parent (like
195 # during a rebase or histedit). Note, we exclude merge commits from this
195 # during a rebase or histedit). Note, we exclude merge commits from this
196 # optimization, since the ctx.files() for a merge commit is not correct for
196 # optimization, since the ctx.files() for a merge commit is not correct for
197 # this comparison.
197 # this comparison.
198 forwardmissingmatch = match
198 forwardmissingmatch = match
199 if b.p1() == a and b.p2().node() == node.nullid:
199 if b.p1() == a and b.p2().node() == node.nullid:
200 filesmatcher = matchmod.exact(b.files())
200 filesmatcher = matchmod.exact(b.files())
201 forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher)
201 forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher)
202 missing = _computeforwardmissing(a, b, match=forwardmissingmatch)
202 missing = _computeforwardmissing(a, b, match=forwardmissingmatch)
203
203
204 ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
204 ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
205
205
206 if debug:
206 if debug:
207 dbg('debug.copies: missing file to search: %d\n' % len(missing))
207 dbg('debug.copies: missing file to search: %d\n' % len(missing))
208
208
209 for f in missing:
209 for f in missing:
210 if debug:
210 if debug:
211 dbg('debug.copies: tracing file: %s\n' % f)
211 dbg('debug.copies: tracing file: %s\n' % f)
212 fctx = b[f]
212 fctx = b[f]
213 fctx._ancestrycontext = ancestrycontext
213 fctx._ancestrycontext = ancestrycontext
214
214
215 if debug:
215 if debug:
216 start = util.timer()
216 start = util.timer()
217 ofctx = _tracefile(fctx, am, limit)
217 ofctx = _tracefile(fctx, am, limit)
218 if ofctx:
218 if ofctx:
219 if debug:
219 if debug:
220 dbg('debug.copies: rename of: %s\n' % ofctx._path)
220 dbg('debug.copies: rename of: %s\n' % ofctx._path)
221 cm[f] = ofctx.path()
221 cm[f] = ofctx.path()
222 if debug:
222 if debug:
223 dbg('debug.copies: time: %f seconds\n'
223 dbg('debug.copies: time: %f seconds\n'
224 % (util.timer() - start))
224 % (util.timer() - start))
225 return cm
225 return cm
226
226
227 def _changesetforwardcopies(a, b, match):
227 def _changesetforwardcopies(a, b, match):
228 if a.rev() == node.nullrev:
228 if a.rev() == node.nullrev:
229 return {}
229 return {}
230
230
231 repo = a.repo()
231 repo = a.repo()
232 children = {}
232 children = {}
233 cl = repo.changelog
233 cl = repo.changelog
234 missingrevs = cl.findmissingrevs(common=[a.rev()], heads=[b.rev()])
234 missingrevs = cl.findmissingrevs(common=[a.rev()], heads=[b.rev()])
235 for r in missingrevs:
235 for r in missingrevs:
236 for p in cl.parentrevs(r):
236 for p in cl.parentrevs(r):
237 if p == node.nullrev:
237 if p == node.nullrev:
238 continue
238 continue
239 if p not in children:
239 if p not in children:
240 children[p] = [r]
240 children[p] = [r]
241 else:
241 else:
242 children[p].append(r)
242 children[p].append(r)
243
243
244 roots = set(children) - set(missingrevs)
244 roots = set(children) - set(missingrevs)
245 # 'work' contains 3-tuples of a (revision number, parent number, copies).
245 # 'work' contains 3-tuples of a (revision number, parent number, copies).
246 # The parent number is only used for knowing which parent the copies dict
246 # The parent number is only used for knowing which parent the copies dict
247 # came from.
247 # came from.
248 work = [(r, 1, {}) for r in roots]
248 work = [(r, 1, {}) for r in roots]
249 heapq.heapify(work)
249 heapq.heapify(work)
250 while work:
250 while work:
251 r, i1, copies1 = heapq.heappop(work)
251 r, i1, copies1 = heapq.heappop(work)
252 if work and work[0][0] == r:
252 if work and work[0][0] == r:
253 # We are tracing copies from both parents
253 # We are tracing copies from both parents
254 r, i2, copies2 = heapq.heappop(work)
254 r, i2, copies2 = heapq.heappop(work)
255 copies = {}
255 copies = {}
256 ctx = repo[r]
256 ctx = repo[r]
257 p1man, p2man = ctx.p1().manifest(), ctx.p2().manifest()
257 p1man, p2man = ctx.p1().manifest(), ctx.p2().manifest()
258 allcopies = set(copies1) | set(copies2)
258 allcopies = set(copies1) | set(copies2)
259 # TODO: perhaps this filtering should be done as long as ctx
259 # TODO: perhaps this filtering should be done as long as ctx
260 # is merge, whether or not we're tracing from both parent.
260 # is merge, whether or not we're tracing from both parent.
261 for dst in allcopies:
261 for dst in allcopies:
262 if not match(dst):
262 if not match(dst):
263 continue
263 continue
264 if dst not in copies2:
264 if dst not in copies2:
265 # Copied on p1 side: mark as copy from p1 side if it didn't
265 # Copied on p1 side: mark as copy from p1 side if it didn't
266 # already exist on p2 side
266 # already exist on p2 side
267 if dst not in p2man:
267 if dst not in p2man:
268 copies[dst] = copies1[dst]
268 copies[dst] = copies1[dst]
269 elif dst not in copies1:
269 elif dst not in copies1:
270 # Copied on p2 side: mark as copy from p2 side if it didn't
270 # Copied on p2 side: mark as copy from p2 side if it didn't
271 # already exist on p1 side
271 # already exist on p1 side
272 if dst not in p1man:
272 if dst not in p1man:
273 copies[dst] = copies2[dst]
273 copies[dst] = copies2[dst]
274 else:
274 else:
275 # Copied on both sides: mark as copy from p1 side
275 # Copied on both sides: mark as copy from p1 side
276 copies[dst] = copies1[dst]
276 copies[dst] = copies1[dst]
277 else:
277 else:
278 copies = copies1
278 copies = copies1
279 if r == b.rev():
279 if r == b.rev():
280 return copies
280 return copies
281 for c in children[r]:
281 for c in children[r]:
282 childctx = repo[c]
282 childctx = repo[c]
283 if r == childctx.p1().rev():
283 if r == childctx.p1().rev():
284 parent = 1
284 parent = 1
285 childcopies = childctx.p1copies()
285 childcopies = childctx.p1copies()
286 else:
286 else:
287 assert r == childctx.p2().rev()
287 assert r == childctx.p2().rev()
288 parent = 2
288 parent = 2
289 childcopies = childctx.p2copies()
289 childcopies = childctx.p2copies()
290 if not match.always():
290 if not match.always():
291 childcopies = {dst: src for dst, src in childcopies.items()
291 childcopies = {dst: src for dst, src in childcopies.items()
292 if match(dst)}
292 if match(dst)}
293 childcopies = _chain(a, childctx, copies, childcopies)
293 childcopies = _chain(a, childctx, copies, childcopies)
294 heapq.heappush(work, (c, parent, childcopies))
294 heapq.heappush(work, (c, parent, childcopies))
295 assert False
295 assert False
296
296
297 def _forwardcopies(a, b, match=None):
297 def _forwardcopies(a, b, match=None):
298 """find {dst@b: src@a} copy mapping where a is an ancestor of b"""
298 """find {dst@b: src@a} copy mapping where a is an ancestor of b"""
299
299
300 match = a.repo().narrowmatch(match)
300 match = a.repo().narrowmatch(match)
301 # check for working copy
301 # check for working copy
302 if b.rev() is None:
302 if b.rev() is None:
303 if a == b.p1():
303 if a == b.p1():
304 # short-circuit to avoid issues with merge states
304 # short-circuit to avoid issues with merge states
305 return _dirstatecopies(b._repo, match)
305 return _dirstatecopies(b._repo, match)
306
306
307 cm = _committedforwardcopies(a, b.p1(), match)
307 cm = _committedforwardcopies(a, b.p1(), match)
308 # combine copies from dirstate if necessary
308 # combine copies from dirstate if necessary
309 return _chain(a, b, cm, _dirstatecopies(b._repo, match))
309 return _chain(a, b, cm, _dirstatecopies(b._repo, match))
310 return _committedforwardcopies(a, b, match)
310 return _committedforwardcopies(a, b, match)
311
311
312 def _backwardrenames(a, b, match):
312 def _backwardrenames(a, b, match):
313 if a._repo.ui.config('experimental', 'copytrace') == 'off':
313 if a._repo.ui.config('experimental', 'copytrace') == 'off':
314 return {}
314 return {}
315
315
316 # Even though we're not taking copies into account, 1:n rename situations
316 # Even though we're not taking copies into account, 1:n rename situations
317 # can still exist (e.g. hg cp a b; hg mv a c). In those cases we
317 # can still exist (e.g. hg cp a b; hg mv a c). In those cases we
318 # arbitrarily pick one of the renames.
318 # arbitrarily pick one of the renames.
319 # We don't want to pass in "match" here, since that would filter
319 # We don't want to pass in "match" here, since that would filter
320 # the destination by it. Since we're reversing the copies, we want
320 # the destination by it. Since we're reversing the copies, we want
321 # to filter the source instead.
321 # to filter the source instead.
322 f = _forwardcopies(b, a)
322 f = _forwardcopies(b, a)
323 r = {}
323 r = {}
324 for k, v in sorted(f.iteritems()):
324 for k, v in sorted(f.iteritems()):
325 if match and not match(v):
325 if match and not match(v):
326 continue
326 continue
327 # remove copies
327 # remove copies
328 if v in a:
328 if v in a:
329 continue
329 continue
330 r[v] = k
330 r[v] = k
331 return r
331 return r
332
332
333 def pathcopies(x, y, match=None):
333 def pathcopies(x, y, match=None):
334 """find {dst@y: src@x} copy mapping for directed compare"""
334 """find {dst@y: src@x} copy mapping for directed compare"""
335 repo = x._repo
335 repo = x._repo
336 debug = repo.ui.debugflag and repo.ui.configbool('devel', 'debug.copies')
336 debug = repo.ui.debugflag and repo.ui.configbool('devel', 'debug.copies')
337 if debug:
337 if debug:
338 repo.ui.debug('debug.copies: searching copies from %s to %s\n'
338 repo.ui.debug('debug.copies: searching copies from %s to %s\n'
339 % (x, y))
339 % (x, y))
340 if x == y or not x or not y:
340 if x == y or not x or not y:
341 return {}
341 return {}
342 a = y.ancestor(x)
342 a = y.ancestor(x)
343 if a == x:
343 if a == x:
344 if debug:
344 if debug:
345 repo.ui.debug('debug.copies: search mode: forward\n')
345 repo.ui.debug('debug.copies: search mode: forward\n')
346 return _forwardcopies(x, y, match=match)
346 return _forwardcopies(x, y, match=match)
347 if a == y:
347 if a == y:
348 if debug:
348 if debug:
349 repo.ui.debug('debug.copies: search mode: backward\n')
349 repo.ui.debug('debug.copies: search mode: backward\n')
350 return _backwardrenames(x, y, match=match)
350 return _backwardrenames(x, y, match=match)
351 if debug:
351 if debug:
352 repo.ui.debug('debug.copies: search mode: combined\n')
352 repo.ui.debug('debug.copies: search mode: combined\n')
353 return _chain(x, y, _backwardrenames(x, a, match=match),
353 return _chain(x, y, _backwardrenames(x, a, match=match),
354 _forwardcopies(a, y, match=match))
354 _forwardcopies(a, y, match=match))
355
355
356 def _computenonoverlap(repo, c1, c2, addedinm1, addedinm2, baselabel=''):
356 def _computenonoverlap(repo, c1, c2, addedinm1, addedinm2, baselabel=''):
357 """Computes, based on addedinm1 and addedinm2, the files exclusive to c1
357 """Computes, based on addedinm1 and addedinm2, the files exclusive to c1
358 and c2. This is its own function so extensions can easily wrap this call
358 and c2. This is its own function so extensions can easily wrap this call
359 to see what files mergecopies is about to process.
359 to see what files mergecopies is about to process.
360
360
361 Even though c1 and c2 are not used in this function, they are useful in
361 Even though c1 and c2 are not used in this function, they are useful in
362 other extensions for being able to read the file nodes of the changed files.
362 other extensions for being able to read the file nodes of the changed files.
363
363
364 "baselabel" can be passed to help distinguish the multiple computations
364 "baselabel" can be passed to help distinguish the multiple computations
365 done in the graft case.
365 done in the graft case.
366 """
366 """
367 u1 = sorted(addedinm1 - addedinm2)
367 u1 = sorted(addedinm1 - addedinm2)
368 u2 = sorted(addedinm2 - addedinm1)
368 u2 = sorted(addedinm2 - addedinm1)
369
369
370 header = " unmatched files in %s"
370 header = " unmatched files in %s"
371 if baselabel:
371 if baselabel:
372 header += ' (from %s)' % baselabel
372 header += ' (from %s)' % baselabel
373 if u1:
373 if u1:
374 repo.ui.debug("%s:\n %s\n" % (header % 'local', "\n ".join(u1)))
374 repo.ui.debug("%s:\n %s\n" % (header % 'local', "\n ".join(u1)))
375 if u2:
375 if u2:
376 repo.ui.debug("%s:\n %s\n" % (header % 'other', "\n ".join(u2)))
376 repo.ui.debug("%s:\n %s\n" % (header % 'other', "\n ".join(u2)))
377
377
378 return u1, u2
378 return u1, u2
379
379
380 def _makegetfctx(ctx):
380 def _makegetfctx(ctx):
381 """return a 'getfctx' function suitable for _checkcopies usage
381 """return a 'getfctx' function suitable for _checkcopies usage
382
382
383 We have to re-setup the function building 'filectx' for each
383 We have to re-setup the function building 'filectx' for each
384 '_checkcopies' to ensure the linkrev adjustment is properly setup for
384 '_checkcopies' to ensure the linkrev adjustment is properly setup for
385 each. Linkrev adjustment is important to avoid bug in rename
385 each. Linkrev adjustment is important to avoid bug in rename
386 detection. Moreover, having a proper '_ancestrycontext' setup ensures
386 detection. Moreover, having a proper '_ancestrycontext' setup ensures
387 the performance impact of this adjustment is kept limited. Without it,
387 the performance impact of this adjustment is kept limited. Without it,
388 each file could do a full dag traversal making the time complexity of
388 each file could do a full dag traversal making the time complexity of
389 the operation explode (see issue4537).
389 the operation explode (see issue4537).
390
390
391 This function exists here mostly to limit the impact on stable. Feel
391 This function exists here mostly to limit the impact on stable. Feel
392 free to refactor on default.
392 free to refactor on default.
393 """
393 """
394 rev = ctx.rev()
394 rev = ctx.rev()
395 repo = ctx._repo
395 repo = ctx._repo
396 ac = getattr(ctx, '_ancestrycontext', None)
396 ac = getattr(ctx, '_ancestrycontext', None)
397 if ac is None:
397 if ac is None:
398 revs = [rev]
398 revs = [rev]
399 if rev is None:
399 if rev is None:
400 revs = [p.rev() for p in ctx.parents()]
400 revs = [p.rev() for p in ctx.parents()]
401 ac = repo.changelog.ancestors(revs, inclusive=True)
401 ac = repo.changelog.ancestors(revs, inclusive=True)
402 ctx._ancestrycontext = ac
402 ctx._ancestrycontext = ac
403 def makectx(f, n):
403 def makectx(f, n):
404 if n in node.wdirfilenodeids: # in a working context?
404 if n in node.wdirfilenodeids: # in a working context?
405 if ctx.rev() is None:
405 if ctx.rev() is None:
406 return ctx.filectx(f)
406 return ctx.filectx(f)
407 return repo[None][f]
407 return repo[None][f]
408 fctx = repo.filectx(f, fileid=n)
408 fctx = repo.filectx(f, fileid=n)
409 # setup only needed for filectx not create from a changectx
409 # setup only needed for filectx not create from a changectx
410 fctx._ancestrycontext = ac
410 fctx._ancestrycontext = ac
411 fctx._descendantrev = rev
411 fctx._descendantrev = rev
412 return fctx
412 return fctx
413 return util.lrucachefunc(makectx)
413 return util.lrucachefunc(makectx)
414
414
415 def _combinecopies(copyfrom, copyto, finalcopy, diverge, incompletediverge):
415 def _combinecopies(copyfrom, copyto, finalcopy, diverge, incompletediverge):
416 """combine partial copy paths"""
416 """combine partial copy paths"""
417 remainder = {}
417 remainder = {}
418 for f in copyfrom:
418 for f in copyfrom:
419 if f in copyto:
419 if f in copyto:
420 finalcopy[copyto[f]] = copyfrom[f]
420 finalcopy[copyto[f]] = copyfrom[f]
421 del copyto[f]
421 del copyto[f]
422 for f in incompletediverge:
422 for f in incompletediverge:
423 assert f not in diverge
423 assert f not in diverge
424 ic = incompletediverge[f]
424 ic = incompletediverge[f]
425 if ic[0] in copyto:
425 if ic[0] in copyto:
426 diverge[f] = [copyto[ic[0]], ic[1]]
426 diverge[f] = [copyto[ic[0]], ic[1]]
427 else:
427 else:
428 remainder[f] = ic
428 remainder[f] = ic
429 return remainder
429 return remainder
430
430
431 def mergecopies(repo, c1, c2, base):
431 def mergecopies(repo, c1, c2, base):
432 """
432 """
433 Finds moves and copies between context c1 and c2 that are relevant for
433 Finds moves and copies between context c1 and c2 that are relevant for
434 merging. 'base' will be used as the merge base.
434 merging. 'base' will be used as the merge base.
435
435
436 Copytracing is used in commands like rebase, merge, unshelve, etc to merge
436 Copytracing is used in commands like rebase, merge, unshelve, etc to merge
437 files that were moved/ copied in one merge parent and modified in another.
437 files that were moved/ copied in one merge parent and modified in another.
438 For example:
438 For example:
439
439
440 o ---> 4 another commit
440 o ---> 4 another commit
441 |
441 |
442 | o ---> 3 commit that modifies a.txt
442 | o ---> 3 commit that modifies a.txt
443 | /
443 | /
444 o / ---> 2 commit that moves a.txt to b.txt
444 o / ---> 2 commit that moves a.txt to b.txt
445 |/
445 |/
446 o ---> 1 merge base
446 o ---> 1 merge base
447
447
448 If we try to rebase revision 3 on revision 4, since there is no a.txt in
448 If we try to rebase revision 3 on revision 4, since there is no a.txt in
449 revision 4, and if user have copytrace disabled, we prints the following
449 revision 4, and if user have copytrace disabled, we prints the following
450 message:
450 message:
451
451
452 ```other changed <file> which local deleted```
452 ```other changed <file> which local deleted```
453
453
454 Returns five dicts: "copy", "movewithdir", "diverge", "renamedelete" and
454 Returns five dicts: "copy", "movewithdir", "diverge", "renamedelete" and
455 "dirmove".
455 "dirmove".
456
456
457 "copy" is a mapping from destination name -> source name,
457 "copy" is a mapping from destination name -> source name,
458 where source is in c1 and destination is in c2 or vice-versa.
458 where source is in c1 and destination is in c2 or vice-versa.
459
459
460 "movewithdir" is a mapping from source name -> destination name,
460 "movewithdir" is a mapping from source name -> destination name,
461 where the file at source present in one context but not the other
461 where the file at source present in one context but not the other
462 needs to be moved to destination by the merge process, because the
462 needs to be moved to destination by the merge process, because the
463 other context moved the directory it is in.
463 other context moved the directory it is in.
464
464
465 "diverge" is a mapping of source name -> list of destination names
465 "diverge" is a mapping of source name -> list of destination names
466 for divergent renames.
466 for divergent renames.
467
467
468 "renamedelete" is a mapping of source name -> list of destination
468 "renamedelete" is a mapping of source name -> list of destination
469 names for files deleted in c1 that were renamed in c2 or vice-versa.
469 names for files deleted in c1 that were renamed in c2 or vice-versa.
470
470
471 "dirmove" is a mapping of detected source dir -> destination dir renames.
471 "dirmove" is a mapping of detected source dir -> destination dir renames.
472 This is needed for handling changes to new files previously grafted into
472 This is needed for handling changes to new files previously grafted into
473 renamed directories.
473 renamed directories.
474
474
475 This function calls different copytracing algorithms based on config.
475 This function calls different copytracing algorithms based on config.
476 """
476 """
477 # avoid silly behavior for update from empty dir
477 # avoid silly behavior for update from empty dir
478 if not c1 or not c2 or c1 == c2:
478 if not c1 or not c2 or c1 == c2:
479 return {}, {}, {}, {}, {}
479 return {}, {}, {}, {}, {}
480
480
481 narrowmatch = c1.repo().narrowmatch()
481 narrowmatch = c1.repo().narrowmatch()
482
482
483 # avoid silly behavior for parent -> working dir
483 # avoid silly behavior for parent -> working dir
484 if c2.node() is None and c1.node() == repo.dirstate.p1():
484 if c2.node() is None and c1.node() == repo.dirstate.p1():
485 return _dirstatecopies(repo, narrowmatch), {}, {}, {}, {}
485 return _dirstatecopies(repo, narrowmatch), {}, {}, {}, {}
486
486
487 copytracing = repo.ui.config('experimental', 'copytrace')
487 copytracing = repo.ui.config('experimental', 'copytrace')
488 boolctrace = stringutil.parsebool(copytracing)
488 boolctrace = stringutil.parsebool(copytracing)
489
489
490 # Copy trace disabling is explicitly below the node == p1 logic above
490 # Copy trace disabling is explicitly below the node == p1 logic above
491 # because the logic above is required for a simple copy to be kept across a
491 # because the logic above is required for a simple copy to be kept across a
492 # rebase.
492 # rebase.
493 if copytracing == 'heuristics':
493 if copytracing == 'heuristics':
494 # Do full copytracing if only non-public revisions are involved as
494 # Do full copytracing if only non-public revisions are involved as
495 # that will be fast enough and will also cover the copies which could
495 # that will be fast enough and will also cover the copies which could
496 # be missed by heuristics
496 # be missed by heuristics
497 if _isfullcopytraceable(repo, c1, base):
497 if _isfullcopytraceable(repo, c1, base):
498 return _fullcopytracing(repo, c1, c2, base)
498 return _fullcopytracing(repo, c1, c2, base)
499 return _heuristicscopytracing(repo, c1, c2, base)
499 return _heuristicscopytracing(repo, c1, c2, base)
500 elif boolctrace is False:
500 elif boolctrace is False:
501 # stringutil.parsebool() returns None when it is unable to parse the
501 # stringutil.parsebool() returns None when it is unable to parse the
502 # value, so we should rely on making sure copytracing is on such cases
502 # value, so we should rely on making sure copytracing is on such cases
503 return {}, {}, {}, {}, {}
503 return {}, {}, {}, {}, {}
504 else:
504 else:
505 return _fullcopytracing(repo, c1, c2, base)
505 return _fullcopytracing(repo, c1, c2, base)
506
506
507 def _isfullcopytraceable(repo, c1, base):
507 def _isfullcopytraceable(repo, c1, base):
508 """ Checks that if base, source and destination are all no-public branches,
508 """ Checks that if base, source and destination are all no-public branches,
509 if yes let's use the full copytrace algorithm for increased capabilities
509 if yes let's use the full copytrace algorithm for increased capabilities
510 since it will be fast enough.
510 since it will be fast enough.
511
511
512 `experimental.copytrace.sourcecommitlimit` can be used to set a limit for
512 `experimental.copytrace.sourcecommitlimit` can be used to set a limit for
513 number of changesets from c1 to base such that if number of changesets are
513 number of changesets from c1 to base such that if number of changesets are
514 more than the limit, full copytracing algorithm won't be used.
514 more than the limit, full copytracing algorithm won't be used.
515 """
515 """
516 if c1.rev() is None:
516 if c1.rev() is None:
517 c1 = c1.p1()
517 c1 = c1.p1()
518 if c1.mutable() and base.mutable():
518 if c1.mutable() and base.mutable():
519 sourcecommitlimit = repo.ui.configint('experimental',
519 sourcecommitlimit = repo.ui.configint('experimental',
520 'copytrace.sourcecommitlimit')
520 'copytrace.sourcecommitlimit')
521 commits = len(repo.revs('%d::%d', base.rev(), c1.rev()))
521 commits = len(repo.revs('%d::%d', base.rev(), c1.rev()))
522 return commits < sourcecommitlimit
522 return commits < sourcecommitlimit
523 return False
523 return False
524
524
525 def _fullcopytracing(repo, c1, c2, base):
525 def _fullcopytracing(repo, c1, c2, base):
526 """ The full copytracing algorithm which finds all the new files that were
526 """ The full copytracing algorithm which finds all the new files that were
527 added from merge base up to the top commit and for each file it checks if
527 added from merge base up to the top commit and for each file it checks if
528 this file was copied from another file.
528 this file was copied from another file.
529
529
530 This is pretty slow when a lot of changesets are involved but will track all
530 This is pretty slow when a lot of changesets are involved but will track all
531 the copies.
531 the copies.
532 """
532 """
533 # In certain scenarios (e.g. graft, update or rebase), base can be
533 # In certain scenarios (e.g. graft, update or rebase), base can be
534 # overridden We still need to know a real common ancestor in this case We
534 # overridden We still need to know a real common ancestor in this case We
535 # can't just compute _c1.ancestor(_c2) and compare it to ca, because there
535 # can't just compute _c1.ancestor(_c2) and compare it to ca, because there
536 # can be multiple common ancestors, e.g. in case of bidmerge. Because our
536 # can be multiple common ancestors, e.g. in case of bidmerge. Because our
537 # caller may not know if the revision passed in lieu of the CA is a genuine
537 # caller may not know if the revision passed in lieu of the CA is a genuine
538 # common ancestor or not without explicitly checking it, it's better to
538 # common ancestor or not without explicitly checking it, it's better to
539 # determine that here.
539 # determine that here.
540 #
540 #
541 # base.isancestorof(wc) is False, work around that
541 # base.isancestorof(wc) is False, work around that
542 _c1 = c1.p1() if c1.rev() is None else c1
542 _c1 = c1.p1() if c1.rev() is None else c1
543 _c2 = c2.p1() if c2.rev() is None else c2
543 _c2 = c2.p1() if c2.rev() is None else c2
544 # an endpoint is "dirty" if it isn't a descendant of the merge base
544 # an endpoint is "dirty" if it isn't a descendant of the merge base
545 # if we have a dirty endpoint, we need to trigger graft logic, and also
545 # if we have a dirty endpoint, we need to trigger graft logic, and also
546 # keep track of which endpoint is dirty
546 # keep track of which endpoint is dirty
547 dirtyc1 = not base.isancestorof(_c1)
547 dirtyc1 = not base.isancestorof(_c1)
548 dirtyc2 = not base.isancestorof(_c2)
548 dirtyc2 = not base.isancestorof(_c2)
549 graft = dirtyc1 or dirtyc2
549 graft = dirtyc1 or dirtyc2
550 tca = base
550 tca = base
551 if graft:
551 if graft:
552 tca = _c1.ancestor(_c2)
552 tca = _c1.ancestor(_c2)
553
553
554 limit = _findlimit(repo, c1, c2)
554 limit = _findlimit(repo, c1, c2)
555 repo.ui.debug(" searching for copies back to rev %d\n" % limit)
555 repo.ui.debug(" searching for copies back to rev %d\n" % limit)
556
556
557 m1 = c1.manifest()
557 m1 = c1.manifest()
558 m2 = c2.manifest()
558 m2 = c2.manifest()
559 mb = base.manifest()
559 mb = base.manifest()
560
560
561 # gather data from _checkcopies:
561 # gather data from _checkcopies:
562 # - diverge = record all diverges in this dict
562 # - diverge = record all diverges in this dict
563 # - copy = record all non-divergent copies in this dict
563 # - copy = record all non-divergent copies in this dict
564 # - fullcopy = record all copies in this dict
564 # - fullcopy = record all copies in this dict
565 # - incomplete = record non-divergent partial copies here
565 # - incomplete = record non-divergent partial copies here
566 # - incompletediverge = record divergent partial copies here
566 # - incompletediverge = record divergent partial copies here
567 diverge = {} # divergence data is shared
567 diverge = {} # divergence data is shared
568 incompletediverge = {}
568 incompletediverge = {}
569 data1 = {'copy': {},
569 data1 = {'copy': {},
570 'fullcopy': {},
570 'fullcopy': {},
571 'incomplete': {},
571 'incomplete': {},
572 'diverge': diverge,
572 'diverge': diverge,
573 'incompletediverge': incompletediverge,
573 'incompletediverge': incompletediverge,
574 }
574 }
575 data2 = {'copy': {},
575 data2 = {'copy': {},
576 'fullcopy': {},
576 'fullcopy': {},
577 'incomplete': {},
577 'incomplete': {},
578 'diverge': diverge,
578 'diverge': diverge,
579 'incompletediverge': incompletediverge,
579 'incompletediverge': incompletediverge,
580 }
580 }
581
581
582 # find interesting file sets from manifests
582 # find interesting file sets from manifests
583 addedinm1 = m1.filesnotin(mb, repo.narrowmatch())
583 addedinm1 = m1.filesnotin(mb, repo.narrowmatch())
584 addedinm2 = m2.filesnotin(mb, repo.narrowmatch())
584 addedinm2 = m2.filesnotin(mb, repo.narrowmatch())
585 bothnew = sorted(addedinm1 & addedinm2)
585 bothnew = sorted(addedinm1 & addedinm2)
586 if tca == base:
586 if tca == base:
587 # unmatched file from base
587 # unmatched file from base
588 u1r, u2r = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2)
588 u1r, u2r = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2)
589 u1u, u2u = u1r, u2r
589 u1u, u2u = u1r, u2r
590 else:
590 else:
591 # unmatched file from base (DAG rotation in the graft case)
591 # unmatched file from base (DAG rotation in the graft case)
592 u1r, u2r = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2,
592 u1r, u2r = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2,
593 baselabel='base')
593 baselabel='base')
594 # unmatched file from topological common ancestors (no DAG rotation)
594 # unmatched file from topological common ancestors (no DAG rotation)
595 # need to recompute this for directory move handling when grafting
595 # need to recompute this for directory move handling when grafting
596 mta = tca.manifest()
596 mta = tca.manifest()
597 u1u, u2u = _computenonoverlap(repo, c1, c2,
597 u1u, u2u = _computenonoverlap(repo, c1, c2,
598 m1.filesnotin(mta, repo.narrowmatch()),
598 m1.filesnotin(mta, repo.narrowmatch()),
599 m2.filesnotin(mta, repo.narrowmatch()),
599 m2.filesnotin(mta, repo.narrowmatch()),
600 baselabel='topological common ancestor')
600 baselabel='topological common ancestor')
601
601
602 for f in u1u:
602 for f in u1u:
603 _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, data1)
603 _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, data1)
604
604
605 for f in u2u:
605 for f in u2u:
606 _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, data2)
606 _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, data2)
607
607
608 copy = dict(data1['copy'])
608 copy = dict(data1['copy'])
609 copy.update(data2['copy'])
609 copy.update(data2['copy'])
610 fullcopy = dict(data1['fullcopy'])
610 fullcopy = dict(data1['fullcopy'])
611 fullcopy.update(data2['fullcopy'])
611 fullcopy.update(data2['fullcopy'])
612
612
613 if dirtyc1:
613 if dirtyc1:
614 _combinecopies(data2['incomplete'], data1['incomplete'], copy, diverge,
614 _combinecopies(data2['incomplete'], data1['incomplete'], copy, diverge,
615 incompletediverge)
615 incompletediverge)
616 if dirtyc2:
616 if dirtyc2:
617 _combinecopies(data1['incomplete'], data2['incomplete'], copy, diverge,
617 _combinecopies(data1['incomplete'], data2['incomplete'], copy, diverge,
618 incompletediverge)
618 incompletediverge)
619
619
620 renamedelete = {}
620 renamedelete = {}
621 renamedeleteset = set()
621 renamedeleteset = set()
622 divergeset = set()
622 divergeset = set()
623 for of, fl in list(diverge.items()):
623 for of, fl in list(diverge.items()):
624 if len(fl) == 1 or of in c1 or of in c2:
624 if len(fl) == 1 or of in c1 or of in c2:
625 del diverge[of] # not actually divergent, or not a rename
625 del diverge[of] # not actually divergent, or not a rename
626 if of not in c1 and of not in c2:
626 if of not in c1 and of not in c2:
627 # renamed on one side, deleted on the other side, but filter
627 # renamed on one side, deleted on the other side, but filter
628 # out files that have been renamed and then deleted
628 # out files that have been renamed and then deleted
629 renamedelete[of] = [f for f in fl if f in c1 or f in c2]
629 renamedelete[of] = [f for f in fl if f in c1 or f in c2]
630 renamedeleteset.update(fl) # reverse map for below
630 renamedeleteset.update(fl) # reverse map for below
631 else:
631 else:
632 divergeset.update(fl) # reverse map for below
632 divergeset.update(fl) # reverse map for below
633
633
634 if bothnew:
634 if bothnew:
635 repo.ui.debug(" unmatched files new in both:\n %s\n"
635 repo.ui.debug(" unmatched files new in both:\n %s\n"
636 % "\n ".join(bothnew))
636 % "\n ".join(bothnew))
637 bothdiverge = {}
637 bothdiverge = {}
638 bothincompletediverge = {}
638 bothincompletediverge = {}
639 remainder = {}
639 remainder = {}
640 both1 = {'copy': {},
640 both1 = {'copy': {},
641 'fullcopy': {},
641 'fullcopy': {},
642 'incomplete': {},
642 'incomplete': {},
643 'diverge': bothdiverge,
643 'diverge': bothdiverge,
644 'incompletediverge': bothincompletediverge
644 'incompletediverge': bothincompletediverge
645 }
645 }
646 both2 = {'copy': {},
646 both2 = {'copy': {},
647 'fullcopy': {},
647 'fullcopy': {},
648 'incomplete': {},
648 'incomplete': {},
649 'diverge': bothdiverge,
649 'diverge': bothdiverge,
650 'incompletediverge': bothincompletediverge
650 'incompletediverge': bothincompletediverge
651 }
651 }
652 for f in bothnew:
652 for f in bothnew:
653 _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, both1)
653 _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, both1)
654 _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, both2)
654 _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, both2)
655 if dirtyc1 and dirtyc2:
655 if dirtyc1 and dirtyc2:
656 remainder = _combinecopies(both2['incomplete'], both1['incomplete'],
656 remainder = _combinecopies(both2['incomplete'], both1['incomplete'],
657 copy, bothdiverge, bothincompletediverge)
657 copy, bothdiverge, bothincompletediverge)
658 remainder1 = _combinecopies(both1['incomplete'], both2['incomplete'],
658 remainder1 = _combinecopies(both1['incomplete'], both2['incomplete'],
659 copy, bothdiverge, bothincompletediverge)
659 copy, bothdiverge, bothincompletediverge)
660 remainder.update(remainder1)
660 remainder.update(remainder1)
661 elif dirtyc1:
661 elif dirtyc1:
662 # incomplete copies may only be found on the "dirty" side for bothnew
662 # incomplete copies may only be found on the "dirty" side for bothnew
663 assert not both2['incomplete']
663 assert not both2['incomplete']
664 remainder = _combinecopies({}, both1['incomplete'], copy, bothdiverge,
664 remainder = _combinecopies({}, both1['incomplete'], copy, bothdiverge,
665 bothincompletediverge)
665 bothincompletediverge)
666 elif dirtyc2:
666 elif dirtyc2:
667 assert not both1['incomplete']
667 assert not both1['incomplete']
668 remainder = _combinecopies({}, both2['incomplete'], copy, bothdiverge,
668 remainder = _combinecopies({}, both2['incomplete'], copy, bothdiverge,
669 bothincompletediverge)
669 bothincompletediverge)
670 else:
670 else:
671 # incomplete copies and divergences can't happen outside grafts
671 # incomplete copies and divergences can't happen outside grafts
672 assert not both1['incomplete']
672 assert not both1['incomplete']
673 assert not both2['incomplete']
673 assert not both2['incomplete']
674 assert not bothincompletediverge
674 assert not bothincompletediverge
675 for f in remainder:
675 for f in remainder:
676 assert f not in bothdiverge
676 assert f not in bothdiverge
677 ic = remainder[f]
677 ic = remainder[f]
678 if ic[0] in (m1 if dirtyc1 else m2):
678 if ic[0] in (m1 if dirtyc1 else m2):
679 # backed-out rename on one side, but watch out for deleted files
679 # backed-out rename on one side, but watch out for deleted files
680 bothdiverge[f] = ic
680 bothdiverge[f] = ic
681 for of, fl in bothdiverge.items():
681 for of, fl in bothdiverge.items():
682 if len(fl) == 2 and fl[0] == fl[1]:
682 if len(fl) == 2 and fl[0] == fl[1]:
683 copy[fl[0]] = of # not actually divergent, just matching renames
683 copy[fl[0]] = of # not actually divergent, just matching renames
684
684
685 # Sometimes we get invalid copies here (the "and not remotebase" in
686 # _checkcopies() seems suspicious). Filter them out.
687 for dst, src in fullcopy.copy().items():
688 if src not in mb:
689 del fullcopy[dst]
685 if fullcopy and repo.ui.debugflag:
690 if fullcopy and repo.ui.debugflag:
686 repo.ui.debug(" all copies found (* = to merge, ! = divergent, "
691 repo.ui.debug(" all copies found (* = to merge, ! = divergent, "
687 "% = renamed and deleted):\n")
692 "% = renamed and deleted):\n")
688 for f in sorted(fullcopy):
693 for f in sorted(fullcopy):
689 note = ""
694 note = ""
690 if f in copy:
695 if f in copy:
691 note += "*"
696 note += "*"
692 if f in divergeset:
697 if f in divergeset:
693 note += "!"
698 note += "!"
694 if f in renamedeleteset:
699 if f in renamedeleteset:
695 note += "%"
700 note += "%"
696 repo.ui.debug(" src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f,
701 repo.ui.debug(" src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f,
697 note))
702 note))
698 del divergeset
703 del divergeset
699
704
700 if not fullcopy:
705 if not fullcopy:
701 return copy, {}, diverge, renamedelete, {}
706 return copy, {}, diverge, renamedelete, {}
702
707
703 repo.ui.debug(" checking for directory renames\n")
708 repo.ui.debug(" checking for directory renames\n")
704
709
705 # generate a directory move map
710 # generate a directory move map
706 d1, d2 = c1.dirs(), c2.dirs()
711 d1, d2 = c1.dirs(), c2.dirs()
707 # Hack for adding '', which is not otherwise added, to d1 and d2
712 # Hack for adding '', which is not otherwise added, to d1 and d2
708 d1.addpath('/')
713 d1.addpath('/')
709 d2.addpath('/')
714 d2.addpath('/')
710 invalid = set()
715 invalid = set()
711 dirmove = {}
716 dirmove = {}
712
717
713 # examine each file copy for a potential directory move, which is
718 # examine each file copy for a potential directory move, which is
714 # when all the files in a directory are moved to a new directory
719 # when all the files in a directory are moved to a new directory
715 for dst, src in fullcopy.iteritems():
720 for dst, src in fullcopy.iteritems():
716 dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst)
721 dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst)
717 if dsrc in invalid:
722 if dsrc in invalid:
718 # already seen to be uninteresting
723 # already seen to be uninteresting
719 continue
724 continue
720 elif dsrc in d1 and ddst in d1:
725 elif dsrc in d1 and ddst in d1:
721 # directory wasn't entirely moved locally
726 # directory wasn't entirely moved locally
722 invalid.add(dsrc)
727 invalid.add(dsrc)
723 elif dsrc in d2 and ddst in d2:
728 elif dsrc in d2 and ddst in d2:
724 # directory wasn't entirely moved remotely
729 # directory wasn't entirely moved remotely
725 invalid.add(dsrc)
730 invalid.add(dsrc)
726 elif dsrc in dirmove and dirmove[dsrc] != ddst:
731 elif dsrc in dirmove and dirmove[dsrc] != ddst:
727 # files from the same directory moved to two different places
732 # files from the same directory moved to two different places
728 invalid.add(dsrc)
733 invalid.add(dsrc)
729 else:
734 else:
730 # looks good so far
735 # looks good so far
731 dirmove[dsrc] = ddst
736 dirmove[dsrc] = ddst
732
737
733 for i in invalid:
738 for i in invalid:
734 if i in dirmove:
739 if i in dirmove:
735 del dirmove[i]
740 del dirmove[i]
736 del d1, d2, invalid
741 del d1, d2, invalid
737
742
738 if not dirmove:
743 if not dirmove:
739 return copy, {}, diverge, renamedelete, {}
744 return copy, {}, diverge, renamedelete, {}
740
745
741 dirmove = {k + "/": v + "/" for k, v in dirmove.iteritems()}
746 dirmove = {k + "/": v + "/" for k, v in dirmove.iteritems()}
742
747
743 for d in dirmove:
748 for d in dirmove:
744 repo.ui.debug(" discovered dir src: '%s' -> dst: '%s'\n" %
749 repo.ui.debug(" discovered dir src: '%s' -> dst: '%s'\n" %
745 (d, dirmove[d]))
750 (d, dirmove[d]))
746
751
747 movewithdir = {}
752 movewithdir = {}
748 # check unaccounted nonoverlapping files against directory moves
753 # check unaccounted nonoverlapping files against directory moves
749 for f in u1r + u2r:
754 for f in u1r + u2r:
750 if f not in fullcopy:
755 if f not in fullcopy:
751 for d in dirmove:
756 for d in dirmove:
752 if f.startswith(d):
757 if f.startswith(d):
753 # new file added in a directory that was moved, move it
758 # new file added in a directory that was moved, move it
754 df = dirmove[d] + f[len(d):]
759 df = dirmove[d] + f[len(d):]
755 if df not in copy:
760 if df not in copy:
756 movewithdir[f] = df
761 movewithdir[f] = df
757 repo.ui.debug((" pending file src: '%s' -> "
762 repo.ui.debug((" pending file src: '%s' -> "
758 "dst: '%s'\n") % (f, df))
763 "dst: '%s'\n") % (f, df))
759 break
764 break
760
765
761 return copy, movewithdir, diverge, renamedelete, dirmove
766 return copy, movewithdir, diverge, renamedelete, dirmove
762
767
763 def _heuristicscopytracing(repo, c1, c2, base):
768 def _heuristicscopytracing(repo, c1, c2, base):
764 """ Fast copytracing using filename heuristics
769 """ Fast copytracing using filename heuristics
765
770
766 Assumes that moves or renames are of following two types:
771 Assumes that moves or renames are of following two types:
767
772
768 1) Inside a directory only (same directory name but different filenames)
773 1) Inside a directory only (same directory name but different filenames)
769 2) Move from one directory to another
774 2) Move from one directory to another
770 (same filenames but different directory names)
775 (same filenames but different directory names)
771
776
772 Works only when there are no merge commits in the "source branch".
777 Works only when there are no merge commits in the "source branch".
773 Source branch is commits from base up to c2 not including base.
778 Source branch is commits from base up to c2 not including base.
774
779
775 If merge is involved it fallbacks to _fullcopytracing().
780 If merge is involved it fallbacks to _fullcopytracing().
776
781
777 Can be used by setting the following config:
782 Can be used by setting the following config:
778
783
779 [experimental]
784 [experimental]
780 copytrace = heuristics
785 copytrace = heuristics
781
786
782 In some cases the copy/move candidates found by heuristics can be very large
787 In some cases the copy/move candidates found by heuristics can be very large
783 in number and that will make the algorithm slow. The number of possible
788 in number and that will make the algorithm slow. The number of possible
784 candidates to check can be limited by using the config
789 candidates to check can be limited by using the config
785 `experimental.copytrace.movecandidateslimit` which defaults to 100.
790 `experimental.copytrace.movecandidateslimit` which defaults to 100.
786 """
791 """
787
792
788 if c1.rev() is None:
793 if c1.rev() is None:
789 c1 = c1.p1()
794 c1 = c1.p1()
790 if c2.rev() is None:
795 if c2.rev() is None:
791 c2 = c2.p1()
796 c2 = c2.p1()
792
797
793 copies = {}
798 copies = {}
794
799
795 changedfiles = set()
800 changedfiles = set()
796 m1 = c1.manifest()
801 m1 = c1.manifest()
797 if not repo.revs('%d::%d', base.rev(), c2.rev()):
802 if not repo.revs('%d::%d', base.rev(), c2.rev()):
798 # If base is not in c2 branch, we switch to fullcopytracing
803 # If base is not in c2 branch, we switch to fullcopytracing
799 repo.ui.debug("switching to full copytracing as base is not "
804 repo.ui.debug("switching to full copytracing as base is not "
800 "an ancestor of c2\n")
805 "an ancestor of c2\n")
801 return _fullcopytracing(repo, c1, c2, base)
806 return _fullcopytracing(repo, c1, c2, base)
802
807
803 ctx = c2
808 ctx = c2
804 while ctx != base:
809 while ctx != base:
805 if len(ctx.parents()) == 2:
810 if len(ctx.parents()) == 2:
806 # To keep things simple let's not handle merges
811 # To keep things simple let's not handle merges
807 repo.ui.debug("switching to full copytracing because of merges\n")
812 repo.ui.debug("switching to full copytracing because of merges\n")
808 return _fullcopytracing(repo, c1, c2, base)
813 return _fullcopytracing(repo, c1, c2, base)
809 changedfiles.update(ctx.files())
814 changedfiles.update(ctx.files())
810 ctx = ctx.p1()
815 ctx = ctx.p1()
811
816
812 cp = _forwardcopies(base, c2)
817 cp = _forwardcopies(base, c2)
813 for dst, src in cp.iteritems():
818 for dst, src in cp.iteritems():
814 if src in m1:
819 if src in m1:
815 copies[dst] = src
820 copies[dst] = src
816
821
817 # file is missing if it isn't present in the destination, but is present in
822 # file is missing if it isn't present in the destination, but is present in
818 # the base and present in the source.
823 # the base and present in the source.
819 # Presence in the base is important to exclude added files, presence in the
824 # Presence in the base is important to exclude added files, presence in the
820 # source is important to exclude removed files.
825 # source is important to exclude removed files.
821 filt = lambda f: f not in m1 and f in base and f in c2
826 filt = lambda f: f not in m1 and f in base and f in c2
822 missingfiles = [f for f in changedfiles if filt(f)]
827 missingfiles = [f for f in changedfiles if filt(f)]
823
828
824 if missingfiles:
829 if missingfiles:
825 basenametofilename = collections.defaultdict(list)
830 basenametofilename = collections.defaultdict(list)
826 dirnametofilename = collections.defaultdict(list)
831 dirnametofilename = collections.defaultdict(list)
827
832
828 for f in m1.filesnotin(base.manifest()):
833 for f in m1.filesnotin(base.manifest()):
829 basename = os.path.basename(f)
834 basename = os.path.basename(f)
830 dirname = os.path.dirname(f)
835 dirname = os.path.dirname(f)
831 basenametofilename[basename].append(f)
836 basenametofilename[basename].append(f)
832 dirnametofilename[dirname].append(f)
837 dirnametofilename[dirname].append(f)
833
838
834 for f in missingfiles:
839 for f in missingfiles:
835 basename = os.path.basename(f)
840 basename = os.path.basename(f)
836 dirname = os.path.dirname(f)
841 dirname = os.path.dirname(f)
837 samebasename = basenametofilename[basename]
842 samebasename = basenametofilename[basename]
838 samedirname = dirnametofilename[dirname]
843 samedirname = dirnametofilename[dirname]
839 movecandidates = samebasename + samedirname
844 movecandidates = samebasename + samedirname
840 # f is guaranteed to be present in c2, that's why
845 # f is guaranteed to be present in c2, that's why
841 # c2.filectx(f) won't fail
846 # c2.filectx(f) won't fail
842 f2 = c2.filectx(f)
847 f2 = c2.filectx(f)
843 # we can have a lot of candidates which can slow down the heuristics
848 # we can have a lot of candidates which can slow down the heuristics
844 # config value to limit the number of candidates moves to check
849 # config value to limit the number of candidates moves to check
845 maxcandidates = repo.ui.configint('experimental',
850 maxcandidates = repo.ui.configint('experimental',
846 'copytrace.movecandidateslimit')
851 'copytrace.movecandidateslimit')
847
852
848 if len(movecandidates) > maxcandidates:
853 if len(movecandidates) > maxcandidates:
849 repo.ui.status(_("skipping copytracing for '%s', more "
854 repo.ui.status(_("skipping copytracing for '%s', more "
850 "candidates than the limit: %d\n")
855 "candidates than the limit: %d\n")
851 % (f, len(movecandidates)))
856 % (f, len(movecandidates)))
852 continue
857 continue
853
858
854 for candidate in movecandidates:
859 for candidate in movecandidates:
855 f1 = c1.filectx(candidate)
860 f1 = c1.filectx(candidate)
856 if _related(f1, f2):
861 if _related(f1, f2):
857 # if there are a few related copies then we'll merge
862 # if there are a few related copies then we'll merge
858 # changes into all of them. This matches the behaviour
863 # changes into all of them. This matches the behaviour
859 # of upstream copytracing
864 # of upstream copytracing
860 copies[candidate] = f
865 copies[candidate] = f
861
866
862 return copies, {}, {}, {}, {}
867 return copies, {}, {}, {}, {}
863
868
864 def _related(f1, f2):
869 def _related(f1, f2):
865 """return True if f1 and f2 filectx have a common ancestor
870 """return True if f1 and f2 filectx have a common ancestor
866
871
867 Walk back to common ancestor to see if the two files originate
872 Walk back to common ancestor to see if the two files originate
868 from the same file. Since workingfilectx's rev() is None it messes
873 from the same file. Since workingfilectx's rev() is None it messes
869 up the integer comparison logic, hence the pre-step check for
874 up the integer comparison logic, hence the pre-step check for
870 None (f1 and f2 can only be workingfilectx's initially).
875 None (f1 and f2 can only be workingfilectx's initially).
871 """
876 """
872
877
873 if f1 == f2:
878 if f1 == f2:
874 return True # a match
879 return True # a match
875
880
876 g1, g2 = f1.ancestors(), f2.ancestors()
881 g1, g2 = f1.ancestors(), f2.ancestors()
877 try:
882 try:
878 f1r, f2r = f1.linkrev(), f2.linkrev()
883 f1r, f2r = f1.linkrev(), f2.linkrev()
879
884
880 if f1r is None:
885 if f1r is None:
881 f1 = next(g1)
886 f1 = next(g1)
882 if f2r is None:
887 if f2r is None:
883 f2 = next(g2)
888 f2 = next(g2)
884
889
885 while True:
890 while True:
886 f1r, f2r = f1.linkrev(), f2.linkrev()
891 f1r, f2r = f1.linkrev(), f2.linkrev()
887 if f1r > f2r:
892 if f1r > f2r:
888 f1 = next(g1)
893 f1 = next(g1)
889 elif f2r > f1r:
894 elif f2r > f1r:
890 f2 = next(g2)
895 f2 = next(g2)
891 else: # f1 and f2 point to files in the same linkrev
896 else: # f1 and f2 point to files in the same linkrev
892 return f1 == f2 # true if they point to the same file
897 return f1 == f2 # true if they point to the same file
893 except StopIteration:
898 except StopIteration:
894 return False
899 return False
895
900
896 def _checkcopies(srcctx, dstctx, f, base, tca, remotebase, limit, data):
901 def _checkcopies(srcctx, dstctx, f, base, tca, remotebase, limit, data):
897 """
902 """
898 check possible copies of f from msrc to mdst
903 check possible copies of f from msrc to mdst
899
904
900 srcctx = starting context for f in msrc
905 srcctx = starting context for f in msrc
901 dstctx = destination context for f in mdst
906 dstctx = destination context for f in mdst
902 f = the filename to check (as in msrc)
907 f = the filename to check (as in msrc)
903 base = the changectx used as a merge base
908 base = the changectx used as a merge base
904 tca = topological common ancestor for graft-like scenarios
909 tca = topological common ancestor for graft-like scenarios
905 remotebase = True if base is outside tca::srcctx, False otherwise
910 remotebase = True if base is outside tca::srcctx, False otherwise
906 limit = the rev number to not search beyond
911 limit = the rev number to not search beyond
907 data = dictionary of dictionary to store copy data. (see mergecopies)
912 data = dictionary of dictionary to store copy data. (see mergecopies)
908
913
909 note: limit is only an optimization, and provides no guarantee that
914 note: limit is only an optimization, and provides no guarantee that
910 irrelevant revisions will not be visited
915 irrelevant revisions will not be visited
911 there is no easy way to make this algorithm stop in a guaranteed way
916 there is no easy way to make this algorithm stop in a guaranteed way
912 once it "goes behind a certain revision".
917 once it "goes behind a certain revision".
913 """
918 """
914
919
915 msrc = srcctx.manifest()
920 msrc = srcctx.manifest()
916 mdst = dstctx.manifest()
921 mdst = dstctx.manifest()
917 mb = base.manifest()
922 mb = base.manifest()
918 mta = tca.manifest()
923 mta = tca.manifest()
919 # Might be true if this call is about finding backward renames,
924 # Might be true if this call is about finding backward renames,
920 # This happens in the case of grafts because the DAG is then rotated.
925 # This happens in the case of grafts because the DAG is then rotated.
921 # If the file exists in both the base and the source, we are not looking
926 # If the file exists in both the base and the source, we are not looking
922 # for a rename on the source side, but on the part of the DAG that is
927 # for a rename on the source side, but on the part of the DAG that is
923 # traversed backwards.
928 # traversed backwards.
924 #
929 #
925 # In the case there is both backward and forward renames (before and after
930 # In the case there is both backward and forward renames (before and after
926 # the base) this is more complicated as we must detect a divergence.
931 # the base) this is more complicated as we must detect a divergence.
927 # We use 'backwards = False' in that case.
932 # We use 'backwards = False' in that case.
928 backwards = not remotebase and base != tca and f in mb
933 backwards = not remotebase and base != tca and f in mb
929 getsrcfctx = _makegetfctx(srcctx)
934 getsrcfctx = _makegetfctx(srcctx)
930 getdstfctx = _makegetfctx(dstctx)
935 getdstfctx = _makegetfctx(dstctx)
931
936
932 if msrc[f] == mb.get(f) and not remotebase:
937 if msrc[f] == mb.get(f) and not remotebase:
933 # Nothing to merge
938 # Nothing to merge
934 return
939 return
935
940
936 of = None
941 of = None
937 seen = {f}
942 seen = {f}
938 for oc in getsrcfctx(f, msrc[f]).ancestors():
943 for oc in getsrcfctx(f, msrc[f]).ancestors():
939 of = oc.path()
944 of = oc.path()
940 if of in seen:
945 if of in seen:
941 # check limit late - grab last rename before
946 # check limit late - grab last rename before
942 if oc.linkrev() < limit:
947 if oc.linkrev() < limit:
943 break
948 break
944 continue
949 continue
945 seen.add(of)
950 seen.add(of)
946
951
947 # remember for dir rename detection
952 # remember for dir rename detection
948 if backwards:
953 if backwards:
949 data['fullcopy'][of] = f # grafting backwards through renames
954 data['fullcopy'][of] = f # grafting backwards through renames
950 else:
955 else:
951 data['fullcopy'][f] = of
956 data['fullcopy'][f] = of
952 if of not in mdst:
957 if of not in mdst:
953 continue # no match, keep looking
958 continue # no match, keep looking
954 if mdst[of] == mb.get(of):
959 if mdst[of] == mb.get(of):
955 return # no merge needed, quit early
960 return # no merge needed, quit early
956 c2 = getdstfctx(of, mdst[of])
961 c2 = getdstfctx(of, mdst[of])
957 # c2 might be a plain new file on added on destination side that is
962 # c2 might be a plain new file on added on destination side that is
958 # unrelated to the droids we are looking for.
963 # unrelated to the droids we are looking for.
959 cr = _related(oc, c2)
964 cr = _related(oc, c2)
960 if cr and (of == f or of == c2.path()): # non-divergent
965 if cr and (of == f or of == c2.path()): # non-divergent
961 if backwards:
966 if backwards:
962 data['copy'][of] = f
967 data['copy'][of] = f
963 elif of in mb:
968 elif of in mb:
964 data['copy'][f] = of
969 data['copy'][f] = of
965 elif remotebase: # special case: a <- b <- a -> b "ping-pong" rename
970 elif remotebase: # special case: a <- b <- a -> b "ping-pong" rename
966 data['copy'][of] = f
971 data['copy'][of] = f
967 del data['fullcopy'][f]
972 del data['fullcopy'][f]
968 data['fullcopy'][of] = f
973 data['fullcopy'][of] = f
969 else: # divergence w.r.t. graft CA on one side of topological CA
974 else: # divergence w.r.t. graft CA on one side of topological CA
970 for sf in seen:
975 for sf in seen:
971 if sf in mb:
976 if sf in mb:
972 assert sf not in data['diverge']
977 assert sf not in data['diverge']
973 data['diverge'][sf] = [f, of]
978 data['diverge'][sf] = [f, of]
974 break
979 break
975 return
980 return
976
981
977 if of in mta:
982 if of in mta:
978 if backwards or remotebase:
983 if backwards or remotebase:
979 data['incomplete'][of] = f
984 data['incomplete'][of] = f
980 else:
985 else:
981 for sf in seen:
986 for sf in seen:
982 if sf in mb:
987 if sf in mb:
983 if tca == base:
988 if tca == base:
984 data['diverge'].setdefault(sf, []).append(f)
989 data['diverge'].setdefault(sf, []).append(f)
985 else:
990 else:
986 data['incompletediverge'][sf] = [of, f]
991 data['incompletediverge'][sf] = [of, f]
987 return
992 return
988
993
989 def duplicatecopies(repo, wctx, rev, fromrev, skiprev=None):
994 def duplicatecopies(repo, wctx, rev, fromrev, skiprev=None):
990 """reproduce copies from fromrev to rev in the dirstate
995 """reproduce copies from fromrev to rev in the dirstate
991
996
992 If skiprev is specified, it's a revision that should be used to
997 If skiprev is specified, it's a revision that should be used to
993 filter copy records. Any copies that occur between fromrev and
998 filter copy records. Any copies that occur between fromrev and
994 skiprev will not be duplicated, even if they appear in the set of
999 skiprev will not be duplicated, even if they appear in the set of
995 copies between fromrev and rev.
1000 copies between fromrev and rev.
996 """
1001 """
997 exclude = {}
1002 exclude = {}
998 ctraceconfig = repo.ui.config('experimental', 'copytrace')
1003 ctraceconfig = repo.ui.config('experimental', 'copytrace')
999 bctrace = stringutil.parsebool(ctraceconfig)
1004 bctrace = stringutil.parsebool(ctraceconfig)
1000 if (skiprev is not None and
1005 if (skiprev is not None and
1001 (ctraceconfig == 'heuristics' or bctrace or bctrace is None)):
1006 (ctraceconfig == 'heuristics' or bctrace or bctrace is None)):
1002 # copytrace='off' skips this line, but not the entire function because
1007 # copytrace='off' skips this line, but not the entire function because
1003 # the line below is O(size of the repo) during a rebase, while the rest
1008 # the line below is O(size of the repo) during a rebase, while the rest
1004 # of the function is much faster (and is required for carrying copy
1009 # of the function is much faster (and is required for carrying copy
1005 # metadata across the rebase anyway).
1010 # metadata across the rebase anyway).
1006 exclude = pathcopies(repo[fromrev], repo[skiprev])
1011 exclude = pathcopies(repo[fromrev], repo[skiprev])
1007 for dst, src in pathcopies(repo[fromrev], repo[rev]).iteritems():
1012 for dst, src in pathcopies(repo[fromrev], repo[rev]).iteritems():
1008 # copies.pathcopies returns backward renames, so dst might not
1013 # copies.pathcopies returns backward renames, so dst might not
1009 # actually be in the dirstate
1014 # actually be in the dirstate
1010 if dst in exclude:
1015 if dst in exclude:
1011 continue
1016 continue
1012 wctx[dst].markcopied(src)
1017 wctx[dst].markcopied(src)
@@ -1,467 +1,459 b''
1 Criss cross merging
1 Criss cross merging
2
2
3 $ hg init criss-cross
3 $ hg init criss-cross
4 $ cd criss-cross
4 $ cd criss-cross
5 $ echo '0 base' > f1
5 $ echo '0 base' > f1
6 $ echo '0 base' > f2
6 $ echo '0 base' > f2
7 $ hg ci -Aqm '0 base'
7 $ hg ci -Aqm '0 base'
8
8
9 $ echo '1 first change' > f1
9 $ echo '1 first change' > f1
10 $ hg ci -m '1 first change f1'
10 $ hg ci -m '1 first change f1'
11
11
12 $ hg up -qr0
12 $ hg up -qr0
13 $ echo '2 first change' > f2
13 $ echo '2 first change' > f2
14 $ hg ci -qm '2 first change f2'
14 $ hg ci -qm '2 first change f2'
15
15
16 $ hg merge -qr 1
16 $ hg merge -qr 1
17 $ hg ci -m '3 merge'
17 $ hg ci -m '3 merge'
18
18
19 $ hg up -qr2
19 $ hg up -qr2
20 $ hg merge -qr1
20 $ hg merge -qr1
21 $ hg ci -qm '4 merge'
21 $ hg ci -qm '4 merge'
22
22
23 $ echo '5 second change' > f1
23 $ echo '5 second change' > f1
24 $ hg ci -m '5 second change f1'
24 $ hg ci -m '5 second change f1'
25
25
26 $ hg up -r3
26 $ hg up -r3
27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
28 $ echo '6 second change' > f2
28 $ echo '6 second change' > f2
29 $ hg ci -m '6 second change f2'
29 $ hg ci -m '6 second change f2'
30
30
31 $ hg log -G
31 $ hg log -G
32 @ changeset: 6:3b08d01b0ab5
32 @ changeset: 6:3b08d01b0ab5
33 | tag: tip
33 | tag: tip
34 | parent: 3:cf89f02107e5
34 | parent: 3:cf89f02107e5
35 | user: test
35 | user: test
36 | date: Thu Jan 01 00:00:00 1970 +0000
36 | date: Thu Jan 01 00:00:00 1970 +0000
37 | summary: 6 second change f2
37 | summary: 6 second change f2
38 |
38 |
39 | o changeset: 5:adfe50279922
39 | o changeset: 5:adfe50279922
40 | | user: test
40 | | user: test
41 | | date: Thu Jan 01 00:00:00 1970 +0000
41 | | date: Thu Jan 01 00:00:00 1970 +0000
42 | | summary: 5 second change f1
42 | | summary: 5 second change f1
43 | |
43 | |
44 | o changeset: 4:7d3e55501ae6
44 | o changeset: 4:7d3e55501ae6
45 | |\ parent: 2:40663881a6dd
45 | |\ parent: 2:40663881a6dd
46 | | | parent: 1:0f6b37dbe527
46 | | | parent: 1:0f6b37dbe527
47 | | | user: test
47 | | | user: test
48 | | | date: Thu Jan 01 00:00:00 1970 +0000
48 | | | date: Thu Jan 01 00:00:00 1970 +0000
49 | | | summary: 4 merge
49 | | | summary: 4 merge
50 | | |
50 | | |
51 o---+ changeset: 3:cf89f02107e5
51 o---+ changeset: 3:cf89f02107e5
52 | | | parent: 2:40663881a6dd
52 | | | parent: 2:40663881a6dd
53 |/ / parent: 1:0f6b37dbe527
53 |/ / parent: 1:0f6b37dbe527
54 | | user: test
54 | | user: test
55 | | date: Thu Jan 01 00:00:00 1970 +0000
55 | | date: Thu Jan 01 00:00:00 1970 +0000
56 | | summary: 3 merge
56 | | summary: 3 merge
57 | |
57 | |
58 | o changeset: 2:40663881a6dd
58 | o changeset: 2:40663881a6dd
59 | | parent: 0:40494bf2444c
59 | | parent: 0:40494bf2444c
60 | | user: test
60 | | user: test
61 | | date: Thu Jan 01 00:00:00 1970 +0000
61 | | date: Thu Jan 01 00:00:00 1970 +0000
62 | | summary: 2 first change f2
62 | | summary: 2 first change f2
63 | |
63 | |
64 o | changeset: 1:0f6b37dbe527
64 o | changeset: 1:0f6b37dbe527
65 |/ user: test
65 |/ user: test
66 | date: Thu Jan 01 00:00:00 1970 +0000
66 | date: Thu Jan 01 00:00:00 1970 +0000
67 | summary: 1 first change f1
67 | summary: 1 first change f1
68 |
68 |
69 o changeset: 0:40494bf2444c
69 o changeset: 0:40494bf2444c
70 user: test
70 user: test
71 date: Thu Jan 01 00:00:00 1970 +0000
71 date: Thu Jan 01 00:00:00 1970 +0000
72 summary: 0 base
72 summary: 0 base
73
73
74
74
75 $ hg merge -v --debug --tool internal:dump 5 --config merge.preferancestor='!'
75 $ hg merge -v --debug --tool internal:dump 5 --config merge.preferancestor='!'
76 note: using 0f6b37dbe527 as ancestor of 3b08d01b0ab5 and adfe50279922
76 note: using 0f6b37dbe527 as ancestor of 3b08d01b0ab5 and adfe50279922
77 alternatively, use --config merge.preferancestor=40663881a6dd
77 alternatively, use --config merge.preferancestor=40663881a6dd
78 searching for copies back to rev 3
78 searching for copies back to rev 3
79 resolving manifests
79 resolving manifests
80 branchmerge: True, force: False, partial: False
80 branchmerge: True, force: False, partial: False
81 ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
81 ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
82 preserving f2 for resolve of f2
82 preserving f2 for resolve of f2
83 f1: remote is newer -> g
83 f1: remote is newer -> g
84 getting f1
84 getting f1
85 f2: versions differ -> m (premerge)
85 f2: versions differ -> m (premerge)
86 picked tool ':dump' for f2 (binary False symlink False changedelete False)
86 picked tool ':dump' for f2 (binary False symlink False changedelete False)
87 merging f2
87 merging f2
88 my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@0f6b37dbe527
88 my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@0f6b37dbe527
89 f2: versions differ -> m (merge)
89 f2: versions differ -> m (merge)
90 picked tool ':dump' for f2 (binary False symlink False changedelete False)
90 picked tool ':dump' for f2 (binary False symlink False changedelete False)
91 my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@0f6b37dbe527
91 my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@0f6b37dbe527
92 1 files updated, 0 files merged, 0 files removed, 1 files unresolved
92 1 files updated, 0 files merged, 0 files removed, 1 files unresolved
93 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
93 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
94 [1]
94 [1]
95
95
96 $ f --dump *
96 $ f --dump *
97 f1:
97 f1:
98 >>>
98 >>>
99 5 second change
99 5 second change
100 <<<
100 <<<
101 f2:
101 f2:
102 >>>
102 >>>
103 6 second change
103 6 second change
104 <<<
104 <<<
105 f2.base:
105 f2.base:
106 >>>
106 >>>
107 0 base
107 0 base
108 <<<
108 <<<
109 f2.local:
109 f2.local:
110 >>>
110 >>>
111 6 second change
111 6 second change
112 <<<
112 <<<
113 f2.orig:
113 f2.orig:
114 >>>
114 >>>
115 6 second change
115 6 second change
116 <<<
116 <<<
117 f2.other:
117 f2.other:
118 >>>
118 >>>
119 2 first change
119 2 first change
120 <<<
120 <<<
121
121
122 $ hg up -qC .
122 $ hg up -qC .
123 $ hg merge -v --tool internal:dump 5 --config merge.preferancestor="null 40663881 3b08d"
123 $ hg merge -v --tool internal:dump 5 --config merge.preferancestor="null 40663881 3b08d"
124 note: using 40663881a6dd as ancestor of 3b08d01b0ab5 and adfe50279922
124 note: using 40663881a6dd as ancestor of 3b08d01b0ab5 and adfe50279922
125 alternatively, use --config merge.preferancestor=0f6b37dbe527
125 alternatively, use --config merge.preferancestor=0f6b37dbe527
126 resolving manifests
126 resolving manifests
127 merging f1
127 merging f1
128 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
128 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
129 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
129 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
130 [1]
130 [1]
131
131
132 Redo merge with merge.preferancestor="*" to enable bid merge
132 Redo merge with merge.preferancestor="*" to enable bid merge
133
133
134 $ rm f*
134 $ rm f*
135 $ hg up -qC .
135 $ hg up -qC .
136 $ hg merge -v --debug --tool internal:dump 5 --config merge.preferancestor="*"
136 $ hg merge -v --debug --tool internal:dump 5 --config merge.preferancestor="*"
137 note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
137 note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
138
138
139 calculating bids for ancestor 0f6b37dbe527
139 calculating bids for ancestor 0f6b37dbe527
140 searching for copies back to rev 3
140 searching for copies back to rev 3
141 resolving manifests
141 resolving manifests
142 branchmerge: True, force: False, partial: False
142 branchmerge: True, force: False, partial: False
143 ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
143 ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
144 f1: remote is newer -> g
144 f1: remote is newer -> g
145 f2: versions differ -> m
145 f2: versions differ -> m
146
146
147 calculating bids for ancestor 40663881a6dd
147 calculating bids for ancestor 40663881a6dd
148 searching for copies back to rev 3
148 searching for copies back to rev 3
149 resolving manifests
149 resolving manifests
150 branchmerge: True, force: False, partial: False
150 branchmerge: True, force: False, partial: False
151 ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922
151 ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922
152 f1: versions differ -> m
152 f1: versions differ -> m
153 f2: remote unchanged -> k
153 f2: remote unchanged -> k
154
154
155 auction for merging merge bids
155 auction for merging merge bids
156 f1: picking 'get' action
156 f1: picking 'get' action
157 f2: picking 'keep' action
157 f2: picking 'keep' action
158 end of auction
158 end of auction
159
159
160 f1: remote is newer -> g
160 f1: remote is newer -> g
161 getting f1
161 getting f1
162 f2: remote unchanged -> k
162 f2: remote unchanged -> k
163 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
163 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
164 (branch merge, don't forget to commit)
164 (branch merge, don't forget to commit)
165
165
166 $ f --dump *
166 $ f --dump *
167 f1:
167 f1:
168 >>>
168 >>>
169 5 second change
169 5 second change
170 <<<
170 <<<
171 f2:
171 f2:
172 >>>
172 >>>
173 6 second change
173 6 second change
174 <<<
174 <<<
175
175
176
176
177 The other way around:
177 The other way around:
178
178
179 $ hg up -C -r5
179 $ hg up -C -r5
180 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
180 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
181 $ hg merge -v --debug --config merge.preferancestor="*"
181 $ hg merge -v --debug --config merge.preferancestor="*"
182 note: merging adfe50279922+ and 3b08d01b0ab5 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
182 note: merging adfe50279922+ and 3b08d01b0ab5 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
183
183
184 calculating bids for ancestor 0f6b37dbe527
184 calculating bids for ancestor 0f6b37dbe527
185 searching for copies back to rev 3
185 searching for copies back to rev 3
186 resolving manifests
186 resolving manifests
187 branchmerge: True, force: False, partial: False
187 branchmerge: True, force: False, partial: False
188 ancestor: 0f6b37dbe527, local: adfe50279922+, remote: 3b08d01b0ab5
188 ancestor: 0f6b37dbe527, local: adfe50279922+, remote: 3b08d01b0ab5
189 f1: remote unchanged -> k
189 f1: remote unchanged -> k
190 f2: versions differ -> m
190 f2: versions differ -> m
191
191
192 calculating bids for ancestor 40663881a6dd
192 calculating bids for ancestor 40663881a6dd
193 searching for copies back to rev 3
193 searching for copies back to rev 3
194 resolving manifests
194 resolving manifests
195 branchmerge: True, force: False, partial: False
195 branchmerge: True, force: False, partial: False
196 ancestor: 40663881a6dd, local: adfe50279922+, remote: 3b08d01b0ab5
196 ancestor: 40663881a6dd, local: adfe50279922+, remote: 3b08d01b0ab5
197 f1: versions differ -> m
197 f1: versions differ -> m
198 f2: remote is newer -> g
198 f2: remote is newer -> g
199
199
200 auction for merging merge bids
200 auction for merging merge bids
201 f1: picking 'keep' action
201 f1: picking 'keep' action
202 f2: picking 'get' action
202 f2: picking 'get' action
203 end of auction
203 end of auction
204
204
205 f2: remote is newer -> g
205 f2: remote is newer -> g
206 getting f2
206 getting f2
207 f1: remote unchanged -> k
207 f1: remote unchanged -> k
208 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
208 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
209 (branch merge, don't forget to commit)
209 (branch merge, don't forget to commit)
210
210
211 $ f --dump *
211 $ f --dump *
212 f1:
212 f1:
213 >>>
213 >>>
214 5 second change
214 5 second change
215 <<<
215 <<<
216 f2:
216 f2:
217 >>>
217 >>>
218 6 second change
218 6 second change
219 <<<
219 <<<
220
220
221 Verify how the output looks and and how verbose it is:
221 Verify how the output looks and and how verbose it is:
222
222
223 $ hg up -qC
223 $ hg up -qC
224 $ hg merge
224 $ hg merge
225 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
225 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
226 (branch merge, don't forget to commit)
226 (branch merge, don't forget to commit)
227
227
228 $ hg up -qC tip
228 $ hg up -qC tip
229 $ hg merge -v
229 $ hg merge -v
230 note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
230 note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
231
231
232 calculating bids for ancestor 0f6b37dbe527
232 calculating bids for ancestor 0f6b37dbe527
233 resolving manifests
233 resolving manifests
234
234
235 calculating bids for ancestor 40663881a6dd
235 calculating bids for ancestor 40663881a6dd
236 resolving manifests
236 resolving manifests
237
237
238 auction for merging merge bids
238 auction for merging merge bids
239 f1: picking 'get' action
239 f1: picking 'get' action
240 f2: picking 'keep' action
240 f2: picking 'keep' action
241 end of auction
241 end of auction
242
242
243 getting f1
243 getting f1
244 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
244 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
245 (branch merge, don't forget to commit)
245 (branch merge, don't forget to commit)
246
246
247 $ hg up -qC
247 $ hg up -qC
248 $ hg merge -v --debug --config merge.preferancestor="*"
248 $ hg merge -v --debug --config merge.preferancestor="*"
249 note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
249 note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd
250
250
251 calculating bids for ancestor 0f6b37dbe527
251 calculating bids for ancestor 0f6b37dbe527
252 searching for copies back to rev 3
252 searching for copies back to rev 3
253 resolving manifests
253 resolving manifests
254 branchmerge: True, force: False, partial: False
254 branchmerge: True, force: False, partial: False
255 ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
255 ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922
256 f1: remote is newer -> g
256 f1: remote is newer -> g
257 f2: versions differ -> m
257 f2: versions differ -> m
258
258
259 calculating bids for ancestor 40663881a6dd
259 calculating bids for ancestor 40663881a6dd
260 searching for copies back to rev 3
260 searching for copies back to rev 3
261 resolving manifests
261 resolving manifests
262 branchmerge: True, force: False, partial: False
262 branchmerge: True, force: False, partial: False
263 ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922
263 ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922
264 f1: versions differ -> m
264 f1: versions differ -> m
265 f2: remote unchanged -> k
265 f2: remote unchanged -> k
266
266
267 auction for merging merge bids
267 auction for merging merge bids
268 f1: picking 'get' action
268 f1: picking 'get' action
269 f2: picking 'keep' action
269 f2: picking 'keep' action
270 end of auction
270 end of auction
271
271
272 f1: remote is newer -> g
272 f1: remote is newer -> g
273 getting f1
273 getting f1
274 f2: remote unchanged -> k
274 f2: remote unchanged -> k
275 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
275 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
276 (branch merge, don't forget to commit)
276 (branch merge, don't forget to commit)
277
277
278 Test the greatest common ancestor returning multiple changesets
278 Test the greatest common ancestor returning multiple changesets
279
279
280 $ hg log -r 'heads(commonancestors(head()))'
280 $ hg log -r 'heads(commonancestors(head()))'
281 changeset: 1:0f6b37dbe527
281 changeset: 1:0f6b37dbe527
282 user: test
282 user: test
283 date: Thu Jan 01 00:00:00 1970 +0000
283 date: Thu Jan 01 00:00:00 1970 +0000
284 summary: 1 first change f1
284 summary: 1 first change f1
285
285
286 changeset: 2:40663881a6dd
286 changeset: 2:40663881a6dd
287 parent: 0:40494bf2444c
287 parent: 0:40494bf2444c
288 user: test
288 user: test
289 date: Thu Jan 01 00:00:00 1970 +0000
289 date: Thu Jan 01 00:00:00 1970 +0000
290 summary: 2 first change f2
290 summary: 2 first change f2
291
291
292
292
293 $ cd ..
293 $ cd ..
294
294
295 http://stackoverflow.com/questions/9350005/how-do-i-specify-a-merge-base-to-use-in-a-hg-merge/9430810
295 http://stackoverflow.com/questions/9350005/how-do-i-specify-a-merge-base-to-use-in-a-hg-merge/9430810
296
296
297 $ hg init ancestor-merging
297 $ hg init ancestor-merging
298 $ cd ancestor-merging
298 $ cd ancestor-merging
299 $ echo a > x
299 $ echo a > x
300 $ hg commit -A -m a x
300 $ hg commit -A -m a x
301 $ hg update -q 0
301 $ hg update -q 0
302 $ echo b >> x
302 $ echo b >> x
303 $ hg commit -m b
303 $ hg commit -m b
304 $ hg update -q 0
304 $ hg update -q 0
305 $ echo c >> x
305 $ echo c >> x
306 $ hg commit -qm c
306 $ hg commit -qm c
307 $ hg update -q 1
307 $ hg update -q 1
308 $ hg merge -q --tool internal:local 2
308 $ hg merge -q --tool internal:local 2
309 $ echo c >> x
309 $ echo c >> x
310 $ hg commit -m bc
310 $ hg commit -m bc
311 $ hg update -q 2
311 $ hg update -q 2
312 $ hg merge -q --tool internal:local 1
312 $ hg merge -q --tool internal:local 1
313 $ echo b >> x
313 $ echo b >> x
314 $ hg commit -qm cb
314 $ hg commit -qm cb
315
315
316 $ hg merge --config merge.preferancestor='!'
316 $ hg merge --config merge.preferancestor='!'
317 note: using 70008a2163f6 as ancestor of 0d355fdef312 and 4b8b546a3eef
317 note: using 70008a2163f6 as ancestor of 0d355fdef312 and 4b8b546a3eef
318 alternatively, use --config merge.preferancestor=b211bbc6eb3c
318 alternatively, use --config merge.preferancestor=b211bbc6eb3c
319 merging x
319 merging x
320 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
320 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
321 (branch merge, don't forget to commit)
321 (branch merge, don't forget to commit)
322 $ cat x
322 $ cat x
323 a
323 a
324 c
324 c
325 b
325 b
326 c
326 c
327
327
328 $ hg up -qC .
328 $ hg up -qC .
329
329
330 $ hg merge --config merge.preferancestor=b211bbc6eb3c
330 $ hg merge --config merge.preferancestor=b211bbc6eb3c
331 note: using b211bbc6eb3c as ancestor of 0d355fdef312 and 4b8b546a3eef
331 note: using b211bbc6eb3c as ancestor of 0d355fdef312 and 4b8b546a3eef
332 alternatively, use --config merge.preferancestor=70008a2163f6
332 alternatively, use --config merge.preferancestor=70008a2163f6
333 merging x
333 merging x
334 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
334 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
335 (branch merge, don't forget to commit)
335 (branch merge, don't forget to commit)
336 $ cat x
336 $ cat x
337 a
337 a
338 b
338 b
339 c
339 c
340 b
340 b
341
341
342 $ hg up -qC .
342 $ hg up -qC .
343
343
344 $ hg merge -v --config merge.preferancestor="*"
344 $ hg merge -v --config merge.preferancestor="*"
345 note: merging 0d355fdef312+ and 4b8b546a3eef using bids from ancestors 70008a2163f6 and b211bbc6eb3c
345 note: merging 0d355fdef312+ and 4b8b546a3eef using bids from ancestors 70008a2163f6 and b211bbc6eb3c
346
346
347 calculating bids for ancestor 70008a2163f6
347 calculating bids for ancestor 70008a2163f6
348 resolving manifests
348 resolving manifests
349
349
350 calculating bids for ancestor b211bbc6eb3c
350 calculating bids for ancestor b211bbc6eb3c
351 resolving manifests
351 resolving manifests
352
352
353 auction for merging merge bids
353 auction for merging merge bids
354 x: multiple bids for merge action:
354 x: multiple bids for merge action:
355 versions differ -> m
355 versions differ -> m
356 versions differ -> m
356 versions differ -> m
357 x: ambiguous merge - picked m action
357 x: ambiguous merge - picked m action
358 end of auction
358 end of auction
359
359
360 merging x
360 merging x
361 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
361 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
362 (branch merge, don't forget to commit)
362 (branch merge, don't forget to commit)
363 $ cat x
363 $ cat x
364 a
364 a
365 c
365 c
366 b
366 b
367 c
367 c
368
368
369 Verify that the old context ancestor works with / despite preferancestor:
369 Verify that the old context ancestor works with / despite preferancestor:
370
370
371 $ hg log -r 'ancestor(head())' --config merge.preferancestor=1 -T '{rev}\n'
371 $ hg log -r 'ancestor(head())' --config merge.preferancestor=1 -T '{rev}\n'
372 1
372 1
373 $ hg log -r 'ancestor(head())' --config merge.preferancestor=2 -T '{rev}\n'
373 $ hg log -r 'ancestor(head())' --config merge.preferancestor=2 -T '{rev}\n'
374 2
374 2
375 $ hg log -r 'ancestor(head())' --config merge.preferancestor=3 -T '{rev}\n'
375 $ hg log -r 'ancestor(head())' --config merge.preferancestor=3 -T '{rev}\n'
376 1
376 1
377 $ hg log -r 'ancestor(head())' --config merge.preferancestor='1337 * - 2' -T '{rev}\n'
377 $ hg log -r 'ancestor(head())' --config merge.preferancestor='1337 * - 2' -T '{rev}\n'
378 2
378 2
379
379
380 $ cd ..
380 $ cd ..
381
381
382 $ hg init issue5020
382 $ hg init issue5020
383 $ cd issue5020
383 $ cd issue5020
384
384
385 $ echo a > noop
385 $ echo a > noop
386 $ hg ci -qAm initial
386 $ hg ci -qAm initial
387
387
388 $ echo b > noop
388 $ echo b > noop
389 $ hg ci -qAm 'uninteresting change'
389 $ hg ci -qAm 'uninteresting change'
390
390
391 $ hg up -q 0
391 $ hg up -q 0
392 $ mkdir d1
392 $ mkdir d1
393 $ echo a > d1/a
393 $ echo a > d1/a
394 $ echo b > d1/b
394 $ echo b > d1/b
395 $ hg ci -qAm 'add d1/a and d1/b'
395 $ hg ci -qAm 'add d1/a and d1/b'
396
396
397 $ hg merge -q 1
397 $ hg merge -q 1
398 $ hg rm d1/a
398 $ hg rm d1/a
399 $ hg mv -q d1 d2
399 $ hg mv -q d1 d2
400 $ hg ci -qm 'merge while removing d1/a and moving d1/b to d2/b'
400 $ hg ci -qm 'merge while removing d1/a and moving d1/b to d2/b'
401
401
402 $ hg up -q 1
402 $ hg up -q 1
403 $ hg merge -q 2
403 $ hg merge -q 2
404 $ hg ci -qm 'merge (no changes while merging)'
404 $ hg ci -qm 'merge (no changes while merging)'
405 $ hg log -G -T '{rev}:{node|short} {desc}'
405 $ hg log -G -T '{rev}:{node|short} {desc}'
406 @ 4:c0ef19750a22 merge (no changes while merging)
406 @ 4:c0ef19750a22 merge (no changes while merging)
407 |\
407 |\
408 +---o 3:6ca01f7342b9 merge while removing d1/a and moving d1/b to d2/b
408 +---o 3:6ca01f7342b9 merge while removing d1/a and moving d1/b to d2/b
409 | |/
409 | |/
410 | o 2:154e6000f54e add d1/a and d1/b
410 | o 2:154e6000f54e add d1/a and d1/b
411 | |
411 | |
412 o | 1:11b5b303e36c uninteresting change
412 o | 1:11b5b303e36c uninteresting change
413 |/
413 |/
414 o 0:7b54db1ebf33 initial
414 o 0:7b54db1ebf33 initial
415
415
416 $ hg merge 3 --debug
416 $ hg merge 3 --debug
417 note: merging c0ef19750a22+ and 6ca01f7342b9 using bids from ancestors 11b5b303e36c and 154e6000f54e
417 note: merging c0ef19750a22+ and 6ca01f7342b9 using bids from ancestors 11b5b303e36c and 154e6000f54e
418
418
419 calculating bids for ancestor 11b5b303e36c
419 calculating bids for ancestor 11b5b303e36c
420 searching for copies back to rev 3
420 searching for copies back to rev 3
421 unmatched files in local:
421 unmatched files in local:
422 d1/a
422 d1/a
423 d1/b
423 d1/b
424 unmatched files in other:
424 unmatched files in other:
425 d2/b
425 d2/b
426 all copies found (* = to merge, ! = divergent, % = renamed and deleted):
427 src: 'd1/b' -> dst: 'd2/b'
428 checking for directory renames
429 discovered dir src: 'd1/' -> dst: 'd2/'
430 pending file src: 'd1/a' -> dst: 'd2/a'
431 pending file src: 'd1/b' -> dst: 'd2/b'
432 resolving manifests
426 resolving manifests
433 branchmerge: True, force: False, partial: False
427 branchmerge: True, force: False, partial: False
434 ancestor: 11b5b303e36c, local: c0ef19750a22+, remote: 6ca01f7342b9
428 ancestor: 11b5b303e36c, local: c0ef19750a22+, remote: 6ca01f7342b9
435 d2/a: remote directory rename - move from d1/a -> dm
429 d2/b: remote created -> g
436 d2/b: remote directory rename, both created -> m
437
430
438 calculating bids for ancestor 154e6000f54e
431 calculating bids for ancestor 154e6000f54e
439 searching for copies back to rev 3
432 searching for copies back to rev 3
440 unmatched files in other:
433 unmatched files in other:
441 d2/b
434 d2/b
442 all copies found (* = to merge, ! = divergent, % = renamed and deleted):
435 all copies found (* = to merge, ! = divergent, % = renamed and deleted):
443 src: 'd1/b' -> dst: 'd2/b'
436 src: 'd1/b' -> dst: 'd2/b'
444 checking for directory renames
437 checking for directory renames
445 discovered dir src: 'd1/' -> dst: 'd2/'
438 discovered dir src: 'd1/' -> dst: 'd2/'
446 resolving manifests
439 resolving manifests
447 branchmerge: True, force: False, partial: False
440 branchmerge: True, force: False, partial: False
448 ancestor: 154e6000f54e, local: c0ef19750a22+, remote: 6ca01f7342b9
441 ancestor: 154e6000f54e, local: c0ef19750a22+, remote: 6ca01f7342b9
449 d1/a: other deleted -> r
442 d1/a: other deleted -> r
450 d1/b: other deleted -> r
443 d1/b: other deleted -> r
451 d2/b: remote created -> g
444 d2/b: remote created -> g
452
445
453 auction for merging merge bids
446 auction for merging merge bids
454 d1/a: consensus for r
447 d1/a: consensus for r
455 d1/b: consensus for r
448 d1/b: consensus for r
456 d2/a: consensus for dm
449 d2/b: consensus for g
457 d2/b: picking 'get' action
458 end of auction
450 end of auction
459
451
460 d1/a: other deleted -> r
452 d1/a: other deleted -> r
461 removing d1/a
453 removing d1/a
462 d1/b: other deleted -> r
454 d1/b: other deleted -> r
463 removing d1/b
455 removing d1/b
464 d2/b: remote created -> g
456 d2/b: remote created -> g
465 getting d2/b
457 getting d2/b
466 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
458 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
467 (branch merge, don't forget to commit)
459 (branch merge, don't forget to commit)
General Comments 0
You need to be logged in to leave comments. Login now