##// END OF EJS Templates
copies: fix typo in comment...
Gábor Stefanik -
r33881:169baf3d default
parent child Browse files
Show More
@@ -1,743 +1,743 b''
1 # copies.py - copy detection for Mercurial
1 # copies.py - copy detection for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import heapq
10 import heapq
11
11
12 from . import (
12 from . import (
13 match as matchmod,
13 match as matchmod,
14 node,
14 node,
15 pathutil,
15 pathutil,
16 scmutil,
16 scmutil,
17 util,
17 util,
18 )
18 )
19
19
20 def _findlimit(repo, a, b):
20 def _findlimit(repo, a, b):
21 """
21 """
22 Find the last revision that needs to be checked to ensure that a full
22 Find the last revision that needs to be checked to ensure that a full
23 transitive closure for file copies can be properly calculated.
23 transitive closure for file copies can be properly calculated.
24 Generally, this means finding the earliest revision number that's an
24 Generally, this means finding the earliest revision number that's an
25 ancestor of a or b but not both, except when a or b is a direct descendent
25 ancestor of a or b but not both, except when a or b is a direct descendent
26 of the other, in which case we can return the minimum revnum of a and b.
26 of the other, in which case we can return the minimum revnum of a and b.
27 None if no such revision exists.
27 None if no such revision exists.
28 """
28 """
29
29
30 # basic idea:
30 # basic idea:
31 # - mark a and b with different sides
31 # - mark a and b with different sides
32 # - if a parent's children are all on the same side, the parent is
32 # - if a parent's children are all on the same side, the parent is
33 # on that side, otherwise it is on no side
33 # on that side, otherwise it is on no side
34 # - walk the graph in topological order with the help of a heap;
34 # - walk the graph in topological order with the help of a heap;
35 # - add unseen parents to side map
35 # - add unseen parents to side map
36 # - clear side of any parent that has children on different sides
36 # - clear side of any parent that has children on different sides
37 # - track number of interesting revs that might still be on a side
37 # - track number of interesting revs that might still be on a side
38 # - track the lowest interesting rev seen
38 # - track the lowest interesting rev seen
39 # - quit when interesting revs is zero
39 # - quit when interesting revs is zero
40
40
41 cl = repo.changelog
41 cl = repo.changelog
42 working = len(cl) # pseudo rev for the working directory
42 working = len(cl) # pseudo rev for the working directory
43 if a is None:
43 if a is None:
44 a = working
44 a = working
45 if b is None:
45 if b is None:
46 b = working
46 b = working
47
47
48 side = {a: -1, b: 1}
48 side = {a: -1, b: 1}
49 visit = [-a, -b]
49 visit = [-a, -b]
50 heapq.heapify(visit)
50 heapq.heapify(visit)
51 interesting = len(visit)
51 interesting = len(visit)
52 hascommonancestor = False
52 hascommonancestor = False
53 limit = working
53 limit = working
54
54
55 while interesting:
55 while interesting:
56 r = -heapq.heappop(visit)
56 r = -heapq.heappop(visit)
57 if r == working:
57 if r == working:
58 parents = [cl.rev(p) for p in repo.dirstate.parents()]
58 parents = [cl.rev(p) for p in repo.dirstate.parents()]
59 else:
59 else:
60 parents = cl.parentrevs(r)
60 parents = cl.parentrevs(r)
61 for p in parents:
61 for p in parents:
62 if p < 0:
62 if p < 0:
63 continue
63 continue
64 if p not in side:
64 if p not in side:
65 # first time we see p; add it to visit
65 # first time we see p; add it to visit
66 side[p] = side[r]
66 side[p] = side[r]
67 if side[p]:
67 if side[p]:
68 interesting += 1
68 interesting += 1
69 heapq.heappush(visit, -p)
69 heapq.heappush(visit, -p)
70 elif side[p] and side[p] != side[r]:
70 elif side[p] and side[p] != side[r]:
71 # p was interesting but now we know better
71 # p was interesting but now we know better
72 side[p] = 0
72 side[p] = 0
73 interesting -= 1
73 interesting -= 1
74 hascommonancestor = True
74 hascommonancestor = True
75 if side[r]:
75 if side[r]:
76 limit = r # lowest rev visited
76 limit = r # lowest rev visited
77 interesting -= 1
77 interesting -= 1
78
78
79 if not hascommonancestor:
79 if not hascommonancestor:
80 return None
80 return None
81
81
82 # Consider the following flow (see test-commit-amend.t under issue4405):
82 # Consider the following flow (see test-commit-amend.t under issue4405):
83 # 1/ File 'a0' committed
83 # 1/ File 'a0' committed
84 # 2/ File renamed from 'a0' to 'a1' in a new commit (call it 'a1')
84 # 2/ File renamed from 'a0' to 'a1' in a new commit (call it 'a1')
85 # 3/ Move back to first commit
85 # 3/ Move back to first commit
86 # 4/ Create a new commit via revert to contents of 'a1' (call it 'a1-amend')
86 # 4/ Create a new commit via revert to contents of 'a1' (call it 'a1-amend')
87 # 5/ Rename file from 'a1' to 'a2' and commit --amend 'a1-msg'
87 # 5/ Rename file from 'a1' to 'a2' and commit --amend 'a1-msg'
88 #
88 #
89 # During the amend in step five, we will be in this state:
89 # During the amend in step five, we will be in this state:
90 #
90 #
91 # @ 3 temporary amend commit for a1-amend
91 # @ 3 temporary amend commit for a1-amend
92 # |
92 # |
93 # o 2 a1-amend
93 # o 2 a1-amend
94 # |
94 # |
95 # | o 1 a1
95 # | o 1 a1
96 # |/
96 # |/
97 # o 0 a0
97 # o 0 a0
98 #
98 #
99 # When _findlimit is called, a and b are revs 3 and 0, so limit will be 2,
99 # When _findlimit is called, a and b are revs 3 and 0, so limit will be 2,
100 # yet the filelog has the copy information in rev 1 and we will not look
100 # yet the filelog has the copy information in rev 1 and we will not look
101 # back far enough unless we also look at the a and b as candidates.
101 # back far enough unless we also look at the a and b as candidates.
102 # This only occurs when a is a descendent of b or visa-versa.
102 # This only occurs when a is a descendent of b or visa-versa.
103 return min(limit, a, b)
103 return min(limit, a, b)
104
104
105 def _chain(src, dst, a, b):
105 def _chain(src, dst, a, b):
106 '''chain two sets of copies a->b'''
106 '''chain two sets of copies a->b'''
107 t = a.copy()
107 t = a.copy()
108 for k, v in b.iteritems():
108 for k, v in b.iteritems():
109 if v in t:
109 if v in t:
110 # found a chain
110 # found a chain
111 if t[v] != k:
111 if t[v] != k:
112 # file wasn't renamed back to itself
112 # file wasn't renamed back to itself
113 t[k] = t[v]
113 t[k] = t[v]
114 if v not in dst:
114 if v not in dst:
115 # chain was a rename, not a copy
115 # chain was a rename, not a copy
116 del t[v]
116 del t[v]
117 if v in src:
117 if v in src:
118 # file is a copy of an existing file
118 # file is a copy of an existing file
119 t[k] = v
119 t[k] = v
120
120
121 # remove criss-crossed copies
121 # remove criss-crossed copies
122 for k, v in t.items():
122 for k, v in t.items():
123 if k in src and v in dst:
123 if k in src and v in dst:
124 del t[k]
124 del t[k]
125
125
126 return t
126 return t
127
127
128 def _tracefile(fctx, am, limit=-1):
128 def _tracefile(fctx, am, limit=-1):
129 '''return file context that is the ancestor of fctx present in ancestor
129 '''return file context that is the ancestor of fctx present in ancestor
130 manifest am, stopping after the first ancestor lower than limit'''
130 manifest am, stopping after the first ancestor lower than limit'''
131
131
132 for f in fctx.ancestors():
132 for f in fctx.ancestors():
133 if am.get(f.path(), None) == f.filenode():
133 if am.get(f.path(), None) == f.filenode():
134 return f
134 return f
135 if limit >= 0 and f.linkrev() < limit and f.rev() < limit:
135 if limit >= 0 and f.linkrev() < limit and f.rev() < limit:
136 return None
136 return None
137
137
138 def _dirstatecopies(d):
138 def _dirstatecopies(d):
139 ds = d._repo.dirstate
139 ds = d._repo.dirstate
140 c = ds.copies().copy()
140 c = ds.copies().copy()
141 for k in c.keys():
141 for k in c.keys():
142 if ds[k] not in 'anm':
142 if ds[k] not in 'anm':
143 del c[k]
143 del c[k]
144 return c
144 return c
145
145
146 def _computeforwardmissing(a, b, match=None):
146 def _computeforwardmissing(a, b, match=None):
147 """Computes which files are in b but not a.
147 """Computes which files are in b but not a.
148 This is its own function so extensions can easily wrap this call to see what
148 This is its own function so extensions can easily wrap this call to see what
149 files _forwardcopies is about to process.
149 files _forwardcopies is about to process.
150 """
150 """
151 ma = a.manifest()
151 ma = a.manifest()
152 mb = b.manifest()
152 mb = b.manifest()
153 return mb.filesnotin(ma, match=match)
153 return mb.filesnotin(ma, match=match)
154
154
155 def _forwardcopies(a, b, match=None):
155 def _forwardcopies(a, b, match=None):
156 '''find {dst@b: src@a} copy mapping where a is an ancestor of b'''
156 '''find {dst@b: src@a} copy mapping where a is an ancestor of b'''
157
157
158 # check for working copy
158 # check for working copy
159 w = None
159 w = None
160 if b.rev() is None:
160 if b.rev() is None:
161 w = b
161 w = b
162 b = w.p1()
162 b = w.p1()
163 if a == b:
163 if a == b:
164 # short-circuit to avoid issues with merge states
164 # short-circuit to avoid issues with merge states
165 return _dirstatecopies(w)
165 return _dirstatecopies(w)
166
166
167 # files might have to be traced back to the fctx parent of the last
167 # files might have to be traced back to the fctx parent of the last
168 # one-side-only changeset, but not further back than that
168 # one-side-only changeset, but not further back than that
169 limit = _findlimit(a._repo, a.rev(), b.rev())
169 limit = _findlimit(a._repo, a.rev(), b.rev())
170 if limit is None:
170 if limit is None:
171 limit = -1
171 limit = -1
172 am = a.manifest()
172 am = a.manifest()
173
173
174 # find where new files came from
174 # find where new files came from
175 # we currently don't try to find where old files went, too expensive
175 # we currently don't try to find where old files went, too expensive
176 # this means we can miss a case like 'hg rm b; hg cp a b'
176 # this means we can miss a case like 'hg rm b; hg cp a b'
177 cm = {}
177 cm = {}
178
178
179 # Computing the forward missing is quite expensive on large manifests, since
179 # Computing the forward missing is quite expensive on large manifests, since
180 # it compares the entire manifests. We can optimize it in the common use
180 # it compares the entire manifests. We can optimize it in the common use
181 # case of computing what copies are in a commit versus its parent (like
181 # case of computing what copies are in a commit versus its parent (like
182 # during a rebase or histedit). Note, we exclude merge commits from this
182 # during a rebase or histedit). Note, we exclude merge commits from this
183 # optimization, since the ctx.files() for a merge commit is not correct for
183 # optimization, since the ctx.files() for a merge commit is not correct for
184 # this comparison.
184 # this comparison.
185 forwardmissingmatch = match
185 forwardmissingmatch = match
186 if b.p1() == a and b.p2().node() == node.nullid:
186 if b.p1() == a and b.p2().node() == node.nullid:
187 filesmatcher = scmutil.matchfiles(a._repo, b.files())
187 filesmatcher = scmutil.matchfiles(a._repo, b.files())
188 forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher)
188 forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher)
189 missing = _computeforwardmissing(a, b, match=forwardmissingmatch)
189 missing = _computeforwardmissing(a, b, match=forwardmissingmatch)
190
190
191 ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
191 ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
192 for f in missing:
192 for f in missing:
193 fctx = b[f]
193 fctx = b[f]
194 fctx._ancestrycontext = ancestrycontext
194 fctx._ancestrycontext = ancestrycontext
195 ofctx = _tracefile(fctx, am, limit)
195 ofctx = _tracefile(fctx, am, limit)
196 if ofctx:
196 if ofctx:
197 cm[f] = ofctx.path()
197 cm[f] = ofctx.path()
198
198
199 # combine copies from dirstate if necessary
199 # combine copies from dirstate if necessary
200 if w is not None:
200 if w is not None:
201 cm = _chain(a, w, cm, _dirstatecopies(w))
201 cm = _chain(a, w, cm, _dirstatecopies(w))
202
202
203 return cm
203 return cm
204
204
205 def _backwardrenames(a, b):
205 def _backwardrenames(a, b):
206 if a._repo.ui.configbool('experimental', 'disablecopytrace'):
206 if a._repo.ui.configbool('experimental', 'disablecopytrace'):
207 return {}
207 return {}
208
208
209 # Even though we're not taking copies into account, 1:n rename situations
209 # Even though we're not taking copies into account, 1:n rename situations
210 # can still exist (e.g. hg cp a b; hg mv a c). In those cases we
210 # can still exist (e.g. hg cp a b; hg mv a c). In those cases we
211 # arbitrarily pick one of the renames.
211 # arbitrarily pick one of the renames.
212 f = _forwardcopies(b, a)
212 f = _forwardcopies(b, a)
213 r = {}
213 r = {}
214 for k, v in sorted(f.iteritems()):
214 for k, v in sorted(f.iteritems()):
215 # remove copies
215 # remove copies
216 if v in a:
216 if v in a:
217 continue
217 continue
218 r[v] = k
218 r[v] = k
219 return r
219 return r
220
220
221 def pathcopies(x, y, match=None):
221 def pathcopies(x, y, match=None):
222 '''find {dst@y: src@x} copy mapping for directed compare'''
222 '''find {dst@y: src@x} copy mapping for directed compare'''
223 if x == y or not x or not y:
223 if x == y or not x or not y:
224 return {}
224 return {}
225 a = y.ancestor(x)
225 a = y.ancestor(x)
226 if a == x:
226 if a == x:
227 return _forwardcopies(x, y, match=match)
227 return _forwardcopies(x, y, match=match)
228 if a == y:
228 if a == y:
229 return _backwardrenames(x, y)
229 return _backwardrenames(x, y)
230 return _chain(x, y, _backwardrenames(x, a),
230 return _chain(x, y, _backwardrenames(x, a),
231 _forwardcopies(a, y, match=match))
231 _forwardcopies(a, y, match=match))
232
232
233 def _computenonoverlap(repo, c1, c2, addedinm1, addedinm2, baselabel=''):
233 def _computenonoverlap(repo, c1, c2, addedinm1, addedinm2, baselabel=''):
234 """Computes, based on addedinm1 and addedinm2, the files exclusive to c1
234 """Computes, based on addedinm1 and addedinm2, the files exclusive to c1
235 and c2. This is its own function so extensions can easily wrap this call
235 and c2. This is its own function so extensions can easily wrap this call
236 to see what files mergecopies is about to process.
236 to see what files mergecopies is about to process.
237
237
238 Even though c1 and c2 are not used in this function, they are useful in
238 Even though c1 and c2 are not used in this function, they are useful in
239 other extensions for being able to read the file nodes of the changed files.
239 other extensions for being able to read the file nodes of the changed files.
240
240
241 "baselabel" can be passed to help distinguish the multiple computations
241 "baselabel" can be passed to help distinguish the multiple computations
242 done in the graft case.
242 done in the graft case.
243 """
243 """
244 u1 = sorted(addedinm1 - addedinm2)
244 u1 = sorted(addedinm1 - addedinm2)
245 u2 = sorted(addedinm2 - addedinm1)
245 u2 = sorted(addedinm2 - addedinm1)
246
246
247 header = " unmatched files in %s"
247 header = " unmatched files in %s"
248 if baselabel:
248 if baselabel:
249 header += ' (from %s)' % baselabel
249 header += ' (from %s)' % baselabel
250 if u1:
250 if u1:
251 repo.ui.debug("%s:\n %s\n" % (header % 'local', "\n ".join(u1)))
251 repo.ui.debug("%s:\n %s\n" % (header % 'local', "\n ".join(u1)))
252 if u2:
252 if u2:
253 repo.ui.debug("%s:\n %s\n" % (header % 'other', "\n ".join(u2)))
253 repo.ui.debug("%s:\n %s\n" % (header % 'other', "\n ".join(u2)))
254 return u1, u2
254 return u1, u2
255
255
256 def _makegetfctx(ctx):
256 def _makegetfctx(ctx):
257 """return a 'getfctx' function suitable for _checkcopies usage
257 """return a 'getfctx' function suitable for _checkcopies usage
258
258
259 We have to re-setup the function building 'filectx' for each
259 We have to re-setup the function building 'filectx' for each
260 '_checkcopies' to ensure the linkrev adjustment is properly setup for
260 '_checkcopies' to ensure the linkrev adjustment is properly setup for
261 each. Linkrev adjustment is important to avoid bug in rename
261 each. Linkrev adjustment is important to avoid bug in rename
262 detection. Moreover, having a proper '_ancestrycontext' setup ensures
262 detection. Moreover, having a proper '_ancestrycontext' setup ensures
263 the performance impact of this adjustment is kept limited. Without it,
263 the performance impact of this adjustment is kept limited. Without it,
264 each file could do a full dag traversal making the time complexity of
264 each file could do a full dag traversal making the time complexity of
265 the operation explode (see issue4537).
265 the operation explode (see issue4537).
266
266
267 This function exists here mostly to limit the impact on stable. Feel
267 This function exists here mostly to limit the impact on stable. Feel
268 free to refactor on default.
268 free to refactor on default.
269 """
269 """
270 rev = ctx.rev()
270 rev = ctx.rev()
271 repo = ctx._repo
271 repo = ctx._repo
272 ac = getattr(ctx, '_ancestrycontext', None)
272 ac = getattr(ctx, '_ancestrycontext', None)
273 if ac is None:
273 if ac is None:
274 revs = [rev]
274 revs = [rev]
275 if rev is None:
275 if rev is None:
276 revs = [p.rev() for p in ctx.parents()]
276 revs = [p.rev() for p in ctx.parents()]
277 ac = repo.changelog.ancestors(revs, inclusive=True)
277 ac = repo.changelog.ancestors(revs, inclusive=True)
278 ctx._ancestrycontext = ac
278 ctx._ancestrycontext = ac
279 def makectx(f, n):
279 def makectx(f, n):
280 if n in node.wdirnodes: # in a working context?
280 if n in node.wdirnodes: # in a working context?
281 if ctx.rev() is None:
281 if ctx.rev() is None:
282 return ctx.filectx(f)
282 return ctx.filectx(f)
283 return repo[None][f]
283 return repo[None][f]
284 fctx = repo.filectx(f, fileid=n)
284 fctx = repo.filectx(f, fileid=n)
285 # setup only needed for filectx not create from a changectx
285 # setup only needed for filectx not create from a changectx
286 fctx._ancestrycontext = ac
286 fctx._ancestrycontext = ac
287 fctx._descendantrev = rev
287 fctx._descendantrev = rev
288 return fctx
288 return fctx
289 return util.lrucachefunc(makectx)
289 return util.lrucachefunc(makectx)
290
290
291 def _combinecopies(copyfrom, copyto, finalcopy, diverge, incompletediverge):
291 def _combinecopies(copyfrom, copyto, finalcopy, diverge, incompletediverge):
292 """combine partial copy paths"""
292 """combine partial copy paths"""
293 remainder = {}
293 remainder = {}
294 for f in copyfrom:
294 for f in copyfrom:
295 if f in copyto:
295 if f in copyto:
296 finalcopy[copyto[f]] = copyfrom[f]
296 finalcopy[copyto[f]] = copyfrom[f]
297 del copyto[f]
297 del copyto[f]
298 for f in incompletediverge:
298 for f in incompletediverge:
299 assert f not in diverge
299 assert f not in diverge
300 ic = incompletediverge[f]
300 ic = incompletediverge[f]
301 if ic[0] in copyto:
301 if ic[0] in copyto:
302 diverge[f] = [copyto[ic[0]], ic[1]]
302 diverge[f] = [copyto[ic[0]], ic[1]]
303 else:
303 else:
304 remainder[f] = ic
304 remainder[f] = ic
305 return remainder
305 return remainder
306
306
307 def mergecopies(repo, c1, c2, base):
307 def mergecopies(repo, c1, c2, base):
308 """
308 """
309 The basic algorithm for copytracing. Copytracing is used in commands like
309 The basic algorithm for copytracing. Copytracing is used in commands like
310 rebase, merge, unshelve, etc to merge files that were moved/ copied in one
310 rebase, merge, unshelve, etc to merge files that were moved/ copied in one
311 merge parent and modified in another. For example:
311 merge parent and modified in another. For example:
312
312
313 o ---> 4 another commit
313 o ---> 4 another commit
314 |
314 |
315 | o ---> 3 commit that modifies a.txt
315 | o ---> 3 commit that modifies a.txt
316 | /
316 | /
317 o / ---> 2 commit that moves a.txt to b.txt
317 o / ---> 2 commit that moves a.txt to b.txt
318 |/
318 |/
319 o ---> 1 merge base
319 o ---> 1 merge base
320
320
321 If we try to rebase revision 3 on revision 4, since there is no a.txt in
321 If we try to rebase revision 3 on revision 4, since there is no a.txt in
322 revision 4, and if user have copytrace disabled, we prints the following
322 revision 4, and if user have copytrace disabled, we prints the following
323 message:
323 message:
324
324
325 ```other changed <file> which local deleted```
325 ```other changed <file> which local deleted```
326
326
327 If copytrace is enabled, this function finds all the new files that were
327 If copytrace is enabled, this function finds all the new files that were
328 added from merge base up to the top commit (here 4), and for each file it
328 added from merge base up to the top commit (here 4), and for each file it
329 checks if this file was copied from another file (a.txt in the above case).
329 checks if this file was copied from another file (a.txt in the above case).
330
330
331 Find moves and copies between context c1 and c2 that are relevant
331 Find moves and copies between context c1 and c2 that are relevant
332 for merging. 'base' will be used as the merge base.
332 for merging. 'base' will be used as the merge base.
333
333
334 Returns five dicts: "copy", "movewithdir", "diverge", "renamedelete" and
334 Returns five dicts: "copy", "movewithdir", "diverge", "renamedelete" and
335 "dirmove".
335 "dirmove".
336
336
337 "copy" is a mapping from destination name -> source name,
337 "copy" is a mapping from destination name -> source name,
338 where source is in c1 and destination is in c2 or vice-versa.
338 where source is in c1 and destination is in c2 or vice-versa.
339
339
340 "movewithdir" is a mapping from source name -> destination name,
340 "movewithdir" is a mapping from source name -> destination name,
341 where the file at source present in one context but not the other
341 where the file at source present in one context but not the other
342 needs to be moved to destination by the merge process, because the
342 needs to be moved to destination by the merge process, because the
343 other context moved the directory it is in.
343 other context moved the directory it is in.
344
344
345 "diverge" is a mapping of source name -> list of destination names
345 "diverge" is a mapping of source name -> list of destination names
346 for divergent renames.
346 for divergent renames.
347
347
348 "renamedelete" is a mapping of source name -> list of destination
348 "renamedelete" is a mapping of source name -> list of destination
349 names for files deleted in c1 that were renamed in c2 or vice-versa.
349 names for files deleted in c1 that were renamed in c2 or vice-versa.
350
350
351 "dirmove" is a mapping of detected source dir -> destination dir renames.
351 "dirmove" is a mapping of detected source dir -> destination dir renames.
352 This is needed for handling changes to new files previously grafted into
352 This is needed for handling changes to new files previously grafted into
353 renamed directories.
353 renamed directories.
354 """
354 """
355 # avoid silly behavior for update from empty dir
355 # avoid silly behavior for update from empty dir
356 if not c1 or not c2 or c1 == c2:
356 if not c1 or not c2 or c1 == c2:
357 return {}, {}, {}, {}, {}
357 return {}, {}, {}, {}, {}
358
358
359 # avoid silly behavior for parent -> working dir
359 # avoid silly behavior for parent -> working dir
360 if c2.node() is None and c1.node() == repo.dirstate.p1():
360 if c2.node() is None and c1.node() == repo.dirstate.p1():
361 return repo.dirstate.copies(), {}, {}, {}, {}
361 return repo.dirstate.copies(), {}, {}, {}, {}
362
362
363 # Copy trace disabling is explicitly below the node == p1 logic above
363 # Copy trace disabling is explicitly below the node == p1 logic above
364 # because the logic above is required for a simple copy to be kept across a
364 # because the logic above is required for a simple copy to be kept across a
365 # rebase.
365 # rebase.
366 if repo.ui.configbool('experimental', 'disablecopytrace'):
366 if repo.ui.configbool('experimental', 'disablecopytrace'):
367 return {}, {}, {}, {}, {}
367 return {}, {}, {}, {}, {}
368
368
369 # In certain scenarios (e.g. graft, update or rebase), base can be
369 # In certain scenarios (e.g. graft, update or rebase), base can be
370 # overridden We still need to know a real common ancestor in this case We
370 # overridden We still need to know a real common ancestor in this case We
371 # can't just compute _c1.ancestor(_c2) and compare it to ca, because there
371 # can't just compute _c1.ancestor(_c2) and compare it to ca, because there
372 # can be multiple common ancestors, e.g. in case of bidmerge. Because our
372 # can be multiple common ancestors, e.g. in case of bidmerge. Because our
373 # caller may not know if the revision passed in lieu of the CA is a genuine
373 # caller may not know if the revision passed in lieu of the CA is a genuine
374 # common ancestor or not without explicitly checking it, it's better to
374 # common ancestor or not without explicitly checking it, it's better to
375 # determine that here.
375 # determine that here.
376 #
376 #
377 # base.descendant(wc) and base.descendant(base) are False, work around that
377 # base.descendant(wc) and base.descendant(base) are False, work around that
378 _c1 = c1.p1() if c1.rev() is None else c1
378 _c1 = c1.p1() if c1.rev() is None else c1
379 _c2 = c2.p1() if c2.rev() is None else c2
379 _c2 = c2.p1() if c2.rev() is None else c2
380 # an endpoint is "dirty" if it isn't a descendant of the merge base
380 # an endpoint is "dirty" if it isn't a descendant of the merge base
381 # if we have a dirty endpoint, we need to trigger graft logic, and also
381 # if we have a dirty endpoint, we need to trigger graft logic, and also
382 # keep track of which endpoint is dirty
382 # keep track of which endpoint is dirty
383 dirtyc1 = not (base == _c1 or base.descendant(_c1))
383 dirtyc1 = not (base == _c1 or base.descendant(_c1))
384 dirtyc2 = not (base== _c2 or base.descendant(_c2))
384 dirtyc2 = not (base== _c2 or base.descendant(_c2))
385 graft = dirtyc1 or dirtyc2
385 graft = dirtyc1 or dirtyc2
386 tca = base
386 tca = base
387 if graft:
387 if graft:
388 tca = _c1.ancestor(_c2)
388 tca = _c1.ancestor(_c2)
389
389
390 limit = _findlimit(repo, c1.rev(), c2.rev())
390 limit = _findlimit(repo, c1.rev(), c2.rev())
391 if limit is None:
391 if limit is None:
392 # no common ancestor, no copies
392 # no common ancestor, no copies
393 return {}, {}, {}, {}, {}
393 return {}, {}, {}, {}, {}
394 repo.ui.debug(" searching for copies back to rev %d\n" % limit)
394 repo.ui.debug(" searching for copies back to rev %d\n" % limit)
395
395
396 m1 = c1.manifest()
396 m1 = c1.manifest()
397 m2 = c2.manifest()
397 m2 = c2.manifest()
398 mb = base.manifest()
398 mb = base.manifest()
399
399
400 # gather data from _checkcopies:
400 # gather data from _checkcopies:
401 # - diverge = record all diverges in this dict
401 # - diverge = record all diverges in this dict
402 # - copy = record all non-divergent copies in this dict
402 # - copy = record all non-divergent copies in this dict
403 # - fullcopy = record all copies in this dict
403 # - fullcopy = record all copies in this dict
404 # - incomplete = record non-divergent partial copies here
404 # - incomplete = record non-divergent partial copies here
405 # - incompletediverge = record divergent partial copies here
405 # - incompletediverge = record divergent partial copies here
406 diverge = {} # divergence data is shared
406 diverge = {} # divergence data is shared
407 incompletediverge = {}
407 incompletediverge = {}
408 data1 = {'copy': {},
408 data1 = {'copy': {},
409 'fullcopy': {},
409 'fullcopy': {},
410 'incomplete': {},
410 'incomplete': {},
411 'diverge': diverge,
411 'diverge': diverge,
412 'incompletediverge': incompletediverge,
412 'incompletediverge': incompletediverge,
413 }
413 }
414 data2 = {'copy': {},
414 data2 = {'copy': {},
415 'fullcopy': {},
415 'fullcopy': {},
416 'incomplete': {},
416 'incomplete': {},
417 'diverge': diverge,
417 'diverge': diverge,
418 'incompletediverge': incompletediverge,
418 'incompletediverge': incompletediverge,
419 }
419 }
420
420
421 # find interesting file sets from manifests
421 # find interesting file sets from manifests
422 addedinm1 = m1.filesnotin(mb)
422 addedinm1 = m1.filesnotin(mb)
423 addedinm2 = m2.filesnotin(mb)
423 addedinm2 = m2.filesnotin(mb)
424 bothnew = sorted(addedinm1 & addedinm2)
424 bothnew = sorted(addedinm1 & addedinm2)
425 if tca == base:
425 if tca == base:
426 # unmatched file from base
426 # unmatched file from base
427 u1r, u2r = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2)
427 u1r, u2r = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2)
428 u1u, u2u = u1r, u2r
428 u1u, u2u = u1r, u2r
429 else:
429 else:
430 # unmatched file from base (DAG rotation in the graft case)
430 # unmatched file from base (DAG rotation in the graft case)
431 u1r, u2r = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2,
431 u1r, u2r = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2,
432 baselabel='base')
432 baselabel='base')
433 # unmatched file from topological common ancestors (no DAG rotation)
433 # unmatched file from topological common ancestors (no DAG rotation)
434 # need to recompute this for directory move handling when grafting
434 # need to recompute this for directory move handling when grafting
435 mta = tca.manifest()
435 mta = tca.manifest()
436 u1u, u2u = _computenonoverlap(repo, c1, c2, m1.filesnotin(mta),
436 u1u, u2u = _computenonoverlap(repo, c1, c2, m1.filesnotin(mta),
437 m2.filesnotin(mta),
437 m2.filesnotin(mta),
438 baselabel='topological common ancestor')
438 baselabel='topological common ancestor')
439
439
440 for f in u1u:
440 for f in u1u:
441 _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, data1)
441 _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, data1)
442
442
443 for f in u2u:
443 for f in u2u:
444 _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, data2)
444 _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, data2)
445
445
446 copy = dict(data1['copy'])
446 copy = dict(data1['copy'])
447 copy.update(data2['copy'])
447 copy.update(data2['copy'])
448 fullcopy = dict(data1['fullcopy'])
448 fullcopy = dict(data1['fullcopy'])
449 fullcopy.update(data2['fullcopy'])
449 fullcopy.update(data2['fullcopy'])
450
450
451 if dirtyc1:
451 if dirtyc1:
452 _combinecopies(data2['incomplete'], data1['incomplete'], copy, diverge,
452 _combinecopies(data2['incomplete'], data1['incomplete'], copy, diverge,
453 incompletediverge)
453 incompletediverge)
454 else:
454 else:
455 _combinecopies(data1['incomplete'], data2['incomplete'], copy, diverge,
455 _combinecopies(data1['incomplete'], data2['incomplete'], copy, diverge,
456 incompletediverge)
456 incompletediverge)
457
457
458 renamedelete = {}
458 renamedelete = {}
459 renamedeleteset = set()
459 renamedeleteset = set()
460 divergeset = set()
460 divergeset = set()
461 for of, fl in diverge.items():
461 for of, fl in diverge.items():
462 if len(fl) == 1 or of in c1 or of in c2:
462 if len(fl) == 1 or of in c1 or of in c2:
463 del diverge[of] # not actually divergent, or not a rename
463 del diverge[of] # not actually divergent, or not a rename
464 if of not in c1 and of not in c2:
464 if of not in c1 and of not in c2:
465 # renamed on one side, deleted on the other side, but filter
465 # renamed on one side, deleted on the other side, but filter
466 # out files that have been renamed and then deleted
466 # out files that have been renamed and then deleted
467 renamedelete[of] = [f for f in fl if f in c1 or f in c2]
467 renamedelete[of] = [f for f in fl if f in c1 or f in c2]
468 renamedeleteset.update(fl) # reverse map for below
468 renamedeleteset.update(fl) # reverse map for below
469 else:
469 else:
470 divergeset.update(fl) # reverse map for below
470 divergeset.update(fl) # reverse map for below
471
471
472 if bothnew:
472 if bothnew:
473 repo.ui.debug(" unmatched files new in both:\n %s\n"
473 repo.ui.debug(" unmatched files new in both:\n %s\n"
474 % "\n ".join(bothnew))
474 % "\n ".join(bothnew))
475 bothdiverge = {}
475 bothdiverge = {}
476 bothincompletediverge = {}
476 bothincompletediverge = {}
477 remainder = {}
477 remainder = {}
478 both1 = {'copy': {},
478 both1 = {'copy': {},
479 'fullcopy': {},
479 'fullcopy': {},
480 'incomplete': {},
480 'incomplete': {},
481 'diverge': bothdiverge,
481 'diverge': bothdiverge,
482 'incompletediverge': bothincompletediverge
482 'incompletediverge': bothincompletediverge
483 }
483 }
484 both2 = {'copy': {},
484 both2 = {'copy': {},
485 'fullcopy': {},
485 'fullcopy': {},
486 'incomplete': {},
486 'incomplete': {},
487 'diverge': bothdiverge,
487 'diverge': bothdiverge,
488 'incompletediverge': bothincompletediverge
488 'incompletediverge': bothincompletediverge
489 }
489 }
490 for f in bothnew:
490 for f in bothnew:
491 _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, both1)
491 _checkcopies(c1, c2, f, base, tca, dirtyc1, limit, both1)
492 _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, both2)
492 _checkcopies(c2, c1, f, base, tca, dirtyc2, limit, both2)
493 if dirtyc1:
493 if dirtyc1:
494 # incomplete copies may only be found on the "dirty" side for bothnew
494 # incomplete copies may only be found on the "dirty" side for bothnew
495 assert not both2['incomplete']
495 assert not both2['incomplete']
496 remainder = _combinecopies({}, both1['incomplete'], copy, bothdiverge,
496 remainder = _combinecopies({}, both1['incomplete'], copy, bothdiverge,
497 bothincompletediverge)
497 bothincompletediverge)
498 elif dirtyc2:
498 elif dirtyc2:
499 assert not both1['incomplete']
499 assert not both1['incomplete']
500 remainder = _combinecopies({}, both2['incomplete'], copy, bothdiverge,
500 remainder = _combinecopies({}, both2['incomplete'], copy, bothdiverge,
501 bothincompletediverge)
501 bothincompletediverge)
502 else:
502 else:
503 # incomplete copies and divergences can't happen outside grafts
503 # incomplete copies and divergences can't happen outside grafts
504 assert not both1['incomplete']
504 assert not both1['incomplete']
505 assert not both2['incomplete']
505 assert not both2['incomplete']
506 assert not bothincompletediverge
506 assert not bothincompletediverge
507 for f in remainder:
507 for f in remainder:
508 assert f not in bothdiverge
508 assert f not in bothdiverge
509 ic = remainder[f]
509 ic = remainder[f]
510 if ic[0] in (m1 if dirtyc1 else m2):
510 if ic[0] in (m1 if dirtyc1 else m2):
511 # backed-out rename on one side, but watch out for deleted files
511 # backed-out rename on one side, but watch out for deleted files
512 bothdiverge[f] = ic
512 bothdiverge[f] = ic
513 for of, fl in bothdiverge.items():
513 for of, fl in bothdiverge.items():
514 if len(fl) == 2 and fl[0] == fl[1]:
514 if len(fl) == 2 and fl[0] == fl[1]:
515 copy[fl[0]] = of # not actually divergent, just matching renames
515 copy[fl[0]] = of # not actually divergent, just matching renames
516
516
517 if fullcopy and repo.ui.debugflag:
517 if fullcopy and repo.ui.debugflag:
518 repo.ui.debug(" all copies found (* = to merge, ! = divergent, "
518 repo.ui.debug(" all copies found (* = to merge, ! = divergent, "
519 "% = renamed and deleted):\n")
519 "% = renamed and deleted):\n")
520 for f in sorted(fullcopy):
520 for f in sorted(fullcopy):
521 note = ""
521 note = ""
522 if f in copy:
522 if f in copy:
523 note += "*"
523 note += "*"
524 if f in divergeset:
524 if f in divergeset:
525 note += "!"
525 note += "!"
526 if f in renamedeleteset:
526 if f in renamedeleteset:
527 note += "%"
527 note += "%"
528 repo.ui.debug(" src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f,
528 repo.ui.debug(" src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f,
529 note))
529 note))
530 del divergeset
530 del divergeset
531
531
532 if not fullcopy:
532 if not fullcopy:
533 return copy, {}, diverge, renamedelete, {}
533 return copy, {}, diverge, renamedelete, {}
534
534
535 repo.ui.debug(" checking for directory renames\n")
535 repo.ui.debug(" checking for directory renames\n")
536
536
537 # generate a directory move map
537 # generate a directory move map
538 d1, d2 = c1.dirs(), c2.dirs()
538 d1, d2 = c1.dirs(), c2.dirs()
539 # Hack for adding '', which is not otherwise added, to d1 and d2
539 # Hack for adding '', which is not otherwise added, to d1 and d2
540 d1.addpath('/')
540 d1.addpath('/')
541 d2.addpath('/')
541 d2.addpath('/')
542 invalid = set()
542 invalid = set()
543 dirmove = {}
543 dirmove = {}
544
544
545 # examine each file copy for a potential directory move, which is
545 # examine each file copy for a potential directory move, which is
546 # when all the files in a directory are moved to a new directory
546 # when all the files in a directory are moved to a new directory
547 for dst, src in fullcopy.iteritems():
547 for dst, src in fullcopy.iteritems():
548 dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst)
548 dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst)
549 if dsrc in invalid:
549 if dsrc in invalid:
550 # already seen to be uninteresting
550 # already seen to be uninteresting
551 continue
551 continue
552 elif dsrc in d1 and ddst in d1:
552 elif dsrc in d1 and ddst in d1:
553 # directory wasn't entirely moved locally
553 # directory wasn't entirely moved locally
554 invalid.add(dsrc + "/")
554 invalid.add(dsrc + "/")
555 elif dsrc in d2 and ddst in d2:
555 elif dsrc in d2 and ddst in d2:
556 # directory wasn't entirely moved remotely
556 # directory wasn't entirely moved remotely
557 invalid.add(dsrc + "/")
557 invalid.add(dsrc + "/")
558 elif dsrc + "/" in dirmove and dirmove[dsrc + "/"] != ddst + "/":
558 elif dsrc + "/" in dirmove and dirmove[dsrc + "/"] != ddst + "/":
559 # files from the same directory moved to two different places
559 # files from the same directory moved to two different places
560 invalid.add(dsrc + "/")
560 invalid.add(dsrc + "/")
561 else:
561 else:
562 # looks good so far
562 # looks good so far
563 dirmove[dsrc + "/"] = ddst + "/"
563 dirmove[dsrc + "/"] = ddst + "/"
564
564
565 for i in invalid:
565 for i in invalid:
566 if i in dirmove:
566 if i in dirmove:
567 del dirmove[i]
567 del dirmove[i]
568 del d1, d2, invalid
568 del d1, d2, invalid
569
569
570 if not dirmove:
570 if not dirmove:
571 return copy, {}, diverge, renamedelete, {}
571 return copy, {}, diverge, renamedelete, {}
572
572
573 for d in dirmove:
573 for d in dirmove:
574 repo.ui.debug(" discovered dir src: '%s' -> dst: '%s'\n" %
574 repo.ui.debug(" discovered dir src: '%s' -> dst: '%s'\n" %
575 (d, dirmove[d]))
575 (d, dirmove[d]))
576
576
577 movewithdir = {}
577 movewithdir = {}
578 # check unaccounted nonoverlapping files against directory moves
578 # check unaccounted nonoverlapping files against directory moves
579 for f in u1r + u2r:
579 for f in u1r + u2r:
580 if f not in fullcopy:
580 if f not in fullcopy:
581 for d in dirmove:
581 for d in dirmove:
582 if f.startswith(d):
582 if f.startswith(d):
583 # new file added in a directory that was moved, move it
583 # new file added in a directory that was moved, move it
584 df = dirmove[d] + f[len(d):]
584 df = dirmove[d] + f[len(d):]
585 if df not in copy:
585 if df not in copy:
586 movewithdir[f] = df
586 movewithdir[f] = df
587 repo.ui.debug((" pending file src: '%s' -> "
587 repo.ui.debug((" pending file src: '%s' -> "
588 "dst: '%s'\n") % (f, df))
588 "dst: '%s'\n") % (f, df))
589 break
589 break
590
590
591 return copy, movewithdir, diverge, renamedelete, dirmove
591 return copy, movewithdir, diverge, renamedelete, dirmove
592
592
593 def _related(f1, f2, limit):
593 def _related(f1, f2, limit):
594 """return True if f1 and f2 filectx have a common ancestor
594 """return True if f1 and f2 filectx have a common ancestor
595
595
596 Walk back to common ancestor to see if the two files originate
596 Walk back to common ancestor to see if the two files originate
597 from the same file. Since workingfilectx's rev() is None it messes
597 from the same file. Since workingfilectx's rev() is None it messes
598 up the integer comparison logic, hence the pre-step check for
598 up the integer comparison logic, hence the pre-step check for
599 None (f1 and f2 can only be workingfilectx's initially).
599 None (f1 and f2 can only be workingfilectx's initially).
600 """
600 """
601
601
602 if f1 == f2:
602 if f1 == f2:
603 return f1 # a match
603 return f1 # a match
604
604
605 g1, g2 = f1.ancestors(), f2.ancestors()
605 g1, g2 = f1.ancestors(), f2.ancestors()
606 try:
606 try:
607 f1r, f2r = f1.linkrev(), f2.linkrev()
607 f1r, f2r = f1.linkrev(), f2.linkrev()
608
608
609 if f1r is None:
609 if f1r is None:
610 f1 = next(g1)
610 f1 = next(g1)
611 if f2r is None:
611 if f2r is None:
612 f2 = next(g2)
612 f2 = next(g2)
613
613
614 while True:
614 while True:
615 f1r, f2r = f1.linkrev(), f2.linkrev()
615 f1r, f2r = f1.linkrev(), f2.linkrev()
616 if f1r > f2r:
616 if f1r > f2r:
617 f1 = next(g1)
617 f1 = next(g1)
618 elif f2r > f1r:
618 elif f2r > f1r:
619 f2 = next(g2)
619 f2 = next(g2)
620 elif f1 == f2:
620 elif f1 == f2:
621 return f1 # a match
621 return f1 # a match
622 elif f1r == f2r or f1r < limit or f2r < limit:
622 elif f1r == f2r or f1r < limit or f2r < limit:
623 return False # copy no longer relevant
623 return False # copy no longer relevant
624 except StopIteration:
624 except StopIteration:
625 return False
625 return False
626
626
627 def _checkcopies(srcctx, dstctx, f, base, tca, remotebase, limit, data):
627 def _checkcopies(srcctx, dstctx, f, base, tca, remotebase, limit, data):
628 """
628 """
629 check possible copies of f from msrc to mdst
629 check possible copies of f from msrc to mdst
630
630
631 srcctx = starting context for f in msrc
631 srcctx = starting context for f in msrc
632 dstctx = destination context for f in mdst
632 dstctx = destination context for f in mdst
633 f = the filename to check (as in msrc)
633 f = the filename to check (as in msrc)
634 base = the changectx used as a merge base
634 base = the changectx used as a merge base
635 tca = topological common ancestor for graft-like scenarios
635 tca = topological common ancestor for graft-like scenarios
636 remotebase = True if base is outside tca::srcctx, False otherwise
636 remotebase = True if base is outside tca::srcctx, False otherwise
637 limit = the rev number to not search beyond
637 limit = the rev number to not search beyond
638 data = dictionary of dictionary to store copy data. (see mergecopies)
638 data = dictionary of dictionary to store copy data. (see mergecopies)
639
639
640 note: limit is only an optimization, and there is no guarantee that
640 note: limit is only an optimization, and provides no guarantee that
641 irrelevant revisions will not be limited
641 irrelevant revisions will not be visited
642 there is no easy way to make this algorithm stop in a guaranteed way
642 there is no easy way to make this algorithm stop in a guaranteed way
643 once it "goes behind a certain revision".
643 once it "goes behind a certain revision".
644 """
644 """
645
645
646 msrc = srcctx.manifest()
646 msrc = srcctx.manifest()
647 mdst = dstctx.manifest()
647 mdst = dstctx.manifest()
648 mb = base.manifest()
648 mb = base.manifest()
649 mta = tca.manifest()
649 mta = tca.manifest()
650 # Might be true if this call is about finding backward renames,
650 # Might be true if this call is about finding backward renames,
651 # This happens in the case of grafts because the DAG is then rotated.
651 # This happens in the case of grafts because the DAG is then rotated.
652 # If the file exists in both the base and the source, we are not looking
652 # If the file exists in both the base and the source, we are not looking
653 # for a rename on the source side, but on the part of the DAG that is
653 # for a rename on the source side, but on the part of the DAG that is
654 # traversed backwards.
654 # traversed backwards.
655 #
655 #
656 # In the case there is both backward and forward renames (before and after
656 # In the case there is both backward and forward renames (before and after
657 # the base) this is more complicated as we must detect a divergence.
657 # the base) this is more complicated as we must detect a divergence.
658 # We use 'backwards = False' in that case.
658 # We use 'backwards = False' in that case.
659 backwards = not remotebase and base != tca and f in mb
659 backwards = not remotebase and base != tca and f in mb
660 getsrcfctx = _makegetfctx(srcctx)
660 getsrcfctx = _makegetfctx(srcctx)
661 getdstfctx = _makegetfctx(dstctx)
661 getdstfctx = _makegetfctx(dstctx)
662
662
663 if msrc[f] == mb.get(f) and not remotebase:
663 if msrc[f] == mb.get(f) and not remotebase:
664 # Nothing to merge
664 # Nothing to merge
665 return
665 return
666
666
667 of = None
667 of = None
668 seen = {f}
668 seen = {f}
669 for oc in getsrcfctx(f, msrc[f]).ancestors():
669 for oc in getsrcfctx(f, msrc[f]).ancestors():
670 ocr = oc.linkrev()
670 ocr = oc.linkrev()
671 of = oc.path()
671 of = oc.path()
672 if of in seen:
672 if of in seen:
673 # check limit late - grab last rename before
673 # check limit late - grab last rename before
674 if ocr < limit:
674 if ocr < limit:
675 break
675 break
676 continue
676 continue
677 seen.add(of)
677 seen.add(of)
678
678
679 # remember for dir rename detection
679 # remember for dir rename detection
680 if backwards:
680 if backwards:
681 data['fullcopy'][of] = f # grafting backwards through renames
681 data['fullcopy'][of] = f # grafting backwards through renames
682 else:
682 else:
683 data['fullcopy'][f] = of
683 data['fullcopy'][f] = of
684 if of not in mdst:
684 if of not in mdst:
685 continue # no match, keep looking
685 continue # no match, keep looking
686 if mdst[of] == mb.get(of):
686 if mdst[of] == mb.get(of):
687 return # no merge needed, quit early
687 return # no merge needed, quit early
688 c2 = getdstfctx(of, mdst[of])
688 c2 = getdstfctx(of, mdst[of])
689 # c2 might be a plain new file on added on destination side that is
689 # c2 might be a plain new file on added on destination side that is
690 # unrelated to the droids we are looking for.
690 # unrelated to the droids we are looking for.
691 cr = _related(oc, c2, tca.rev())
691 cr = _related(oc, c2, tca.rev())
692 if cr and (of == f or of == c2.path()): # non-divergent
692 if cr and (of == f or of == c2.path()): # non-divergent
693 if backwards:
693 if backwards:
694 data['copy'][of] = f
694 data['copy'][of] = f
695 elif of in mb:
695 elif of in mb:
696 data['copy'][f] = of
696 data['copy'][f] = of
697 elif remotebase: # special case: a <- b <- a -> b "ping-pong" rename
697 elif remotebase: # special case: a <- b <- a -> b "ping-pong" rename
698 data['copy'][of] = f
698 data['copy'][of] = f
699 del data['fullcopy'][f]
699 del data['fullcopy'][f]
700 data['fullcopy'][of] = f
700 data['fullcopy'][of] = f
701 else: # divergence w.r.t. graft CA on one side of topological CA
701 else: # divergence w.r.t. graft CA on one side of topological CA
702 for sf in seen:
702 for sf in seen:
703 if sf in mb:
703 if sf in mb:
704 assert sf not in data['diverge']
704 assert sf not in data['diverge']
705 data['diverge'][sf] = [f, of]
705 data['diverge'][sf] = [f, of]
706 break
706 break
707 return
707 return
708
708
709 if of in mta:
709 if of in mta:
710 if backwards or remotebase:
710 if backwards or remotebase:
711 data['incomplete'][of] = f
711 data['incomplete'][of] = f
712 else:
712 else:
713 for sf in seen:
713 for sf in seen:
714 if sf in mb:
714 if sf in mb:
715 if tca == base:
715 if tca == base:
716 data['diverge'].setdefault(sf, []).append(f)
716 data['diverge'].setdefault(sf, []).append(f)
717 else:
717 else:
718 data['incompletediverge'][sf] = [of, f]
718 data['incompletediverge'][sf] = [of, f]
719 return
719 return
720
720
721 def duplicatecopies(repo, rev, fromrev, skiprev=None):
721 def duplicatecopies(repo, rev, fromrev, skiprev=None):
722 '''reproduce copies from fromrev to rev in the dirstate
722 '''reproduce copies from fromrev to rev in the dirstate
723
723
724 If skiprev is specified, it's a revision that should be used to
724 If skiprev is specified, it's a revision that should be used to
725 filter copy records. Any copies that occur between fromrev and
725 filter copy records. Any copies that occur between fromrev and
726 skiprev will not be duplicated, even if they appear in the set of
726 skiprev will not be duplicated, even if they appear in the set of
727 copies between fromrev and rev.
727 copies between fromrev and rev.
728 '''
728 '''
729 exclude = {}
729 exclude = {}
730 if (skiprev is not None and
730 if (skiprev is not None and
731 not repo.ui.configbool('experimental', 'disablecopytrace')):
731 not repo.ui.configbool('experimental', 'disablecopytrace')):
732 # disablecopytrace skips this line, but not the entire function because
732 # disablecopytrace skips this line, but not the entire function because
733 # the line below is O(size of the repo) during a rebase, while the rest
733 # the line below is O(size of the repo) during a rebase, while the rest
734 # of the function is much faster (and is required for carrying copy
734 # of the function is much faster (and is required for carrying copy
735 # metadata across the rebase anyway).
735 # metadata across the rebase anyway).
736 exclude = pathcopies(repo[fromrev], repo[skiprev])
736 exclude = pathcopies(repo[fromrev], repo[skiprev])
737 for dst, src in pathcopies(repo[fromrev], repo[rev]).iteritems():
737 for dst, src in pathcopies(repo[fromrev], repo[rev]).iteritems():
738 # copies.pathcopies returns backward renames, so dst might not
738 # copies.pathcopies returns backward renames, so dst might not
739 # actually be in the dirstate
739 # actually be in the dirstate
740 if dst in exclude:
740 if dst in exclude:
741 continue
741 continue
742 if repo.dirstate[dst] in "nma":
742 if repo.dirstate[dst] in "nma":
743 repo.dirstate.copy(src, dst)
743 repo.dirstate.copy(src, dst)
General Comments 0
You need to be logged in to leave comments. Login now