##// END OF EJS Templates
copies: factor out setupctx into _makegetfctx...
Matt Mackall -
r26656:3e3d783b default
parent child Browse files
Show More
@@ -1,545 +1,544
1 # copies.py - copy detection for Mercurial
1 # copies.py - copy detection for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import heapq
10 import heapq
11
11
12 from . import (
12 from . import (
13 pathutil,
13 pathutil,
14 util,
14 util,
15 )
15 )
16
16
17 def _findlimit(repo, a, b):
17 def _findlimit(repo, a, b):
18 """
18 """
19 Find the last revision that needs to be checked to ensure that a full
19 Find the last revision that needs to be checked to ensure that a full
20 transitive closure for file copies can be properly calculated.
20 transitive closure for file copies can be properly calculated.
21 Generally, this means finding the earliest revision number that's an
21 Generally, this means finding the earliest revision number that's an
22 ancestor of a or b but not both, except when a or b is a direct descendent
22 ancestor of a or b but not both, except when a or b is a direct descendent
23 of the other, in which case we can return the minimum revnum of a and b.
23 of the other, in which case we can return the minimum revnum of a and b.
24 None if no such revision exists.
24 None if no such revision exists.
25 """
25 """
26
26
27 # basic idea:
27 # basic idea:
28 # - mark a and b with different sides
28 # - mark a and b with different sides
29 # - if a parent's children are all on the same side, the parent is
29 # - if a parent's children are all on the same side, the parent is
30 # on that side, otherwise it is on no side
30 # on that side, otherwise it is on no side
31 # - walk the graph in topological order with the help of a heap;
31 # - walk the graph in topological order with the help of a heap;
32 # - add unseen parents to side map
32 # - add unseen parents to side map
33 # - clear side of any parent that has children on different sides
33 # - clear side of any parent that has children on different sides
34 # - track number of interesting revs that might still be on a side
34 # - track number of interesting revs that might still be on a side
35 # - track the lowest interesting rev seen
35 # - track the lowest interesting rev seen
36 # - quit when interesting revs is zero
36 # - quit when interesting revs is zero
37
37
38 cl = repo.changelog
38 cl = repo.changelog
39 working = len(cl) # pseudo rev for the working directory
39 working = len(cl) # pseudo rev for the working directory
40 if a is None:
40 if a is None:
41 a = working
41 a = working
42 if b is None:
42 if b is None:
43 b = working
43 b = working
44
44
45 side = {a: -1, b: 1}
45 side = {a: -1, b: 1}
46 visit = [-a, -b]
46 visit = [-a, -b]
47 heapq.heapify(visit)
47 heapq.heapify(visit)
48 interesting = len(visit)
48 interesting = len(visit)
49 hascommonancestor = False
49 hascommonancestor = False
50 limit = working
50 limit = working
51
51
52 while interesting:
52 while interesting:
53 r = -heapq.heappop(visit)
53 r = -heapq.heappop(visit)
54 if r == working:
54 if r == working:
55 parents = [cl.rev(p) for p in repo.dirstate.parents()]
55 parents = [cl.rev(p) for p in repo.dirstate.parents()]
56 else:
56 else:
57 parents = cl.parentrevs(r)
57 parents = cl.parentrevs(r)
58 for p in parents:
58 for p in parents:
59 if p < 0:
59 if p < 0:
60 continue
60 continue
61 if p not in side:
61 if p not in side:
62 # first time we see p; add it to visit
62 # first time we see p; add it to visit
63 side[p] = side[r]
63 side[p] = side[r]
64 if side[p]:
64 if side[p]:
65 interesting += 1
65 interesting += 1
66 heapq.heappush(visit, -p)
66 heapq.heappush(visit, -p)
67 elif side[p] and side[p] != side[r]:
67 elif side[p] and side[p] != side[r]:
68 # p was interesting but now we know better
68 # p was interesting but now we know better
69 side[p] = 0
69 side[p] = 0
70 interesting -= 1
70 interesting -= 1
71 hascommonancestor = True
71 hascommonancestor = True
72 if side[r]:
72 if side[r]:
73 limit = r # lowest rev visited
73 limit = r # lowest rev visited
74 interesting -= 1
74 interesting -= 1
75
75
76 if not hascommonancestor:
76 if not hascommonancestor:
77 return None
77 return None
78
78
79 # Consider the following flow (see test-commit-amend.t under issue4405):
79 # Consider the following flow (see test-commit-amend.t under issue4405):
80 # 1/ File 'a0' committed
80 # 1/ File 'a0' committed
81 # 2/ File renamed from 'a0' to 'a1' in a new commit (call it 'a1')
81 # 2/ File renamed from 'a0' to 'a1' in a new commit (call it 'a1')
82 # 3/ Move back to first commit
82 # 3/ Move back to first commit
83 # 4/ Create a new commit via revert to contents of 'a1' (call it 'a1-amend')
83 # 4/ Create a new commit via revert to contents of 'a1' (call it 'a1-amend')
84 # 5/ Rename file from 'a1' to 'a2' and commit --amend 'a1-msg'
84 # 5/ Rename file from 'a1' to 'a2' and commit --amend 'a1-msg'
85 #
85 #
86 # During the amend in step five, we will be in this state:
86 # During the amend in step five, we will be in this state:
87 #
87 #
88 # @ 3 temporary amend commit for a1-amend
88 # @ 3 temporary amend commit for a1-amend
89 # |
89 # |
90 # o 2 a1-amend
90 # o 2 a1-amend
91 # |
91 # |
92 # | o 1 a1
92 # | o 1 a1
93 # |/
93 # |/
94 # o 0 a0
94 # o 0 a0
95 #
95 #
96 # When _findlimit is called, a and b are revs 3 and 0, so limit will be 2,
96 # When _findlimit is called, a and b are revs 3 and 0, so limit will be 2,
97 # yet the filelog has the copy information in rev 1 and we will not look
97 # yet the filelog has the copy information in rev 1 and we will not look
98 # back far enough unless we also look at the a and b as candidates.
98 # back far enough unless we also look at the a and b as candidates.
99 # This only occurs when a is a descendent of b or visa-versa.
99 # This only occurs when a is a descendent of b or visa-versa.
100 return min(limit, a, b)
100 return min(limit, a, b)
101
101
102 def _chain(src, dst, a, b):
102 def _chain(src, dst, a, b):
103 '''chain two sets of copies a->b'''
103 '''chain two sets of copies a->b'''
104 t = a.copy()
104 t = a.copy()
105 for k, v in b.iteritems():
105 for k, v in b.iteritems():
106 if v in t:
106 if v in t:
107 # found a chain
107 # found a chain
108 if t[v] != k:
108 if t[v] != k:
109 # file wasn't renamed back to itself
109 # file wasn't renamed back to itself
110 t[k] = t[v]
110 t[k] = t[v]
111 if v not in dst:
111 if v not in dst:
112 # chain was a rename, not a copy
112 # chain was a rename, not a copy
113 del t[v]
113 del t[v]
114 if v in src:
114 if v in src:
115 # file is a copy of an existing file
115 # file is a copy of an existing file
116 t[k] = v
116 t[k] = v
117
117
118 # remove criss-crossed copies
118 # remove criss-crossed copies
119 for k, v in t.items():
119 for k, v in t.items():
120 if k in src and v in dst:
120 if k in src and v in dst:
121 del t[k]
121 del t[k]
122
122
123 return t
123 return t
124
124
125 def _tracefile(fctx, am, limit=-1):
125 def _tracefile(fctx, am, limit=-1):
126 '''return file context that is the ancestor of fctx present in ancestor
126 '''return file context that is the ancestor of fctx present in ancestor
127 manifest am, stopping after the first ancestor lower than limit'''
127 manifest am, stopping after the first ancestor lower than limit'''
128
128
129 for f in fctx.ancestors():
129 for f in fctx.ancestors():
130 if am.get(f.path(), None) == f.filenode():
130 if am.get(f.path(), None) == f.filenode():
131 return f
131 return f
132 if limit >= 0 and f.linkrev() < limit and f.rev() < limit:
132 if limit >= 0 and f.linkrev() < limit and f.rev() < limit:
133 return None
133 return None
134
134
135 def _dirstatecopies(d):
135 def _dirstatecopies(d):
136 ds = d._repo.dirstate
136 ds = d._repo.dirstate
137 c = ds.copies().copy()
137 c = ds.copies().copy()
138 for k in c.keys():
138 for k in c.keys():
139 if ds[k] not in 'anm':
139 if ds[k] not in 'anm':
140 del c[k]
140 del c[k]
141 return c
141 return c
142
142
143 def _computeforwardmissing(a, b, match=None):
143 def _computeforwardmissing(a, b, match=None):
144 """Computes which files are in b but not a.
144 """Computes which files are in b but not a.
145 This is its own function so extensions can easily wrap this call to see what
145 This is its own function so extensions can easily wrap this call to see what
146 files _forwardcopies is about to process.
146 files _forwardcopies is about to process.
147 """
147 """
148 ma = a.manifest()
148 ma = a.manifest()
149 mb = b.manifest()
149 mb = b.manifest()
150 if match:
150 if match:
151 ma = ma.matches(match)
151 ma = ma.matches(match)
152 mb = mb.matches(match)
152 mb = mb.matches(match)
153 return mb.filesnotin(ma)
153 return mb.filesnotin(ma)
154
154
155 def _forwardcopies(a, b, match=None):
155 def _forwardcopies(a, b, match=None):
156 '''find {dst@b: src@a} copy mapping where a is an ancestor of b'''
156 '''find {dst@b: src@a} copy mapping where a is an ancestor of b'''
157
157
158 # check for working copy
158 # check for working copy
159 w = None
159 w = None
160 if b.rev() is None:
160 if b.rev() is None:
161 w = b
161 w = b
162 b = w.p1()
162 b = w.p1()
163 if a == b:
163 if a == b:
164 # short-circuit to avoid issues with merge states
164 # short-circuit to avoid issues with merge states
165 return _dirstatecopies(w)
165 return _dirstatecopies(w)
166
166
167 # files might have to be traced back to the fctx parent of the last
167 # files might have to be traced back to the fctx parent of the last
168 # one-side-only changeset, but not further back than that
168 # one-side-only changeset, but not further back than that
169 limit = _findlimit(a._repo, a.rev(), b.rev())
169 limit = _findlimit(a._repo, a.rev(), b.rev())
170 if limit is None:
170 if limit is None:
171 limit = -1
171 limit = -1
172 am = a.manifest()
172 am = a.manifest()
173
173
174 # find where new files came from
174 # find where new files came from
175 # we currently don't try to find where old files went, too expensive
175 # we currently don't try to find where old files went, too expensive
176 # this means we can miss a case like 'hg rm b; hg cp a b'
176 # this means we can miss a case like 'hg rm b; hg cp a b'
177 cm = {}
177 cm = {}
178 missing = _computeforwardmissing(a, b, match=match)
178 missing = _computeforwardmissing(a, b, match=match)
179 ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
179 ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
180 for f in missing:
180 for f in missing:
181 fctx = b[f]
181 fctx = b[f]
182 fctx._ancestrycontext = ancestrycontext
182 fctx._ancestrycontext = ancestrycontext
183 ofctx = _tracefile(fctx, am, limit)
183 ofctx = _tracefile(fctx, am, limit)
184 if ofctx:
184 if ofctx:
185 cm[f] = ofctx.path()
185 cm[f] = ofctx.path()
186
186
187 # combine copies from dirstate if necessary
187 # combine copies from dirstate if necessary
188 if w is not None:
188 if w is not None:
189 cm = _chain(a, w, cm, _dirstatecopies(w))
189 cm = _chain(a, w, cm, _dirstatecopies(w))
190
190
191 return cm
191 return cm
192
192
193 def _backwardrenames(a, b):
193 def _backwardrenames(a, b):
194 if a._repo.ui.configbool('experimental', 'disablecopytrace'):
194 if a._repo.ui.configbool('experimental', 'disablecopytrace'):
195 return {}
195 return {}
196
196
197 # Even though we're not taking copies into account, 1:n rename situations
197 # Even though we're not taking copies into account, 1:n rename situations
198 # can still exist (e.g. hg cp a b; hg mv a c). In those cases we
198 # can still exist (e.g. hg cp a b; hg mv a c). In those cases we
199 # arbitrarily pick one of the renames.
199 # arbitrarily pick one of the renames.
200 f = _forwardcopies(b, a)
200 f = _forwardcopies(b, a)
201 r = {}
201 r = {}
202 for k, v in sorted(f.iteritems()):
202 for k, v in sorted(f.iteritems()):
203 # remove copies
203 # remove copies
204 if v in a:
204 if v in a:
205 continue
205 continue
206 r[v] = k
206 r[v] = k
207 return r
207 return r
208
208
209 def pathcopies(x, y, match=None):
209 def pathcopies(x, y, match=None):
210 '''find {dst@y: src@x} copy mapping for directed compare'''
210 '''find {dst@y: src@x} copy mapping for directed compare'''
211 if x == y or not x or not y:
211 if x == y or not x or not y:
212 return {}
212 return {}
213 a = y.ancestor(x)
213 a = y.ancestor(x)
214 if a == x:
214 if a == x:
215 return _forwardcopies(x, y, match=match)
215 return _forwardcopies(x, y, match=match)
216 if a == y:
216 if a == y:
217 return _backwardrenames(x, y)
217 return _backwardrenames(x, y)
218 return _chain(x, y, _backwardrenames(x, a),
218 return _chain(x, y, _backwardrenames(x, a),
219 _forwardcopies(a, y, match=match))
219 _forwardcopies(a, y, match=match))
220
220
221 def _computenonoverlap(repo, c1, c2, addedinm1, addedinm2):
221 def _computenonoverlap(repo, c1, c2, addedinm1, addedinm2):
222 """Computes, based on addedinm1 and addedinm2, the files exclusive to c1
222 """Computes, based on addedinm1 and addedinm2, the files exclusive to c1
223 and c2. This is its own function so extensions can easily wrap this call
223 and c2. This is its own function so extensions can easily wrap this call
224 to see what files mergecopies is about to process.
224 to see what files mergecopies is about to process.
225
225
226 Even though c1 and c2 are not used in this function, they are useful in
226 Even though c1 and c2 are not used in this function, they are useful in
227 other extensions for being able to read the file nodes of the changed files.
227 other extensions for being able to read the file nodes of the changed files.
228 """
228 """
229 u1 = sorted(addedinm1 - addedinm2)
229 u1 = sorted(addedinm1 - addedinm2)
230 u2 = sorted(addedinm2 - addedinm1)
230 u2 = sorted(addedinm2 - addedinm1)
231
231
232 if u1:
232 if u1:
233 repo.ui.debug(" unmatched files in local:\n %s\n"
233 repo.ui.debug(" unmatched files in local:\n %s\n"
234 % "\n ".join(u1))
234 % "\n ".join(u1))
235 if u2:
235 if u2:
236 repo.ui.debug(" unmatched files in other:\n %s\n"
236 repo.ui.debug(" unmatched files in other:\n %s\n"
237 % "\n ".join(u2))
237 % "\n ".join(u2))
238 return u1, u2
238 return u1, u2
239
239
240 def _makegetfctx(ctx):
241 """return a 'getfctx' function suitable for checkcopies usage
242
243 We have to re-setup the function building 'filectx' for each
244 'checkcopies' to ensure the linkrev adjustement is properly setup for
245 each. Linkrev adjustment is important to avoid bug in rename
246 detection. Moreover, having a proper '_ancestrycontext' setup ensures
247 the performance impact of this adjustment is kept limited. Without it,
248 each file could do a full dag traversal making the time complexity of
249 the operation explode (see issue4537).
250
251 This function exists here mostly to limit the impact on stable. Feel
252 free to refactor on default.
253 """
254 rev = ctx.rev()
255 repo = ctx._repo
256 ac = getattr(ctx, '_ancestrycontext', None)
257 if ac is None:
258 revs = [rev]
259 if rev is None:
260 revs = [p.rev() for p in ctx.parents()]
261 ac = repo.changelog.ancestors(revs, inclusive=True)
262 ctx._ancestrycontext = ac
263 def makectx(f, n):
264 if len(n) != 20: # in a working context?
265 if ctx.rev() is None:
266 return ctx.filectx(f)
267 return repo[None][f]
268 fctx = repo.filectx(f, fileid=n)
269 # setup only needed for filectx not create from a changectx
270 fctx._ancestrycontext = ac
271 fctx._descendantrev = rev
272 return fctx
273 return util.lrucachefunc(makectx)
274
240 def mergecopies(repo, c1, c2, ca):
275 def mergecopies(repo, c1, c2, ca):
241 """
276 """
242 Find moves and copies between context c1 and c2 that are relevant
277 Find moves and copies between context c1 and c2 that are relevant
243 for merging.
278 for merging.
244
279
245 Returns four dicts: "copy", "movewithdir", "diverge", and
280 Returns four dicts: "copy", "movewithdir", "diverge", and
246 "renamedelete".
281 "renamedelete".
247
282
248 "copy" is a mapping from destination name -> source name,
283 "copy" is a mapping from destination name -> source name,
249 where source is in c1 and destination is in c2 or vice-versa.
284 where source is in c1 and destination is in c2 or vice-versa.
250
285
251 "movewithdir" is a mapping from source name -> destination name,
286 "movewithdir" is a mapping from source name -> destination name,
252 where the file at source present in one context but not the other
287 where the file at source present in one context but not the other
253 needs to be moved to destination by the merge process, because the
288 needs to be moved to destination by the merge process, because the
254 other context moved the directory it is in.
289 other context moved the directory it is in.
255
290
256 "diverge" is a mapping of source name -> list of destination names
291 "diverge" is a mapping of source name -> list of destination names
257 for divergent renames.
292 for divergent renames.
258
293
259 "renamedelete" is a mapping of source name -> list of destination
294 "renamedelete" is a mapping of source name -> list of destination
260 names for files deleted in c1 that were renamed in c2 or vice-versa.
295 names for files deleted in c1 that were renamed in c2 or vice-versa.
261 """
296 """
262 # avoid silly behavior for update from empty dir
297 # avoid silly behavior for update from empty dir
263 if not c1 or not c2 or c1 == c2:
298 if not c1 or not c2 or c1 == c2:
264 return {}, {}, {}, {}
299 return {}, {}, {}, {}
265
300
266 # avoid silly behavior for parent -> working dir
301 # avoid silly behavior for parent -> working dir
267 if c2.node() is None and c1.node() == repo.dirstate.p1():
302 if c2.node() is None and c1.node() == repo.dirstate.p1():
268 return repo.dirstate.copies(), {}, {}, {}
303 return repo.dirstate.copies(), {}, {}, {}
269
304
270 # Copy trace disabling is explicitly below the node == p1 logic above
305 # Copy trace disabling is explicitly below the node == p1 logic above
271 # because the logic above is required for a simple copy to be kept across a
306 # because the logic above is required for a simple copy to be kept across a
272 # rebase.
307 # rebase.
273 if repo.ui.configbool('experimental', 'disablecopytrace'):
308 if repo.ui.configbool('experimental', 'disablecopytrace'):
274 return {}, {}, {}, {}
309 return {}, {}, {}, {}
275
310
276 limit = _findlimit(repo, c1.rev(), c2.rev())
311 limit = _findlimit(repo, c1.rev(), c2.rev())
277 if limit is None:
312 if limit is None:
278 # no common ancestor, no copies
313 # no common ancestor, no copies
279 return {}, {}, {}, {}
314 return {}, {}, {}, {}
280 repo.ui.debug(" searching for copies back to rev %d\n" % limit)
315 repo.ui.debug(" searching for copies back to rev %d\n" % limit)
281
316
282 m1 = c1.manifest()
317 m1 = c1.manifest()
283 m2 = c2.manifest()
318 m2 = c2.manifest()
284 ma = ca.manifest()
319 ma = ca.manifest()
285
320
286
287 def setupctx(ctx):
288 """return a 'getfctx' function suitable for checkcopies usage
289
290 We have to re-setup the function building 'filectx' for each
291 'checkcopies' to ensure the linkrev adjustement is properly setup for
292 each. Linkrev adjustment is important to avoid bug in rename
293 detection. Moreover, having a proper '_ancestrycontext' setup ensures
294 the performance impact of this adjustment is kept limited. Without it,
295 each file could do a full dag traversal making the time complexity of
296 the operation explode (see issue4537).
297
298 This function exists here mostly to limit the impact on stable. Feel
299 free to refactor on default.
300 """
301 rev = ctx.rev()
302 ac = getattr(ctx, '_ancestrycontext', None)
303 repo = ctx._repo
304 if ac is None:
305 revs = [rev]
306 if rev is None:
307 revs = [p.rev() for p in ctx.parents()]
308 ac = ctx._repo.changelog.ancestors(revs, inclusive=True)
309 ctx._ancestrycontext = ac
310 def makectx(f, n):
311 if len(n) != 20: # in a working context?
312 if ctx.rev() is None:
313 return ctx.filectx(f)
314 return repo[None][f]
315 fctx = repo.filectx(f, fileid=n)
316 # setup only needed for filectx not create from a changectx
317 fctx._ancestrycontext = ac
318 fctx._descendantrev = rev
319 return fctx
320 return util.lrucachefunc(makectx)
321
322 copy1, copy2, = {}, {}
321 copy1, copy2, = {}, {}
323 movewithdir1, movewithdir2 = {}, {}
322 movewithdir1, movewithdir2 = {}, {}
324 fullcopy1, fullcopy2 = {}, {}
323 fullcopy1, fullcopy2 = {}, {}
325 diverge = {}
324 diverge = {}
326
325
327 addedinm1 = m1.filesnotin(ma)
326 addedinm1 = m1.filesnotin(ma)
328 addedinm2 = m2.filesnotin(ma)
327 addedinm2 = m2.filesnotin(ma)
329 u1, u2 = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2)
328 u1, u2 = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2)
330
329
331 for f in u1:
330 for f in u1:
332 getfctx = setupctx(c1)
331 getfctx = _makegetfctx(c1)
333 checkcopies(getfctx, f, m1, m2, ca, limit, diverge, copy1, fullcopy1)
332 checkcopies(getfctx, f, m1, m2, ca, limit, diverge, copy1, fullcopy1)
334
333
335 for f in u2:
334 for f in u2:
336 getfctx = setupctx(c2)
335 getfctx = _makegetfctx(c2)
337 checkcopies(getfctx, f, m2, m1, ca, limit, diverge, copy2, fullcopy2)
336 checkcopies(getfctx, f, m2, m1, ca, limit, diverge, copy2, fullcopy2)
338
337
339 copy = dict(copy1.items() + copy2.items())
338 copy = dict(copy1.items() + copy2.items())
340 movewithdir = dict(movewithdir1.items() + movewithdir2.items())
339 movewithdir = dict(movewithdir1.items() + movewithdir2.items())
341 fullcopy = dict(fullcopy1.items() + fullcopy2.items())
340 fullcopy = dict(fullcopy1.items() + fullcopy2.items())
342
341
343 renamedelete = {}
342 renamedelete = {}
344 renamedelete2 = set()
343 renamedelete2 = set()
345 divergeset = set()
344 divergeset = set()
346 for of, fl in diverge.items():
345 for of, fl in diverge.items():
347 if len(fl) == 1 or of in c1 or of in c2:
346 if len(fl) == 1 or of in c1 or of in c2:
348 del diverge[of] # not actually divergent, or not a rename
347 del diverge[of] # not actually divergent, or not a rename
349 if of not in c1 and of not in c2:
348 if of not in c1 and of not in c2:
350 # renamed on one side, deleted on the other side, but filter
349 # renamed on one side, deleted on the other side, but filter
351 # out files that have been renamed and then deleted
350 # out files that have been renamed and then deleted
352 renamedelete[of] = [f for f in fl if f in c1 or f in c2]
351 renamedelete[of] = [f for f in fl if f in c1 or f in c2]
353 renamedelete2.update(fl) # reverse map for below
352 renamedelete2.update(fl) # reverse map for below
354 else:
353 else:
355 divergeset.update(fl) # reverse map for below
354 divergeset.update(fl) # reverse map for below
356
355
357 bothnew = sorted(addedinm1 & addedinm2)
356 bothnew = sorted(addedinm1 & addedinm2)
358 if bothnew:
357 if bothnew:
359 repo.ui.debug(" unmatched files new in both:\n %s\n"
358 repo.ui.debug(" unmatched files new in both:\n %s\n"
360 % "\n ".join(bothnew))
359 % "\n ".join(bothnew))
361 bothdiverge, _copy, _fullcopy = {}, {}, {}
360 bothdiverge, _copy, _fullcopy = {}, {}, {}
362 for f in bothnew:
361 for f in bothnew:
363 getfctx = setupctx(c1)
362 getfctx = _makegetfctx(c1)
364 checkcopies(getfctx, f, m1, m2, ca, limit, bothdiverge,
363 checkcopies(getfctx, f, m1, m2, ca, limit, bothdiverge,
365 _copy, _fullcopy)
364 _copy, _fullcopy)
366 getfctx = setupctx(c2)
365 getfctx = _makegetfctx(c2)
367 checkcopies(getfctx, f, m2, m1, ca, limit, bothdiverge,
366 checkcopies(getfctx, f, m2, m1, ca, limit, bothdiverge,
368 _copy, _fullcopy)
367 _copy, _fullcopy)
369 for of, fl in bothdiverge.items():
368 for of, fl in bothdiverge.items():
370 if len(fl) == 2 and fl[0] == fl[1]:
369 if len(fl) == 2 and fl[0] == fl[1]:
371 copy[fl[0]] = of # not actually divergent, just matching renames
370 copy[fl[0]] = of # not actually divergent, just matching renames
372
371
373 if fullcopy and repo.ui.debugflag:
372 if fullcopy and repo.ui.debugflag:
374 repo.ui.debug(" all copies found (* = to merge, ! = divergent, "
373 repo.ui.debug(" all copies found (* = to merge, ! = divergent, "
375 "% = renamed and deleted):\n")
374 "% = renamed and deleted):\n")
376 for f in sorted(fullcopy):
375 for f in sorted(fullcopy):
377 note = ""
376 note = ""
378 if f in copy:
377 if f in copy:
379 note += "*"
378 note += "*"
380 if f in divergeset:
379 if f in divergeset:
381 note += "!"
380 note += "!"
382 if f in renamedelete2:
381 if f in renamedelete2:
383 note += "%"
382 note += "%"
384 repo.ui.debug(" src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f,
383 repo.ui.debug(" src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f,
385 note))
384 note))
386 del divergeset
385 del divergeset
387
386
388 if not fullcopy:
387 if not fullcopy:
389 return copy, movewithdir, diverge, renamedelete
388 return copy, movewithdir, diverge, renamedelete
390
389
391 repo.ui.debug(" checking for directory renames\n")
390 repo.ui.debug(" checking for directory renames\n")
392
391
393 # generate a directory move map
392 # generate a directory move map
394 d1, d2 = c1.dirs(), c2.dirs()
393 d1, d2 = c1.dirs(), c2.dirs()
395 # Hack for adding '', which is not otherwise added, to d1 and d2
394 # Hack for adding '', which is not otherwise added, to d1 and d2
396 d1.addpath('/')
395 d1.addpath('/')
397 d2.addpath('/')
396 d2.addpath('/')
398 invalid = set()
397 invalid = set()
399 dirmove = {}
398 dirmove = {}
400
399
401 # examine each file copy for a potential directory move, which is
400 # examine each file copy for a potential directory move, which is
402 # when all the files in a directory are moved to a new directory
401 # when all the files in a directory are moved to a new directory
403 for dst, src in fullcopy.iteritems():
402 for dst, src in fullcopy.iteritems():
404 dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst)
403 dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst)
405 if dsrc in invalid:
404 if dsrc in invalid:
406 # already seen to be uninteresting
405 # already seen to be uninteresting
407 continue
406 continue
408 elif dsrc in d1 and ddst in d1:
407 elif dsrc in d1 and ddst in d1:
409 # directory wasn't entirely moved locally
408 # directory wasn't entirely moved locally
410 invalid.add(dsrc)
409 invalid.add(dsrc)
411 elif dsrc in d2 and ddst in d2:
410 elif dsrc in d2 and ddst in d2:
412 # directory wasn't entirely moved remotely
411 # directory wasn't entirely moved remotely
413 invalid.add(dsrc)
412 invalid.add(dsrc)
414 elif dsrc in dirmove and dirmove[dsrc] != ddst:
413 elif dsrc in dirmove and dirmove[dsrc] != ddst:
415 # files from the same directory moved to two different places
414 # files from the same directory moved to two different places
416 invalid.add(dsrc)
415 invalid.add(dsrc)
417 else:
416 else:
418 # looks good so far
417 # looks good so far
419 dirmove[dsrc + "/"] = ddst + "/"
418 dirmove[dsrc + "/"] = ddst + "/"
420
419
421 for i in invalid:
420 for i in invalid:
422 if i in dirmove:
421 if i in dirmove:
423 del dirmove[i]
422 del dirmove[i]
424 del d1, d2, invalid
423 del d1, d2, invalid
425
424
426 if not dirmove:
425 if not dirmove:
427 return copy, movewithdir, diverge, renamedelete
426 return copy, movewithdir, diverge, renamedelete
428
427
429 for d in dirmove:
428 for d in dirmove:
430 repo.ui.debug(" discovered dir src: '%s' -> dst: '%s'\n" %
429 repo.ui.debug(" discovered dir src: '%s' -> dst: '%s'\n" %
431 (d, dirmove[d]))
430 (d, dirmove[d]))
432
431
433 # check unaccounted nonoverlapping files against directory moves
432 # check unaccounted nonoverlapping files against directory moves
434 for f in u1 + u2:
433 for f in u1 + u2:
435 if f not in fullcopy:
434 if f not in fullcopy:
436 for d in dirmove:
435 for d in dirmove:
437 if f.startswith(d):
436 if f.startswith(d):
438 # new file added in a directory that was moved, move it
437 # new file added in a directory that was moved, move it
439 df = dirmove[d] + f[len(d):]
438 df = dirmove[d] + f[len(d):]
440 if df not in copy:
439 if df not in copy:
441 movewithdir[f] = df
440 movewithdir[f] = df
442 repo.ui.debug((" pending file src: '%s' -> "
441 repo.ui.debug((" pending file src: '%s' -> "
443 "dst: '%s'\n") % (f, df))
442 "dst: '%s'\n") % (f, df))
444 break
443 break
445
444
446 return copy, movewithdir, diverge, renamedelete
445 return copy, movewithdir, diverge, renamedelete
447
446
448 def checkcopies(getfctx, f, m1, m2, ca, limit, diverge, copy, fullcopy):
447 def checkcopies(getfctx, f, m1, m2, ca, limit, diverge, copy, fullcopy):
449 """
448 """
450 check possible copies of f from m1 to m2
449 check possible copies of f from m1 to m2
451
450
452 getfctx = function accepting (filename, node) that returns a filectx.
451 getfctx = function accepting (filename, node) that returns a filectx.
453 f = the filename to check
452 f = the filename to check
454 m1 = the source manifest
453 m1 = the source manifest
455 m2 = the destination manifest
454 m2 = the destination manifest
456 ca = the changectx of the common ancestor
455 ca = the changectx of the common ancestor
457 limit = the rev number to not search beyond
456 limit = the rev number to not search beyond
458 diverge = record all diverges in this dict
457 diverge = record all diverges in this dict
459 copy = record all non-divergent copies in this dict
458 copy = record all non-divergent copies in this dict
460 fullcopy = record all copies in this dict
459 fullcopy = record all copies in this dict
461 """
460 """
462
461
463 ma = ca.manifest()
462 ma = ca.manifest()
464
463
465 def _related(f1, f2, limit):
464 def _related(f1, f2, limit):
466 # Walk back to common ancestor to see if the two files originate
465 # Walk back to common ancestor to see if the two files originate
467 # from the same file. Since workingfilectx's rev() is None it messes
466 # from the same file. Since workingfilectx's rev() is None it messes
468 # up the integer comparison logic, hence the pre-step check for
467 # up the integer comparison logic, hence the pre-step check for
469 # None (f1 and f2 can only be workingfilectx's initially).
468 # None (f1 and f2 can only be workingfilectx's initially).
470
469
471 if f1 == f2:
470 if f1 == f2:
472 return f1 # a match
471 return f1 # a match
473
472
474 g1, g2 = f1.ancestors(), f2.ancestors()
473 g1, g2 = f1.ancestors(), f2.ancestors()
475 try:
474 try:
476 f1r, f2r = f1.linkrev(), f2.linkrev()
475 f1r, f2r = f1.linkrev(), f2.linkrev()
477
476
478 if f1r is None:
477 if f1r is None:
479 f1 = g1.next()
478 f1 = g1.next()
480 if f2r is None:
479 if f2r is None:
481 f2 = g2.next()
480 f2 = g2.next()
482
481
483 while True:
482 while True:
484 f1r, f2r = f1.linkrev(), f2.linkrev()
483 f1r, f2r = f1.linkrev(), f2.linkrev()
485 if f1r > f2r:
484 if f1r > f2r:
486 f1 = g1.next()
485 f1 = g1.next()
487 elif f2r > f1r:
486 elif f2r > f1r:
488 f2 = g2.next()
487 f2 = g2.next()
489 elif f1 == f2:
488 elif f1 == f2:
490 return f1 # a match
489 return f1 # a match
491 elif f1r == f2r or f1r < limit or f2r < limit:
490 elif f1r == f2r or f1r < limit or f2r < limit:
492 return False # copy no longer relevant
491 return False # copy no longer relevant
493 except StopIteration:
492 except StopIteration:
494 return False
493 return False
495
494
496 of = None
495 of = None
497 seen = set([f])
496 seen = set([f])
498 for oc in getfctx(f, m1[f]).ancestors():
497 for oc in getfctx(f, m1[f]).ancestors():
499 ocr = oc.linkrev()
498 ocr = oc.linkrev()
500 of = oc.path()
499 of = oc.path()
501 if of in seen:
500 if of in seen:
502 # check limit late - grab last rename before
501 # check limit late - grab last rename before
503 if ocr < limit:
502 if ocr < limit:
504 break
503 break
505 continue
504 continue
506 seen.add(of)
505 seen.add(of)
507
506
508 fullcopy[f] = of # remember for dir rename detection
507 fullcopy[f] = of # remember for dir rename detection
509 if of not in m2:
508 if of not in m2:
510 continue # no match, keep looking
509 continue # no match, keep looking
511 if m2[of] == ma.get(of):
510 if m2[of] == ma.get(of):
512 break # no merge needed, quit early
511 break # no merge needed, quit early
513 c2 = getfctx(of, m2[of])
512 c2 = getfctx(of, m2[of])
514 cr = _related(oc, c2, ca.rev())
513 cr = _related(oc, c2, ca.rev())
515 if cr and (of == f or of == c2.path()): # non-divergent
514 if cr and (of == f or of == c2.path()): # non-divergent
516 copy[f] = of
515 copy[f] = of
517 of = None
516 of = None
518 break
517 break
519
518
520 if of in ma:
519 if of in ma:
521 diverge.setdefault(of, []).append(f)
520 diverge.setdefault(of, []).append(f)
522
521
523 def duplicatecopies(repo, rev, fromrev, skiprev=None):
522 def duplicatecopies(repo, rev, fromrev, skiprev=None):
524 '''reproduce copies from fromrev to rev in the dirstate
523 '''reproduce copies from fromrev to rev in the dirstate
525
524
526 If skiprev is specified, it's a revision that should be used to
525 If skiprev is specified, it's a revision that should be used to
527 filter copy records. Any copies that occur between fromrev and
526 filter copy records. Any copies that occur between fromrev and
528 skiprev will not be duplicated, even if they appear in the set of
527 skiprev will not be duplicated, even if they appear in the set of
529 copies between fromrev and rev.
528 copies between fromrev and rev.
530 '''
529 '''
531 exclude = {}
530 exclude = {}
532 if (skiprev is not None and
531 if (skiprev is not None and
533 not repo.ui.configbool('experimental', 'disablecopytrace')):
532 not repo.ui.configbool('experimental', 'disablecopytrace')):
534 # disablecopytrace skips this line, but not the entire function because
533 # disablecopytrace skips this line, but not the entire function because
535 # the line below is O(size of the repo) during a rebase, while the rest
534 # the line below is O(size of the repo) during a rebase, while the rest
536 # of the function is much faster (and is required for carrying copy
535 # of the function is much faster (and is required for carrying copy
537 # metadata across the rebase anyway).
536 # metadata across the rebase anyway).
538 exclude = pathcopies(repo[fromrev], repo[skiprev])
537 exclude = pathcopies(repo[fromrev], repo[skiprev])
539 for dst, src in pathcopies(repo[fromrev], repo[rev]).iteritems():
538 for dst, src in pathcopies(repo[fromrev], repo[rev]).iteritems():
540 # copies.pathcopies returns backward renames, so dst might not
539 # copies.pathcopies returns backward renames, so dst might not
541 # actually be in the dirstate
540 # actually be in the dirstate
542 if dst in exclude:
541 if dst in exclude:
543 continue
542 continue
544 if repo.dirstate[dst] in "nma":
543 if repo.dirstate[dst] in "nma":
545 repo.dirstate.copy(src, dst)
544 repo.dirstate.copy(src, dst)
General Comments 0
You need to be logged in to leave comments. Login now