##// END OF EJS Templates
copies: move _makegetfctx calls into checkcopies
Matt Mackall -
r26657:1ae898cf default
parent child Browse files
Show More
@@ -1,544 +1,539
1 # copies.py - copy detection for Mercurial
1 # copies.py - copy detection for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import heapq
10 import heapq
11
11
12 from . import (
12 from . import (
13 pathutil,
13 pathutil,
14 util,
14 util,
15 )
15 )
16
16
17 def _findlimit(repo, a, b):
17 def _findlimit(repo, a, b):
18 """
18 """
19 Find the last revision that needs to be checked to ensure that a full
19 Find the last revision that needs to be checked to ensure that a full
20 transitive closure for file copies can be properly calculated.
20 transitive closure for file copies can be properly calculated.
21 Generally, this means finding the earliest revision number that's an
21 Generally, this means finding the earliest revision number that's an
22 ancestor of a or b but not both, except when a or b is a direct descendent
22 ancestor of a or b but not both, except when a or b is a direct descendent
23 of the other, in which case we can return the minimum revnum of a and b.
23 of the other, in which case we can return the minimum revnum of a and b.
24 None if no such revision exists.
24 None if no such revision exists.
25 """
25 """
26
26
27 # basic idea:
27 # basic idea:
28 # - mark a and b with different sides
28 # - mark a and b with different sides
29 # - if a parent's children are all on the same side, the parent is
29 # - if a parent's children are all on the same side, the parent is
30 # on that side, otherwise it is on no side
30 # on that side, otherwise it is on no side
31 # - walk the graph in topological order with the help of a heap;
31 # - walk the graph in topological order with the help of a heap;
32 # - add unseen parents to side map
32 # - add unseen parents to side map
33 # - clear side of any parent that has children on different sides
33 # - clear side of any parent that has children on different sides
34 # - track number of interesting revs that might still be on a side
34 # - track number of interesting revs that might still be on a side
35 # - track the lowest interesting rev seen
35 # - track the lowest interesting rev seen
36 # - quit when interesting revs is zero
36 # - quit when interesting revs is zero
37
37
38 cl = repo.changelog
38 cl = repo.changelog
39 working = len(cl) # pseudo rev for the working directory
39 working = len(cl) # pseudo rev for the working directory
40 if a is None:
40 if a is None:
41 a = working
41 a = working
42 if b is None:
42 if b is None:
43 b = working
43 b = working
44
44
45 side = {a: -1, b: 1}
45 side = {a: -1, b: 1}
46 visit = [-a, -b]
46 visit = [-a, -b]
47 heapq.heapify(visit)
47 heapq.heapify(visit)
48 interesting = len(visit)
48 interesting = len(visit)
49 hascommonancestor = False
49 hascommonancestor = False
50 limit = working
50 limit = working
51
51
52 while interesting:
52 while interesting:
53 r = -heapq.heappop(visit)
53 r = -heapq.heappop(visit)
54 if r == working:
54 if r == working:
55 parents = [cl.rev(p) for p in repo.dirstate.parents()]
55 parents = [cl.rev(p) for p in repo.dirstate.parents()]
56 else:
56 else:
57 parents = cl.parentrevs(r)
57 parents = cl.parentrevs(r)
58 for p in parents:
58 for p in parents:
59 if p < 0:
59 if p < 0:
60 continue
60 continue
61 if p not in side:
61 if p not in side:
62 # first time we see p; add it to visit
62 # first time we see p; add it to visit
63 side[p] = side[r]
63 side[p] = side[r]
64 if side[p]:
64 if side[p]:
65 interesting += 1
65 interesting += 1
66 heapq.heappush(visit, -p)
66 heapq.heappush(visit, -p)
67 elif side[p] and side[p] != side[r]:
67 elif side[p] and side[p] != side[r]:
68 # p was interesting but now we know better
68 # p was interesting but now we know better
69 side[p] = 0
69 side[p] = 0
70 interesting -= 1
70 interesting -= 1
71 hascommonancestor = True
71 hascommonancestor = True
72 if side[r]:
72 if side[r]:
73 limit = r # lowest rev visited
73 limit = r # lowest rev visited
74 interesting -= 1
74 interesting -= 1
75
75
76 if not hascommonancestor:
76 if not hascommonancestor:
77 return None
77 return None
78
78
79 # Consider the following flow (see test-commit-amend.t under issue4405):
79 # Consider the following flow (see test-commit-amend.t under issue4405):
80 # 1/ File 'a0' committed
80 # 1/ File 'a0' committed
81 # 2/ File renamed from 'a0' to 'a1' in a new commit (call it 'a1')
81 # 2/ File renamed from 'a0' to 'a1' in a new commit (call it 'a1')
82 # 3/ Move back to first commit
82 # 3/ Move back to first commit
83 # 4/ Create a new commit via revert to contents of 'a1' (call it 'a1-amend')
83 # 4/ Create a new commit via revert to contents of 'a1' (call it 'a1-amend')
84 # 5/ Rename file from 'a1' to 'a2' and commit --amend 'a1-msg'
84 # 5/ Rename file from 'a1' to 'a2' and commit --amend 'a1-msg'
85 #
85 #
86 # During the amend in step five, we will be in this state:
86 # During the amend in step five, we will be in this state:
87 #
87 #
88 # @ 3 temporary amend commit for a1-amend
88 # @ 3 temporary amend commit for a1-amend
89 # |
89 # |
90 # o 2 a1-amend
90 # o 2 a1-amend
91 # |
91 # |
92 # | o 1 a1
92 # | o 1 a1
93 # |/
93 # |/
94 # o 0 a0
94 # o 0 a0
95 #
95 #
96 # When _findlimit is called, a and b are revs 3 and 0, so limit will be 2,
96 # When _findlimit is called, a and b are revs 3 and 0, so limit will be 2,
97 # yet the filelog has the copy information in rev 1 and we will not look
97 # yet the filelog has the copy information in rev 1 and we will not look
98 # back far enough unless we also look at the a and b as candidates.
98 # back far enough unless we also look at the a and b as candidates.
99 # This only occurs when a is a descendent of b or visa-versa.
99 # This only occurs when a is a descendent of b or visa-versa.
100 return min(limit, a, b)
100 return min(limit, a, b)
101
101
102 def _chain(src, dst, a, b):
102 def _chain(src, dst, a, b):
103 '''chain two sets of copies a->b'''
103 '''chain two sets of copies a->b'''
104 t = a.copy()
104 t = a.copy()
105 for k, v in b.iteritems():
105 for k, v in b.iteritems():
106 if v in t:
106 if v in t:
107 # found a chain
107 # found a chain
108 if t[v] != k:
108 if t[v] != k:
109 # file wasn't renamed back to itself
109 # file wasn't renamed back to itself
110 t[k] = t[v]
110 t[k] = t[v]
111 if v not in dst:
111 if v not in dst:
112 # chain was a rename, not a copy
112 # chain was a rename, not a copy
113 del t[v]
113 del t[v]
114 if v in src:
114 if v in src:
115 # file is a copy of an existing file
115 # file is a copy of an existing file
116 t[k] = v
116 t[k] = v
117
117
118 # remove criss-crossed copies
118 # remove criss-crossed copies
119 for k, v in t.items():
119 for k, v in t.items():
120 if k in src and v in dst:
120 if k in src and v in dst:
121 del t[k]
121 del t[k]
122
122
123 return t
123 return t
124
124
125 def _tracefile(fctx, am, limit=-1):
125 def _tracefile(fctx, am, limit=-1):
126 '''return file context that is the ancestor of fctx present in ancestor
126 '''return file context that is the ancestor of fctx present in ancestor
127 manifest am, stopping after the first ancestor lower than limit'''
127 manifest am, stopping after the first ancestor lower than limit'''
128
128
129 for f in fctx.ancestors():
129 for f in fctx.ancestors():
130 if am.get(f.path(), None) == f.filenode():
130 if am.get(f.path(), None) == f.filenode():
131 return f
131 return f
132 if limit >= 0 and f.linkrev() < limit and f.rev() < limit:
132 if limit >= 0 and f.linkrev() < limit and f.rev() < limit:
133 return None
133 return None
134
134
135 def _dirstatecopies(d):
135 def _dirstatecopies(d):
136 ds = d._repo.dirstate
136 ds = d._repo.dirstate
137 c = ds.copies().copy()
137 c = ds.copies().copy()
138 for k in c.keys():
138 for k in c.keys():
139 if ds[k] not in 'anm':
139 if ds[k] not in 'anm':
140 del c[k]
140 del c[k]
141 return c
141 return c
142
142
143 def _computeforwardmissing(a, b, match=None):
143 def _computeforwardmissing(a, b, match=None):
144 """Computes which files are in b but not a.
144 """Computes which files are in b but not a.
145 This is its own function so extensions can easily wrap this call to see what
145 This is its own function so extensions can easily wrap this call to see what
146 files _forwardcopies is about to process.
146 files _forwardcopies is about to process.
147 """
147 """
148 ma = a.manifest()
148 ma = a.manifest()
149 mb = b.manifest()
149 mb = b.manifest()
150 if match:
150 if match:
151 ma = ma.matches(match)
151 ma = ma.matches(match)
152 mb = mb.matches(match)
152 mb = mb.matches(match)
153 return mb.filesnotin(ma)
153 return mb.filesnotin(ma)
154
154
155 def _forwardcopies(a, b, match=None):
155 def _forwardcopies(a, b, match=None):
156 '''find {dst@b: src@a} copy mapping where a is an ancestor of b'''
156 '''find {dst@b: src@a} copy mapping where a is an ancestor of b'''
157
157
158 # check for working copy
158 # check for working copy
159 w = None
159 w = None
160 if b.rev() is None:
160 if b.rev() is None:
161 w = b
161 w = b
162 b = w.p1()
162 b = w.p1()
163 if a == b:
163 if a == b:
164 # short-circuit to avoid issues with merge states
164 # short-circuit to avoid issues with merge states
165 return _dirstatecopies(w)
165 return _dirstatecopies(w)
166
166
167 # files might have to be traced back to the fctx parent of the last
167 # files might have to be traced back to the fctx parent of the last
168 # one-side-only changeset, but not further back than that
168 # one-side-only changeset, but not further back than that
169 limit = _findlimit(a._repo, a.rev(), b.rev())
169 limit = _findlimit(a._repo, a.rev(), b.rev())
170 if limit is None:
170 if limit is None:
171 limit = -1
171 limit = -1
172 am = a.manifest()
172 am = a.manifest()
173
173
174 # find where new files came from
174 # find where new files came from
175 # we currently don't try to find where old files went, too expensive
175 # we currently don't try to find where old files went, too expensive
176 # this means we can miss a case like 'hg rm b; hg cp a b'
176 # this means we can miss a case like 'hg rm b; hg cp a b'
177 cm = {}
177 cm = {}
178 missing = _computeforwardmissing(a, b, match=match)
178 missing = _computeforwardmissing(a, b, match=match)
179 ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
179 ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
180 for f in missing:
180 for f in missing:
181 fctx = b[f]
181 fctx = b[f]
182 fctx._ancestrycontext = ancestrycontext
182 fctx._ancestrycontext = ancestrycontext
183 ofctx = _tracefile(fctx, am, limit)
183 ofctx = _tracefile(fctx, am, limit)
184 if ofctx:
184 if ofctx:
185 cm[f] = ofctx.path()
185 cm[f] = ofctx.path()
186
186
187 # combine copies from dirstate if necessary
187 # combine copies from dirstate if necessary
188 if w is not None:
188 if w is not None:
189 cm = _chain(a, w, cm, _dirstatecopies(w))
189 cm = _chain(a, w, cm, _dirstatecopies(w))
190
190
191 return cm
191 return cm
192
192
193 def _backwardrenames(a, b):
193 def _backwardrenames(a, b):
194 if a._repo.ui.configbool('experimental', 'disablecopytrace'):
194 if a._repo.ui.configbool('experimental', 'disablecopytrace'):
195 return {}
195 return {}
196
196
197 # Even though we're not taking copies into account, 1:n rename situations
197 # Even though we're not taking copies into account, 1:n rename situations
198 # can still exist (e.g. hg cp a b; hg mv a c). In those cases we
198 # can still exist (e.g. hg cp a b; hg mv a c). In those cases we
199 # arbitrarily pick one of the renames.
199 # arbitrarily pick one of the renames.
200 f = _forwardcopies(b, a)
200 f = _forwardcopies(b, a)
201 r = {}
201 r = {}
202 for k, v in sorted(f.iteritems()):
202 for k, v in sorted(f.iteritems()):
203 # remove copies
203 # remove copies
204 if v in a:
204 if v in a:
205 continue
205 continue
206 r[v] = k
206 r[v] = k
207 return r
207 return r
208
208
209 def pathcopies(x, y, match=None):
209 def pathcopies(x, y, match=None):
210 '''find {dst@y: src@x} copy mapping for directed compare'''
210 '''find {dst@y: src@x} copy mapping for directed compare'''
211 if x == y or not x or not y:
211 if x == y or not x or not y:
212 return {}
212 return {}
213 a = y.ancestor(x)
213 a = y.ancestor(x)
214 if a == x:
214 if a == x:
215 return _forwardcopies(x, y, match=match)
215 return _forwardcopies(x, y, match=match)
216 if a == y:
216 if a == y:
217 return _backwardrenames(x, y)
217 return _backwardrenames(x, y)
218 return _chain(x, y, _backwardrenames(x, a),
218 return _chain(x, y, _backwardrenames(x, a),
219 _forwardcopies(a, y, match=match))
219 _forwardcopies(a, y, match=match))
220
220
221 def _computenonoverlap(repo, c1, c2, addedinm1, addedinm2):
221 def _computenonoverlap(repo, c1, c2, addedinm1, addedinm2):
222 """Computes, based on addedinm1 and addedinm2, the files exclusive to c1
222 """Computes, based on addedinm1 and addedinm2, the files exclusive to c1
223 and c2. This is its own function so extensions can easily wrap this call
223 and c2. This is its own function so extensions can easily wrap this call
224 to see what files mergecopies is about to process.
224 to see what files mergecopies is about to process.
225
225
226 Even though c1 and c2 are not used in this function, they are useful in
226 Even though c1 and c2 are not used in this function, they are useful in
227 other extensions for being able to read the file nodes of the changed files.
227 other extensions for being able to read the file nodes of the changed files.
228 """
228 """
229 u1 = sorted(addedinm1 - addedinm2)
229 u1 = sorted(addedinm1 - addedinm2)
230 u2 = sorted(addedinm2 - addedinm1)
230 u2 = sorted(addedinm2 - addedinm1)
231
231
232 if u1:
232 if u1:
233 repo.ui.debug(" unmatched files in local:\n %s\n"
233 repo.ui.debug(" unmatched files in local:\n %s\n"
234 % "\n ".join(u1))
234 % "\n ".join(u1))
235 if u2:
235 if u2:
236 repo.ui.debug(" unmatched files in other:\n %s\n"
236 repo.ui.debug(" unmatched files in other:\n %s\n"
237 % "\n ".join(u2))
237 % "\n ".join(u2))
238 return u1, u2
238 return u1, u2
239
239
240 def _makegetfctx(ctx):
240 def _makegetfctx(ctx):
241 """return a 'getfctx' function suitable for checkcopies usage
241 """return a 'getfctx' function suitable for checkcopies usage
242
242
243 We have to re-setup the function building 'filectx' for each
243 We have to re-setup the function building 'filectx' for each
244 'checkcopies' to ensure the linkrev adjustement is properly setup for
244 'checkcopies' to ensure the linkrev adjustement is properly setup for
245 each. Linkrev adjustment is important to avoid bug in rename
245 each. Linkrev adjustment is important to avoid bug in rename
246 detection. Moreover, having a proper '_ancestrycontext' setup ensures
246 detection. Moreover, having a proper '_ancestrycontext' setup ensures
247 the performance impact of this adjustment is kept limited. Without it,
247 the performance impact of this adjustment is kept limited. Without it,
248 each file could do a full dag traversal making the time complexity of
248 each file could do a full dag traversal making the time complexity of
249 the operation explode (see issue4537).
249 the operation explode (see issue4537).
250
250
251 This function exists here mostly to limit the impact on stable. Feel
251 This function exists here mostly to limit the impact on stable. Feel
252 free to refactor on default.
252 free to refactor on default.
253 """
253 """
254 rev = ctx.rev()
254 rev = ctx.rev()
255 repo = ctx._repo
255 repo = ctx._repo
256 ac = getattr(ctx, '_ancestrycontext', None)
256 ac = getattr(ctx, '_ancestrycontext', None)
257 if ac is None:
257 if ac is None:
258 revs = [rev]
258 revs = [rev]
259 if rev is None:
259 if rev is None:
260 revs = [p.rev() for p in ctx.parents()]
260 revs = [p.rev() for p in ctx.parents()]
261 ac = repo.changelog.ancestors(revs, inclusive=True)
261 ac = repo.changelog.ancestors(revs, inclusive=True)
262 ctx._ancestrycontext = ac
262 ctx._ancestrycontext = ac
263 def makectx(f, n):
263 def makectx(f, n):
264 if len(n) != 20: # in a working context?
264 if len(n) != 20: # in a working context?
265 if ctx.rev() is None:
265 if ctx.rev() is None:
266 return ctx.filectx(f)
266 return ctx.filectx(f)
267 return repo[None][f]
267 return repo[None][f]
268 fctx = repo.filectx(f, fileid=n)
268 fctx = repo.filectx(f, fileid=n)
269 # setup only needed for filectx not create from a changectx
269 # setup only needed for filectx not create from a changectx
270 fctx._ancestrycontext = ac
270 fctx._ancestrycontext = ac
271 fctx._descendantrev = rev
271 fctx._descendantrev = rev
272 return fctx
272 return fctx
273 return util.lrucachefunc(makectx)
273 return util.lrucachefunc(makectx)
274
274
275 def mergecopies(repo, c1, c2, ca):
275 def mergecopies(repo, c1, c2, ca):
276 """
276 """
277 Find moves and copies between context c1 and c2 that are relevant
277 Find moves and copies between context c1 and c2 that are relevant
278 for merging.
278 for merging.
279
279
280 Returns four dicts: "copy", "movewithdir", "diverge", and
280 Returns four dicts: "copy", "movewithdir", "diverge", and
281 "renamedelete".
281 "renamedelete".
282
282
283 "copy" is a mapping from destination name -> source name,
283 "copy" is a mapping from destination name -> source name,
284 where source is in c1 and destination is in c2 or vice-versa.
284 where source is in c1 and destination is in c2 or vice-versa.
285
285
286 "movewithdir" is a mapping from source name -> destination name,
286 "movewithdir" is a mapping from source name -> destination name,
287 where the file at source present in one context but not the other
287 where the file at source present in one context but not the other
288 needs to be moved to destination by the merge process, because the
288 needs to be moved to destination by the merge process, because the
289 other context moved the directory it is in.
289 other context moved the directory it is in.
290
290
291 "diverge" is a mapping of source name -> list of destination names
291 "diverge" is a mapping of source name -> list of destination names
292 for divergent renames.
292 for divergent renames.
293
293
294 "renamedelete" is a mapping of source name -> list of destination
294 "renamedelete" is a mapping of source name -> list of destination
295 names for files deleted in c1 that were renamed in c2 or vice-versa.
295 names for files deleted in c1 that were renamed in c2 or vice-versa.
296 """
296 """
297 # avoid silly behavior for update from empty dir
297 # avoid silly behavior for update from empty dir
298 if not c1 or not c2 or c1 == c2:
298 if not c1 or not c2 or c1 == c2:
299 return {}, {}, {}, {}
299 return {}, {}, {}, {}
300
300
301 # avoid silly behavior for parent -> working dir
301 # avoid silly behavior for parent -> working dir
302 if c2.node() is None and c1.node() == repo.dirstate.p1():
302 if c2.node() is None and c1.node() == repo.dirstate.p1():
303 return repo.dirstate.copies(), {}, {}, {}
303 return repo.dirstate.copies(), {}, {}, {}
304
304
305 # Copy trace disabling is explicitly below the node == p1 logic above
305 # Copy trace disabling is explicitly below the node == p1 logic above
306 # because the logic above is required for a simple copy to be kept across a
306 # because the logic above is required for a simple copy to be kept across a
307 # rebase.
307 # rebase.
308 if repo.ui.configbool('experimental', 'disablecopytrace'):
308 if repo.ui.configbool('experimental', 'disablecopytrace'):
309 return {}, {}, {}, {}
309 return {}, {}, {}, {}
310
310
311 limit = _findlimit(repo, c1.rev(), c2.rev())
311 limit = _findlimit(repo, c1.rev(), c2.rev())
312 if limit is None:
312 if limit is None:
313 # no common ancestor, no copies
313 # no common ancestor, no copies
314 return {}, {}, {}, {}
314 return {}, {}, {}, {}
315 repo.ui.debug(" searching for copies back to rev %d\n" % limit)
315 repo.ui.debug(" searching for copies back to rev %d\n" % limit)
316
316
317 m1 = c1.manifest()
317 m1 = c1.manifest()
318 m2 = c2.manifest()
318 m2 = c2.manifest()
319 ma = ca.manifest()
319 ma = ca.manifest()
320
320
321 copy1, copy2, = {}, {}
321 copy1, copy2, = {}, {}
322 movewithdir1, movewithdir2 = {}, {}
322 movewithdir1, movewithdir2 = {}, {}
323 fullcopy1, fullcopy2 = {}, {}
323 fullcopy1, fullcopy2 = {}, {}
324 diverge = {}
324 diverge = {}
325
325
326 addedinm1 = m1.filesnotin(ma)
326 addedinm1 = m1.filesnotin(ma)
327 addedinm2 = m2.filesnotin(ma)
327 addedinm2 = m2.filesnotin(ma)
328 u1, u2 = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2)
328 u1, u2 = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2)
329
329
330 for f in u1:
330 for f in u1:
331 getfctx = _makegetfctx(c1)
331 checkcopies(c1, f, m1, m2, ca, limit, diverge, copy1, fullcopy1)
332 checkcopies(getfctx, f, m1, m2, ca, limit, diverge, copy1, fullcopy1)
333
332
334 for f in u2:
333 for f in u2:
335 getfctx = _makegetfctx(c2)
334 checkcopies(c2, f, m2, m1, ca, limit, diverge, copy2, fullcopy2)
336 checkcopies(getfctx, f, m2, m1, ca, limit, diverge, copy2, fullcopy2)
337
335
338 copy = dict(copy1.items() + copy2.items())
336 copy = dict(copy1.items() + copy2.items())
339 movewithdir = dict(movewithdir1.items() + movewithdir2.items())
337 movewithdir = dict(movewithdir1.items() + movewithdir2.items())
340 fullcopy = dict(fullcopy1.items() + fullcopy2.items())
338 fullcopy = dict(fullcopy1.items() + fullcopy2.items())
341
339
342 renamedelete = {}
340 renamedelete = {}
343 renamedelete2 = set()
341 renamedelete2 = set()
344 divergeset = set()
342 divergeset = set()
345 for of, fl in diverge.items():
343 for of, fl in diverge.items():
346 if len(fl) == 1 or of in c1 or of in c2:
344 if len(fl) == 1 or of in c1 or of in c2:
347 del diverge[of] # not actually divergent, or not a rename
345 del diverge[of] # not actually divergent, or not a rename
348 if of not in c1 and of not in c2:
346 if of not in c1 and of not in c2:
349 # renamed on one side, deleted on the other side, but filter
347 # renamed on one side, deleted on the other side, but filter
350 # out files that have been renamed and then deleted
348 # out files that have been renamed and then deleted
351 renamedelete[of] = [f for f in fl if f in c1 or f in c2]
349 renamedelete[of] = [f for f in fl if f in c1 or f in c2]
352 renamedelete2.update(fl) # reverse map for below
350 renamedelete2.update(fl) # reverse map for below
353 else:
351 else:
354 divergeset.update(fl) # reverse map for below
352 divergeset.update(fl) # reverse map for below
355
353
356 bothnew = sorted(addedinm1 & addedinm2)
354 bothnew = sorted(addedinm1 & addedinm2)
357 if bothnew:
355 if bothnew:
358 repo.ui.debug(" unmatched files new in both:\n %s\n"
356 repo.ui.debug(" unmatched files new in both:\n %s\n"
359 % "\n ".join(bothnew))
357 % "\n ".join(bothnew))
360 bothdiverge, _copy, _fullcopy = {}, {}, {}
358 bothdiverge, _copy, _fullcopy = {}, {}, {}
361 for f in bothnew:
359 for f in bothnew:
362 getfctx = _makegetfctx(c1)
360 checkcopies(c1, f, m1, m2, ca, limit, bothdiverge, _copy, _fullcopy)
363 checkcopies(getfctx, f, m1, m2, ca, limit, bothdiverge,
361 checkcopies(c2, f, m2, m1, ca, limit, bothdiverge, _copy, _fullcopy)
364 _copy, _fullcopy)
365 getfctx = _makegetfctx(c2)
366 checkcopies(getfctx, f, m2, m1, ca, limit, bothdiverge,
367 _copy, _fullcopy)
368 for of, fl in bothdiverge.items():
362 for of, fl in bothdiverge.items():
369 if len(fl) == 2 and fl[0] == fl[1]:
363 if len(fl) == 2 and fl[0] == fl[1]:
370 copy[fl[0]] = of # not actually divergent, just matching renames
364 copy[fl[0]] = of # not actually divergent, just matching renames
371
365
372 if fullcopy and repo.ui.debugflag:
366 if fullcopy and repo.ui.debugflag:
373 repo.ui.debug(" all copies found (* = to merge, ! = divergent, "
367 repo.ui.debug(" all copies found (* = to merge, ! = divergent, "
374 "% = renamed and deleted):\n")
368 "% = renamed and deleted):\n")
375 for f in sorted(fullcopy):
369 for f in sorted(fullcopy):
376 note = ""
370 note = ""
377 if f in copy:
371 if f in copy:
378 note += "*"
372 note += "*"
379 if f in divergeset:
373 if f in divergeset:
380 note += "!"
374 note += "!"
381 if f in renamedelete2:
375 if f in renamedelete2:
382 note += "%"
376 note += "%"
383 repo.ui.debug(" src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f,
377 repo.ui.debug(" src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f,
384 note))
378 note))
385 del divergeset
379 del divergeset
386
380
387 if not fullcopy:
381 if not fullcopy:
388 return copy, movewithdir, diverge, renamedelete
382 return copy, movewithdir, diverge, renamedelete
389
383
390 repo.ui.debug(" checking for directory renames\n")
384 repo.ui.debug(" checking for directory renames\n")
391
385
392 # generate a directory move map
386 # generate a directory move map
393 d1, d2 = c1.dirs(), c2.dirs()
387 d1, d2 = c1.dirs(), c2.dirs()
394 # Hack for adding '', which is not otherwise added, to d1 and d2
388 # Hack for adding '', which is not otherwise added, to d1 and d2
395 d1.addpath('/')
389 d1.addpath('/')
396 d2.addpath('/')
390 d2.addpath('/')
397 invalid = set()
391 invalid = set()
398 dirmove = {}
392 dirmove = {}
399
393
400 # examine each file copy for a potential directory move, which is
394 # examine each file copy for a potential directory move, which is
401 # when all the files in a directory are moved to a new directory
395 # when all the files in a directory are moved to a new directory
402 for dst, src in fullcopy.iteritems():
396 for dst, src in fullcopy.iteritems():
403 dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst)
397 dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst)
404 if dsrc in invalid:
398 if dsrc in invalid:
405 # already seen to be uninteresting
399 # already seen to be uninteresting
406 continue
400 continue
407 elif dsrc in d1 and ddst in d1:
401 elif dsrc in d1 and ddst in d1:
408 # directory wasn't entirely moved locally
402 # directory wasn't entirely moved locally
409 invalid.add(dsrc)
403 invalid.add(dsrc)
410 elif dsrc in d2 and ddst in d2:
404 elif dsrc in d2 and ddst in d2:
411 # directory wasn't entirely moved remotely
405 # directory wasn't entirely moved remotely
412 invalid.add(dsrc)
406 invalid.add(dsrc)
413 elif dsrc in dirmove and dirmove[dsrc] != ddst:
407 elif dsrc in dirmove and dirmove[dsrc] != ddst:
414 # files from the same directory moved to two different places
408 # files from the same directory moved to two different places
415 invalid.add(dsrc)
409 invalid.add(dsrc)
416 else:
410 else:
417 # looks good so far
411 # looks good so far
418 dirmove[dsrc + "/"] = ddst + "/"
412 dirmove[dsrc + "/"] = ddst + "/"
419
413
420 for i in invalid:
414 for i in invalid:
421 if i in dirmove:
415 if i in dirmove:
422 del dirmove[i]
416 del dirmove[i]
423 del d1, d2, invalid
417 del d1, d2, invalid
424
418
425 if not dirmove:
419 if not dirmove:
426 return copy, movewithdir, diverge, renamedelete
420 return copy, movewithdir, diverge, renamedelete
427
421
428 for d in dirmove:
422 for d in dirmove:
429 repo.ui.debug(" discovered dir src: '%s' -> dst: '%s'\n" %
423 repo.ui.debug(" discovered dir src: '%s' -> dst: '%s'\n" %
430 (d, dirmove[d]))
424 (d, dirmove[d]))
431
425
432 # check unaccounted nonoverlapping files against directory moves
426 # check unaccounted nonoverlapping files against directory moves
433 for f in u1 + u2:
427 for f in u1 + u2:
434 if f not in fullcopy:
428 if f not in fullcopy:
435 for d in dirmove:
429 for d in dirmove:
436 if f.startswith(d):
430 if f.startswith(d):
437 # new file added in a directory that was moved, move it
431 # new file added in a directory that was moved, move it
438 df = dirmove[d] + f[len(d):]
432 df = dirmove[d] + f[len(d):]
439 if df not in copy:
433 if df not in copy:
440 movewithdir[f] = df
434 movewithdir[f] = df
441 repo.ui.debug((" pending file src: '%s' -> "
435 repo.ui.debug((" pending file src: '%s' -> "
442 "dst: '%s'\n") % (f, df))
436 "dst: '%s'\n") % (f, df))
443 break
437 break
444
438
445 return copy, movewithdir, diverge, renamedelete
439 return copy, movewithdir, diverge, renamedelete
446
440
447 def checkcopies(getfctx, f, m1, m2, ca, limit, diverge, copy, fullcopy):
441 def checkcopies(ctx, f, m1, m2, ca, limit, diverge, copy, fullcopy):
448 """
442 """
449 check possible copies of f from m1 to m2
443 check possible copies of f from m1 to m2
450
444
451 getfctx = function accepting (filename, node) that returns a filectx.
445 ctx = starting context for f in m1
452 f = the filename to check
446 f = the filename to check
453 m1 = the source manifest
447 m1 = the source manifest
454 m2 = the destination manifest
448 m2 = the destination manifest
455 ca = the changectx of the common ancestor
449 ca = the changectx of the common ancestor
456 limit = the rev number to not search beyond
450 limit = the rev number to not search beyond
457 diverge = record all diverges in this dict
451 diverge = record all diverges in this dict
458 copy = record all non-divergent copies in this dict
452 copy = record all non-divergent copies in this dict
459 fullcopy = record all copies in this dict
453 fullcopy = record all copies in this dict
460 """
454 """
461
455
462 ma = ca.manifest()
456 ma = ca.manifest()
457 getfctx = _makegetfctx(ctx)
463
458
464 def _related(f1, f2, limit):
459 def _related(f1, f2, limit):
465 # Walk back to common ancestor to see if the two files originate
460 # Walk back to common ancestor to see if the two files originate
466 # from the same file. Since workingfilectx's rev() is None it messes
461 # from the same file. Since workingfilectx's rev() is None it messes
467 # up the integer comparison logic, hence the pre-step check for
462 # up the integer comparison logic, hence the pre-step check for
468 # None (f1 and f2 can only be workingfilectx's initially).
463 # None (f1 and f2 can only be workingfilectx's initially).
469
464
470 if f1 == f2:
465 if f1 == f2:
471 return f1 # a match
466 return f1 # a match
472
467
473 g1, g2 = f1.ancestors(), f2.ancestors()
468 g1, g2 = f1.ancestors(), f2.ancestors()
474 try:
469 try:
475 f1r, f2r = f1.linkrev(), f2.linkrev()
470 f1r, f2r = f1.linkrev(), f2.linkrev()
476
471
477 if f1r is None:
472 if f1r is None:
478 f1 = g1.next()
473 f1 = g1.next()
479 if f2r is None:
474 if f2r is None:
480 f2 = g2.next()
475 f2 = g2.next()
481
476
482 while True:
477 while True:
483 f1r, f2r = f1.linkrev(), f2.linkrev()
478 f1r, f2r = f1.linkrev(), f2.linkrev()
484 if f1r > f2r:
479 if f1r > f2r:
485 f1 = g1.next()
480 f1 = g1.next()
486 elif f2r > f1r:
481 elif f2r > f1r:
487 f2 = g2.next()
482 f2 = g2.next()
488 elif f1 == f2:
483 elif f1 == f2:
489 return f1 # a match
484 return f1 # a match
490 elif f1r == f2r or f1r < limit or f2r < limit:
485 elif f1r == f2r or f1r < limit or f2r < limit:
491 return False # copy no longer relevant
486 return False # copy no longer relevant
492 except StopIteration:
487 except StopIteration:
493 return False
488 return False
494
489
495 of = None
490 of = None
496 seen = set([f])
491 seen = set([f])
497 for oc in getfctx(f, m1[f]).ancestors():
492 for oc in getfctx(f, m1[f]).ancestors():
498 ocr = oc.linkrev()
493 ocr = oc.linkrev()
499 of = oc.path()
494 of = oc.path()
500 if of in seen:
495 if of in seen:
501 # check limit late - grab last rename before
496 # check limit late - grab last rename before
502 if ocr < limit:
497 if ocr < limit:
503 break
498 break
504 continue
499 continue
505 seen.add(of)
500 seen.add(of)
506
501
507 fullcopy[f] = of # remember for dir rename detection
502 fullcopy[f] = of # remember for dir rename detection
508 if of not in m2:
503 if of not in m2:
509 continue # no match, keep looking
504 continue # no match, keep looking
510 if m2[of] == ma.get(of):
505 if m2[of] == ma.get(of):
511 break # no merge needed, quit early
506 break # no merge needed, quit early
512 c2 = getfctx(of, m2[of])
507 c2 = getfctx(of, m2[of])
513 cr = _related(oc, c2, ca.rev())
508 cr = _related(oc, c2, ca.rev())
514 if cr and (of == f or of == c2.path()): # non-divergent
509 if cr and (of == f or of == c2.path()): # non-divergent
515 copy[f] = of
510 copy[f] = of
516 of = None
511 of = None
517 break
512 break
518
513
519 if of in ma:
514 if of in ma:
520 diverge.setdefault(of, []).append(f)
515 diverge.setdefault(of, []).append(f)
521
516
522 def duplicatecopies(repo, rev, fromrev, skiprev=None):
517 def duplicatecopies(repo, rev, fromrev, skiprev=None):
523 '''reproduce copies from fromrev to rev in the dirstate
518 '''reproduce copies from fromrev to rev in the dirstate
524
519
525 If skiprev is specified, it's a revision that should be used to
520 If skiprev is specified, it's a revision that should be used to
526 filter copy records. Any copies that occur between fromrev and
521 filter copy records. Any copies that occur between fromrev and
527 skiprev will not be duplicated, even if they appear in the set of
522 skiprev will not be duplicated, even if they appear in the set of
528 copies between fromrev and rev.
523 copies between fromrev and rev.
529 '''
524 '''
530 exclude = {}
525 exclude = {}
531 if (skiprev is not None and
526 if (skiprev is not None and
532 not repo.ui.configbool('experimental', 'disablecopytrace')):
527 not repo.ui.configbool('experimental', 'disablecopytrace')):
533 # disablecopytrace skips this line, but not the entire function because
528 # disablecopytrace skips this line, but not the entire function because
534 # the line below is O(size of the repo) during a rebase, while the rest
529 # the line below is O(size of the repo) during a rebase, while the rest
535 # of the function is much faster (and is required for carrying copy
530 # of the function is much faster (and is required for carrying copy
536 # metadata across the rebase anyway).
531 # metadata across the rebase anyway).
537 exclude = pathcopies(repo[fromrev], repo[skiprev])
532 exclude = pathcopies(repo[fromrev], repo[skiprev])
538 for dst, src in pathcopies(repo[fromrev], repo[rev]).iteritems():
533 for dst, src in pathcopies(repo[fromrev], repo[rev]).iteritems():
539 # copies.pathcopies returns backward renames, so dst might not
534 # copies.pathcopies returns backward renames, so dst might not
540 # actually be in the dirstate
535 # actually be in the dirstate
541 if dst in exclude:
536 if dst in exclude:
542 continue
537 continue
543 if repo.dirstate[dst] in "nma":
538 if repo.dirstate[dst] in "nma":
544 repo.dirstate.copy(src, dst)
539 repo.dirstate.copy(src, dst)
General Comments 0
You need to be logged in to leave comments. Login now