##// END OF EJS Templates
pytype: stop excluding copies.py...
Matt Harbison -
r49308:79b90431 default
parent child Browse files
Show More
@@ -1,1305 +1,1309 b''
1 # coding: utf8
1 # coding: utf8
2 # copies.py - copy detection for Mercurial
2 # copies.py - copy detection for Mercurial
3 #
3 #
4 # Copyright 2008 Olivia Mackall <olivia@selenic.com>
4 # Copyright 2008 Olivia Mackall <olivia@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import collections
11 import collections
12 import os
12 import os
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import nullrev
15 from .node import nullrev
16
16
17 from . import (
17 from . import (
18 match as matchmod,
18 match as matchmod,
19 pathutil,
19 pathutil,
20 policy,
20 policy,
21 pycompat,
21 pycompat,
22 util,
22 util,
23 )
23 )
24
24
25
25
26 from .utils import stringutil
26 from .utils import stringutil
27
27
28 from .revlogutils import (
28 from .revlogutils import (
29 flagutil,
29 flagutil,
30 sidedata as sidedatamod,
30 sidedata as sidedatamod,
31 )
31 )
32
32
33 rustmod = policy.importrust("copy_tracing")
33 rustmod = policy.importrust("copy_tracing")
34
34
35
35
36 def _filter(src, dst, t):
36 def _filter(src, dst, t):
37 """filters out invalid copies after chaining"""
37 """filters out invalid copies after chaining"""
38
38
39 # When _chain()'ing copies in 'a' (from 'src' via some other commit 'mid')
39 # When _chain()'ing copies in 'a' (from 'src' via some other commit 'mid')
40 # with copies in 'b' (from 'mid' to 'dst'), we can get the different cases
40 # with copies in 'b' (from 'mid' to 'dst'), we can get the different cases
41 # in the following table (not including trivial cases). For example, case 6
41 # in the following table (not including trivial cases). For example, case 6
42 # is where a file existed in 'src' and remained under that name in 'mid' and
42 # is where a file existed in 'src' and remained under that name in 'mid' and
43 # then was renamed between 'mid' and 'dst'.
43 # then was renamed between 'mid' and 'dst'.
44 #
44 #
45 # case src mid dst result
45 # case src mid dst result
46 # 1 x y - -
46 # 1 x y - -
47 # 2 x y y x->y
47 # 2 x y y x->y
48 # 3 x y x -
48 # 3 x y x -
49 # 4 x y z x->z
49 # 4 x y z x->z
50 # 5 - x y -
50 # 5 - x y -
51 # 6 x x y x->y
51 # 6 x x y x->y
52 #
52 #
53 # _chain() takes care of chaining the copies in 'a' and 'b', but it
53 # _chain() takes care of chaining the copies in 'a' and 'b', but it
54 # cannot tell the difference between cases 1 and 2, between 3 and 4, or
54 # cannot tell the difference between cases 1 and 2, between 3 and 4, or
55 # between 5 and 6, so it includes all cases in its result.
55 # between 5 and 6, so it includes all cases in its result.
56 # Cases 1, 3, and 5 are then removed by _filter().
56 # Cases 1, 3, and 5 are then removed by _filter().
57
57
58 for k, v in list(t.items()):
58 for k, v in list(t.items()):
59 if k == v: # case 3
59 if k == v: # case 3
60 del t[k]
60 del t[k]
61 elif v not in src: # case 5
61 elif v not in src: # case 5
62 # remove copies from files that didn't exist
62 # remove copies from files that didn't exist
63 del t[k]
63 del t[k]
64 elif k not in dst: # case 1
64 elif k not in dst: # case 1
65 # remove copies to files that were then removed
65 # remove copies to files that were then removed
66 del t[k]
66 del t[k]
67
67
68
68
69 def _chain(prefix, suffix):
69 def _chain(prefix, suffix):
70 """chain two sets of copies 'prefix' and 'suffix'"""
70 """chain two sets of copies 'prefix' and 'suffix'"""
71 result = prefix.copy()
71 result = prefix.copy()
72 for key, value in pycompat.iteritems(suffix):
72 for key, value in pycompat.iteritems(suffix):
73 result[key] = prefix.get(value, value)
73 result[key] = prefix.get(value, value)
74 return result
74 return result
75
75
76
76
77 def _tracefile(fctx, am, basemf):
77 def _tracefile(fctx, am, basemf):
78 """return file context that is the ancestor of fctx present in ancestor
78 """return file context that is the ancestor of fctx present in ancestor
79 manifest am
79 manifest am
80
80
81 Note: we used to try and stop after a given limit, however checking if that
81 Note: we used to try and stop after a given limit, however checking if that
82 limit is reached turned out to be very expensive. we are better off
82 limit is reached turned out to be very expensive. we are better off
83 disabling that feature."""
83 disabling that feature."""
84
84
85 for f in fctx.ancestors():
85 for f in fctx.ancestors():
86 path = f.path()
86 path = f.path()
87 if am.get(path, None) == f.filenode():
87 if am.get(path, None) == f.filenode():
88 return path
88 return path
89 if basemf and basemf.get(path, None) == f.filenode():
89 if basemf and basemf.get(path, None) == f.filenode():
90 return path
90 return path
91
91
92
92
93 def _dirstatecopies(repo, match=None):
93 def _dirstatecopies(repo, match=None):
94 ds = repo.dirstate
94 ds = repo.dirstate
95 c = ds.copies().copy()
95 c = ds.copies().copy()
96 for k in list(c):
96 for k in list(c):
97 if not ds.get_entry(k).tracked or (match and not match(k)):
97 if not ds.get_entry(k).tracked or (match and not match(k)):
98 del c[k]
98 del c[k]
99 return c
99 return c
100
100
101
101
102 def _computeforwardmissing(a, b, match=None):
102 def _computeforwardmissing(a, b, match=None):
103 """Computes which files are in b but not a.
103 """Computes which files are in b but not a.
104 This is its own function so extensions can easily wrap this call to see what
104 This is its own function so extensions can easily wrap this call to see what
105 files _forwardcopies is about to process.
105 files _forwardcopies is about to process.
106 """
106 """
107 ma = a.manifest()
107 ma = a.manifest()
108 mb = b.manifest()
108 mb = b.manifest()
109 return mb.filesnotin(ma, match=match)
109 return mb.filesnotin(ma, match=match)
110
110
111
111
112 def usechangesetcentricalgo(repo):
112 def usechangesetcentricalgo(repo):
113 """Checks if we should use changeset-centric copy algorithms"""
113 """Checks if we should use changeset-centric copy algorithms"""
114 if repo.filecopiesmode == b'changeset-sidedata':
114 if repo.filecopiesmode == b'changeset-sidedata':
115 return True
115 return True
116 readfrom = repo.ui.config(b'experimental', b'copies.read-from')
116 readfrom = repo.ui.config(b'experimental', b'copies.read-from')
117 changesetsource = (b'changeset-only', b'compatibility')
117 changesetsource = (b'changeset-only', b'compatibility')
118 return readfrom in changesetsource
118 return readfrom in changesetsource
119
119
120
120
121 def _committedforwardcopies(a, b, base, match):
121 def _committedforwardcopies(a, b, base, match):
122 """Like _forwardcopies(), but b.rev() cannot be None (working copy)"""
122 """Like _forwardcopies(), but b.rev() cannot be None (working copy)"""
123 # files might have to be traced back to the fctx parent of the last
123 # files might have to be traced back to the fctx parent of the last
124 # one-side-only changeset, but not further back than that
124 # one-side-only changeset, but not further back than that
125 repo = a._repo
125 repo = a._repo
126
126
127 if usechangesetcentricalgo(repo):
127 if usechangesetcentricalgo(repo):
128 return _changesetforwardcopies(a, b, match)
128 return _changesetforwardcopies(a, b, match)
129
129
130 debug = repo.ui.debugflag and repo.ui.configbool(b'devel', b'debug.copies')
130 debug = repo.ui.debugflag and repo.ui.configbool(b'devel', b'debug.copies')
131 dbg = repo.ui.debug
131 dbg = repo.ui.debug
132 if debug:
132 if debug:
133 dbg(b'debug.copies: looking into rename from %s to %s\n' % (a, b))
133 dbg(b'debug.copies: looking into rename from %s to %s\n' % (a, b))
134 am = a.manifest()
134 am = a.manifest()
135 basemf = None if base is None else base.manifest()
135 basemf = None if base is None else base.manifest()
136
136
137 # find where new files came from
137 # find where new files came from
138 # we currently don't try to find where old files went, too expensive
138 # we currently don't try to find where old files went, too expensive
139 # this means we can miss a case like 'hg rm b; hg cp a b'
139 # this means we can miss a case like 'hg rm b; hg cp a b'
140 cm = {}
140 cm = {}
141
141
142 # Computing the forward missing is quite expensive on large manifests, since
142 # Computing the forward missing is quite expensive on large manifests, since
143 # it compares the entire manifests. We can optimize it in the common use
143 # it compares the entire manifests. We can optimize it in the common use
144 # case of computing what copies are in a commit versus its parent (like
144 # case of computing what copies are in a commit versus its parent (like
145 # during a rebase or histedit). Note, we exclude merge commits from this
145 # during a rebase or histedit). Note, we exclude merge commits from this
146 # optimization, since the ctx.files() for a merge commit is not correct for
146 # optimization, since the ctx.files() for a merge commit is not correct for
147 # this comparison.
147 # this comparison.
148 forwardmissingmatch = match
148 forwardmissingmatch = match
149 if b.p1() == a and b.p2().rev() == nullrev:
149 if b.p1() == a and b.p2().rev() == nullrev:
150 filesmatcher = matchmod.exact(b.files())
150 filesmatcher = matchmod.exact(b.files())
151 forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher)
151 forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher)
152 if repo.ui.configbool(b'devel', b'copy-tracing.trace-all-files'):
152 if repo.ui.configbool(b'devel', b'copy-tracing.trace-all-files'):
153 missing = list(b.walk(match))
153 missing = list(b.walk(match))
154 # _computeforwardmissing(a, b, match=forwardmissingmatch)
154 # _computeforwardmissing(a, b, match=forwardmissingmatch)
155 if debug:
155 if debug:
156 dbg(b'debug.copies: searching all files: %d\n' % len(missing))
156 dbg(b'debug.copies: searching all files: %d\n' % len(missing))
157 else:
157 else:
158 missing = _computeforwardmissing(a, b, match=forwardmissingmatch)
158 missing = _computeforwardmissing(a, b, match=forwardmissingmatch)
159 if debug:
159 if debug:
160 dbg(
160 dbg(
161 b'debug.copies: missing files to search: %d\n'
161 b'debug.copies: missing files to search: %d\n'
162 % len(missing)
162 % len(missing)
163 )
163 )
164
164
165 ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
165 ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True)
166
166
167 for f in sorted(missing):
167 for f in sorted(missing):
168 if debug:
168 if debug:
169 dbg(b'debug.copies: tracing file: %s\n' % f)
169 dbg(b'debug.copies: tracing file: %s\n' % f)
170 fctx = b[f]
170 fctx = b[f]
171 fctx._ancestrycontext = ancestrycontext
171 fctx._ancestrycontext = ancestrycontext
172
172
173 if debug:
173 if debug:
174 start = util.timer()
174 start = util.timer()
175 opath = _tracefile(fctx, am, basemf)
175 opath = _tracefile(fctx, am, basemf)
176 if opath:
176 if opath:
177 if debug:
177 if debug:
178 dbg(b'debug.copies: rename of: %s\n' % opath)
178 dbg(b'debug.copies: rename of: %s\n' % opath)
179 cm[f] = opath
179 cm[f] = opath
180 if debug:
180 if debug:
181 dbg(
181 dbg(
182 b'debug.copies: time: %f seconds\n'
182 b'debug.copies: time: %f seconds\n'
183 % (util.timer() - start)
183 % (util.timer() - start)
184 )
184 )
185 return cm
185 return cm
186
186
187
187
188 def _revinfo_getter(repo, match):
188 def _revinfo_getter(repo, match):
189 """returns a function that returns the following data given a <rev>"
189 """returns a function that returns the following data given a <rev>"
190
190
191 * p1: revision number of first parent
191 * p1: revision number of first parent
192 * p2: revision number of first parent
192 * p2: revision number of first parent
193 * changes: a ChangingFiles object
193 * changes: a ChangingFiles object
194 """
194 """
195 cl = repo.changelog
195 cl = repo.changelog
196 parents = cl.parentrevs
196 parents = cl.parentrevs
197 flags = cl.flags
197 flags = cl.flags
198
198
199 HASCOPIESINFO = flagutil.REVIDX_HASCOPIESINFO
199 HASCOPIESINFO = flagutil.REVIDX_HASCOPIESINFO
200
200
201 changelogrevision = cl.changelogrevision
201 changelogrevision = cl.changelogrevision
202
202
203 if rustmod is not None:
203 if rustmod is not None:
204
204
205 def revinfo(rev):
205 def revinfo(rev):
206 p1, p2 = parents(rev)
206 p1, p2 = parents(rev)
207 if flags(rev) & HASCOPIESINFO:
207 if flags(rev) & HASCOPIESINFO:
208 raw = changelogrevision(rev)._sidedata.get(sidedatamod.SD_FILES)
208 raw = changelogrevision(rev)._sidedata.get(sidedatamod.SD_FILES)
209 else:
209 else:
210 raw = None
210 raw = None
211 return (p1, p2, raw)
211 return (p1, p2, raw)
212
212
213 else:
213 else:
214
214
215 def revinfo(rev):
215 def revinfo(rev):
216 p1, p2 = parents(rev)
216 p1, p2 = parents(rev)
217 if flags(rev) & HASCOPIESINFO:
217 if flags(rev) & HASCOPIESINFO:
218 changes = changelogrevision(rev).changes
218 changes = changelogrevision(rev).changes
219 else:
219 else:
220 changes = None
220 changes = None
221 return (p1, p2, changes)
221 return (p1, p2, changes)
222
222
223 return revinfo
223 return revinfo
224
224
225
225
226 def cached_is_ancestor(is_ancestor):
226 def cached_is_ancestor(is_ancestor):
227 """return a cached version of is_ancestor"""
227 """return a cached version of is_ancestor"""
228 cache = {}
228 cache = {}
229
229
230 def _is_ancestor(anc, desc):
230 def _is_ancestor(anc, desc):
231 if anc > desc:
231 if anc > desc:
232 return False
232 return False
233 elif anc == desc:
233 elif anc == desc:
234 return True
234 return True
235 key = (anc, desc)
235 key = (anc, desc)
236 ret = cache.get(key)
236 ret = cache.get(key)
237 if ret is None:
237 if ret is None:
238 ret = cache[key] = is_ancestor(anc, desc)
238 ret = cache[key] = is_ancestor(anc, desc)
239 return ret
239 return ret
240
240
241 return _is_ancestor
241 return _is_ancestor
242
242
243
243
244 def _changesetforwardcopies(a, b, match):
244 def _changesetforwardcopies(a, b, match):
245 if a.rev() in (nullrev, b.rev()):
245 if a.rev() in (nullrev, b.rev()):
246 return {}
246 return {}
247
247
248 repo = a.repo().unfiltered()
248 repo = a.repo().unfiltered()
249
249
250 cl = repo.changelog
250 cl = repo.changelog
251 isancestor = cl.isancestorrev
251 isancestor = cl.isancestorrev
252
252
253 # To track rename from "A" to B, we need to gather all parent β†’ children
253 # To track rename from "A" to B, we need to gather all parent β†’ children
254 # edges that are contains in `::B` but not in `::A`.
254 # edges that are contains in `::B` but not in `::A`.
255 #
255 #
256 #
256 #
257 # To do so, we need to gather all revisions exclusiveΒΉ to "B" (ieΒΉ: `::b -
257 # To do so, we need to gather all revisions exclusiveΒΉ to "B" (ieΒΉ: `::b -
258 # ::a`) and also all the "roots point", ie the parents of the exclusive set
258 # ::a`) and also all the "roots point", ie the parents of the exclusive set
259 # that belong to ::a. These are exactly all the revisions needed to express
259 # that belong to ::a. These are exactly all the revisions needed to express
260 # the parent β†’ children we need to combine.
260 # the parent β†’ children we need to combine.
261 #
261 #
262 # [1] actually, we need to gather all the edges within `(::a)::b`, ie:
262 # [1] actually, we need to gather all the edges within `(::a)::b`, ie:
263 # excluding paths that leads to roots that are not ancestors of `a`. We
263 # excluding paths that leads to roots that are not ancestors of `a`. We
264 # keep this out of the explanation because it is hard enough without this special case..
264 # keep this out of the explanation because it is hard enough without this special case..
265
265
266 parents = cl._uncheckedparentrevs
266 parents = cl._uncheckedparentrevs
267 graph_roots = (nullrev, nullrev)
267 graph_roots = (nullrev, nullrev)
268
268
269 ancestors = cl.ancestors([a.rev()], inclusive=True)
269 ancestors = cl.ancestors([a.rev()], inclusive=True)
270 revs = cl.findmissingrevs(common=[a.rev()], heads=[b.rev()])
270 revs = cl.findmissingrevs(common=[a.rev()], heads=[b.rev()])
271 roots = set()
271 roots = set()
272 has_graph_roots = False
272 has_graph_roots = False
273 multi_thread = repo.ui.configbool(b'devel', b'copy-tracing.multi-thread')
273 multi_thread = repo.ui.configbool(b'devel', b'copy-tracing.multi-thread')
274
274
275 # iterate over `only(B, A)`
275 # iterate over `only(B, A)`
276 for r in revs:
276 for r in revs:
277 ps = parents(r)
277 ps = parents(r)
278 if ps == graph_roots:
278 if ps == graph_roots:
279 has_graph_roots = True
279 has_graph_roots = True
280 else:
280 else:
281 p1, p2 = ps
281 p1, p2 = ps
282
282
283 # find all the "root points" (see larger comment above)
283 # find all the "root points" (see larger comment above)
284 if p1 != nullrev and p1 in ancestors:
284 if p1 != nullrev and p1 in ancestors:
285 roots.add(p1)
285 roots.add(p1)
286 if p2 != nullrev and p2 in ancestors:
286 if p2 != nullrev and p2 in ancestors:
287 roots.add(p2)
287 roots.add(p2)
288 if not roots:
288 if not roots:
289 # no common revision to track copies from
289 # no common revision to track copies from
290 return {}
290 return {}
291 if has_graph_roots:
291 if has_graph_roots:
292 # this deal with the special case mentioned in the [1] footnotes. We
292 # this deal with the special case mentioned in the [1] footnotes. We
293 # must filter out revisions that leads to non-common graphroots.
293 # must filter out revisions that leads to non-common graphroots.
294 roots = list(roots)
294 roots = list(roots)
295 m = min(roots)
295 m = min(roots)
296 h = [b.rev()]
296 h = [b.rev()]
297 roots_to_head = cl.reachableroots(m, h, roots, includepath=True)
297 roots_to_head = cl.reachableroots(m, h, roots, includepath=True)
298 roots_to_head = set(roots_to_head)
298 roots_to_head = set(roots_to_head)
299 revs = [r for r in revs if r in roots_to_head]
299 revs = [r for r in revs if r in roots_to_head]
300
300
301 if repo.filecopiesmode == b'changeset-sidedata':
301 if repo.filecopiesmode == b'changeset-sidedata':
302 # When using side-data, we will process the edges "from" the children.
302 # When using side-data, we will process the edges "from" the children.
303 # We iterate over the children, gathering previous collected data for
303 # We iterate over the children, gathering previous collected data for
304 # the parents. Do know when the parents data is no longer necessary, we
304 # the parents. Do know when the parents data is no longer necessary, we
305 # keep a counter of how many children each revision has.
305 # keep a counter of how many children each revision has.
306 #
306 #
307 # An interesting property of `children_count` is that it only contains
307 # An interesting property of `children_count` is that it only contains
308 # revision that will be relevant for a edge of the graph. So if a
308 # revision that will be relevant for a edge of the graph. So if a
309 # children has parent not in `children_count`, that edges should not be
309 # children has parent not in `children_count`, that edges should not be
310 # processed.
310 # processed.
311 children_count = dict((r, 0) for r in roots)
311 children_count = dict((r, 0) for r in roots)
312 for r in revs:
312 for r in revs:
313 for p in cl.parentrevs(r):
313 for p in cl.parentrevs(r):
314 if p == nullrev:
314 if p == nullrev:
315 continue
315 continue
316 children_count[r] = 0
316 children_count[r] = 0
317 if p in children_count:
317 if p in children_count:
318 children_count[p] += 1
318 children_count[p] += 1
319 revinfo = _revinfo_getter(repo, match)
319 revinfo = _revinfo_getter(repo, match)
320 with repo.changelog.reading():
320 with repo.changelog.reading():
321 return _combine_changeset_copies(
321 return _combine_changeset_copies(
322 revs,
322 revs,
323 children_count,
323 children_count,
324 b.rev(),
324 b.rev(),
325 revinfo,
325 revinfo,
326 match,
326 match,
327 isancestor,
327 isancestor,
328 multi_thread,
328 multi_thread,
329 )
329 )
330 else:
330 else:
331 # When not using side-data, we will process the edges "from" the parent.
331 # When not using side-data, we will process the edges "from" the parent.
332 # so we need a full mapping of the parent -> children relation.
332 # so we need a full mapping of the parent -> children relation.
333 children = dict((r, []) for r in roots)
333 children = dict((r, []) for r in roots)
334 for r in revs:
334 for r in revs:
335 for p in cl.parentrevs(r):
335 for p in cl.parentrevs(r):
336 if p == nullrev:
336 if p == nullrev:
337 continue
337 continue
338 children[r] = []
338 children[r] = []
339 if p in children:
339 if p in children:
340 children[p].append(r)
340 children[p].append(r)
341 x = revs.pop()
341 x = revs.pop()
342 assert x == b.rev()
342 assert x == b.rev()
343 revs.extend(roots)
343 revs.extend(roots)
344 revs.sort()
344 revs.sort()
345
345
346 revinfo = _revinfo_getter_extra(repo)
346 revinfo = _revinfo_getter_extra(repo)
347 return _combine_changeset_copies_extra(
347 return _combine_changeset_copies_extra(
348 revs, children, b.rev(), revinfo, match, isancestor
348 revs, children, b.rev(), revinfo, match, isancestor
349 )
349 )
350
350
351
351
352 def _combine_changeset_copies(
352 def _combine_changeset_copies(
353 revs, children_count, targetrev, revinfo, match, isancestor, multi_thread
353 revs, children_count, targetrev, revinfo, match, isancestor, multi_thread
354 ):
354 ):
355 """combine the copies information for each item of iterrevs
355 """combine the copies information for each item of iterrevs
356
356
357 revs: sorted iterable of revision to visit
357 revs: sorted iterable of revision to visit
358 children_count: a {parent: <number-of-relevant-children>} mapping.
358 children_count: a {parent: <number-of-relevant-children>} mapping.
359 targetrev: the final copies destination revision (not in iterrevs)
359 targetrev: the final copies destination revision (not in iterrevs)
360 revinfo(rev): a function that return (p1, p2, p1copies, p2copies, removed)
360 revinfo(rev): a function that return (p1, p2, p1copies, p2copies, removed)
361 match: a matcher
361 match: a matcher
362
362
363 It returns the aggregated copies information for `targetrev`.
363 It returns the aggregated copies information for `targetrev`.
364 """
364 """
365
365
366 alwaysmatch = match.always()
366 alwaysmatch = match.always()
367
367
368 if rustmod is not None:
368 if rustmod is not None:
369 final_copies = rustmod.combine_changeset_copies(
369 final_copies = rustmod.combine_changeset_copies(
370 list(revs), children_count, targetrev, revinfo, multi_thread
370 list(revs), children_count, targetrev, revinfo, multi_thread
371 )
371 )
372 else:
372 else:
373 isancestor = cached_is_ancestor(isancestor)
373 isancestor = cached_is_ancestor(isancestor)
374
374
375 all_copies = {}
375 all_copies = {}
376 # iterate over all the "children" side of copy tracing "edge"
376 # iterate over all the "children" side of copy tracing "edge"
377 for current_rev in revs:
377 for current_rev in revs:
378 p1, p2, changes = revinfo(current_rev)
378 p1, p2, changes = revinfo(current_rev)
379 current_copies = None
379 current_copies = None
380 # iterate over all parents to chain the existing data with the
380 # iterate over all parents to chain the existing data with the
381 # data from the parent β†’ child edge.
381 # data from the parent β†’ child edge.
382 for parent, parent_rev in ((1, p1), (2, p2)):
382 for parent, parent_rev in ((1, p1), (2, p2)):
383 if parent_rev == nullrev:
383 if parent_rev == nullrev:
384 continue
384 continue
385 remaining_children = children_count.get(parent_rev)
385 remaining_children = children_count.get(parent_rev)
386 if remaining_children is None:
386 if remaining_children is None:
387 continue
387 continue
388 remaining_children -= 1
388 remaining_children -= 1
389 children_count[parent_rev] = remaining_children
389 children_count[parent_rev] = remaining_children
390 if remaining_children:
390 if remaining_children:
391 copies = all_copies.get(parent_rev, None)
391 copies = all_copies.get(parent_rev, None)
392 else:
392 else:
393 copies = all_copies.pop(parent_rev, None)
393 copies = all_copies.pop(parent_rev, None)
394
394
395 if copies is None:
395 if copies is None:
396 # this is a root
396 # this is a root
397 newcopies = copies = {}
397 newcopies = copies = {}
398 elif remaining_children:
398 elif remaining_children:
399 newcopies = copies.copy()
399 newcopies = copies.copy()
400 else:
400 else:
401 newcopies = copies
401 newcopies = copies
402 # chain the data in the edge with the existing data
402 # chain the data in the edge with the existing data
403 if changes is not None:
403 if changes is not None:
404 childcopies = {}
404 childcopies = {}
405 if parent == 1:
405 if parent == 1:
406 childcopies = changes.copied_from_p1
406 childcopies = changes.copied_from_p1
407 elif parent == 2:
407 elif parent == 2:
408 childcopies = changes.copied_from_p2
408 childcopies = changes.copied_from_p2
409
409
410 if childcopies:
410 if childcopies:
411 newcopies = copies.copy()
411 newcopies = copies.copy()
412 for dest, source in pycompat.iteritems(childcopies):
412 for dest, source in pycompat.iteritems(childcopies):
413 prev = copies.get(source)
413 prev = copies.get(source)
414 if prev is not None and prev[1] is not None:
414 if prev is not None and prev[1] is not None:
415 source = prev[1]
415 source = prev[1]
416 newcopies[dest] = (current_rev, source)
416 newcopies[dest] = (current_rev, source)
417 assert newcopies is not copies
417 assert newcopies is not copies
418 if changes.removed:
418 if changes.removed:
419 for f in changes.removed:
419 for f in changes.removed:
420 if f in newcopies:
420 if f in newcopies:
421 if newcopies is copies:
421 if newcopies is copies:
422 # copy on write to avoid affecting potential other
422 # copy on write to avoid affecting potential other
423 # branches. when there are no other branches, this
423 # branches. when there are no other branches, this
424 # could be avoided.
424 # could be avoided.
425 newcopies = copies.copy()
425 newcopies = copies.copy()
426 newcopies[f] = (current_rev, None)
426 newcopies[f] = (current_rev, None)
427 # check potential need to combine the data from another parent (for
427 # check potential need to combine the data from another parent (for
428 # that child). See comment below for details.
428 # that child). See comment below for details.
429 if current_copies is None:
429 if current_copies is None:
430 current_copies = newcopies
430 current_copies = newcopies
431 else:
431 else:
432 # we are the second parent to work on c, we need to merge our
432 # we are the second parent to work on c, we need to merge our
433 # work with the other.
433 # work with the other.
434 #
434 #
435 # In case of conflict, parent 1 take precedence over parent 2.
435 # In case of conflict, parent 1 take precedence over parent 2.
436 # This is an arbitrary choice made anew when implementing
436 # This is an arbitrary choice made anew when implementing
437 # changeset based copies. It was made without regards with
437 # changeset based copies. It was made without regards with
438 # potential filelog related behavior.
438 # potential filelog related behavior.
439 assert parent == 2
439 assert parent == 2
440 current_copies = _merge_copies_dict(
440 current_copies = _merge_copies_dict(
441 newcopies,
441 newcopies,
442 current_copies,
442 current_copies,
443 isancestor,
443 isancestor,
444 changes,
444 changes,
445 current_rev,
445 current_rev,
446 )
446 )
447 all_copies[current_rev] = current_copies
447 all_copies[current_rev] = current_copies
448
448
449 # filter out internal details and return a {dest: source mapping}
449 # filter out internal details and return a {dest: source mapping}
450 final_copies = {}
450 final_copies = {}
451 for dest, (tt, source) in all_copies[targetrev].items():
451
452 targetrev_items = all_copies[targetrev]
453 assert targetrev_items is not None # help pytype
454
455 for dest, (tt, source) in targetrev_items.items():
452 if source is not None:
456 if source is not None:
453 final_copies[dest] = source
457 final_copies[dest] = source
454 if not alwaysmatch:
458 if not alwaysmatch:
455 for filename in list(final_copies.keys()):
459 for filename in list(final_copies.keys()):
456 if not match(filename):
460 if not match(filename):
457 del final_copies[filename]
461 del final_copies[filename]
458 return final_copies
462 return final_copies
459
463
460
464
461 # constant to decide which side to pick with _merge_copies_dict
465 # constant to decide which side to pick with _merge_copies_dict
462 PICK_MINOR = 0
466 PICK_MINOR = 0
463 PICK_MAJOR = 1
467 PICK_MAJOR = 1
464 PICK_EITHER = 2
468 PICK_EITHER = 2
465
469
466
470
467 def _merge_copies_dict(minor, major, isancestor, changes, current_merge):
471 def _merge_copies_dict(minor, major, isancestor, changes, current_merge):
468 """merge two copies-mapping together, minor and major
472 """merge two copies-mapping together, minor and major
469
473
470 In case of conflict, value from "major" will be picked.
474 In case of conflict, value from "major" will be picked.
471
475
472 - `isancestors(low_rev, high_rev)`: callable return True if `low_rev` is an
476 - `isancestors(low_rev, high_rev)`: callable return True if `low_rev` is an
473 ancestors of `high_rev`,
477 ancestors of `high_rev`,
474
478
475 - `ismerged(path)`: callable return True if `path` have been merged in the
479 - `ismerged(path)`: callable return True if `path` have been merged in the
476 current revision,
480 current revision,
477
481
478 return the resulting dict (in practice, the "minor" object, updated)
482 return the resulting dict (in practice, the "minor" object, updated)
479 """
483 """
480 for dest, value in major.items():
484 for dest, value in major.items():
481 other = minor.get(dest)
485 other = minor.get(dest)
482 if other is None:
486 if other is None:
483 minor[dest] = value
487 minor[dest] = value
484 else:
488 else:
485 pick, overwrite = _compare_values(
489 pick, overwrite = _compare_values(
486 changes, isancestor, dest, other, value
490 changes, isancestor, dest, other, value
487 )
491 )
488 if overwrite:
492 if overwrite:
489 if pick == PICK_MAJOR:
493 if pick == PICK_MAJOR:
490 minor[dest] = (current_merge, value[1])
494 minor[dest] = (current_merge, value[1])
491 else:
495 else:
492 minor[dest] = (current_merge, other[1])
496 minor[dest] = (current_merge, other[1])
493 elif pick == PICK_MAJOR:
497 elif pick == PICK_MAJOR:
494 minor[dest] = value
498 minor[dest] = value
495 return minor
499 return minor
496
500
497
501
498 def _compare_values(changes, isancestor, dest, minor, major):
502 def _compare_values(changes, isancestor, dest, minor, major):
499 """compare two value within a _merge_copies_dict loop iteration
503 """compare two value within a _merge_copies_dict loop iteration
500
504
501 return (pick, overwrite).
505 return (pick, overwrite).
502
506
503 - pick is one of PICK_MINOR, PICK_MAJOR or PICK_EITHER
507 - pick is one of PICK_MINOR, PICK_MAJOR or PICK_EITHER
504 - overwrite is True if pick is a return of an ambiguity that needs resolution.
508 - overwrite is True if pick is a return of an ambiguity that needs resolution.
505 """
509 """
506 major_tt, major_value = major
510 major_tt, major_value = major
507 minor_tt, minor_value = minor
511 minor_tt, minor_value = minor
508
512
509 if major_tt == minor_tt:
513 if major_tt == minor_tt:
510 # if it comes from the same revision it must be the same value
514 # if it comes from the same revision it must be the same value
511 assert major_value == minor_value
515 assert major_value == minor_value
512 return PICK_EITHER, False
516 return PICK_EITHER, False
513 elif (
517 elif (
514 changes is not None
518 changes is not None
515 and minor_value is not None
519 and minor_value is not None
516 and major_value is None
520 and major_value is None
517 and dest in changes.salvaged
521 and dest in changes.salvaged
518 ):
522 ):
519 # In this case, a deletion was reverted, the "alive" value overwrite
523 # In this case, a deletion was reverted, the "alive" value overwrite
520 # the deleted one.
524 # the deleted one.
521 return PICK_MINOR, True
525 return PICK_MINOR, True
522 elif (
526 elif (
523 changes is not None
527 changes is not None
524 and major_value is not None
528 and major_value is not None
525 and minor_value is None
529 and minor_value is None
526 and dest in changes.salvaged
530 and dest in changes.salvaged
527 ):
531 ):
528 # In this case, a deletion was reverted, the "alive" value overwrite
532 # In this case, a deletion was reverted, the "alive" value overwrite
529 # the deleted one.
533 # the deleted one.
530 return PICK_MAJOR, True
534 return PICK_MAJOR, True
531 elif isancestor(minor_tt, major_tt):
535 elif isancestor(minor_tt, major_tt):
532 if changes is not None and dest in changes.merged:
536 if changes is not None and dest in changes.merged:
533 # change to dest happened on the branch without copy-source change,
537 # change to dest happened on the branch without copy-source change,
534 # so both source are valid and "major" wins.
538 # so both source are valid and "major" wins.
535 return PICK_MAJOR, True
539 return PICK_MAJOR, True
536 else:
540 else:
537 return PICK_MAJOR, False
541 return PICK_MAJOR, False
538 elif isancestor(major_tt, minor_tt):
542 elif isancestor(major_tt, minor_tt):
539 if changes is not None and dest in changes.merged:
543 if changes is not None and dest in changes.merged:
540 # change to dest happened on the branch without copy-source change,
544 # change to dest happened on the branch without copy-source change,
541 # so both source are valid and "major" wins.
545 # so both source are valid and "major" wins.
542 return PICK_MAJOR, True
546 return PICK_MAJOR, True
543 else:
547 else:
544 return PICK_MINOR, False
548 return PICK_MINOR, False
545 elif minor_value is None:
549 elif minor_value is None:
546 # in case of conflict, the "alive" side wins.
550 # in case of conflict, the "alive" side wins.
547 return PICK_MAJOR, True
551 return PICK_MAJOR, True
548 elif major_value is None:
552 elif major_value is None:
549 # in case of conflict, the "alive" side wins.
553 # in case of conflict, the "alive" side wins.
550 return PICK_MINOR, True
554 return PICK_MINOR, True
551 else:
555 else:
552 # in case of conflict where both side are alive, major wins.
556 # in case of conflict where both side are alive, major wins.
553 return PICK_MAJOR, True
557 return PICK_MAJOR, True
554
558
555
559
556 def _revinfo_getter_extra(repo):
560 def _revinfo_getter_extra(repo):
557 """return a function that return multiple data given a <rev>"i
561 """return a function that return multiple data given a <rev>"i
558
562
559 * p1: revision number of first parent
563 * p1: revision number of first parent
560 * p2: revision number of first parent
564 * p2: revision number of first parent
561 * p1copies: mapping of copies from p1
565 * p1copies: mapping of copies from p1
562 * p2copies: mapping of copies from p2
566 * p2copies: mapping of copies from p2
563 * removed: a list of removed files
567 * removed: a list of removed files
564 * ismerged: a callback to know if file was merged in that revision
568 * ismerged: a callback to know if file was merged in that revision
565 """
569 """
566 cl = repo.changelog
570 cl = repo.changelog
567 parents = cl.parentrevs
571 parents = cl.parentrevs
568
572
569 def get_ismerged(rev):
573 def get_ismerged(rev):
570 ctx = repo[rev]
574 ctx = repo[rev]
571
575
572 def ismerged(path):
576 def ismerged(path):
573 if path not in ctx.files():
577 if path not in ctx.files():
574 return False
578 return False
575 fctx = ctx[path]
579 fctx = ctx[path]
576 parents = fctx._filelog.parents(fctx._filenode)
580 parents = fctx._filelog.parents(fctx._filenode)
577 nb_parents = 0
581 nb_parents = 0
578 for n in parents:
582 for n in parents:
579 if n != repo.nullid:
583 if n != repo.nullid:
580 nb_parents += 1
584 nb_parents += 1
581 return nb_parents >= 2
585 return nb_parents >= 2
582
586
583 return ismerged
587 return ismerged
584
588
585 def revinfo(rev):
589 def revinfo(rev):
586 p1, p2 = parents(rev)
590 p1, p2 = parents(rev)
587 ctx = repo[rev]
591 ctx = repo[rev]
588 p1copies, p2copies = ctx._copies
592 p1copies, p2copies = ctx._copies
589 removed = ctx.filesremoved()
593 removed = ctx.filesremoved()
590 return p1, p2, p1copies, p2copies, removed, get_ismerged(rev)
594 return p1, p2, p1copies, p2copies, removed, get_ismerged(rev)
591
595
592 return revinfo
596 return revinfo
593
597
594
598
595 def _combine_changeset_copies_extra(
599 def _combine_changeset_copies_extra(
596 revs, children, targetrev, revinfo, match, isancestor
600 revs, children, targetrev, revinfo, match, isancestor
597 ):
601 ):
598 """version of `_combine_changeset_copies` that works with the Google
602 """version of `_combine_changeset_copies` that works with the Google
599 specific "extra" based storage for copy information"""
603 specific "extra" based storage for copy information"""
600 all_copies = {}
604 all_copies = {}
601 alwaysmatch = match.always()
605 alwaysmatch = match.always()
602 for r in revs:
606 for r in revs:
603 copies = all_copies.pop(r, None)
607 copies = all_copies.pop(r, None)
604 if copies is None:
608 if copies is None:
605 # this is a root
609 # this is a root
606 copies = {}
610 copies = {}
607 for i, c in enumerate(children[r]):
611 for i, c in enumerate(children[r]):
608 p1, p2, p1copies, p2copies, removed, ismerged = revinfo(c)
612 p1, p2, p1copies, p2copies, removed, ismerged = revinfo(c)
609 if r == p1:
613 if r == p1:
610 parent = 1
614 parent = 1
611 childcopies = p1copies
615 childcopies = p1copies
612 else:
616 else:
613 assert r == p2
617 assert r == p2
614 parent = 2
618 parent = 2
615 childcopies = p2copies
619 childcopies = p2copies
616 if not alwaysmatch:
620 if not alwaysmatch:
617 childcopies = {
621 childcopies = {
618 dst: src for dst, src in childcopies.items() if match(dst)
622 dst: src for dst, src in childcopies.items() if match(dst)
619 }
623 }
620 newcopies = copies
624 newcopies = copies
621 if childcopies:
625 if childcopies:
622 newcopies = copies.copy()
626 newcopies = copies.copy()
623 for dest, source in pycompat.iteritems(childcopies):
627 for dest, source in pycompat.iteritems(childcopies):
624 prev = copies.get(source)
628 prev = copies.get(source)
625 if prev is not None and prev[1] is not None:
629 if prev is not None and prev[1] is not None:
626 source = prev[1]
630 source = prev[1]
627 newcopies[dest] = (c, source)
631 newcopies[dest] = (c, source)
628 assert newcopies is not copies
632 assert newcopies is not copies
629 for f in removed:
633 for f in removed:
630 if f in newcopies:
634 if f in newcopies:
631 if newcopies is copies:
635 if newcopies is copies:
632 # copy on write to avoid affecting potential other
636 # copy on write to avoid affecting potential other
633 # branches. when there are no other branches, this
637 # branches. when there are no other branches, this
634 # could be avoided.
638 # could be avoided.
635 newcopies = copies.copy()
639 newcopies = copies.copy()
636 newcopies[f] = (c, None)
640 newcopies[f] = (c, None)
637 othercopies = all_copies.get(c)
641 othercopies = all_copies.get(c)
638 if othercopies is None:
642 if othercopies is None:
639 all_copies[c] = newcopies
643 all_copies[c] = newcopies
640 else:
644 else:
641 # we are the second parent to work on c, we need to merge our
645 # we are the second parent to work on c, we need to merge our
642 # work with the other.
646 # work with the other.
643 #
647 #
644 # In case of conflict, parent 1 take precedence over parent 2.
648 # In case of conflict, parent 1 take precedence over parent 2.
645 # This is an arbitrary choice made anew when implementing
649 # This is an arbitrary choice made anew when implementing
646 # changeset based copies. It was made without regards with
650 # changeset based copies. It was made without regards with
647 # potential filelog related behavior.
651 # potential filelog related behavior.
648 if parent == 1:
652 if parent == 1:
649 _merge_copies_dict_extra(
653 _merge_copies_dict_extra(
650 othercopies, newcopies, isancestor, ismerged
654 othercopies, newcopies, isancestor, ismerged
651 )
655 )
652 else:
656 else:
653 _merge_copies_dict_extra(
657 _merge_copies_dict_extra(
654 newcopies, othercopies, isancestor, ismerged
658 newcopies, othercopies, isancestor, ismerged
655 )
659 )
656 all_copies[c] = newcopies
660 all_copies[c] = newcopies
657
661
658 final_copies = {}
662 final_copies = {}
659 for dest, (tt, source) in all_copies[targetrev].items():
663 for dest, (tt, source) in all_copies[targetrev].items():
660 if source is not None:
664 if source is not None:
661 final_copies[dest] = source
665 final_copies[dest] = source
662 return final_copies
666 return final_copies
663
667
664
668
665 def _merge_copies_dict_extra(minor, major, isancestor, ismerged):
669 def _merge_copies_dict_extra(minor, major, isancestor, ismerged):
666 """version of `_merge_copies_dict` that works with the Google
670 """version of `_merge_copies_dict` that works with the Google
667 specific "extra" based storage for copy information"""
671 specific "extra" based storage for copy information"""
668 for dest, value in major.items():
672 for dest, value in major.items():
669 other = minor.get(dest)
673 other = minor.get(dest)
670 if other is None:
674 if other is None:
671 minor[dest] = value
675 minor[dest] = value
672 else:
676 else:
673 new_tt = value[0]
677 new_tt = value[0]
674 other_tt = other[0]
678 other_tt = other[0]
675 if value[1] == other[1]:
679 if value[1] == other[1]:
676 continue
680 continue
677 # content from "major" wins, unless it is older
681 # content from "major" wins, unless it is older
678 # than the branch point or there is a merge
682 # than the branch point or there is a merge
679 if (
683 if (
680 new_tt == other_tt
684 new_tt == other_tt
681 or not isancestor(new_tt, other_tt)
685 or not isancestor(new_tt, other_tt)
682 or ismerged(dest)
686 or ismerged(dest)
683 ):
687 ):
684 minor[dest] = value
688 minor[dest] = value
685
689
686
690
687 def _forwardcopies(a, b, base=None, match=None):
691 def _forwardcopies(a, b, base=None, match=None):
688 """find {dst@b: src@a} copy mapping where a is an ancestor of b"""
692 """find {dst@b: src@a} copy mapping where a is an ancestor of b"""
689
693
690 if base is None:
694 if base is None:
691 base = a
695 base = a
692 match = a.repo().narrowmatch(match)
696 match = a.repo().narrowmatch(match)
693 # check for working copy
697 # check for working copy
694 if b.rev() is None:
698 if b.rev() is None:
695 cm = _committedforwardcopies(a, b.p1(), base, match)
699 cm = _committedforwardcopies(a, b.p1(), base, match)
696 # combine copies from dirstate if necessary
700 # combine copies from dirstate if necessary
697 copies = _chain(cm, _dirstatecopies(b._repo, match))
701 copies = _chain(cm, _dirstatecopies(b._repo, match))
698 else:
702 else:
699 copies = _committedforwardcopies(a, b, base, match)
703 copies = _committedforwardcopies(a, b, base, match)
700 return copies
704 return copies
701
705
702
706
703 def _backwardrenames(a, b, match):
707 def _backwardrenames(a, b, match):
704 """find renames from a to b"""
708 """find renames from a to b"""
705 if a._repo.ui.config(b'experimental', b'copytrace') == b'off':
709 if a._repo.ui.config(b'experimental', b'copytrace') == b'off':
706 return {}
710 return {}
707
711
708 # We don't want to pass in "match" here, since that would filter
712 # We don't want to pass in "match" here, since that would filter
709 # the destination by it. Since we're reversing the copies, we want
713 # the destination by it. Since we're reversing the copies, we want
710 # to filter the source instead.
714 # to filter the source instead.
711 copies = _forwardcopies(b, a)
715 copies = _forwardcopies(b, a)
712 return _reverse_renames(copies, a, match)
716 return _reverse_renames(copies, a, match)
713
717
714
718
715 def _reverse_renames(copies, dst, match):
719 def _reverse_renames(copies, dst, match):
716 """given copies to context 'dst', finds renames from that context"""
720 """given copies to context 'dst', finds renames from that context"""
717 # Even though we're not taking copies into account, 1:n rename situations
721 # Even though we're not taking copies into account, 1:n rename situations
718 # can still exist (e.g. hg cp a b; hg mv a c). In those cases we
722 # can still exist (e.g. hg cp a b; hg mv a c). In those cases we
719 # arbitrarily pick one of the renames.
723 # arbitrarily pick one of the renames.
720 r = {}
724 r = {}
721 for k, v in sorted(pycompat.iteritems(copies)):
725 for k, v in sorted(pycompat.iteritems(copies)):
722 if match and not match(v):
726 if match and not match(v):
723 continue
727 continue
724 # remove copies
728 # remove copies
725 if v in dst:
729 if v in dst:
726 continue
730 continue
727 r[v] = k
731 r[v] = k
728 return r
732 return r
729
733
730
734
731 def pathcopies(x, y, match=None):
735 def pathcopies(x, y, match=None):
732 """find {dst@y: src@x} copy mapping for directed compare"""
736 """find {dst@y: src@x} copy mapping for directed compare"""
733 repo = x._repo
737 repo = x._repo
734 debug = repo.ui.debugflag and repo.ui.configbool(b'devel', b'debug.copies')
738 debug = repo.ui.debugflag and repo.ui.configbool(b'devel', b'debug.copies')
735 if debug:
739 if debug:
736 repo.ui.debug(
740 repo.ui.debug(
737 b'debug.copies: searching copies from %s to %s\n' % (x, y)
741 b'debug.copies: searching copies from %s to %s\n' % (x, y)
738 )
742 )
739 if x == y or not x or not y:
743 if x == y or not x or not y:
740 return {}
744 return {}
741 if y.rev() is None and x == y.p1():
745 if y.rev() is None and x == y.p1():
742 if debug:
746 if debug:
743 repo.ui.debug(b'debug.copies: search mode: dirstate\n')
747 repo.ui.debug(b'debug.copies: search mode: dirstate\n')
744 # short-circuit to avoid issues with merge states
748 # short-circuit to avoid issues with merge states
745 return _dirstatecopies(repo, match)
749 return _dirstatecopies(repo, match)
746 a = y.ancestor(x)
750 a = y.ancestor(x)
747 if a == x:
751 if a == x:
748 if debug:
752 if debug:
749 repo.ui.debug(b'debug.copies: search mode: forward\n')
753 repo.ui.debug(b'debug.copies: search mode: forward\n')
750 copies = _forwardcopies(x, y, match=match)
754 copies = _forwardcopies(x, y, match=match)
751 elif a == y:
755 elif a == y:
752 if debug:
756 if debug:
753 repo.ui.debug(b'debug.copies: search mode: backward\n')
757 repo.ui.debug(b'debug.copies: search mode: backward\n')
754 copies = _backwardrenames(x, y, match=match)
758 copies = _backwardrenames(x, y, match=match)
755 else:
759 else:
756 if debug:
760 if debug:
757 repo.ui.debug(b'debug.copies: search mode: combined\n')
761 repo.ui.debug(b'debug.copies: search mode: combined\n')
758 base = None
762 base = None
759 if a.rev() != nullrev:
763 if a.rev() != nullrev:
760 base = x
764 base = x
761 x_copies = _forwardcopies(a, x)
765 x_copies = _forwardcopies(a, x)
762 y_copies = _forwardcopies(a, y, base, match=match)
766 y_copies = _forwardcopies(a, y, base, match=match)
763 same_keys = set(x_copies) & set(y_copies)
767 same_keys = set(x_copies) & set(y_copies)
764 for k in same_keys:
768 for k in same_keys:
765 if x_copies.get(k) == y_copies.get(k):
769 if x_copies.get(k) == y_copies.get(k):
766 del x_copies[k]
770 del x_copies[k]
767 del y_copies[k]
771 del y_copies[k]
768 x_backward_renames = _reverse_renames(x_copies, x, match)
772 x_backward_renames = _reverse_renames(x_copies, x, match)
769 copies = _chain(
773 copies = _chain(
770 x_backward_renames,
774 x_backward_renames,
771 y_copies,
775 y_copies,
772 )
776 )
773 _filter(x, y, copies)
777 _filter(x, y, copies)
774 return copies
778 return copies
775
779
776
780
777 def mergecopies(repo, c1, c2, base):
781 def mergecopies(repo, c1, c2, base):
778 """
782 """
779 Finds moves and copies between context c1 and c2 that are relevant for
783 Finds moves and copies between context c1 and c2 that are relevant for
780 merging. 'base' will be used as the merge base.
784 merging. 'base' will be used as the merge base.
781
785
782 Copytracing is used in commands like rebase, merge, unshelve, etc to merge
786 Copytracing is used in commands like rebase, merge, unshelve, etc to merge
783 files that were moved/ copied in one merge parent and modified in another.
787 files that were moved/ copied in one merge parent and modified in another.
784 For example:
788 For example:
785
789
786 o ---> 4 another commit
790 o ---> 4 another commit
787 |
791 |
788 | o ---> 3 commit that modifies a.txt
792 | o ---> 3 commit that modifies a.txt
789 | /
793 | /
790 o / ---> 2 commit that moves a.txt to b.txt
794 o / ---> 2 commit that moves a.txt to b.txt
791 |/
795 |/
792 o ---> 1 merge base
796 o ---> 1 merge base
793
797
794 If we try to rebase revision 3 on revision 4, since there is no a.txt in
798 If we try to rebase revision 3 on revision 4, since there is no a.txt in
795 revision 4, and if user have copytrace disabled, we prints the following
799 revision 4, and if user have copytrace disabled, we prints the following
796 message:
800 message:
797
801
798 ```other changed <file> which local deleted```
802 ```other changed <file> which local deleted```
799
803
800 Returns a tuple where:
804 Returns a tuple where:
801
805
802 "branch_copies" an instance of branch_copies.
806 "branch_copies" an instance of branch_copies.
803
807
804 "diverge" is a mapping of source name -> list of destination names
808 "diverge" is a mapping of source name -> list of destination names
805 for divergent renames.
809 for divergent renames.
806
810
807 This function calls different copytracing algorithms based on config.
811 This function calls different copytracing algorithms based on config.
808 """
812 """
809 # avoid silly behavior for update from empty dir
813 # avoid silly behavior for update from empty dir
810 if not c1 or not c2 or c1 == c2:
814 if not c1 or not c2 or c1 == c2:
811 return branch_copies(), branch_copies(), {}
815 return branch_copies(), branch_copies(), {}
812
816
813 narrowmatch = c1.repo().narrowmatch()
817 narrowmatch = c1.repo().narrowmatch()
814
818
815 # avoid silly behavior for parent -> working dir
819 # avoid silly behavior for parent -> working dir
816 if c2.node() is None and c1.node() == repo.dirstate.p1():
820 if c2.node() is None and c1.node() == repo.dirstate.p1():
817 return (
821 return (
818 branch_copies(_dirstatecopies(repo, narrowmatch)),
822 branch_copies(_dirstatecopies(repo, narrowmatch)),
819 branch_copies(),
823 branch_copies(),
820 {},
824 {},
821 )
825 )
822
826
823 copytracing = repo.ui.config(b'experimental', b'copytrace')
827 copytracing = repo.ui.config(b'experimental', b'copytrace')
824 if stringutil.parsebool(copytracing) is False:
828 if stringutil.parsebool(copytracing) is False:
825 # stringutil.parsebool() returns None when it is unable to parse the
829 # stringutil.parsebool() returns None when it is unable to parse the
826 # value, so we should rely on making sure copytracing is on such cases
830 # value, so we should rely on making sure copytracing is on such cases
827 return branch_copies(), branch_copies(), {}
831 return branch_copies(), branch_copies(), {}
828
832
829 if usechangesetcentricalgo(repo):
833 if usechangesetcentricalgo(repo):
830 # The heuristics don't make sense when we need changeset-centric algos
834 # The heuristics don't make sense when we need changeset-centric algos
831 return _fullcopytracing(repo, c1, c2, base)
835 return _fullcopytracing(repo, c1, c2, base)
832
836
833 # Copy trace disabling is explicitly below the node == p1 logic above
837 # Copy trace disabling is explicitly below the node == p1 logic above
834 # because the logic above is required for a simple copy to be kept across a
838 # because the logic above is required for a simple copy to be kept across a
835 # rebase.
839 # rebase.
836 if copytracing == b'heuristics':
840 if copytracing == b'heuristics':
837 # Do full copytracing if only non-public revisions are involved as
841 # Do full copytracing if only non-public revisions are involved as
838 # that will be fast enough and will also cover the copies which could
842 # that will be fast enough and will also cover the copies which could
839 # be missed by heuristics
843 # be missed by heuristics
840 if _isfullcopytraceable(repo, c1, base):
844 if _isfullcopytraceable(repo, c1, base):
841 return _fullcopytracing(repo, c1, c2, base)
845 return _fullcopytracing(repo, c1, c2, base)
842 return _heuristicscopytracing(repo, c1, c2, base)
846 return _heuristicscopytracing(repo, c1, c2, base)
843 else:
847 else:
844 return _fullcopytracing(repo, c1, c2, base)
848 return _fullcopytracing(repo, c1, c2, base)
845
849
846
850
847 def _isfullcopytraceable(repo, c1, base):
851 def _isfullcopytraceable(repo, c1, base):
848 """Checks that if base, source and destination are all no-public branches,
852 """Checks that if base, source and destination are all no-public branches,
849 if yes let's use the full copytrace algorithm for increased capabilities
853 if yes let's use the full copytrace algorithm for increased capabilities
850 since it will be fast enough.
854 since it will be fast enough.
851
855
852 `experimental.copytrace.sourcecommitlimit` can be used to set a limit for
856 `experimental.copytrace.sourcecommitlimit` can be used to set a limit for
853 number of changesets from c1 to base such that if number of changesets are
857 number of changesets from c1 to base such that if number of changesets are
854 more than the limit, full copytracing algorithm won't be used.
858 more than the limit, full copytracing algorithm won't be used.
855 """
859 """
856 if c1.rev() is None:
860 if c1.rev() is None:
857 c1 = c1.p1()
861 c1 = c1.p1()
858 if c1.mutable() and base.mutable():
862 if c1.mutable() and base.mutable():
859 sourcecommitlimit = repo.ui.configint(
863 sourcecommitlimit = repo.ui.configint(
860 b'experimental', b'copytrace.sourcecommitlimit'
864 b'experimental', b'copytrace.sourcecommitlimit'
861 )
865 )
862 commits = len(repo.revs(b'%d::%d', base.rev(), c1.rev()))
866 commits = len(repo.revs(b'%d::%d', base.rev(), c1.rev()))
863 return commits < sourcecommitlimit
867 return commits < sourcecommitlimit
864 return False
868 return False
865
869
866
870
867 def _checksinglesidecopies(
871 def _checksinglesidecopies(
868 src, dsts1, m1, m2, mb, c2, base, copy, renamedelete
872 src, dsts1, m1, m2, mb, c2, base, copy, renamedelete
869 ):
873 ):
870 if src not in m2:
874 if src not in m2:
871 # deleted on side 2
875 # deleted on side 2
872 if src not in m1:
876 if src not in m1:
873 # renamed on side 1, deleted on side 2
877 # renamed on side 1, deleted on side 2
874 renamedelete[src] = dsts1
878 renamedelete[src] = dsts1
875 elif src not in mb:
879 elif src not in mb:
876 # Work around the "short-circuit to avoid issues with merge states"
880 # Work around the "short-circuit to avoid issues with merge states"
877 # thing in pathcopies(): pathcopies(x, y) can return a copy where the
881 # thing in pathcopies(): pathcopies(x, y) can return a copy where the
878 # destination doesn't exist in y.
882 # destination doesn't exist in y.
879 pass
883 pass
880 elif mb[src] != m2[src] and not _related(c2[src], base[src]):
884 elif mb[src] != m2[src] and not _related(c2[src], base[src]):
881 return
885 return
882 elif mb[src] != m2[src] or mb.flags(src) != m2.flags(src):
886 elif mb[src] != m2[src] or mb.flags(src) != m2.flags(src):
883 # modified on side 2
887 # modified on side 2
884 for dst in dsts1:
888 for dst in dsts1:
885 copy[dst] = src
889 copy[dst] = src
886
890
887
891
888 class branch_copies(object):
892 class branch_copies(object):
889 """Information about copies made on one side of a merge/graft.
893 """Information about copies made on one side of a merge/graft.
890
894
891 "copy" is a mapping from destination name -> source name,
895 "copy" is a mapping from destination name -> source name,
892 where source is in c1 and destination is in c2 or vice-versa.
896 where source is in c1 and destination is in c2 or vice-versa.
893
897
894 "movewithdir" is a mapping from source name -> destination name,
898 "movewithdir" is a mapping from source name -> destination name,
895 where the file at source present in one context but not the other
899 where the file at source present in one context but not the other
896 needs to be moved to destination by the merge process, because the
900 needs to be moved to destination by the merge process, because the
897 other context moved the directory it is in.
901 other context moved the directory it is in.
898
902
899 "renamedelete" is a mapping of source name -> list of destination
903 "renamedelete" is a mapping of source name -> list of destination
900 names for files deleted in c1 that were renamed in c2 or vice-versa.
904 names for files deleted in c1 that were renamed in c2 or vice-versa.
901
905
902 "dirmove" is a mapping of detected source dir -> destination dir renames.
906 "dirmove" is a mapping of detected source dir -> destination dir renames.
903 This is needed for handling changes to new files previously grafted into
907 This is needed for handling changes to new files previously grafted into
904 renamed directories.
908 renamed directories.
905 """
909 """
906
910
907 def __init__(
911 def __init__(
908 self, copy=None, renamedelete=None, dirmove=None, movewithdir=None
912 self, copy=None, renamedelete=None, dirmove=None, movewithdir=None
909 ):
913 ):
910 self.copy = {} if copy is None else copy
914 self.copy = {} if copy is None else copy
911 self.renamedelete = {} if renamedelete is None else renamedelete
915 self.renamedelete = {} if renamedelete is None else renamedelete
912 self.dirmove = {} if dirmove is None else dirmove
916 self.dirmove = {} if dirmove is None else dirmove
913 self.movewithdir = {} if movewithdir is None else movewithdir
917 self.movewithdir = {} if movewithdir is None else movewithdir
914
918
915 def __repr__(self):
919 def __repr__(self):
916 return '<branch_copies\n copy=%r\n renamedelete=%r\n dirmove=%r\n movewithdir=%r\n>' % (
920 return '<branch_copies\n copy=%r\n renamedelete=%r\n dirmove=%r\n movewithdir=%r\n>' % (
917 self.copy,
921 self.copy,
918 self.renamedelete,
922 self.renamedelete,
919 self.dirmove,
923 self.dirmove,
920 self.movewithdir,
924 self.movewithdir,
921 )
925 )
922
926
923
927
924 def _fullcopytracing(repo, c1, c2, base):
928 def _fullcopytracing(repo, c1, c2, base):
925 """The full copytracing algorithm which finds all the new files that were
929 """The full copytracing algorithm which finds all the new files that were
926 added from merge base up to the top commit and for each file it checks if
930 added from merge base up to the top commit and for each file it checks if
927 this file was copied from another file.
931 this file was copied from another file.
928
932
929 This is pretty slow when a lot of changesets are involved but will track all
933 This is pretty slow when a lot of changesets are involved but will track all
930 the copies.
934 the copies.
931 """
935 """
932 m1 = c1.manifest()
936 m1 = c1.manifest()
933 m2 = c2.manifest()
937 m2 = c2.manifest()
934 mb = base.manifest()
938 mb = base.manifest()
935
939
936 copies1 = pathcopies(base, c1)
940 copies1 = pathcopies(base, c1)
937 copies2 = pathcopies(base, c2)
941 copies2 = pathcopies(base, c2)
938
942
939 if not (copies1 or copies2):
943 if not (copies1 or copies2):
940 return branch_copies(), branch_copies(), {}
944 return branch_copies(), branch_copies(), {}
941
945
942 inversecopies1 = {}
946 inversecopies1 = {}
943 inversecopies2 = {}
947 inversecopies2 = {}
944 for dst, src in copies1.items():
948 for dst, src in copies1.items():
945 inversecopies1.setdefault(src, []).append(dst)
949 inversecopies1.setdefault(src, []).append(dst)
946 for dst, src in copies2.items():
950 for dst, src in copies2.items():
947 inversecopies2.setdefault(src, []).append(dst)
951 inversecopies2.setdefault(src, []).append(dst)
948
952
949 copy1 = {}
953 copy1 = {}
950 copy2 = {}
954 copy2 = {}
951 diverge = {}
955 diverge = {}
952 renamedelete1 = {}
956 renamedelete1 = {}
953 renamedelete2 = {}
957 renamedelete2 = {}
954 allsources = set(inversecopies1) | set(inversecopies2)
958 allsources = set(inversecopies1) | set(inversecopies2)
955 for src in allsources:
959 for src in allsources:
956 dsts1 = inversecopies1.get(src)
960 dsts1 = inversecopies1.get(src)
957 dsts2 = inversecopies2.get(src)
961 dsts2 = inversecopies2.get(src)
958 if dsts1 and dsts2:
962 if dsts1 and dsts2:
959 # copied/renamed on both sides
963 # copied/renamed on both sides
960 if src not in m1 and src not in m2:
964 if src not in m1 and src not in m2:
961 # renamed on both sides
965 # renamed on both sides
962 dsts1 = set(dsts1)
966 dsts1 = set(dsts1)
963 dsts2 = set(dsts2)
967 dsts2 = set(dsts2)
964 # If there's some overlap in the rename destinations, we
968 # If there's some overlap in the rename destinations, we
965 # consider it not divergent. For example, if side 1 copies 'a'
969 # consider it not divergent. For example, if side 1 copies 'a'
966 # to 'b' and 'c' and deletes 'a', and side 2 copies 'a' to 'c'
970 # to 'b' and 'c' and deletes 'a', and side 2 copies 'a' to 'c'
967 # and 'd' and deletes 'a'.
971 # and 'd' and deletes 'a'.
968 if dsts1 & dsts2:
972 if dsts1 & dsts2:
969 for dst in dsts1 & dsts2:
973 for dst in dsts1 & dsts2:
970 copy1[dst] = src
974 copy1[dst] = src
971 copy2[dst] = src
975 copy2[dst] = src
972 else:
976 else:
973 diverge[src] = sorted(dsts1 | dsts2)
977 diverge[src] = sorted(dsts1 | dsts2)
974 elif src in m1 and src in m2:
978 elif src in m1 and src in m2:
975 # copied on both sides
979 # copied on both sides
976 dsts1 = set(dsts1)
980 dsts1 = set(dsts1)
977 dsts2 = set(dsts2)
981 dsts2 = set(dsts2)
978 for dst in dsts1 & dsts2:
982 for dst in dsts1 & dsts2:
979 copy1[dst] = src
983 copy1[dst] = src
980 copy2[dst] = src
984 copy2[dst] = src
981 # TODO: Handle cases where it was renamed on one side and copied
985 # TODO: Handle cases where it was renamed on one side and copied
982 # on the other side
986 # on the other side
983 elif dsts1:
987 elif dsts1:
984 # copied/renamed only on side 1
988 # copied/renamed only on side 1
985 _checksinglesidecopies(
989 _checksinglesidecopies(
986 src, dsts1, m1, m2, mb, c2, base, copy1, renamedelete1
990 src, dsts1, m1, m2, mb, c2, base, copy1, renamedelete1
987 )
991 )
988 elif dsts2:
992 elif dsts2:
989 # copied/renamed only on side 2
993 # copied/renamed only on side 2
990 _checksinglesidecopies(
994 _checksinglesidecopies(
991 src, dsts2, m2, m1, mb, c1, base, copy2, renamedelete2
995 src, dsts2, m2, m1, mb, c1, base, copy2, renamedelete2
992 )
996 )
993
997
994 # find interesting file sets from manifests
998 # find interesting file sets from manifests
995 cache = []
999 cache = []
996
1000
997 def _get_addedfiles(idx):
1001 def _get_addedfiles(idx):
998 if not cache:
1002 if not cache:
999 addedinm1 = m1.filesnotin(mb, repo.narrowmatch())
1003 addedinm1 = m1.filesnotin(mb, repo.narrowmatch())
1000 addedinm2 = m2.filesnotin(mb, repo.narrowmatch())
1004 addedinm2 = m2.filesnotin(mb, repo.narrowmatch())
1001 u1 = sorted(addedinm1 - addedinm2)
1005 u1 = sorted(addedinm1 - addedinm2)
1002 u2 = sorted(addedinm2 - addedinm1)
1006 u2 = sorted(addedinm2 - addedinm1)
1003 cache.extend((u1, u2))
1007 cache.extend((u1, u2))
1004 return cache[idx]
1008 return cache[idx]
1005
1009
1006 u1fn = lambda: _get_addedfiles(0)
1010 u1fn = lambda: _get_addedfiles(0)
1007 u2fn = lambda: _get_addedfiles(1)
1011 u2fn = lambda: _get_addedfiles(1)
1008 if repo.ui.debugflag:
1012 if repo.ui.debugflag:
1009 u1 = u1fn()
1013 u1 = u1fn()
1010 u2 = u2fn()
1014 u2 = u2fn()
1011
1015
1012 header = b" unmatched files in %s"
1016 header = b" unmatched files in %s"
1013 if u1:
1017 if u1:
1014 repo.ui.debug(
1018 repo.ui.debug(
1015 b"%s:\n %s\n" % (header % b'local', b"\n ".join(u1))
1019 b"%s:\n %s\n" % (header % b'local', b"\n ".join(u1))
1016 )
1020 )
1017 if u2:
1021 if u2:
1018 repo.ui.debug(
1022 repo.ui.debug(
1019 b"%s:\n %s\n" % (header % b'other', b"\n ".join(u2))
1023 b"%s:\n %s\n" % (header % b'other', b"\n ".join(u2))
1020 )
1024 )
1021
1025
1022 renamedeleteset = set()
1026 renamedeleteset = set()
1023 divergeset = set()
1027 divergeset = set()
1024 for dsts in diverge.values():
1028 for dsts in diverge.values():
1025 divergeset.update(dsts)
1029 divergeset.update(dsts)
1026 for dsts in renamedelete1.values():
1030 for dsts in renamedelete1.values():
1027 renamedeleteset.update(dsts)
1031 renamedeleteset.update(dsts)
1028 for dsts in renamedelete2.values():
1032 for dsts in renamedelete2.values():
1029 renamedeleteset.update(dsts)
1033 renamedeleteset.update(dsts)
1030
1034
1031 repo.ui.debug(
1035 repo.ui.debug(
1032 b" all copies found (* = to merge, ! = divergent, "
1036 b" all copies found (* = to merge, ! = divergent, "
1033 b"% = renamed and deleted):\n"
1037 b"% = renamed and deleted):\n"
1034 )
1038 )
1035 for side, copies in ((b"local", copies1), (b"remote", copies2)):
1039 for side, copies in ((b"local", copies1), (b"remote", copies2)):
1036 if not copies:
1040 if not copies:
1037 continue
1041 continue
1038 repo.ui.debug(b" on %s side:\n" % side)
1042 repo.ui.debug(b" on %s side:\n" % side)
1039 for f in sorted(copies):
1043 for f in sorted(copies):
1040 note = b""
1044 note = b""
1041 if f in copy1 or f in copy2:
1045 if f in copy1 or f in copy2:
1042 note += b"*"
1046 note += b"*"
1043 if f in divergeset:
1047 if f in divergeset:
1044 note += b"!"
1048 note += b"!"
1045 if f in renamedeleteset:
1049 if f in renamedeleteset:
1046 note += b"%"
1050 note += b"%"
1047 repo.ui.debug(
1051 repo.ui.debug(
1048 b" src: '%s' -> dst: '%s' %s\n" % (copies[f], f, note)
1052 b" src: '%s' -> dst: '%s' %s\n" % (copies[f], f, note)
1049 )
1053 )
1050 del renamedeleteset
1054 del renamedeleteset
1051 del divergeset
1055 del divergeset
1052
1056
1053 repo.ui.debug(b" checking for directory renames\n")
1057 repo.ui.debug(b" checking for directory renames\n")
1054
1058
1055 dirmove1, movewithdir2 = _dir_renames(repo, c1, copy1, copies1, u2fn)
1059 dirmove1, movewithdir2 = _dir_renames(repo, c1, copy1, copies1, u2fn)
1056 dirmove2, movewithdir1 = _dir_renames(repo, c2, copy2, copies2, u1fn)
1060 dirmove2, movewithdir1 = _dir_renames(repo, c2, copy2, copies2, u1fn)
1057
1061
1058 branch_copies1 = branch_copies(copy1, renamedelete1, dirmove1, movewithdir1)
1062 branch_copies1 = branch_copies(copy1, renamedelete1, dirmove1, movewithdir1)
1059 branch_copies2 = branch_copies(copy2, renamedelete2, dirmove2, movewithdir2)
1063 branch_copies2 = branch_copies(copy2, renamedelete2, dirmove2, movewithdir2)
1060
1064
1061 return branch_copies1, branch_copies2, diverge
1065 return branch_copies1, branch_copies2, diverge
1062
1066
1063
1067
1064 def _dir_renames(repo, ctx, copy, fullcopy, addedfilesfn):
1068 def _dir_renames(repo, ctx, copy, fullcopy, addedfilesfn):
1065 """Finds moved directories and files that should move with them.
1069 """Finds moved directories and files that should move with them.
1066
1070
1067 ctx: the context for one of the sides
1071 ctx: the context for one of the sides
1068 copy: files copied on the same side (as ctx)
1072 copy: files copied on the same side (as ctx)
1069 fullcopy: files copied on the same side (as ctx), including those that
1073 fullcopy: files copied on the same side (as ctx), including those that
1070 merge.manifestmerge() won't care about
1074 merge.manifestmerge() won't care about
1071 addedfilesfn: function returning added files on the other side (compared to
1075 addedfilesfn: function returning added files on the other side (compared to
1072 ctx)
1076 ctx)
1073 """
1077 """
1074 # generate a directory move map
1078 # generate a directory move map
1075 invalid = set()
1079 invalid = set()
1076 dirmove = {}
1080 dirmove = {}
1077
1081
1078 # examine each file copy for a potential directory move, which is
1082 # examine each file copy for a potential directory move, which is
1079 # when all the files in a directory are moved to a new directory
1083 # when all the files in a directory are moved to a new directory
1080 for dst, src in pycompat.iteritems(fullcopy):
1084 for dst, src in pycompat.iteritems(fullcopy):
1081 dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst)
1085 dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst)
1082 if dsrc in invalid:
1086 if dsrc in invalid:
1083 # already seen to be uninteresting
1087 # already seen to be uninteresting
1084 continue
1088 continue
1085 elif ctx.hasdir(dsrc) and ctx.hasdir(ddst):
1089 elif ctx.hasdir(dsrc) and ctx.hasdir(ddst):
1086 # directory wasn't entirely moved locally
1090 # directory wasn't entirely moved locally
1087 invalid.add(dsrc)
1091 invalid.add(dsrc)
1088 elif dsrc in dirmove and dirmove[dsrc] != ddst:
1092 elif dsrc in dirmove and dirmove[dsrc] != ddst:
1089 # files from the same directory moved to two different places
1093 # files from the same directory moved to two different places
1090 invalid.add(dsrc)
1094 invalid.add(dsrc)
1091 else:
1095 else:
1092 # looks good so far
1096 # looks good so far
1093 dirmove[dsrc] = ddst
1097 dirmove[dsrc] = ddst
1094
1098
1095 for i in invalid:
1099 for i in invalid:
1096 if i in dirmove:
1100 if i in dirmove:
1097 del dirmove[i]
1101 del dirmove[i]
1098 del invalid
1102 del invalid
1099
1103
1100 if not dirmove:
1104 if not dirmove:
1101 return {}, {}
1105 return {}, {}
1102
1106
1103 dirmove = {k + b"/": v + b"/" for k, v in pycompat.iteritems(dirmove)}
1107 dirmove = {k + b"/": v + b"/" for k, v in pycompat.iteritems(dirmove)}
1104
1108
1105 for d in dirmove:
1109 for d in dirmove:
1106 repo.ui.debug(
1110 repo.ui.debug(
1107 b" discovered dir src: '%s' -> dst: '%s'\n" % (d, dirmove[d])
1111 b" discovered dir src: '%s' -> dst: '%s'\n" % (d, dirmove[d])
1108 )
1112 )
1109
1113
1110 # Sort the directories in reverse order, so we find children first
1114 # Sort the directories in reverse order, so we find children first
1111 # For example, if dir1/ was renamed to dir2/, and dir1/subdir1/
1115 # For example, if dir1/ was renamed to dir2/, and dir1/subdir1/
1112 # was renamed to dir2/subdir2/, we want to move dir1/subdir1/file
1116 # was renamed to dir2/subdir2/, we want to move dir1/subdir1/file
1113 # to dir2/subdir2/file (not dir2/subdir1/file)
1117 # to dir2/subdir2/file (not dir2/subdir1/file)
1114 dirmove_children_first = sorted(dirmove, reverse=True)
1118 dirmove_children_first = sorted(dirmove, reverse=True)
1115
1119
1116 movewithdir = {}
1120 movewithdir = {}
1117 # check unaccounted nonoverlapping files against directory moves
1121 # check unaccounted nonoverlapping files against directory moves
1118 for f in addedfilesfn():
1122 for f in addedfilesfn():
1119 if f not in fullcopy:
1123 if f not in fullcopy:
1120 for d in dirmove_children_first:
1124 for d in dirmove_children_first:
1121 if f.startswith(d):
1125 if f.startswith(d):
1122 # new file added in a directory that was moved, move it
1126 # new file added in a directory that was moved, move it
1123 df = dirmove[d] + f[len(d) :]
1127 df = dirmove[d] + f[len(d) :]
1124 if df not in copy:
1128 if df not in copy:
1125 movewithdir[f] = df
1129 movewithdir[f] = df
1126 repo.ui.debug(
1130 repo.ui.debug(
1127 b" pending file src: '%s' -> dst: '%s'\n"
1131 b" pending file src: '%s' -> dst: '%s'\n"
1128 % (f, df)
1132 % (f, df)
1129 )
1133 )
1130 break
1134 break
1131
1135
1132 return dirmove, movewithdir
1136 return dirmove, movewithdir
1133
1137
1134
1138
1135 def _heuristicscopytracing(repo, c1, c2, base):
1139 def _heuristicscopytracing(repo, c1, c2, base):
1136 """Fast copytracing using filename heuristics
1140 """Fast copytracing using filename heuristics
1137
1141
1138 Assumes that moves or renames are of following two types:
1142 Assumes that moves or renames are of following two types:
1139
1143
1140 1) Inside a directory only (same directory name but different filenames)
1144 1) Inside a directory only (same directory name but different filenames)
1141 2) Move from one directory to another
1145 2) Move from one directory to another
1142 (same filenames but different directory names)
1146 (same filenames but different directory names)
1143
1147
1144 Works only when there are no merge commits in the "source branch".
1148 Works only when there are no merge commits in the "source branch".
1145 Source branch is commits from base up to c2 not including base.
1149 Source branch is commits from base up to c2 not including base.
1146
1150
1147 If merge is involved it fallbacks to _fullcopytracing().
1151 If merge is involved it fallbacks to _fullcopytracing().
1148
1152
1149 Can be used by setting the following config:
1153 Can be used by setting the following config:
1150
1154
1151 [experimental]
1155 [experimental]
1152 copytrace = heuristics
1156 copytrace = heuristics
1153
1157
1154 In some cases the copy/move candidates found by heuristics can be very large
1158 In some cases the copy/move candidates found by heuristics can be very large
1155 in number and that will make the algorithm slow. The number of possible
1159 in number and that will make the algorithm slow. The number of possible
1156 candidates to check can be limited by using the config
1160 candidates to check can be limited by using the config
1157 `experimental.copytrace.movecandidateslimit` which defaults to 100.
1161 `experimental.copytrace.movecandidateslimit` which defaults to 100.
1158 """
1162 """
1159
1163
1160 if c1.rev() is None:
1164 if c1.rev() is None:
1161 c1 = c1.p1()
1165 c1 = c1.p1()
1162 if c2.rev() is None:
1166 if c2.rev() is None:
1163 c2 = c2.p1()
1167 c2 = c2.p1()
1164
1168
1165 changedfiles = set()
1169 changedfiles = set()
1166 m1 = c1.manifest()
1170 m1 = c1.manifest()
1167 if not repo.revs(b'%d::%d', base.rev(), c2.rev()):
1171 if not repo.revs(b'%d::%d', base.rev(), c2.rev()):
1168 # If base is not in c2 branch, we switch to fullcopytracing
1172 # If base is not in c2 branch, we switch to fullcopytracing
1169 repo.ui.debug(
1173 repo.ui.debug(
1170 b"switching to full copytracing as base is not "
1174 b"switching to full copytracing as base is not "
1171 b"an ancestor of c2\n"
1175 b"an ancestor of c2\n"
1172 )
1176 )
1173 return _fullcopytracing(repo, c1, c2, base)
1177 return _fullcopytracing(repo, c1, c2, base)
1174
1178
1175 ctx = c2
1179 ctx = c2
1176 while ctx != base:
1180 while ctx != base:
1177 if len(ctx.parents()) == 2:
1181 if len(ctx.parents()) == 2:
1178 # To keep things simple let's not handle merges
1182 # To keep things simple let's not handle merges
1179 repo.ui.debug(b"switching to full copytracing because of merges\n")
1183 repo.ui.debug(b"switching to full copytracing because of merges\n")
1180 return _fullcopytracing(repo, c1, c2, base)
1184 return _fullcopytracing(repo, c1, c2, base)
1181 changedfiles.update(ctx.files())
1185 changedfiles.update(ctx.files())
1182 ctx = ctx.p1()
1186 ctx = ctx.p1()
1183
1187
1184 copies2 = {}
1188 copies2 = {}
1185 cp = _forwardcopies(base, c2)
1189 cp = _forwardcopies(base, c2)
1186 for dst, src in pycompat.iteritems(cp):
1190 for dst, src in pycompat.iteritems(cp):
1187 if src in m1:
1191 if src in m1:
1188 copies2[dst] = src
1192 copies2[dst] = src
1189
1193
1190 # file is missing if it isn't present in the destination, but is present in
1194 # file is missing if it isn't present in the destination, but is present in
1191 # the base and present in the source.
1195 # the base and present in the source.
1192 # Presence in the base is important to exclude added files, presence in the
1196 # Presence in the base is important to exclude added files, presence in the
1193 # source is important to exclude removed files.
1197 # source is important to exclude removed files.
1194 filt = lambda f: f not in m1 and f in base and f in c2
1198 filt = lambda f: f not in m1 and f in base and f in c2
1195 missingfiles = [f for f in changedfiles if filt(f)]
1199 missingfiles = [f for f in changedfiles if filt(f)]
1196
1200
1197 copies1 = {}
1201 copies1 = {}
1198 if missingfiles:
1202 if missingfiles:
1199 basenametofilename = collections.defaultdict(list)
1203 basenametofilename = collections.defaultdict(list)
1200 dirnametofilename = collections.defaultdict(list)
1204 dirnametofilename = collections.defaultdict(list)
1201
1205
1202 for f in m1.filesnotin(base.manifest()):
1206 for f in m1.filesnotin(base.manifest()):
1203 basename = os.path.basename(f)
1207 basename = os.path.basename(f)
1204 dirname = os.path.dirname(f)
1208 dirname = os.path.dirname(f)
1205 basenametofilename[basename].append(f)
1209 basenametofilename[basename].append(f)
1206 dirnametofilename[dirname].append(f)
1210 dirnametofilename[dirname].append(f)
1207
1211
1208 for f in missingfiles:
1212 for f in missingfiles:
1209 basename = os.path.basename(f)
1213 basename = os.path.basename(f)
1210 dirname = os.path.dirname(f)
1214 dirname = os.path.dirname(f)
1211 samebasename = basenametofilename[basename]
1215 samebasename = basenametofilename[basename]
1212 samedirname = dirnametofilename[dirname]
1216 samedirname = dirnametofilename[dirname]
1213 movecandidates = samebasename + samedirname
1217 movecandidates = samebasename + samedirname
1214 # f is guaranteed to be present in c2, that's why
1218 # f is guaranteed to be present in c2, that's why
1215 # c2.filectx(f) won't fail
1219 # c2.filectx(f) won't fail
1216 f2 = c2.filectx(f)
1220 f2 = c2.filectx(f)
1217 # we can have a lot of candidates which can slow down the heuristics
1221 # we can have a lot of candidates which can slow down the heuristics
1218 # config value to limit the number of candidates moves to check
1222 # config value to limit the number of candidates moves to check
1219 maxcandidates = repo.ui.configint(
1223 maxcandidates = repo.ui.configint(
1220 b'experimental', b'copytrace.movecandidateslimit'
1224 b'experimental', b'copytrace.movecandidateslimit'
1221 )
1225 )
1222
1226
1223 if len(movecandidates) > maxcandidates:
1227 if len(movecandidates) > maxcandidates:
1224 repo.ui.status(
1228 repo.ui.status(
1225 _(
1229 _(
1226 b"skipping copytracing for '%s', more "
1230 b"skipping copytracing for '%s', more "
1227 b"candidates than the limit: %d\n"
1231 b"candidates than the limit: %d\n"
1228 )
1232 )
1229 % (f, len(movecandidates))
1233 % (f, len(movecandidates))
1230 )
1234 )
1231 continue
1235 continue
1232
1236
1233 for candidate in movecandidates:
1237 for candidate in movecandidates:
1234 f1 = c1.filectx(candidate)
1238 f1 = c1.filectx(candidate)
1235 if _related(f1, f2):
1239 if _related(f1, f2):
1236 # if there are a few related copies then we'll merge
1240 # if there are a few related copies then we'll merge
1237 # changes into all of them. This matches the behaviour
1241 # changes into all of them. This matches the behaviour
1238 # of upstream copytracing
1242 # of upstream copytracing
1239 copies1[candidate] = f
1243 copies1[candidate] = f
1240
1244
1241 return branch_copies(copies1), branch_copies(copies2), {}
1245 return branch_copies(copies1), branch_copies(copies2), {}
1242
1246
1243
1247
1244 def _related(f1, f2):
1248 def _related(f1, f2):
1245 """return True if f1 and f2 filectx have a common ancestor
1249 """return True if f1 and f2 filectx have a common ancestor
1246
1250
1247 Walk back to common ancestor to see if the two files originate
1251 Walk back to common ancestor to see if the two files originate
1248 from the same file. Since workingfilectx's rev() is None it messes
1252 from the same file. Since workingfilectx's rev() is None it messes
1249 up the integer comparison logic, hence the pre-step check for
1253 up the integer comparison logic, hence the pre-step check for
1250 None (f1 and f2 can only be workingfilectx's initially).
1254 None (f1 and f2 can only be workingfilectx's initially).
1251 """
1255 """
1252
1256
1253 if f1 == f2:
1257 if f1 == f2:
1254 return True # a match
1258 return True # a match
1255
1259
1256 g1, g2 = f1.ancestors(), f2.ancestors()
1260 g1, g2 = f1.ancestors(), f2.ancestors()
1257 try:
1261 try:
1258 f1r, f2r = f1.linkrev(), f2.linkrev()
1262 f1r, f2r = f1.linkrev(), f2.linkrev()
1259
1263
1260 if f1r is None:
1264 if f1r is None:
1261 f1 = next(g1)
1265 f1 = next(g1)
1262 if f2r is None:
1266 if f2r is None:
1263 f2 = next(g2)
1267 f2 = next(g2)
1264
1268
1265 while True:
1269 while True:
1266 f1r, f2r = f1.linkrev(), f2.linkrev()
1270 f1r, f2r = f1.linkrev(), f2.linkrev()
1267 if f1r > f2r:
1271 if f1r > f2r:
1268 f1 = next(g1)
1272 f1 = next(g1)
1269 elif f2r > f1r:
1273 elif f2r > f1r:
1270 f2 = next(g2)
1274 f2 = next(g2)
1271 else: # f1 and f2 point to files in the same linkrev
1275 else: # f1 and f2 point to files in the same linkrev
1272 return f1 == f2 # true if they point to the same file
1276 return f1 == f2 # true if they point to the same file
1273 except StopIteration:
1277 except StopIteration:
1274 return False
1278 return False
1275
1279
1276
1280
1277 def graftcopies(wctx, ctx, base):
1281 def graftcopies(wctx, ctx, base):
1278 """reproduce copies between base and ctx in the wctx
1282 """reproduce copies between base and ctx in the wctx
1279
1283
1280 Unlike mergecopies(), this function will only consider copies between base
1284 Unlike mergecopies(), this function will only consider copies between base
1281 and ctx; it will ignore copies between base and wctx. Also unlike
1285 and ctx; it will ignore copies between base and wctx. Also unlike
1282 mergecopies(), this function will apply copies to the working copy (instead
1286 mergecopies(), this function will apply copies to the working copy (instead
1283 of just returning information about the copies). That makes it cheaper
1287 of just returning information about the copies). That makes it cheaper
1284 (especially in the common case of base==ctx.p1()) and useful also when
1288 (especially in the common case of base==ctx.p1()) and useful also when
1285 experimental.copytrace=off.
1289 experimental.copytrace=off.
1286
1290
1287 merge.update() will have already marked most copies, but it will only
1291 merge.update() will have already marked most copies, but it will only
1288 mark copies if it thinks the source files are related (see
1292 mark copies if it thinks the source files are related (see
1289 merge._related()). It will also not mark copies if the file wasn't modified
1293 merge._related()). It will also not mark copies if the file wasn't modified
1290 on the local side. This function adds the copies that were "missed"
1294 on the local side. This function adds the copies that were "missed"
1291 by merge.update().
1295 by merge.update().
1292 """
1296 """
1293 new_copies = pathcopies(base, ctx)
1297 new_copies = pathcopies(base, ctx)
1294 parent = wctx.p1()
1298 parent = wctx.p1()
1295 _filter(parent, wctx, new_copies)
1299 _filter(parent, wctx, new_copies)
1296 # Extra filtering to drop copy information for files that existed before
1300 # Extra filtering to drop copy information for files that existed before
1297 # the graft. This is to handle the case of grafting a rename onto a commit
1301 # the graft. This is to handle the case of grafting a rename onto a commit
1298 # that already has the rename. Otherwise the presence of copy information
1302 # that already has the rename. Otherwise the presence of copy information
1299 # would result in the creation of an empty commit where we would prefer to
1303 # would result in the creation of an empty commit where we would prefer to
1300 # not create one.
1304 # not create one.
1301 for dest, __ in list(new_copies.items()):
1305 for dest, __ in list(new_copies.items()):
1302 if dest in parent:
1306 if dest in parent:
1303 del new_copies[dest]
1307 del new_copies[dest]
1304 for dst, src in pycompat.iteritems(new_copies):
1308 for dst, src in pycompat.iteritems(new_copies):
1305 wctx[dst].markcopied(src)
1309 wctx[dst].markcopied(src)
@@ -1,98 +1,96 b''
1 #require pytype py3 slow
1 #require pytype py3 slow
2
2
3 $ cd $RUNTESTDIR/..
3 $ cd $RUNTESTDIR/..
4
4
5 Many of the individual files that are excluded here confuse pytype
5 Many of the individual files that are excluded here confuse pytype
6 because they do a mix of Python 2 and Python 3 things
6 because they do a mix of Python 2 and Python 3 things
7 conditionally. There's no good way to help it out with that as far as
7 conditionally. There's no good way to help it out with that as far as
8 I can tell, so let's just hide those files from it for now. We should
8 I can tell, so let's just hide those files from it for now. We should
9 endeavor to empty this list out over time, as some of these are
9 endeavor to empty this list out over time, as some of these are
10 probably hiding real problems.
10 probably hiding real problems.
11
11
12 mercurial/bundlerepo.py # no vfs and ui attrs on bundlerepo
12 mercurial/bundlerepo.py # no vfs and ui attrs on bundlerepo
13 mercurial/chgserver.py # [attribute-error]
13 mercurial/chgserver.py # [attribute-error]
14 mercurial/cmdutil.py # No attribute 'markcopied' on mercurial.context.filectx [attribute-error]
14 mercurial/cmdutil.py # No attribute 'markcopied' on mercurial.context.filectx [attribute-error]
15 mercurial/context.py # many [attribute-error]
15 mercurial/context.py # many [attribute-error]
16 mercurial/copies.py # No attribute 'items' on None [attribute-error]
17 mercurial/crecord.py # tons of [attribute-error], [module-attr]
16 mercurial/crecord.py # tons of [attribute-error], [module-attr]
18 mercurial/debugcommands.py # [wrong-arg-types]
17 mercurial/debugcommands.py # [wrong-arg-types]
19 mercurial/dispatch.py # initstdio: No attribute ... on TextIO [attribute-error]
18 mercurial/dispatch.py # initstdio: No attribute ... on TextIO [attribute-error]
20 mercurial/exchange.py # [attribute-error]
19 mercurial/exchange.py # [attribute-error]
21 mercurial/hgweb/hgweb_mod.py # [attribute-error], [name-error], [wrong-arg-types]
20 mercurial/hgweb/hgweb_mod.py # [attribute-error], [name-error], [wrong-arg-types]
22 mercurial/hgweb/server.py # [attribute-error], [name-error], [module-attr]
21 mercurial/hgweb/server.py # [attribute-error], [name-error], [module-attr]
23 mercurial/hgweb/webcommands.py # [missing-parameter]
22 mercurial/hgweb/webcommands.py # [missing-parameter]
24 mercurial/hgweb/wsgicgi.py # confused values in os.environ
23 mercurial/hgweb/wsgicgi.py # confused values in os.environ
25 mercurial/httppeer.py # [attribute-error], [wrong-arg-types]
24 mercurial/httppeer.py # [attribute-error], [wrong-arg-types]
26 mercurial/interfaces # No attribute 'capabilities' on peer [attribute-error]
25 mercurial/interfaces # No attribute 'capabilities' on peer [attribute-error]
27 mercurial/keepalive.py # [attribute-error]
26 mercurial/keepalive.py # [attribute-error]
28 mercurial/localrepo.py # [attribute-error]
27 mercurial/localrepo.py # [attribute-error]
29 mercurial/manifest.py # [unsupported-operands], [wrong-arg-types]
28 mercurial/manifest.py # [unsupported-operands], [wrong-arg-types]
30 mercurial/minirst.py # [unsupported-operands], [attribute-error]
29 mercurial/minirst.py # [unsupported-operands], [attribute-error]
31 mercurial/patch.py # [wrong-arg-types]
30 mercurial/patch.py # [wrong-arg-types]
32 mercurial/pure/osutil.py # [invalid-typevar], [not-callable]
31 mercurial/pure/osutil.py # [invalid-typevar], [not-callable]
33 mercurial/pure/parsers.py # [attribute-error]
32 mercurial/pure/parsers.py # [attribute-error]
34 mercurial/pycompat.py # bytes vs str issues
33 mercurial/pycompat.py # bytes vs str issues
35 mercurial/repoview.py # [attribute-error]
34 mercurial/repoview.py # [attribute-error]
36 mercurial/sslutil.py # [attribute-error]
35 mercurial/sslutil.py # [attribute-error]
37 mercurial/statprof.py # bytes vs str on TextIO.write() [wrong-arg-types]
36 mercurial/statprof.py # bytes vs str on TextIO.write() [wrong-arg-types]
38 mercurial/testing/storage.py # tons of [attribute-error]
37 mercurial/testing/storage.py # tons of [attribute-error]
39 mercurial/ui.py # [attribute-error], [wrong-arg-types]
38 mercurial/ui.py # [attribute-error], [wrong-arg-types]
40 mercurial/unionrepo.py # ui, svfs, unfiltered [attribute-error]
39 mercurial/unionrepo.py # ui, svfs, unfiltered [attribute-error]
41 mercurial/util.py # [attribute-error], [wrong-arg-count]
40 mercurial/util.py # [attribute-error], [wrong-arg-count]
42 mercurial/utils/procutil.py # [attribute-error], [module-attr], [bad-return-type]
41 mercurial/utils/procutil.py # [attribute-error], [module-attr], [bad-return-type]
43 mercurial/utils/stringutil.py # [module-attr], [wrong-arg-count]
42 mercurial/utils/stringutil.py # [module-attr], [wrong-arg-count]
44 mercurial/utils/memorytop.py # not 3.6 compatible
43 mercurial/utils/memorytop.py # not 3.6 compatible
45 mercurial/win32.py # [not-callable]
44 mercurial/win32.py # [not-callable]
46 mercurial/wireprotoframing.py # [unsupported-operands], [attribute-error], [import-error]
45 mercurial/wireprotoframing.py # [unsupported-operands], [attribute-error], [import-error]
47 mercurial/wireprotoserver.py # line 253, in _availableapis: No attribute '__iter__' on Callable[[Any, Any], Any] [attribute-error]
46 mercurial/wireprotoserver.py # line 253, in _availableapis: No attribute '__iter__' on Callable[[Any, Any], Any] [attribute-error]
48 mercurial/wireprotov1peer.py # [attribute-error]
47 mercurial/wireprotov1peer.py # [attribute-error]
49 mercurial/wireprotov1server.py # BUG?: BundleValueError handler accesses subclass's attrs
48 mercurial/wireprotov1server.py # BUG?: BundleValueError handler accesses subclass's attrs
50 mercurial/wireprotov2server.py # [unsupported-operands], [attribute-error]
49 mercurial/wireprotov2server.py # [unsupported-operands], [attribute-error]
51
50
52 TODO: use --no-cache on test server? Caching the files locally helps during
51 TODO: use --no-cache on test server? Caching the files locally helps during
53 development, but may be a hinderance for CI testing.
52 development, but may be a hinderance for CI testing.
54
53
55 $ pytype -V 3.6 --keep-going --jobs auto mercurial \
54 $ pytype -V 3.6 --keep-going --jobs auto mercurial \
56 > -x mercurial/bundlerepo.py \
55 > -x mercurial/bundlerepo.py \
57 > -x mercurial/chgserver.py \
56 > -x mercurial/chgserver.py \
58 > -x mercurial/cmdutil.py \
57 > -x mercurial/cmdutil.py \
59 > -x mercurial/context.py \
58 > -x mercurial/context.py \
60 > -x mercurial/copies.py \
61 > -x mercurial/crecord.py \
59 > -x mercurial/crecord.py \
62 > -x mercurial/debugcommands.py \
60 > -x mercurial/debugcommands.py \
63 > -x mercurial/dispatch.py \
61 > -x mercurial/dispatch.py \
64 > -x mercurial/exchange.py \
62 > -x mercurial/exchange.py \
65 > -x mercurial/hgweb/hgweb_mod.py \
63 > -x mercurial/hgweb/hgweb_mod.py \
66 > -x mercurial/hgweb/server.py \
64 > -x mercurial/hgweb/server.py \
67 > -x mercurial/hgweb/webcommands.py \
65 > -x mercurial/hgweb/webcommands.py \
68 > -x mercurial/hgweb/wsgicgi.py \
66 > -x mercurial/hgweb/wsgicgi.py \
69 > -x mercurial/httppeer.py \
67 > -x mercurial/httppeer.py \
70 > -x mercurial/interfaces \
68 > -x mercurial/interfaces \
71 > -x mercurial/keepalive.py \
69 > -x mercurial/keepalive.py \
72 > -x mercurial/localrepo.py \
70 > -x mercurial/localrepo.py \
73 > -x mercurial/manifest.py \
71 > -x mercurial/manifest.py \
74 > -x mercurial/minirst.py \
72 > -x mercurial/minirst.py \
75 > -x mercurial/patch.py \
73 > -x mercurial/patch.py \
76 > -x mercurial/pure/osutil.py \
74 > -x mercurial/pure/osutil.py \
77 > -x mercurial/pure/parsers.py \
75 > -x mercurial/pure/parsers.py \
78 > -x mercurial/pycompat.py \
76 > -x mercurial/pycompat.py \
79 > -x mercurial/repoview.py \
77 > -x mercurial/repoview.py \
80 > -x mercurial/sslutil.py \
78 > -x mercurial/sslutil.py \
81 > -x mercurial/statprof.py \
79 > -x mercurial/statprof.py \
82 > -x mercurial/testing/storage.py \
80 > -x mercurial/testing/storage.py \
83 > -x mercurial/thirdparty \
81 > -x mercurial/thirdparty \
84 > -x mercurial/ui.py \
82 > -x mercurial/ui.py \
85 > -x mercurial/unionrepo.py \
83 > -x mercurial/unionrepo.py \
86 > -x mercurial/utils/procutil.py \
84 > -x mercurial/utils/procutil.py \
87 > -x mercurial/utils/stringutil.py \
85 > -x mercurial/utils/stringutil.py \
88 > -x mercurial/utils/memorytop.py \
86 > -x mercurial/utils/memorytop.py \
89 > -x mercurial/win32.py \
87 > -x mercurial/win32.py \
90 > -x mercurial/wireprotoframing.py \
88 > -x mercurial/wireprotoframing.py \
91 > -x mercurial/wireprotoserver.py \
89 > -x mercurial/wireprotoserver.py \
92 > -x mercurial/wireprotov1peer.py \
90 > -x mercurial/wireprotov1peer.py \
93 > -x mercurial/wireprotov1server.py \
91 > -x mercurial/wireprotov1server.py \
94 > -x mercurial/wireprotov2server.py \
92 > -x mercurial/wireprotov2server.py \
95 > > $TESTTMP/pytype-output.txt || cat $TESTTMP/pytype-output.txt
93 > > $TESTTMP/pytype-output.txt || cat $TESTTMP/pytype-output.txt
96
94
97 Only show the results on a failure, because the output on success is also
95 Only show the results on a failure, because the output on success is also
98 voluminous and variable.
96 voluminous and variable.
General Comments 0
You need to be logged in to leave comments. Login now