##// END OF EJS Templates
ancestor: use heapreplace() in place of heappop/heappush()...
Yuya Nishihara -
r39574:238a1480 default
parent child Browse files
Show More
@@ -1,382 +1,382 b''
1 # ancestor.py - generic DAG ancestor algorithm for mercurial
1 # ancestor.py - generic DAG ancestor algorithm for mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import heapq
10 import heapq
11
11
12 from .node import nullrev
12 from .node import nullrev
13 from . import (
13 from . import (
14 pycompat,
14 pycompat,
15 )
15 )
16
16
17 def commonancestorsheads(pfunc, *nodes):
17 def commonancestorsheads(pfunc, *nodes):
18 """Returns a set with the heads of all common ancestors of all nodes,
18 """Returns a set with the heads of all common ancestors of all nodes,
19 heads(::nodes[0] and ::nodes[1] and ...) .
19 heads(::nodes[0] and ::nodes[1] and ...) .
20
20
21 pfunc must return a list of parent vertices for a given vertex.
21 pfunc must return a list of parent vertices for a given vertex.
22 """
22 """
23 if not isinstance(nodes, set):
23 if not isinstance(nodes, set):
24 nodes = set(nodes)
24 nodes = set(nodes)
25 if nullrev in nodes:
25 if nullrev in nodes:
26 return set()
26 return set()
27 if len(nodes) <= 1:
27 if len(nodes) <= 1:
28 return nodes
28 return nodes
29
29
30 allseen = (1 << len(nodes)) - 1
30 allseen = (1 << len(nodes)) - 1
31 seen = [0] * (max(nodes) + 1)
31 seen = [0] * (max(nodes) + 1)
32 for i, n in enumerate(nodes):
32 for i, n in enumerate(nodes):
33 seen[n] = 1 << i
33 seen[n] = 1 << i
34 poison = 1 << (i + 1)
34 poison = 1 << (i + 1)
35
35
36 gca = set()
36 gca = set()
37 interesting = len(nodes)
37 interesting = len(nodes)
38 nv = len(seen) - 1
38 nv = len(seen) - 1
39 while nv >= 0 and interesting:
39 while nv >= 0 and interesting:
40 v = nv
40 v = nv
41 nv -= 1
41 nv -= 1
42 if not seen[v]:
42 if not seen[v]:
43 continue
43 continue
44 sv = seen[v]
44 sv = seen[v]
45 if sv < poison:
45 if sv < poison:
46 interesting -= 1
46 interesting -= 1
47 if sv == allseen:
47 if sv == allseen:
48 gca.add(v)
48 gca.add(v)
49 sv |= poison
49 sv |= poison
50 if v in nodes:
50 if v in nodes:
51 # history is linear
51 # history is linear
52 return {v}
52 return {v}
53 if sv < poison:
53 if sv < poison:
54 for p in pfunc(v):
54 for p in pfunc(v):
55 sp = seen[p]
55 sp = seen[p]
56 if p == nullrev:
56 if p == nullrev:
57 continue
57 continue
58 if sp == 0:
58 if sp == 0:
59 seen[p] = sv
59 seen[p] = sv
60 interesting += 1
60 interesting += 1
61 elif sp != sv:
61 elif sp != sv:
62 seen[p] |= sv
62 seen[p] |= sv
63 else:
63 else:
64 for p in pfunc(v):
64 for p in pfunc(v):
65 if p == nullrev:
65 if p == nullrev:
66 continue
66 continue
67 sp = seen[p]
67 sp = seen[p]
68 if sp and sp < poison:
68 if sp and sp < poison:
69 interesting -= 1
69 interesting -= 1
70 seen[p] = sv
70 seen[p] = sv
71 return gca
71 return gca
72
72
73 def ancestors(pfunc, *orignodes):
73 def ancestors(pfunc, *orignodes):
74 """
74 """
75 Returns the common ancestors of a and b that are furthest from a
75 Returns the common ancestors of a and b that are furthest from a
76 root (as measured by longest path).
76 root (as measured by longest path).
77
77
78 pfunc must return a list of parent vertices for a given vertex.
78 pfunc must return a list of parent vertices for a given vertex.
79 """
79 """
80 def deepest(nodes):
80 def deepest(nodes):
81 interesting = {}
81 interesting = {}
82 count = max(nodes) + 1
82 count = max(nodes) + 1
83 depth = [0] * count
83 depth = [0] * count
84 seen = [0] * count
84 seen = [0] * count
85 mapping = []
85 mapping = []
86 for (i, n) in enumerate(sorted(nodes)):
86 for (i, n) in enumerate(sorted(nodes)):
87 depth[n] = 1
87 depth[n] = 1
88 b = 1 << i
88 b = 1 << i
89 seen[n] = b
89 seen[n] = b
90 interesting[b] = 1
90 interesting[b] = 1
91 mapping.append((b, n))
91 mapping.append((b, n))
92 nv = count - 1
92 nv = count - 1
93 while nv >= 0 and len(interesting) > 1:
93 while nv >= 0 and len(interesting) > 1:
94 v = nv
94 v = nv
95 nv -= 1
95 nv -= 1
96 dv = depth[v]
96 dv = depth[v]
97 if dv == 0:
97 if dv == 0:
98 continue
98 continue
99 sv = seen[v]
99 sv = seen[v]
100 for p in pfunc(v):
100 for p in pfunc(v):
101 if p == nullrev:
101 if p == nullrev:
102 continue
102 continue
103 dp = depth[p]
103 dp = depth[p]
104 nsp = sp = seen[p]
104 nsp = sp = seen[p]
105 if dp <= dv:
105 if dp <= dv:
106 depth[p] = dv + 1
106 depth[p] = dv + 1
107 if sp != sv:
107 if sp != sv:
108 interesting[sv] += 1
108 interesting[sv] += 1
109 nsp = seen[p] = sv
109 nsp = seen[p] = sv
110 if sp:
110 if sp:
111 interesting[sp] -= 1
111 interesting[sp] -= 1
112 if interesting[sp] == 0:
112 if interesting[sp] == 0:
113 del interesting[sp]
113 del interesting[sp]
114 elif dv == dp - 1:
114 elif dv == dp - 1:
115 nsp = sp | sv
115 nsp = sp | sv
116 if nsp == sp:
116 if nsp == sp:
117 continue
117 continue
118 seen[p] = nsp
118 seen[p] = nsp
119 interesting.setdefault(nsp, 0)
119 interesting.setdefault(nsp, 0)
120 interesting[nsp] += 1
120 interesting[nsp] += 1
121 interesting[sp] -= 1
121 interesting[sp] -= 1
122 if interesting[sp] == 0:
122 if interesting[sp] == 0:
123 del interesting[sp]
123 del interesting[sp]
124 interesting[sv] -= 1
124 interesting[sv] -= 1
125 if interesting[sv] == 0:
125 if interesting[sv] == 0:
126 del interesting[sv]
126 del interesting[sv]
127
127
128 if len(interesting) != 1:
128 if len(interesting) != 1:
129 return []
129 return []
130
130
131 k = 0
131 k = 0
132 for i in interesting:
132 for i in interesting:
133 k |= i
133 k |= i
134 return set(n for (i, n) in mapping if k & i)
134 return set(n for (i, n) in mapping if k & i)
135
135
136 gca = commonancestorsheads(pfunc, *orignodes)
136 gca = commonancestorsheads(pfunc, *orignodes)
137
137
138 if len(gca) <= 1:
138 if len(gca) <= 1:
139 return gca
139 return gca
140 return deepest(gca)
140 return deepest(gca)
141
141
142 class incrementalmissingancestors(object):
142 class incrementalmissingancestors(object):
143 '''persistent state used to calculate missing ancestors incrementally
143 '''persistent state used to calculate missing ancestors incrementally
144
144
145 Although similar in spirit to lazyancestors below, this is a separate class
145 Although similar in spirit to lazyancestors below, this is a separate class
146 because trying to support contains and missingancestors operations with the
146 because trying to support contains and missingancestors operations with the
147 same internal data structures adds needless complexity.'''
147 same internal data structures adds needless complexity.'''
148 def __init__(self, pfunc, bases):
148 def __init__(self, pfunc, bases):
149 self.bases = set(bases)
149 self.bases = set(bases)
150 if not self.bases:
150 if not self.bases:
151 self.bases.add(nullrev)
151 self.bases.add(nullrev)
152 self.pfunc = pfunc
152 self.pfunc = pfunc
153
153
154 def hasbases(self):
154 def hasbases(self):
155 '''whether the common set has any non-trivial bases'''
155 '''whether the common set has any non-trivial bases'''
156 return self.bases and self.bases != {nullrev}
156 return self.bases and self.bases != {nullrev}
157
157
158 def addbases(self, newbases):
158 def addbases(self, newbases):
159 '''grow the ancestor set by adding new bases'''
159 '''grow the ancestor set by adding new bases'''
160 self.bases.update(newbases)
160 self.bases.update(newbases)
161
161
162 def removeancestorsfrom(self, revs):
162 def removeancestorsfrom(self, revs):
163 '''remove all ancestors of bases from the set revs (in place)'''
163 '''remove all ancestors of bases from the set revs (in place)'''
164 bases = self.bases
164 bases = self.bases
165 pfunc = self.pfunc
165 pfunc = self.pfunc
166 revs.difference_update(bases)
166 revs.difference_update(bases)
167 # nullrev is always an ancestor
167 # nullrev is always an ancestor
168 revs.discard(nullrev)
168 revs.discard(nullrev)
169 if not revs:
169 if not revs:
170 return
170 return
171 # anything in revs > start is definitely not an ancestor of bases
171 # anything in revs > start is definitely not an ancestor of bases
172 # revs <= start needs to be investigated
172 # revs <= start needs to be investigated
173 start = max(bases)
173 start = max(bases)
174 keepcount = sum(1 for r in revs if r > start)
174 keepcount = sum(1 for r in revs if r > start)
175 if len(revs) == keepcount:
175 if len(revs) == keepcount:
176 # no revs to consider
176 # no revs to consider
177 return
177 return
178
178
179 for curr in pycompat.xrange(start, min(revs) - 1, -1):
179 for curr in pycompat.xrange(start, min(revs) - 1, -1):
180 if curr not in bases:
180 if curr not in bases:
181 continue
181 continue
182 revs.discard(curr)
182 revs.discard(curr)
183 bases.update(pfunc(curr))
183 bases.update(pfunc(curr))
184 if len(revs) == keepcount:
184 if len(revs) == keepcount:
185 # no more potential revs to discard
185 # no more potential revs to discard
186 break
186 break
187
187
188 def missingancestors(self, revs):
188 def missingancestors(self, revs):
189 '''return all the ancestors of revs that are not ancestors of self.bases
189 '''return all the ancestors of revs that are not ancestors of self.bases
190
190
191 This may include elements from revs.
191 This may include elements from revs.
192
192
193 Equivalent to the revset (::revs - ::self.bases). Revs are returned in
193 Equivalent to the revset (::revs - ::self.bases). Revs are returned in
194 revision number order, which is a topological order.'''
194 revision number order, which is a topological order.'''
195 revsvisit = set(revs)
195 revsvisit = set(revs)
196 basesvisit = self.bases
196 basesvisit = self.bases
197 pfunc = self.pfunc
197 pfunc = self.pfunc
198 bothvisit = revsvisit.intersection(basesvisit)
198 bothvisit = revsvisit.intersection(basesvisit)
199 revsvisit.difference_update(bothvisit)
199 revsvisit.difference_update(bothvisit)
200 if not revsvisit:
200 if not revsvisit:
201 return []
201 return []
202
202
203 start = max(max(revsvisit), max(basesvisit))
203 start = max(max(revsvisit), max(basesvisit))
204 # At this point, we hold the invariants that:
204 # At this point, we hold the invariants that:
205 # - revsvisit is the set of nodes we know are an ancestor of at least
205 # - revsvisit is the set of nodes we know are an ancestor of at least
206 # one of the nodes in revs
206 # one of the nodes in revs
207 # - basesvisit is the same for bases
207 # - basesvisit is the same for bases
208 # - bothvisit is the set of nodes we know are ancestors of at least one
208 # - bothvisit is the set of nodes we know are ancestors of at least one
209 # of the nodes in revs and one of the nodes in bases. bothvisit and
209 # of the nodes in revs and one of the nodes in bases. bothvisit and
210 # revsvisit are mutually exclusive, but bothvisit is a subset of
210 # revsvisit are mutually exclusive, but bothvisit is a subset of
211 # basesvisit.
211 # basesvisit.
212 # Now we walk down in reverse topo order, adding parents of nodes
212 # Now we walk down in reverse topo order, adding parents of nodes
213 # already visited to the sets while maintaining the invariants. When a
213 # already visited to the sets while maintaining the invariants. When a
214 # node is found in both revsvisit and basesvisit, it is removed from
214 # node is found in both revsvisit and basesvisit, it is removed from
215 # revsvisit and added to bothvisit. When revsvisit becomes empty, there
215 # revsvisit and added to bothvisit. When revsvisit becomes empty, there
216 # are no more ancestors of revs that aren't also ancestors of bases, so
216 # are no more ancestors of revs that aren't also ancestors of bases, so
217 # exit.
217 # exit.
218
218
219 missing = []
219 missing = []
220 for curr in pycompat.xrange(start, nullrev, -1):
220 for curr in pycompat.xrange(start, nullrev, -1):
221 if not revsvisit:
221 if not revsvisit:
222 break
222 break
223
223
224 if curr in bothvisit:
224 if curr in bothvisit:
225 bothvisit.remove(curr)
225 bothvisit.remove(curr)
226 # curr's parents might have made it into revsvisit through
226 # curr's parents might have made it into revsvisit through
227 # another path
227 # another path
228 for p in pfunc(curr):
228 for p in pfunc(curr):
229 revsvisit.discard(p)
229 revsvisit.discard(p)
230 basesvisit.add(p)
230 basesvisit.add(p)
231 bothvisit.add(p)
231 bothvisit.add(p)
232 continue
232 continue
233
233
234 if curr in revsvisit:
234 if curr in revsvisit:
235 missing.append(curr)
235 missing.append(curr)
236 revsvisit.remove(curr)
236 revsvisit.remove(curr)
237 thisvisit = revsvisit
237 thisvisit = revsvisit
238 othervisit = basesvisit
238 othervisit = basesvisit
239 elif curr in basesvisit:
239 elif curr in basesvisit:
240 thisvisit = basesvisit
240 thisvisit = basesvisit
241 othervisit = revsvisit
241 othervisit = revsvisit
242 else:
242 else:
243 # not an ancestor of revs or bases: ignore
243 # not an ancestor of revs or bases: ignore
244 continue
244 continue
245
245
246 for p in pfunc(curr):
246 for p in pfunc(curr):
247 if p == nullrev:
247 if p == nullrev:
248 pass
248 pass
249 elif p in othervisit or p in bothvisit:
249 elif p in othervisit or p in bothvisit:
250 # p is implicitly in thisvisit. This means p is or should be
250 # p is implicitly in thisvisit. This means p is or should be
251 # in bothvisit
251 # in bothvisit
252 revsvisit.discard(p)
252 revsvisit.discard(p)
253 basesvisit.add(p)
253 basesvisit.add(p)
254 bothvisit.add(p)
254 bothvisit.add(p)
255 else:
255 else:
256 # visit later
256 # visit later
257 thisvisit.add(p)
257 thisvisit.add(p)
258
258
259 missing.reverse()
259 missing.reverse()
260 return missing
260 return missing
261
261
262 # Extracted from lazyancestors.__iter__ to avoid a reference cycle
262 # Extracted from lazyancestors.__iter__ to avoid a reference cycle
263 def _lazyancestorsiter(parentrevs, initrevs, stoprev, inclusive):
263 def _lazyancestorsiter(parentrevs, initrevs, stoprev, inclusive):
264 seen = {nullrev}
264 seen = {nullrev}
265 heappush = heapq.heappush
265 heappush = heapq.heappush
266 heappop = heapq.heappop
266 heappop = heapq.heappop
267 heapreplace = heapq.heapreplace
267 see = seen.add
268 see = seen.add
268
269
269 if inclusive:
270 if inclusive:
270 visit = [-r for r in initrevs]
271 visit = [-r for r in initrevs]
271 seen.update(initrevs)
272 seen.update(initrevs)
272 heapq.heapify(visit)
273 heapq.heapify(visit)
273 else:
274 else:
274 visit = []
275 visit = []
275 heapq.heapify(visit)
276 heapq.heapify(visit)
276 for r in initrevs:
277 for r in initrevs:
277 p1, p2 = parentrevs(r)
278 p1, p2 = parentrevs(r)
278 if p1 not in seen:
279 if p1 not in seen:
279 heappush(visit, -p1)
280 heappush(visit, -p1)
280 see(p1)
281 see(p1)
281 if p2 not in seen:
282 if p2 not in seen:
282 heappush(visit, -p2)
283 heappush(visit, -p2)
283 see(p2)
284 see(p2)
284
285
285 while visit:
286 while visit:
286 current = -visit[0]
287 current = -visit[0]
287 if current < stoprev:
288 if current < stoprev:
288 break
289 break
289 yield current
290 yield current
290 # optimize out heapq operation if p1 is known to be the next highest
291 # optimize out heapq operation if p1 is known to be the next highest
291 # revision, which is quite common in linear history.
292 # revision, which is quite common in linear history.
292 p1, p2 = parentrevs(current)
293 p1, p2 = parentrevs(current)
293 if p1 not in seen:
294 if p1 not in seen:
294 if current - p1 == 1:
295 if current - p1 == 1:
295 visit[0] = -p1
296 visit[0] = -p1
296 else:
297 else:
297 heappop(visit)
298 heapreplace(visit, -p1)
298 heappush(visit, -p1)
299 see(p1)
299 see(p1)
300 else:
300 else:
301 heappop(visit)
301 heappop(visit)
302 if p2 not in seen:
302 if p2 not in seen:
303 heappush(visit, -p2)
303 heappush(visit, -p2)
304 see(p2)
304 see(p2)
305
305
306 class lazyancestors(object):
306 class lazyancestors(object):
307 def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
307 def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
308 """Create a new object generating ancestors for the given revs. Does
308 """Create a new object generating ancestors for the given revs. Does
309 not generate revs lower than stoprev.
309 not generate revs lower than stoprev.
310
310
311 This is computed lazily starting from revs. The object supports
311 This is computed lazily starting from revs. The object supports
312 iteration and membership.
312 iteration and membership.
313
313
314 cl should be a changelog and revs should be an iterable. inclusive is
314 cl should be a changelog and revs should be an iterable. inclusive is
315 a boolean that indicates whether revs should be included. Revs lower
315 a boolean that indicates whether revs should be included. Revs lower
316 than stoprev will not be generated.
316 than stoprev will not be generated.
317
317
318 Result does not include the null revision."""
318 Result does not include the null revision."""
319 self._parentrevs = pfunc
319 self._parentrevs = pfunc
320 self._initrevs = revs = [r for r in revs if r >= stoprev]
320 self._initrevs = revs = [r for r in revs if r >= stoprev]
321 self._stoprev = stoprev
321 self._stoprev = stoprev
322 self._inclusive = inclusive
322 self._inclusive = inclusive
323
323
324 self._containsseen = set()
324 self._containsseen = set()
325 self._containsiter = _lazyancestorsiter(self._parentrevs,
325 self._containsiter = _lazyancestorsiter(self._parentrevs,
326 self._initrevs,
326 self._initrevs,
327 self._stoprev,
327 self._stoprev,
328 self._inclusive)
328 self._inclusive)
329
329
330 def __nonzero__(self):
330 def __nonzero__(self):
331 """False if the set is empty, True otherwise."""
331 """False if the set is empty, True otherwise."""
332 try:
332 try:
333 next(iter(self))
333 next(iter(self))
334 return True
334 return True
335 except StopIteration:
335 except StopIteration:
336 return False
336 return False
337
337
338 __bool__ = __nonzero__
338 __bool__ = __nonzero__
339
339
340 def __iter__(self):
340 def __iter__(self):
341 """Generate the ancestors of _initrevs in reverse topological order.
341 """Generate the ancestors of _initrevs in reverse topological order.
342
342
343 If inclusive is False, yield a sequence of revision numbers starting
343 If inclusive is False, yield a sequence of revision numbers starting
344 with the parents of each revision in revs, i.e., each revision is
344 with the parents of each revision in revs, i.e., each revision is
345 *not* considered an ancestor of itself. Results are emitted in reverse
345 *not* considered an ancestor of itself. Results are emitted in reverse
346 revision number order. That order is also topological: a child is
346 revision number order. That order is also topological: a child is
347 always emitted before its parent.
347 always emitted before its parent.
348
348
349 If inclusive is True, the source revisions are also yielded. The
349 If inclusive is True, the source revisions are also yielded. The
350 reverse revision number order is still enforced."""
350 reverse revision number order is still enforced."""
351 for rev in _lazyancestorsiter(self._parentrevs, self._initrevs,
351 for rev in _lazyancestorsiter(self._parentrevs, self._initrevs,
352 self._stoprev, self._inclusive):
352 self._stoprev, self._inclusive):
353 yield rev
353 yield rev
354
354
355 def __contains__(self, target):
355 def __contains__(self, target):
356 """Test whether target is an ancestor of self._initrevs."""
356 """Test whether target is an ancestor of self._initrevs."""
357 seen = self._containsseen
357 seen = self._containsseen
358 if target in seen:
358 if target in seen:
359 return True
359 return True
360 iter = self._containsiter
360 iter = self._containsiter
361 if iter is None:
361 if iter is None:
362 # Iterator exhausted
362 # Iterator exhausted
363 return False
363 return False
364 # Only integer target is valid, but some callers expect 'None in self'
364 # Only integer target is valid, but some callers expect 'None in self'
365 # to be False. So we explicitly allow it.
365 # to be False. So we explicitly allow it.
366 if target is None:
366 if target is None:
367 return False
367 return False
368
368
369 see = seen.add
369 see = seen.add
370 try:
370 try:
371 while True:
371 while True:
372 rev = next(iter)
372 rev = next(iter)
373 see(rev)
373 see(rev)
374 if rev == target:
374 if rev == target:
375 return True
375 return True
376 if rev < target:
376 if rev < target:
377 return False
377 return False
378 except StopIteration:
378 except StopIteration:
379 # Set to None to indicate fast-path can be used next time, and to
379 # Set to None to indicate fast-path can be used next time, and to
380 # free up memory.
380 # free up memory.
381 self._containsiter = None
381 self._containsiter = None
382 return False
382 return False
General Comments 0
You need to be logged in to leave comments. Login now