##// END OF EJS Templates
ancestor: return early from _lazyancestorsiter() when reached to stoprev...
Yuya Nishihara -
r39570:fd9029d3 default
parent child Browse files
Show More
@@ -1,367 +1,368 b''
1 1 # ancestor.py - generic DAG ancestor algorithm for mercurial
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import heapq
11 11
12 12 from .node import nullrev
13 13 from . import (
14 14 pycompat,
15 15 )
16 16
17 17 def commonancestorsheads(pfunc, *nodes):
18 18 """Returns a set with the heads of all common ancestors of all nodes,
19 19 heads(::nodes[0] and ::nodes[1] and ...) .
20 20
21 21 pfunc must return a list of parent vertices for a given vertex.
22 22 """
23 23 if not isinstance(nodes, set):
24 24 nodes = set(nodes)
25 25 if nullrev in nodes:
26 26 return set()
27 27 if len(nodes) <= 1:
28 28 return nodes
29 29
30 30 allseen = (1 << len(nodes)) - 1
31 31 seen = [0] * (max(nodes) + 1)
32 32 for i, n in enumerate(nodes):
33 33 seen[n] = 1 << i
34 34 poison = 1 << (i + 1)
35 35
36 36 gca = set()
37 37 interesting = len(nodes)
38 38 nv = len(seen) - 1
39 39 while nv >= 0 and interesting:
40 40 v = nv
41 41 nv -= 1
42 42 if not seen[v]:
43 43 continue
44 44 sv = seen[v]
45 45 if sv < poison:
46 46 interesting -= 1
47 47 if sv == allseen:
48 48 gca.add(v)
49 49 sv |= poison
50 50 if v in nodes:
51 51 # history is linear
52 52 return {v}
53 53 if sv < poison:
54 54 for p in pfunc(v):
55 55 sp = seen[p]
56 56 if p == nullrev:
57 57 continue
58 58 if sp == 0:
59 59 seen[p] = sv
60 60 interesting += 1
61 61 elif sp != sv:
62 62 seen[p] |= sv
63 63 else:
64 64 for p in pfunc(v):
65 65 if p == nullrev:
66 66 continue
67 67 sp = seen[p]
68 68 if sp and sp < poison:
69 69 interesting -= 1
70 70 seen[p] = sv
71 71 return gca
72 72
73 73 def ancestors(pfunc, *orignodes):
74 74 """
75 75 Returns the common ancestors of a and b that are furthest from a
76 76 root (as measured by longest path).
77 77
78 78 pfunc must return a list of parent vertices for a given vertex.
79 79 """
80 80 def deepest(nodes):
81 81 interesting = {}
82 82 count = max(nodes) + 1
83 83 depth = [0] * count
84 84 seen = [0] * count
85 85 mapping = []
86 86 for (i, n) in enumerate(sorted(nodes)):
87 87 depth[n] = 1
88 88 b = 1 << i
89 89 seen[n] = b
90 90 interesting[b] = 1
91 91 mapping.append((b, n))
92 92 nv = count - 1
93 93 while nv >= 0 and len(interesting) > 1:
94 94 v = nv
95 95 nv -= 1
96 96 dv = depth[v]
97 97 if dv == 0:
98 98 continue
99 99 sv = seen[v]
100 100 for p in pfunc(v):
101 101 if p == nullrev:
102 102 continue
103 103 dp = depth[p]
104 104 nsp = sp = seen[p]
105 105 if dp <= dv:
106 106 depth[p] = dv + 1
107 107 if sp != sv:
108 108 interesting[sv] += 1
109 109 nsp = seen[p] = sv
110 110 if sp:
111 111 interesting[sp] -= 1
112 112 if interesting[sp] == 0:
113 113 del interesting[sp]
114 114 elif dv == dp - 1:
115 115 nsp = sp | sv
116 116 if nsp == sp:
117 117 continue
118 118 seen[p] = nsp
119 119 interesting.setdefault(nsp, 0)
120 120 interesting[nsp] += 1
121 121 interesting[sp] -= 1
122 122 if interesting[sp] == 0:
123 123 del interesting[sp]
124 124 interesting[sv] -= 1
125 125 if interesting[sv] == 0:
126 126 del interesting[sv]
127 127
128 128 if len(interesting) != 1:
129 129 return []
130 130
131 131 k = 0
132 132 for i in interesting:
133 133 k |= i
134 134 return set(n for (i, n) in mapping if k & i)
135 135
136 136 gca = commonancestorsheads(pfunc, *orignodes)
137 137
138 138 if len(gca) <= 1:
139 139 return gca
140 140 return deepest(gca)
141 141
142 142 class incrementalmissingancestors(object):
143 143 '''persistent state used to calculate missing ancestors incrementally
144 144
145 145 Although similar in spirit to lazyancestors below, this is a separate class
146 146 because trying to support contains and missingancestors operations with the
147 147 same internal data structures adds needless complexity.'''
148 148 def __init__(self, pfunc, bases):
149 149 self.bases = set(bases)
150 150 if not self.bases:
151 151 self.bases.add(nullrev)
152 152 self.pfunc = pfunc
153 153
154 154 def hasbases(self):
155 155 '''whether the common set has any non-trivial bases'''
156 156 return self.bases and self.bases != {nullrev}
157 157
158 158 def addbases(self, newbases):
159 159 '''grow the ancestor set by adding new bases'''
160 160 self.bases.update(newbases)
161 161
162 162 def removeancestorsfrom(self, revs):
163 163 '''remove all ancestors of bases from the set revs (in place)'''
164 164 bases = self.bases
165 165 pfunc = self.pfunc
166 166 revs.difference_update(bases)
167 167 # nullrev is always an ancestor
168 168 revs.discard(nullrev)
169 169 if not revs:
170 170 return
171 171 # anything in revs > start is definitely not an ancestor of bases
172 172 # revs <= start needs to be investigated
173 173 start = max(bases)
174 174 keepcount = sum(1 for r in revs if r > start)
175 175 if len(revs) == keepcount:
176 176 # no revs to consider
177 177 return
178 178
179 179 for curr in pycompat.xrange(start, min(revs) - 1, -1):
180 180 if curr not in bases:
181 181 continue
182 182 revs.discard(curr)
183 183 bases.update(pfunc(curr))
184 184 if len(revs) == keepcount:
185 185 # no more potential revs to discard
186 186 break
187 187
188 188 def missingancestors(self, revs):
189 189 '''return all the ancestors of revs that are not ancestors of self.bases
190 190
191 191 This may include elements from revs.
192 192
193 193 Equivalent to the revset (::revs - ::self.bases). Revs are returned in
194 194 revision number order, which is a topological order.'''
195 195 revsvisit = set(revs)
196 196 basesvisit = self.bases
197 197 pfunc = self.pfunc
198 198 bothvisit = revsvisit.intersection(basesvisit)
199 199 revsvisit.difference_update(bothvisit)
200 200 if not revsvisit:
201 201 return []
202 202
203 203 start = max(max(revsvisit), max(basesvisit))
204 204 # At this point, we hold the invariants that:
205 205 # - revsvisit is the set of nodes we know are an ancestor of at least
206 206 # one of the nodes in revs
207 207 # - basesvisit is the same for bases
208 208 # - bothvisit is the set of nodes we know are ancestors of at least one
209 209 # of the nodes in revs and one of the nodes in bases. bothvisit and
210 210 # revsvisit are mutually exclusive, but bothvisit is a subset of
211 211 # basesvisit.
212 212 # Now we walk down in reverse topo order, adding parents of nodes
213 213 # already visited to the sets while maintaining the invariants. When a
214 214 # node is found in both revsvisit and basesvisit, it is removed from
215 215 # revsvisit and added to bothvisit. When revsvisit becomes empty, there
216 216 # are no more ancestors of revs that aren't also ancestors of bases, so
217 217 # exit.
218 218
219 219 missing = []
220 220 for curr in pycompat.xrange(start, nullrev, -1):
221 221 if not revsvisit:
222 222 break
223 223
224 224 if curr in bothvisit:
225 225 bothvisit.remove(curr)
226 226 # curr's parents might have made it into revsvisit through
227 227 # another path
228 228 for p in pfunc(curr):
229 229 revsvisit.discard(p)
230 230 basesvisit.add(p)
231 231 bothvisit.add(p)
232 232 continue
233 233
234 234 if curr in revsvisit:
235 235 missing.append(curr)
236 236 revsvisit.remove(curr)
237 237 thisvisit = revsvisit
238 238 othervisit = basesvisit
239 239 elif curr in basesvisit:
240 240 thisvisit = basesvisit
241 241 othervisit = revsvisit
242 242 else:
243 243 # not an ancestor of revs or bases: ignore
244 244 continue
245 245
246 246 for p in pfunc(curr):
247 247 if p == nullrev:
248 248 pass
249 249 elif p in othervisit or p in bothvisit:
250 250 # p is implicitly in thisvisit. This means p is or should be
251 251 # in bothvisit
252 252 revsvisit.discard(p)
253 253 basesvisit.add(p)
254 254 bothvisit.add(p)
255 255 else:
256 256 # visit later
257 257 thisvisit.add(p)
258 258
259 259 missing.reverse()
260 260 return missing
261 261
262 262 # Extracted from lazyancestors.__iter__ to avoid a reference cycle
263 263 def _lazyancestorsiter(parentrevs, initrevs, stoprev, inclusive):
264 264 seen = {nullrev}
265 265 schedule = heapq.heappush
266 266 nextitem = heapq.heappop
267 267 see = seen.add
268 268
269 269 if inclusive:
270 270 visit = [-r for r in initrevs]
271 271 seen.update(initrevs)
272 272 heapq.heapify(visit)
273 273 else:
274 274 visit = []
275 275 heapq.heapify(visit)
276 276 for r in initrevs:
277 277 for parent in parentrevs(r):
278 278 if parent not in seen:
279 279 schedule(visit, -parent)
280 280 see(parent)
281 281
282 282 while visit:
283 283 current = -nextitem(visit)
284 if current >= stoprev:
285 yield current
286 for parent in parentrevs(current):
287 if parent not in seen:
288 schedule(visit, -parent)
289 see(parent)
284 if current < stoprev:
285 break
286 yield current
287 for parent in parentrevs(current):
288 if parent not in seen:
289 schedule(visit, -parent)
290 see(parent)
290 291
291 292 class lazyancestors(object):
292 293 def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
293 294 """Create a new object generating ancestors for the given revs. Does
294 295 not generate revs lower than stoprev.
295 296
296 297 This is computed lazily starting from revs. The object supports
297 298 iteration and membership.
298 299
299 300 cl should be a changelog and revs should be an iterable. inclusive is
300 301 a boolean that indicates whether revs should be included. Revs lower
301 302 than stoprev will not be generated.
302 303
303 304 Result does not include the null revision."""
304 305 self._parentrevs = pfunc
305 306 self._initrevs = revs = [r for r in revs if r >= stoprev]
306 307 self._stoprev = stoprev
307 308 self._inclusive = inclusive
308 309
309 310 self._containsseen = set()
310 311 self._containsiter = _lazyancestorsiter(self._parentrevs,
311 312 self._initrevs,
312 313 self._stoprev,
313 314 self._inclusive)
314 315
315 316 def __nonzero__(self):
316 317 """False if the set is empty, True otherwise."""
317 318 try:
318 319 next(iter(self))
319 320 return True
320 321 except StopIteration:
321 322 return False
322 323
323 324 __bool__ = __nonzero__
324 325
325 326 def __iter__(self):
326 327 """Generate the ancestors of _initrevs in reverse topological order.
327 328
328 329 If inclusive is False, yield a sequence of revision numbers starting
329 330 with the parents of each revision in revs, i.e., each revision is
330 331 *not* considered an ancestor of itself. Results are emitted in reverse
331 332 revision number order. That order is also topological: a child is
332 333 always emitted before its parent.
333 334
334 335 If inclusive is True, the source revisions are also yielded. The
335 336 reverse revision number order is still enforced."""
336 337 for rev in _lazyancestorsiter(self._parentrevs, self._initrevs,
337 338 self._stoprev, self._inclusive):
338 339 yield rev
339 340
340 341 def __contains__(self, target):
341 342 """Test whether target is an ancestor of self._initrevs."""
342 343 seen = self._containsseen
343 344 if target in seen:
344 345 return True
345 346 iter = self._containsiter
346 347 if iter is None:
347 348 # Iterator exhausted
348 349 return False
349 350 # Only integer target is valid, but some callers expect 'None in self'
350 351 # to be False. So we explicitly allow it.
351 352 if target is None:
352 353 return False
353 354
354 355 see = seen.add
355 356 try:
356 357 while True:
357 358 rev = next(iter)
358 359 see(rev)
359 360 if rev == target:
360 361 return True
361 362 if rev < target:
362 363 return False
363 364 except StopIteration:
364 365 # Set to None to indicate fast-path can be used next time, and to
365 366 # free up memory.
366 367 self._containsiter = None
367 368 return False
General Comments 0
You need to be logged in to leave comments. Login now