##// END OF EJS Templates
ancestor: incrementalmissingancestors.basesheads()...
Georges Racinet -
r41280:4856c9b8 default
parent child Browse files
Show More
@@ -1,420 +1,424 b''
1 1 # ancestor.py - generic DAG ancestor algorithm for mercurial
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import heapq
11 11
12 12 from .node import nullrev
13 13 from . import (
14 dagop,
14 15 policy,
15 16 pycompat,
16 17 )
17 18
18 19 parsers = policy.importmod(r'parsers')
19 20
20 21 def commonancestorsheads(pfunc, *nodes):
21 22 """Returns a set with the heads of all common ancestors of all nodes,
22 23 heads(::nodes[0] and ::nodes[1] and ...) .
23 24
24 25 pfunc must return a list of parent vertices for a given vertex.
25 26 """
26 27 if not isinstance(nodes, set):
27 28 nodes = set(nodes)
28 29 if nullrev in nodes:
29 30 return set()
30 31 if len(nodes) <= 1:
31 32 return nodes
32 33
33 34 allseen = (1 << len(nodes)) - 1
34 35 seen = [0] * (max(nodes) + 1)
35 36 for i, n in enumerate(nodes):
36 37 seen[n] = 1 << i
37 38 poison = 1 << (i + 1)
38 39
39 40 gca = set()
40 41 interesting = len(nodes)
41 42 nv = len(seen) - 1
42 43 while nv >= 0 and interesting:
43 44 v = nv
44 45 nv -= 1
45 46 if not seen[v]:
46 47 continue
47 48 sv = seen[v]
48 49 if sv < poison:
49 50 interesting -= 1
50 51 if sv == allseen:
51 52 gca.add(v)
52 53 sv |= poison
53 54 if v in nodes:
54 55 # history is linear
55 56 return {v}
56 57 if sv < poison:
57 58 for p in pfunc(v):
58 59 sp = seen[p]
59 60 if p == nullrev:
60 61 continue
61 62 if sp == 0:
62 63 seen[p] = sv
63 64 interesting += 1
64 65 elif sp != sv:
65 66 seen[p] |= sv
66 67 else:
67 68 for p in pfunc(v):
68 69 if p == nullrev:
69 70 continue
70 71 sp = seen[p]
71 72 if sp and sp < poison:
72 73 interesting -= 1
73 74 seen[p] = sv
74 75 return gca
75 76
76 77 def ancestors(pfunc, *orignodes):
77 78 """
78 79 Returns the common ancestors of a and b that are furthest from a
79 80 root (as measured by longest path).
80 81
81 82 pfunc must return a list of parent vertices for a given vertex.
82 83 """
83 84 def deepest(nodes):
84 85 interesting = {}
85 86 count = max(nodes) + 1
86 87 depth = [0] * count
87 88 seen = [0] * count
88 89 mapping = []
89 90 for (i, n) in enumerate(sorted(nodes)):
90 91 depth[n] = 1
91 92 b = 1 << i
92 93 seen[n] = b
93 94 interesting[b] = 1
94 95 mapping.append((b, n))
95 96 nv = count - 1
96 97 while nv >= 0 and len(interesting) > 1:
97 98 v = nv
98 99 nv -= 1
99 100 dv = depth[v]
100 101 if dv == 0:
101 102 continue
102 103 sv = seen[v]
103 104 for p in pfunc(v):
104 105 if p == nullrev:
105 106 continue
106 107 dp = depth[p]
107 108 nsp = sp = seen[p]
108 109 if dp <= dv:
109 110 depth[p] = dv + 1
110 111 if sp != sv:
111 112 interesting[sv] += 1
112 113 nsp = seen[p] = sv
113 114 if sp:
114 115 interesting[sp] -= 1
115 116 if interesting[sp] == 0:
116 117 del interesting[sp]
117 118 elif dv == dp - 1:
118 119 nsp = sp | sv
119 120 if nsp == sp:
120 121 continue
121 122 seen[p] = nsp
122 123 interesting.setdefault(nsp, 0)
123 124 interesting[nsp] += 1
124 125 interesting[sp] -= 1
125 126 if interesting[sp] == 0:
126 127 del interesting[sp]
127 128 interesting[sv] -= 1
128 129 if interesting[sv] == 0:
129 130 del interesting[sv]
130 131
131 132 if len(interesting) != 1:
132 133 return []
133 134
134 135 k = 0
135 136 for i in interesting:
136 137 k |= i
137 138 return set(n for (i, n) in mapping if k & i)
138 139
139 140 gca = commonancestorsheads(pfunc, *orignodes)
140 141
141 142 if len(gca) <= 1:
142 143 return gca
143 144 return deepest(gca)
144 145
145 146 class incrementalmissingancestors(object):
146 147 '''persistent state used to calculate missing ancestors incrementally
147 148
148 149 Although similar in spirit to lazyancestors below, this is a separate class
149 150 because trying to support contains and missingancestors operations with the
150 151 same internal data structures adds needless complexity.'''
151 152 def __init__(self, pfunc, bases):
152 153 self.bases = set(bases)
153 154 if not self.bases:
154 155 self.bases.add(nullrev)
155 156 self.pfunc = pfunc
156 157
157 158 def hasbases(self):
158 159 '''whether the common set has any non-trivial bases'''
159 160 return self.bases and self.bases != {nullrev}
160 161
161 162 def addbases(self, newbases):
162 163 '''grow the ancestor set by adding new bases'''
163 164 self.bases.update(newbases)
164 165
166 def basesheads(self):
167 return dagop.headrevs(self.bases, self.pfunc)
168
165 169 def removeancestorsfrom(self, revs):
166 170 '''remove all ancestors of bases from the set revs (in place)'''
167 171 bases = self.bases
168 172 pfunc = self.pfunc
169 173 revs.difference_update(bases)
170 174 # nullrev is always an ancestor
171 175 revs.discard(nullrev)
172 176 if not revs:
173 177 return
174 178 # anything in revs > start is definitely not an ancestor of bases
175 179 # revs <= start needs to be investigated
176 180 start = max(bases)
177 181 keepcount = sum(1 for r in revs if r > start)
178 182 if len(revs) == keepcount:
179 183 # no revs to consider
180 184 return
181 185
182 186 for curr in pycompat.xrange(start, min(revs) - 1, -1):
183 187 if curr not in bases:
184 188 continue
185 189 revs.discard(curr)
186 190 bases.update(pfunc(curr))
187 191 if len(revs) == keepcount:
188 192 # no more potential revs to discard
189 193 break
190 194
191 195 def missingancestors(self, revs):
192 196 '''return all the ancestors of revs that are not ancestors of self.bases
193 197
194 198 This may include elements from revs.
195 199
196 200 Equivalent to the revset (::revs - ::self.bases). Revs are returned in
197 201 revision number order, which is a topological order.'''
198 202 revsvisit = set(revs)
199 203 basesvisit = self.bases
200 204 pfunc = self.pfunc
201 205 bothvisit = revsvisit.intersection(basesvisit)
202 206 revsvisit.difference_update(bothvisit)
203 207 if not revsvisit:
204 208 return []
205 209
206 210 start = max(max(revsvisit), max(basesvisit))
207 211 # At this point, we hold the invariants that:
208 212 # - revsvisit is the set of nodes we know are an ancestor of at least
209 213 # one of the nodes in revs
210 214 # - basesvisit is the same for bases
211 215 # - bothvisit is the set of nodes we know are ancestors of at least one
212 216 # of the nodes in revs and one of the nodes in bases. bothvisit and
213 217 # revsvisit are mutually exclusive, but bothvisit is a subset of
214 218 # basesvisit.
215 219 # Now we walk down in reverse topo order, adding parents of nodes
216 220 # already visited to the sets while maintaining the invariants. When a
217 221 # node is found in both revsvisit and basesvisit, it is removed from
218 222 # revsvisit and added to bothvisit. When revsvisit becomes empty, there
219 223 # are no more ancestors of revs that aren't also ancestors of bases, so
220 224 # exit.
221 225
222 226 missing = []
223 227 for curr in pycompat.xrange(start, nullrev, -1):
224 228 if not revsvisit:
225 229 break
226 230
227 231 if curr in bothvisit:
228 232 bothvisit.remove(curr)
229 233 # curr's parents might have made it into revsvisit through
230 234 # another path
231 235 for p in pfunc(curr):
232 236 revsvisit.discard(p)
233 237 basesvisit.add(p)
234 238 bothvisit.add(p)
235 239 continue
236 240
237 241 if curr in revsvisit:
238 242 missing.append(curr)
239 243 revsvisit.remove(curr)
240 244 thisvisit = revsvisit
241 245 othervisit = basesvisit
242 246 elif curr in basesvisit:
243 247 thisvisit = basesvisit
244 248 othervisit = revsvisit
245 249 else:
246 250 # not an ancestor of revs or bases: ignore
247 251 continue
248 252
249 253 for p in pfunc(curr):
250 254 if p == nullrev:
251 255 pass
252 256 elif p in othervisit or p in bothvisit:
253 257 # p is implicitly in thisvisit. This means p is or should be
254 258 # in bothvisit
255 259 revsvisit.discard(p)
256 260 basesvisit.add(p)
257 261 bothvisit.add(p)
258 262 else:
259 263 # visit later
260 264 thisvisit.add(p)
261 265
262 266 missing.reverse()
263 267 return missing
264 268
265 269 # Extracted from lazyancestors.__iter__ to avoid a reference cycle
266 270 def _lazyancestorsiter(parentrevs, initrevs, stoprev, inclusive):
267 271 seen = {nullrev}
268 272 heappush = heapq.heappush
269 273 heappop = heapq.heappop
270 274 heapreplace = heapq.heapreplace
271 275 see = seen.add
272 276
273 277 if inclusive:
274 278 visit = [-r for r in initrevs]
275 279 seen.update(initrevs)
276 280 heapq.heapify(visit)
277 281 else:
278 282 visit = []
279 283 heapq.heapify(visit)
280 284 for r in initrevs:
281 285 p1, p2 = parentrevs(r)
282 286 if p1 not in seen:
283 287 heappush(visit, -p1)
284 288 see(p1)
285 289 if p2 not in seen:
286 290 heappush(visit, -p2)
287 291 see(p2)
288 292
289 293 while visit:
290 294 current = -visit[0]
291 295 if current < stoprev:
292 296 break
293 297 yield current
294 298 # optimize out heapq operation if p1 is known to be the next highest
295 299 # revision, which is quite common in linear history.
296 300 p1, p2 = parentrevs(current)
297 301 if p1 not in seen:
298 302 if current - p1 == 1:
299 303 visit[0] = -p1
300 304 else:
301 305 heapreplace(visit, -p1)
302 306 see(p1)
303 307 else:
304 308 heappop(visit)
305 309 if p2 not in seen:
306 310 heappush(visit, -p2)
307 311 see(p2)
308 312
309 313 class lazyancestors(object):
310 314 def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
311 315 """Create a new object generating ancestors for the given revs. Does
312 316 not generate revs lower than stoprev.
313 317
314 318 This is computed lazily starting from revs. The object supports
315 319 iteration and membership.
316 320
317 321 cl should be a changelog and revs should be an iterable. inclusive is
318 322 a boolean that indicates whether revs should be included. Revs lower
319 323 than stoprev will not be generated.
320 324
321 325 Result does not include the null revision."""
322 326 self._parentrevs = pfunc
323 327 self._initrevs = revs = [r for r in revs if r >= stoprev]
324 328 self._stoprev = stoprev
325 329 self._inclusive = inclusive
326 330
327 331 self._containsseen = set()
328 332 self._containsiter = _lazyancestorsiter(self._parentrevs,
329 333 self._initrevs,
330 334 self._stoprev,
331 335 self._inclusive)
332 336
333 337 def __nonzero__(self):
334 338 """False if the set is empty, True otherwise."""
335 339 try:
336 340 next(iter(self))
337 341 return True
338 342 except StopIteration:
339 343 return False
340 344
341 345 __bool__ = __nonzero__
342 346
343 347 def __iter__(self):
344 348 """Generate the ancestors of _initrevs in reverse topological order.
345 349
346 350 If inclusive is False, yield a sequence of revision numbers starting
347 351 with the parents of each revision in revs, i.e., each revision is
348 352 *not* considered an ancestor of itself. Results are emitted in reverse
349 353 revision number order. That order is also topological: a child is
350 354 always emitted before its parent.
351 355
352 356 If inclusive is True, the source revisions are also yielded. The
353 357 reverse revision number order is still enforced."""
354 358 return _lazyancestorsiter(self._parentrevs, self._initrevs,
355 359 self._stoprev, self._inclusive)
356 360
357 361 def __contains__(self, target):
358 362 """Test whether target is an ancestor of self._initrevs."""
359 363 seen = self._containsseen
360 364 if target in seen:
361 365 return True
362 366 iter = self._containsiter
363 367 if iter is None:
364 368 # Iterator exhausted
365 369 return False
366 370 # Only integer target is valid, but some callers expect 'None in self'
367 371 # to be False. So we explicitly allow it.
368 372 if target is None:
369 373 return False
370 374
371 375 see = seen.add
372 376 try:
373 377 while True:
374 378 rev = next(iter)
375 379 see(rev)
376 380 if rev == target:
377 381 return True
378 382 if rev < target:
379 383 return False
380 384 except StopIteration:
381 385 # Set to None to indicate fast-path can be used next time, and to
382 386 # free up memory.
383 387 self._containsiter = None
384 388 return False
385 389
386 390 class rustlazyancestors(object):
387 391
388 392 def __init__(self, index, revs, stoprev=0, inclusive=False):
389 393 self._index = index
390 394 self._stoprev = stoprev
391 395 self._inclusive = inclusive
392 396 # no need to prefilter out init revs that are smaller than stoprev,
393 397 # it's done by rustlazyancestors constructor.
394 398 # we need to convert to a list, because our ruslazyancestors
395 399 # constructor (from C code) doesn't understand anything else yet
396 400 self._initrevs = initrevs = list(revs)
397 401
398 402 self._containsiter = parsers.rustlazyancestors(
399 403 index, initrevs, stoprev, inclusive)
400 404
401 405 def __nonzero__(self):
402 406 """False if the set is empty, True otherwise.
403 407
404 408 It's better to duplicate this essentially trivial method than
405 409 to subclass lazyancestors
406 410 """
407 411 try:
408 412 next(iter(self))
409 413 return True
410 414 except StopIteration:
411 415 return False
412 416
413 417 def __iter__(self):
414 418 return parsers.rustlazyancestors(self._index,
415 419 self._initrevs,
416 420 self._stoprev,
417 421 self._inclusive)
418 422
419 423 def __contains__(self, target):
420 424 return target in self._containsiter
General Comments 0
You need to be logged in to leave comments. Login now