##// END OF EJS Templates
ancestor: drop another unused variable assignment...
Matt Harbison -
r44421:68b09ebf default
parent child Browse files
Show More
@@ -1,431 +1,431 b''
1 1 # ancestor.py - generic DAG ancestor algorithm for mercurial
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import heapq
11 11
12 12 from .node import nullrev
13 13 from . import (
14 14 dagop,
15 15 policy,
16 16 pycompat,
17 17 )
18 18
19 19 parsers = policy.importmod('parsers')
20 20
21 21
22 22 def commonancestorsheads(pfunc, *nodes):
23 23 """Returns a set with the heads of all common ancestors of all nodes,
24 24 heads(::nodes[0] and ::nodes[1] and ...) .
25 25
26 26 pfunc must return a list of parent vertices for a given vertex.
27 27 """
28 28 if not isinstance(nodes, set):
29 29 nodes = set(nodes)
30 30 if nullrev in nodes:
31 31 return set()
32 32 if len(nodes) <= 1:
33 33 return nodes
34 34
35 35 allseen = (1 << len(nodes)) - 1
36 36 seen = [0] * (max(nodes) + 1)
37 37 for i, n in enumerate(nodes):
38 38 seen[n] = 1 << i
39 39 poison = 1 << (i + 1)
40 40
41 41 gca = set()
42 42 interesting = len(nodes)
43 43 nv = len(seen) - 1
44 44 while nv >= 0 and interesting:
45 45 v = nv
46 46 nv -= 1
47 47 if not seen[v]:
48 48 continue
49 49 sv = seen[v]
50 50 if sv < poison:
51 51 interesting -= 1
52 52 if sv == allseen:
53 53 gca.add(v)
54 54 sv |= poison
55 55 if v in nodes:
56 56 # history is linear
57 57 return {v}
58 58 if sv < poison:
59 59 for p in pfunc(v):
60 60 sp = seen[p]
61 61 if p == nullrev:
62 62 continue
63 63 if sp == 0:
64 64 seen[p] = sv
65 65 interesting += 1
66 66 elif sp != sv:
67 67 seen[p] |= sv
68 68 else:
69 69 for p in pfunc(v):
70 70 if p == nullrev:
71 71 continue
72 72 sp = seen[p]
73 73 if sp and sp < poison:
74 74 interesting -= 1
75 75 seen[p] = sv
76 76 return gca
77 77
78 78
79 79 def ancestors(pfunc, *orignodes):
80 80 """
81 81 Returns the common ancestors of a and b that are furthest from a
82 82 root (as measured by longest path).
83 83
84 84 pfunc must return a list of parent vertices for a given vertex.
85 85 """
86 86
87 87 def deepest(nodes):
88 88 interesting = {}
89 89 count = max(nodes) + 1
90 90 depth = [0] * count
91 91 seen = [0] * count
92 92 mapping = []
93 93 for (i, n) in enumerate(sorted(nodes)):
94 94 depth[n] = 1
95 95 b = 1 << i
96 96 seen[n] = b
97 97 interesting[b] = 1
98 98 mapping.append((b, n))
99 99 nv = count - 1
100 100 while nv >= 0 and len(interesting) > 1:
101 101 v = nv
102 102 nv -= 1
103 103 dv = depth[v]
104 104 if dv == 0:
105 105 continue
106 106 sv = seen[v]
107 107 for p in pfunc(v):
108 108 if p == nullrev:
109 109 continue
110 110 dp = depth[p]
111 nsp = sp = seen[p]
111 sp = seen[p]
112 112 if dp <= dv:
113 113 depth[p] = dv + 1
114 114 if sp != sv:
115 115 interesting[sv] += 1
116 nsp = seen[p] = sv
116 seen[p] = sv
117 117 if sp:
118 118 interesting[sp] -= 1
119 119 if interesting[sp] == 0:
120 120 del interesting[sp]
121 121 elif dv == dp - 1:
122 122 nsp = sp | sv
123 123 if nsp == sp:
124 124 continue
125 125 seen[p] = nsp
126 126 interesting.setdefault(nsp, 0)
127 127 interesting[nsp] += 1
128 128 interesting[sp] -= 1
129 129 if interesting[sp] == 0:
130 130 del interesting[sp]
131 131 interesting[sv] -= 1
132 132 if interesting[sv] == 0:
133 133 del interesting[sv]
134 134
135 135 if len(interesting) != 1:
136 136 return []
137 137
138 138 k = 0
139 139 for i in interesting:
140 140 k |= i
141 141 return set(n for (i, n) in mapping if k & i)
142 142
143 143 gca = commonancestorsheads(pfunc, *orignodes)
144 144
145 145 if len(gca) <= 1:
146 146 return gca
147 147 return deepest(gca)
148 148
149 149
150 150 class incrementalmissingancestors(object):
151 151 '''persistent state used to calculate missing ancestors incrementally
152 152
153 153 Although similar in spirit to lazyancestors below, this is a separate class
154 154 because trying to support contains and missingancestors operations with the
155 155 same internal data structures adds needless complexity.'''
156 156
157 157 def __init__(self, pfunc, bases):
158 158 self.bases = set(bases)
159 159 if not self.bases:
160 160 self.bases.add(nullrev)
161 161 self.pfunc = pfunc
162 162
163 163 def hasbases(self):
164 164 '''whether the common set has any non-trivial bases'''
165 165 return self.bases and self.bases != {nullrev}
166 166
167 167 def addbases(self, newbases):
168 168 '''grow the ancestor set by adding new bases'''
169 169 self.bases.update(newbases)
170 170
171 171 def basesheads(self):
172 172 return dagop.headrevs(self.bases, self.pfunc)
173 173
174 174 def removeancestorsfrom(self, revs):
175 175 '''remove all ancestors of bases from the set revs (in place)'''
176 176 bases = self.bases
177 177 pfunc = self.pfunc
178 178 revs.difference_update(bases)
179 179 # nullrev is always an ancestor
180 180 revs.discard(nullrev)
181 181 if not revs:
182 182 return
183 183 # anything in revs > start is definitely not an ancestor of bases
184 184 # revs <= start needs to be investigated
185 185 start = max(bases)
186 186 keepcount = sum(1 for r in revs if r > start)
187 187 if len(revs) == keepcount:
188 188 # no revs to consider
189 189 return
190 190
191 191 for curr in pycompat.xrange(start, min(revs) - 1, -1):
192 192 if curr not in bases:
193 193 continue
194 194 revs.discard(curr)
195 195 bases.update(pfunc(curr))
196 196 if len(revs) == keepcount:
197 197 # no more potential revs to discard
198 198 break
199 199
200 200 def missingancestors(self, revs):
201 201 '''return all the ancestors of revs that are not ancestors of self.bases
202 202
203 203 This may include elements from revs.
204 204
205 205 Equivalent to the revset (::revs - ::self.bases). Revs are returned in
206 206 revision number order, which is a topological order.'''
207 207 revsvisit = set(revs)
208 208 basesvisit = self.bases
209 209 pfunc = self.pfunc
210 210 bothvisit = revsvisit.intersection(basesvisit)
211 211 revsvisit.difference_update(bothvisit)
212 212 if not revsvisit:
213 213 return []
214 214
215 215 start = max(max(revsvisit), max(basesvisit))
216 216 # At this point, we hold the invariants that:
217 217 # - revsvisit is the set of nodes we know are an ancestor of at least
218 218 # one of the nodes in revs
219 219 # - basesvisit is the same for bases
220 220 # - bothvisit is the set of nodes we know are ancestors of at least one
221 221 # of the nodes in revs and one of the nodes in bases. bothvisit and
222 222 # revsvisit are mutually exclusive, but bothvisit is a subset of
223 223 # basesvisit.
224 224 # Now we walk down in reverse topo order, adding parents of nodes
225 225 # already visited to the sets while maintaining the invariants. When a
226 226 # node is found in both revsvisit and basesvisit, it is removed from
227 227 # revsvisit and added to bothvisit. When revsvisit becomes empty, there
228 228 # are no more ancestors of revs that aren't also ancestors of bases, so
229 229 # exit.
230 230
231 231 missing = []
232 232 for curr in pycompat.xrange(start, nullrev, -1):
233 233 if not revsvisit:
234 234 break
235 235
236 236 if curr in bothvisit:
237 237 bothvisit.remove(curr)
238 238 # curr's parents might have made it into revsvisit through
239 239 # another path
240 240 for p in pfunc(curr):
241 241 revsvisit.discard(p)
242 242 basesvisit.add(p)
243 243 bothvisit.add(p)
244 244 continue
245 245
246 246 if curr in revsvisit:
247 247 missing.append(curr)
248 248 revsvisit.remove(curr)
249 249 thisvisit = revsvisit
250 250 othervisit = basesvisit
251 251 elif curr in basesvisit:
252 252 thisvisit = basesvisit
253 253 othervisit = revsvisit
254 254 else:
255 255 # not an ancestor of revs or bases: ignore
256 256 continue
257 257
258 258 for p in pfunc(curr):
259 259 if p == nullrev:
260 260 pass
261 261 elif p in othervisit or p in bothvisit:
262 262 # p is implicitly in thisvisit. This means p is or should be
263 263 # in bothvisit
264 264 revsvisit.discard(p)
265 265 basesvisit.add(p)
266 266 bothvisit.add(p)
267 267 else:
268 268 # visit later
269 269 thisvisit.add(p)
270 270
271 271 missing.reverse()
272 272 return missing
273 273
274 274
275 275 # Extracted from lazyancestors.__iter__ to avoid a reference cycle
276 276 def _lazyancestorsiter(parentrevs, initrevs, stoprev, inclusive):
277 277 seen = {nullrev}
278 278 heappush = heapq.heappush
279 279 heappop = heapq.heappop
280 280 heapreplace = heapq.heapreplace
281 281 see = seen.add
282 282
283 283 if inclusive:
284 284 visit = [-r for r in initrevs]
285 285 seen.update(initrevs)
286 286 heapq.heapify(visit)
287 287 else:
288 288 visit = []
289 289 heapq.heapify(visit)
290 290 for r in initrevs:
291 291 p1, p2 = parentrevs(r)
292 292 if p1 not in seen:
293 293 heappush(visit, -p1)
294 294 see(p1)
295 295 if p2 not in seen:
296 296 heappush(visit, -p2)
297 297 see(p2)
298 298
299 299 while visit:
300 300 current = -visit[0]
301 301 if current < stoprev:
302 302 break
303 303 yield current
304 304 # optimize out heapq operation if p1 is known to be the next highest
305 305 # revision, which is quite common in linear history.
306 306 p1, p2 = parentrevs(current)
307 307 if p1 not in seen:
308 308 if current - p1 == 1:
309 309 visit[0] = -p1
310 310 else:
311 311 heapreplace(visit, -p1)
312 312 see(p1)
313 313 else:
314 314 heappop(visit)
315 315 if p2 not in seen:
316 316 heappush(visit, -p2)
317 317 see(p2)
318 318
319 319
320 320 class lazyancestors(object):
321 321 def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
322 322 """Create a new object generating ancestors for the given revs. Does
323 323 not generate revs lower than stoprev.
324 324
325 325 This is computed lazily starting from revs. The object supports
326 326 iteration and membership.
327 327
328 328 cl should be a changelog and revs should be an iterable. inclusive is
329 329 a boolean that indicates whether revs should be included. Revs lower
330 330 than stoprev will not be generated.
331 331
332 332 Result does not include the null revision."""
333 333 self._parentrevs = pfunc
334 334 self._initrevs = [r for r in revs if r >= stoprev]
335 335 self._stoprev = stoprev
336 336 self._inclusive = inclusive
337 337
338 338 self._containsseen = set()
339 339 self._containsiter = _lazyancestorsiter(
340 340 self._parentrevs, self._initrevs, self._stoprev, self._inclusive
341 341 )
342 342
343 343 def __nonzero__(self):
344 344 """False if the set is empty, True otherwise."""
345 345 try:
346 346 next(iter(self))
347 347 return True
348 348 except StopIteration:
349 349 return False
350 350
351 351 __bool__ = __nonzero__
352 352
353 353 def __iter__(self):
354 354 """Generate the ancestors of _initrevs in reverse topological order.
355 355
356 356 If inclusive is False, yield a sequence of revision numbers starting
357 357 with the parents of each revision in revs, i.e., each revision is
358 358 *not* considered an ancestor of itself. Results are emitted in reverse
359 359 revision number order. That order is also topological: a child is
360 360 always emitted before its parent.
361 361
362 362 If inclusive is True, the source revisions are also yielded. The
363 363 reverse revision number order is still enforced."""
364 364 return _lazyancestorsiter(
365 365 self._parentrevs, self._initrevs, self._stoprev, self._inclusive
366 366 )
367 367
368 368 def __contains__(self, target):
369 369 """Test whether target is an ancestor of self._initrevs."""
370 370 seen = self._containsseen
371 371 if target in seen:
372 372 return True
373 373 iter = self._containsiter
374 374 if iter is None:
375 375 # Iterator exhausted
376 376 return False
377 377 # Only integer target is valid, but some callers expect 'None in self'
378 378 # to be False. So we explicitly allow it.
379 379 if target is None:
380 380 return False
381 381
382 382 see = seen.add
383 383 try:
384 384 while True:
385 385 rev = next(iter)
386 386 see(rev)
387 387 if rev == target:
388 388 return True
389 389 if rev < target:
390 390 return False
391 391 except StopIteration:
392 392 # Set to None to indicate fast-path can be used next time, and to
393 393 # free up memory.
394 394 self._containsiter = None
395 395 return False
396 396
397 397
398 398 class rustlazyancestors(object):
399 399 def __init__(self, index, revs, stoprev=0, inclusive=False):
400 400 self._index = index
401 401 self._stoprev = stoprev
402 402 self._inclusive = inclusive
403 403 # no need to prefilter out init revs that are smaller than stoprev,
404 404 # it's done by rustlazyancestors constructor.
405 405 # we need to convert to a list, because our ruslazyancestors
406 406 # constructor (from C code) doesn't understand anything else yet
407 407 self._initrevs = initrevs = list(revs)
408 408
409 409 self._containsiter = parsers.rustlazyancestors(
410 410 index, initrevs, stoprev, inclusive
411 411 )
412 412
413 413 def __nonzero__(self):
414 414 """False if the set is empty, True otherwise.
415 415
416 416 It's better to duplicate this essentially trivial method than
417 417 to subclass lazyancestors
418 418 """
419 419 try:
420 420 next(iter(self))
421 421 return True
422 422 except StopIteration:
423 423 return False
424 424
425 425 def __iter__(self):
426 426 return parsers.rustlazyancestors(
427 427 self._index, self._initrevs, self._stoprev, self._inclusive
428 428 )
429 429
430 430 def __contains__(self, target):
431 431 return target in self._containsiter
General Comments 0
You need to be logged in to leave comments. Login now