##// END OF EJS Templates
ancestor: incrementalmissingancestors.basesheads()...
Georges Racinet -
r41280:4856c9b8 default
parent child Browse files
Show More
@@ -1,420 +1,424
1 # ancestor.py - generic DAG ancestor algorithm for mercurial
1 # ancestor.py - generic DAG ancestor algorithm for mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import heapq
10 import heapq
11
11
12 from .node import nullrev
12 from .node import nullrev
13 from . import (
13 from . import (
14 dagop,
14 policy,
15 policy,
15 pycompat,
16 pycompat,
16 )
17 )
17
18
18 parsers = policy.importmod(r'parsers')
19 parsers = policy.importmod(r'parsers')
19
20
20 def commonancestorsheads(pfunc, *nodes):
21 def commonancestorsheads(pfunc, *nodes):
21 """Returns a set with the heads of all common ancestors of all nodes,
22 """Returns a set with the heads of all common ancestors of all nodes,
22 heads(::nodes[0] and ::nodes[1] and ...) .
23 heads(::nodes[0] and ::nodes[1] and ...) .
23
24
24 pfunc must return a list of parent vertices for a given vertex.
25 pfunc must return a list of parent vertices for a given vertex.
25 """
26 """
26 if not isinstance(nodes, set):
27 if not isinstance(nodes, set):
27 nodes = set(nodes)
28 nodes = set(nodes)
28 if nullrev in nodes:
29 if nullrev in nodes:
29 return set()
30 return set()
30 if len(nodes) <= 1:
31 if len(nodes) <= 1:
31 return nodes
32 return nodes
32
33
33 allseen = (1 << len(nodes)) - 1
34 allseen = (1 << len(nodes)) - 1
34 seen = [0] * (max(nodes) + 1)
35 seen = [0] * (max(nodes) + 1)
35 for i, n in enumerate(nodes):
36 for i, n in enumerate(nodes):
36 seen[n] = 1 << i
37 seen[n] = 1 << i
37 poison = 1 << (i + 1)
38 poison = 1 << (i + 1)
38
39
39 gca = set()
40 gca = set()
40 interesting = len(nodes)
41 interesting = len(nodes)
41 nv = len(seen) - 1
42 nv = len(seen) - 1
42 while nv >= 0 and interesting:
43 while nv >= 0 and interesting:
43 v = nv
44 v = nv
44 nv -= 1
45 nv -= 1
45 if not seen[v]:
46 if not seen[v]:
46 continue
47 continue
47 sv = seen[v]
48 sv = seen[v]
48 if sv < poison:
49 if sv < poison:
49 interesting -= 1
50 interesting -= 1
50 if sv == allseen:
51 if sv == allseen:
51 gca.add(v)
52 gca.add(v)
52 sv |= poison
53 sv |= poison
53 if v in nodes:
54 if v in nodes:
54 # history is linear
55 # history is linear
55 return {v}
56 return {v}
56 if sv < poison:
57 if sv < poison:
57 for p in pfunc(v):
58 for p in pfunc(v):
58 sp = seen[p]
59 sp = seen[p]
59 if p == nullrev:
60 if p == nullrev:
60 continue
61 continue
61 if sp == 0:
62 if sp == 0:
62 seen[p] = sv
63 seen[p] = sv
63 interesting += 1
64 interesting += 1
64 elif sp != sv:
65 elif sp != sv:
65 seen[p] |= sv
66 seen[p] |= sv
66 else:
67 else:
67 for p in pfunc(v):
68 for p in pfunc(v):
68 if p == nullrev:
69 if p == nullrev:
69 continue
70 continue
70 sp = seen[p]
71 sp = seen[p]
71 if sp and sp < poison:
72 if sp and sp < poison:
72 interesting -= 1
73 interesting -= 1
73 seen[p] = sv
74 seen[p] = sv
74 return gca
75 return gca
75
76
76 def ancestors(pfunc, *orignodes):
77 def ancestors(pfunc, *orignodes):
77 """
78 """
78 Returns the common ancestors of a and b that are furthest from a
79 Returns the common ancestors of a and b that are furthest from a
79 root (as measured by longest path).
80 root (as measured by longest path).
80
81
81 pfunc must return a list of parent vertices for a given vertex.
82 pfunc must return a list of parent vertices for a given vertex.
82 """
83 """
83 def deepest(nodes):
84 def deepest(nodes):
84 interesting = {}
85 interesting = {}
85 count = max(nodes) + 1
86 count = max(nodes) + 1
86 depth = [0] * count
87 depth = [0] * count
87 seen = [0] * count
88 seen = [0] * count
88 mapping = []
89 mapping = []
89 for (i, n) in enumerate(sorted(nodes)):
90 for (i, n) in enumerate(sorted(nodes)):
90 depth[n] = 1
91 depth[n] = 1
91 b = 1 << i
92 b = 1 << i
92 seen[n] = b
93 seen[n] = b
93 interesting[b] = 1
94 interesting[b] = 1
94 mapping.append((b, n))
95 mapping.append((b, n))
95 nv = count - 1
96 nv = count - 1
96 while nv >= 0 and len(interesting) > 1:
97 while nv >= 0 and len(interesting) > 1:
97 v = nv
98 v = nv
98 nv -= 1
99 nv -= 1
99 dv = depth[v]
100 dv = depth[v]
100 if dv == 0:
101 if dv == 0:
101 continue
102 continue
102 sv = seen[v]
103 sv = seen[v]
103 for p in pfunc(v):
104 for p in pfunc(v):
104 if p == nullrev:
105 if p == nullrev:
105 continue
106 continue
106 dp = depth[p]
107 dp = depth[p]
107 nsp = sp = seen[p]
108 nsp = sp = seen[p]
108 if dp <= dv:
109 if dp <= dv:
109 depth[p] = dv + 1
110 depth[p] = dv + 1
110 if sp != sv:
111 if sp != sv:
111 interesting[sv] += 1
112 interesting[sv] += 1
112 nsp = seen[p] = sv
113 nsp = seen[p] = sv
113 if sp:
114 if sp:
114 interesting[sp] -= 1
115 interesting[sp] -= 1
115 if interesting[sp] == 0:
116 if interesting[sp] == 0:
116 del interesting[sp]
117 del interesting[sp]
117 elif dv == dp - 1:
118 elif dv == dp - 1:
118 nsp = sp | sv
119 nsp = sp | sv
119 if nsp == sp:
120 if nsp == sp:
120 continue
121 continue
121 seen[p] = nsp
122 seen[p] = nsp
122 interesting.setdefault(nsp, 0)
123 interesting.setdefault(nsp, 0)
123 interesting[nsp] += 1
124 interesting[nsp] += 1
124 interesting[sp] -= 1
125 interesting[sp] -= 1
125 if interesting[sp] == 0:
126 if interesting[sp] == 0:
126 del interesting[sp]
127 del interesting[sp]
127 interesting[sv] -= 1
128 interesting[sv] -= 1
128 if interesting[sv] == 0:
129 if interesting[sv] == 0:
129 del interesting[sv]
130 del interesting[sv]
130
131
131 if len(interesting) != 1:
132 if len(interesting) != 1:
132 return []
133 return []
133
134
134 k = 0
135 k = 0
135 for i in interesting:
136 for i in interesting:
136 k |= i
137 k |= i
137 return set(n for (i, n) in mapping if k & i)
138 return set(n for (i, n) in mapping if k & i)
138
139
139 gca = commonancestorsheads(pfunc, *orignodes)
140 gca = commonancestorsheads(pfunc, *orignodes)
140
141
141 if len(gca) <= 1:
142 if len(gca) <= 1:
142 return gca
143 return gca
143 return deepest(gca)
144 return deepest(gca)
144
145
145 class incrementalmissingancestors(object):
146 class incrementalmissingancestors(object):
146 '''persistent state used to calculate missing ancestors incrementally
147 '''persistent state used to calculate missing ancestors incrementally
147
148
148 Although similar in spirit to lazyancestors below, this is a separate class
149 Although similar in spirit to lazyancestors below, this is a separate class
149 because trying to support contains and missingancestors operations with the
150 because trying to support contains and missingancestors operations with the
150 same internal data structures adds needless complexity.'''
151 same internal data structures adds needless complexity.'''
151 def __init__(self, pfunc, bases):
152 def __init__(self, pfunc, bases):
152 self.bases = set(bases)
153 self.bases = set(bases)
153 if not self.bases:
154 if not self.bases:
154 self.bases.add(nullrev)
155 self.bases.add(nullrev)
155 self.pfunc = pfunc
156 self.pfunc = pfunc
156
157
157 def hasbases(self):
158 def hasbases(self):
158 '''whether the common set has any non-trivial bases'''
159 '''whether the common set has any non-trivial bases'''
159 return self.bases and self.bases != {nullrev}
160 return self.bases and self.bases != {nullrev}
160
161
161 def addbases(self, newbases):
162 def addbases(self, newbases):
162 '''grow the ancestor set by adding new bases'''
163 '''grow the ancestor set by adding new bases'''
163 self.bases.update(newbases)
164 self.bases.update(newbases)
164
165
166 def basesheads(self):
167 return dagop.headrevs(self.bases, self.pfunc)
168
165 def removeancestorsfrom(self, revs):
169 def removeancestorsfrom(self, revs):
166 '''remove all ancestors of bases from the set revs (in place)'''
170 '''remove all ancestors of bases from the set revs (in place)'''
167 bases = self.bases
171 bases = self.bases
168 pfunc = self.pfunc
172 pfunc = self.pfunc
169 revs.difference_update(bases)
173 revs.difference_update(bases)
170 # nullrev is always an ancestor
174 # nullrev is always an ancestor
171 revs.discard(nullrev)
175 revs.discard(nullrev)
172 if not revs:
176 if not revs:
173 return
177 return
174 # anything in revs > start is definitely not an ancestor of bases
178 # anything in revs > start is definitely not an ancestor of bases
175 # revs <= start needs to be investigated
179 # revs <= start needs to be investigated
176 start = max(bases)
180 start = max(bases)
177 keepcount = sum(1 for r in revs if r > start)
181 keepcount = sum(1 for r in revs if r > start)
178 if len(revs) == keepcount:
182 if len(revs) == keepcount:
179 # no revs to consider
183 # no revs to consider
180 return
184 return
181
185
182 for curr in pycompat.xrange(start, min(revs) - 1, -1):
186 for curr in pycompat.xrange(start, min(revs) - 1, -1):
183 if curr not in bases:
187 if curr not in bases:
184 continue
188 continue
185 revs.discard(curr)
189 revs.discard(curr)
186 bases.update(pfunc(curr))
190 bases.update(pfunc(curr))
187 if len(revs) == keepcount:
191 if len(revs) == keepcount:
188 # no more potential revs to discard
192 # no more potential revs to discard
189 break
193 break
190
194
191 def missingancestors(self, revs):
195 def missingancestors(self, revs):
192 '''return all the ancestors of revs that are not ancestors of self.bases
196 '''return all the ancestors of revs that are not ancestors of self.bases
193
197
194 This may include elements from revs.
198 This may include elements from revs.
195
199
196 Equivalent to the revset (::revs - ::self.bases). Revs are returned in
200 Equivalent to the revset (::revs - ::self.bases). Revs are returned in
197 revision number order, which is a topological order.'''
201 revision number order, which is a topological order.'''
198 revsvisit = set(revs)
202 revsvisit = set(revs)
199 basesvisit = self.bases
203 basesvisit = self.bases
200 pfunc = self.pfunc
204 pfunc = self.pfunc
201 bothvisit = revsvisit.intersection(basesvisit)
205 bothvisit = revsvisit.intersection(basesvisit)
202 revsvisit.difference_update(bothvisit)
206 revsvisit.difference_update(bothvisit)
203 if not revsvisit:
207 if not revsvisit:
204 return []
208 return []
205
209
206 start = max(max(revsvisit), max(basesvisit))
210 start = max(max(revsvisit), max(basesvisit))
207 # At this point, we hold the invariants that:
211 # At this point, we hold the invariants that:
208 # - revsvisit is the set of nodes we know are an ancestor of at least
212 # - revsvisit is the set of nodes we know are an ancestor of at least
209 # one of the nodes in revs
213 # one of the nodes in revs
210 # - basesvisit is the same for bases
214 # - basesvisit is the same for bases
211 # - bothvisit is the set of nodes we know are ancestors of at least one
215 # - bothvisit is the set of nodes we know are ancestors of at least one
212 # of the nodes in revs and one of the nodes in bases. bothvisit and
216 # of the nodes in revs and one of the nodes in bases. bothvisit and
213 # revsvisit are mutually exclusive, but bothvisit is a subset of
217 # revsvisit are mutually exclusive, but bothvisit is a subset of
214 # basesvisit.
218 # basesvisit.
215 # Now we walk down in reverse topo order, adding parents of nodes
219 # Now we walk down in reverse topo order, adding parents of nodes
216 # already visited to the sets while maintaining the invariants. When a
220 # already visited to the sets while maintaining the invariants. When a
217 # node is found in both revsvisit and basesvisit, it is removed from
221 # node is found in both revsvisit and basesvisit, it is removed from
218 # revsvisit and added to bothvisit. When revsvisit becomes empty, there
222 # revsvisit and added to bothvisit. When revsvisit becomes empty, there
219 # are no more ancestors of revs that aren't also ancestors of bases, so
223 # are no more ancestors of revs that aren't also ancestors of bases, so
220 # exit.
224 # exit.
221
225
222 missing = []
226 missing = []
223 for curr in pycompat.xrange(start, nullrev, -1):
227 for curr in pycompat.xrange(start, nullrev, -1):
224 if not revsvisit:
228 if not revsvisit:
225 break
229 break
226
230
227 if curr in bothvisit:
231 if curr in bothvisit:
228 bothvisit.remove(curr)
232 bothvisit.remove(curr)
229 # curr's parents might have made it into revsvisit through
233 # curr's parents might have made it into revsvisit through
230 # another path
234 # another path
231 for p in pfunc(curr):
235 for p in pfunc(curr):
232 revsvisit.discard(p)
236 revsvisit.discard(p)
233 basesvisit.add(p)
237 basesvisit.add(p)
234 bothvisit.add(p)
238 bothvisit.add(p)
235 continue
239 continue
236
240
237 if curr in revsvisit:
241 if curr in revsvisit:
238 missing.append(curr)
242 missing.append(curr)
239 revsvisit.remove(curr)
243 revsvisit.remove(curr)
240 thisvisit = revsvisit
244 thisvisit = revsvisit
241 othervisit = basesvisit
245 othervisit = basesvisit
242 elif curr in basesvisit:
246 elif curr in basesvisit:
243 thisvisit = basesvisit
247 thisvisit = basesvisit
244 othervisit = revsvisit
248 othervisit = revsvisit
245 else:
249 else:
246 # not an ancestor of revs or bases: ignore
250 # not an ancestor of revs or bases: ignore
247 continue
251 continue
248
252
249 for p in pfunc(curr):
253 for p in pfunc(curr):
250 if p == nullrev:
254 if p == nullrev:
251 pass
255 pass
252 elif p in othervisit or p in bothvisit:
256 elif p in othervisit or p in bothvisit:
253 # p is implicitly in thisvisit. This means p is or should be
257 # p is implicitly in thisvisit. This means p is or should be
254 # in bothvisit
258 # in bothvisit
255 revsvisit.discard(p)
259 revsvisit.discard(p)
256 basesvisit.add(p)
260 basesvisit.add(p)
257 bothvisit.add(p)
261 bothvisit.add(p)
258 else:
262 else:
259 # visit later
263 # visit later
260 thisvisit.add(p)
264 thisvisit.add(p)
261
265
262 missing.reverse()
266 missing.reverse()
263 return missing
267 return missing
264
268
265 # Extracted from lazyancestors.__iter__ to avoid a reference cycle
269 # Extracted from lazyancestors.__iter__ to avoid a reference cycle
266 def _lazyancestorsiter(parentrevs, initrevs, stoprev, inclusive):
270 def _lazyancestorsiter(parentrevs, initrevs, stoprev, inclusive):
267 seen = {nullrev}
271 seen = {nullrev}
268 heappush = heapq.heappush
272 heappush = heapq.heappush
269 heappop = heapq.heappop
273 heappop = heapq.heappop
270 heapreplace = heapq.heapreplace
274 heapreplace = heapq.heapreplace
271 see = seen.add
275 see = seen.add
272
276
273 if inclusive:
277 if inclusive:
274 visit = [-r for r in initrevs]
278 visit = [-r for r in initrevs]
275 seen.update(initrevs)
279 seen.update(initrevs)
276 heapq.heapify(visit)
280 heapq.heapify(visit)
277 else:
281 else:
278 visit = []
282 visit = []
279 heapq.heapify(visit)
283 heapq.heapify(visit)
280 for r in initrevs:
284 for r in initrevs:
281 p1, p2 = parentrevs(r)
285 p1, p2 = parentrevs(r)
282 if p1 not in seen:
286 if p1 not in seen:
283 heappush(visit, -p1)
287 heappush(visit, -p1)
284 see(p1)
288 see(p1)
285 if p2 not in seen:
289 if p2 not in seen:
286 heappush(visit, -p2)
290 heappush(visit, -p2)
287 see(p2)
291 see(p2)
288
292
289 while visit:
293 while visit:
290 current = -visit[0]
294 current = -visit[0]
291 if current < stoprev:
295 if current < stoprev:
292 break
296 break
293 yield current
297 yield current
294 # optimize out heapq operation if p1 is known to be the next highest
298 # optimize out heapq operation if p1 is known to be the next highest
295 # revision, which is quite common in linear history.
299 # revision, which is quite common in linear history.
296 p1, p2 = parentrevs(current)
300 p1, p2 = parentrevs(current)
297 if p1 not in seen:
301 if p1 not in seen:
298 if current - p1 == 1:
302 if current - p1 == 1:
299 visit[0] = -p1
303 visit[0] = -p1
300 else:
304 else:
301 heapreplace(visit, -p1)
305 heapreplace(visit, -p1)
302 see(p1)
306 see(p1)
303 else:
307 else:
304 heappop(visit)
308 heappop(visit)
305 if p2 not in seen:
309 if p2 not in seen:
306 heappush(visit, -p2)
310 heappush(visit, -p2)
307 see(p2)
311 see(p2)
308
312
309 class lazyancestors(object):
313 class lazyancestors(object):
310 def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
314 def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
311 """Create a new object generating ancestors for the given revs. Does
315 """Create a new object generating ancestors for the given revs. Does
312 not generate revs lower than stoprev.
316 not generate revs lower than stoprev.
313
317
314 This is computed lazily starting from revs. The object supports
318 This is computed lazily starting from revs. The object supports
315 iteration and membership.
319 iteration and membership.
316
320
317 cl should be a changelog and revs should be an iterable. inclusive is
321 cl should be a changelog and revs should be an iterable. inclusive is
318 a boolean that indicates whether revs should be included. Revs lower
322 a boolean that indicates whether revs should be included. Revs lower
319 than stoprev will not be generated.
323 than stoprev will not be generated.
320
324
321 Result does not include the null revision."""
325 Result does not include the null revision."""
322 self._parentrevs = pfunc
326 self._parentrevs = pfunc
323 self._initrevs = revs = [r for r in revs if r >= stoprev]
327 self._initrevs = revs = [r for r in revs if r >= stoprev]
324 self._stoprev = stoprev
328 self._stoprev = stoprev
325 self._inclusive = inclusive
329 self._inclusive = inclusive
326
330
327 self._containsseen = set()
331 self._containsseen = set()
328 self._containsiter = _lazyancestorsiter(self._parentrevs,
332 self._containsiter = _lazyancestorsiter(self._parentrevs,
329 self._initrevs,
333 self._initrevs,
330 self._stoprev,
334 self._stoprev,
331 self._inclusive)
335 self._inclusive)
332
336
333 def __nonzero__(self):
337 def __nonzero__(self):
334 """False if the set is empty, True otherwise."""
338 """False if the set is empty, True otherwise."""
335 try:
339 try:
336 next(iter(self))
340 next(iter(self))
337 return True
341 return True
338 except StopIteration:
342 except StopIteration:
339 return False
343 return False
340
344
341 __bool__ = __nonzero__
345 __bool__ = __nonzero__
342
346
343 def __iter__(self):
347 def __iter__(self):
344 """Generate the ancestors of _initrevs in reverse topological order.
348 """Generate the ancestors of _initrevs in reverse topological order.
345
349
346 If inclusive is False, yield a sequence of revision numbers starting
350 If inclusive is False, yield a sequence of revision numbers starting
347 with the parents of each revision in revs, i.e., each revision is
351 with the parents of each revision in revs, i.e., each revision is
348 *not* considered an ancestor of itself. Results are emitted in reverse
352 *not* considered an ancestor of itself. Results are emitted in reverse
349 revision number order. That order is also topological: a child is
353 revision number order. That order is also topological: a child is
350 always emitted before its parent.
354 always emitted before its parent.
351
355
352 If inclusive is True, the source revisions are also yielded. The
356 If inclusive is True, the source revisions are also yielded. The
353 reverse revision number order is still enforced."""
357 reverse revision number order is still enforced."""
354 return _lazyancestorsiter(self._parentrevs, self._initrevs,
358 return _lazyancestorsiter(self._parentrevs, self._initrevs,
355 self._stoprev, self._inclusive)
359 self._stoprev, self._inclusive)
356
360
357 def __contains__(self, target):
361 def __contains__(self, target):
358 """Test whether target is an ancestor of self._initrevs."""
362 """Test whether target is an ancestor of self._initrevs."""
359 seen = self._containsseen
363 seen = self._containsseen
360 if target in seen:
364 if target in seen:
361 return True
365 return True
362 iter = self._containsiter
366 iter = self._containsiter
363 if iter is None:
367 if iter is None:
364 # Iterator exhausted
368 # Iterator exhausted
365 return False
369 return False
366 # Only integer target is valid, but some callers expect 'None in self'
370 # Only integer target is valid, but some callers expect 'None in self'
367 # to be False. So we explicitly allow it.
371 # to be False. So we explicitly allow it.
368 if target is None:
372 if target is None:
369 return False
373 return False
370
374
371 see = seen.add
375 see = seen.add
372 try:
376 try:
373 while True:
377 while True:
374 rev = next(iter)
378 rev = next(iter)
375 see(rev)
379 see(rev)
376 if rev == target:
380 if rev == target:
377 return True
381 return True
378 if rev < target:
382 if rev < target:
379 return False
383 return False
380 except StopIteration:
384 except StopIteration:
381 # Set to None to indicate fast-path can be used next time, and to
385 # Set to None to indicate fast-path can be used next time, and to
382 # free up memory.
386 # free up memory.
383 self._containsiter = None
387 self._containsiter = None
384 return False
388 return False
385
389
386 class rustlazyancestors(object):
390 class rustlazyancestors(object):
387
391
388 def __init__(self, index, revs, stoprev=0, inclusive=False):
392 def __init__(self, index, revs, stoprev=0, inclusive=False):
389 self._index = index
393 self._index = index
390 self._stoprev = stoprev
394 self._stoprev = stoprev
391 self._inclusive = inclusive
395 self._inclusive = inclusive
392 # no need to prefilter out init revs that are smaller than stoprev,
396 # no need to prefilter out init revs that are smaller than stoprev,
393 # it's done by rustlazyancestors constructor.
397 # it's done by rustlazyancestors constructor.
394 # we need to convert to a list, because our ruslazyancestors
398 # we need to convert to a list, because our ruslazyancestors
395 # constructor (from C code) doesn't understand anything else yet
399 # constructor (from C code) doesn't understand anything else yet
396 self._initrevs = initrevs = list(revs)
400 self._initrevs = initrevs = list(revs)
397
401
398 self._containsiter = parsers.rustlazyancestors(
402 self._containsiter = parsers.rustlazyancestors(
399 index, initrevs, stoprev, inclusive)
403 index, initrevs, stoprev, inclusive)
400
404
401 def __nonzero__(self):
405 def __nonzero__(self):
402 """False if the set is empty, True otherwise.
406 """False if the set is empty, True otherwise.
403
407
404 It's better to duplicate this essentially trivial method than
408 It's better to duplicate this essentially trivial method than
405 to subclass lazyancestors
409 to subclass lazyancestors
406 """
410 """
407 try:
411 try:
408 next(iter(self))
412 next(iter(self))
409 return True
413 return True
410 except StopIteration:
414 except StopIteration:
411 return False
415 return False
412
416
413 def __iter__(self):
417 def __iter__(self):
414 return parsers.rustlazyancestors(self._index,
418 return parsers.rustlazyancestors(self._index,
415 self._initrevs,
419 self._initrevs,
416 self._stoprev,
420 self._stoprev,
417 self._inclusive)
421 self._inclusive)
418
422
419 def __contains__(self, target):
423 def __contains__(self, target):
420 return target in self._containsiter
424 return target in self._containsiter
General Comments 0
You need to be logged in to leave comments. Login now