##// END OF EJS Templates
ancestors: actually iterate over ancestors in topological order (issue5979)...
Boris Feld -
r39509:b6db2e80 default
parent child Browse files
Show More
@@ -0,0 +1,34 b''
1 $ hg init r1
2 $ cd r1
3 $ hg ci --config ui.allowemptycommit=true -m c0
4 $ hg ci --config ui.allowemptycommit=true -m c1
5 $ hg ci --config ui.allowemptycommit=true -m c2
6 $ hg co -q 0
7 $ hg ci --config ui.allowemptycommit=true -m c3
8 created new head
9 $ hg co -q 3
10 $ hg merge --quiet
11 $ hg ci --config ui.allowemptycommit=true -m c4
12
13 $ hg log -G -T'{desc}'
14 @ c4
15 |\
16 | o c3
17 | |
18 o | c2
19 | |
20 o | c1
21 |/
22 o c0
23
24
25 >>> from mercurial.ui import ui
26 >>> from mercurial.hg import repository
27 >>> repo = repository(ui())
28 >>> for anc in repo.changelog.ancestors([4], inclusive=True):
29 ... print(anc)
30 4
31 3
32 2
33 1
34 0
@@ -1,370 +1,380 b''
1 # ancestor.py - generic DAG ancestor algorithm for mercurial
1 # ancestor.py - generic DAG ancestor algorithm for mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
11 import heapq
10 import heapq
12
11
13 from .node import nullrev
12 from .node import nullrev
14 from . import (
13 from . import (
15 pycompat,
14 pycompat,
16 )
15 )
17
16
18 def commonancestorsheads(pfunc, *nodes):
17 def commonancestorsheads(pfunc, *nodes):
19 """Returns a set with the heads of all common ancestors of all nodes,
18 """Returns a set with the heads of all common ancestors of all nodes,
20 heads(::nodes[0] and ::nodes[1] and ...) .
19 heads(::nodes[0] and ::nodes[1] and ...) .
21
20
22 pfunc must return a list of parent vertices for a given vertex.
21 pfunc must return a list of parent vertices for a given vertex.
23 """
22 """
24 if not isinstance(nodes, set):
23 if not isinstance(nodes, set):
25 nodes = set(nodes)
24 nodes = set(nodes)
26 if nullrev in nodes:
25 if nullrev in nodes:
27 return set()
26 return set()
28 if len(nodes) <= 1:
27 if len(nodes) <= 1:
29 return nodes
28 return nodes
30
29
31 allseen = (1 << len(nodes)) - 1
30 allseen = (1 << len(nodes)) - 1
32 seen = [0] * (max(nodes) + 1)
31 seen = [0] * (max(nodes) + 1)
33 for i, n in enumerate(nodes):
32 for i, n in enumerate(nodes):
34 seen[n] = 1 << i
33 seen[n] = 1 << i
35 poison = 1 << (i + 1)
34 poison = 1 << (i + 1)
36
35
37 gca = set()
36 gca = set()
38 interesting = len(nodes)
37 interesting = len(nodes)
39 nv = len(seen) - 1
38 nv = len(seen) - 1
40 while nv >= 0 and interesting:
39 while nv >= 0 and interesting:
41 v = nv
40 v = nv
42 nv -= 1
41 nv -= 1
43 if not seen[v]:
42 if not seen[v]:
44 continue
43 continue
45 sv = seen[v]
44 sv = seen[v]
46 if sv < poison:
45 if sv < poison:
47 interesting -= 1
46 interesting -= 1
48 if sv == allseen:
47 if sv == allseen:
49 gca.add(v)
48 gca.add(v)
50 sv |= poison
49 sv |= poison
51 if v in nodes:
50 if v in nodes:
52 # history is linear
51 # history is linear
53 return {v}
52 return {v}
54 if sv < poison:
53 if sv < poison:
55 for p in pfunc(v):
54 for p in pfunc(v):
56 sp = seen[p]
55 sp = seen[p]
57 if p == nullrev:
56 if p == nullrev:
58 continue
57 continue
59 if sp == 0:
58 if sp == 0:
60 seen[p] = sv
59 seen[p] = sv
61 interesting += 1
60 interesting += 1
62 elif sp != sv:
61 elif sp != sv:
63 seen[p] |= sv
62 seen[p] |= sv
64 else:
63 else:
65 for p in pfunc(v):
64 for p in pfunc(v):
66 if p == nullrev:
65 if p == nullrev:
67 continue
66 continue
68 sp = seen[p]
67 sp = seen[p]
69 if sp and sp < poison:
68 if sp and sp < poison:
70 interesting -= 1
69 interesting -= 1
71 seen[p] = sv
70 seen[p] = sv
72 return gca
71 return gca
73
72
74 def ancestors(pfunc, *orignodes):
73 def ancestors(pfunc, *orignodes):
75 """
74 """
76 Returns the common ancestors of a and b that are furthest from a
75 Returns the common ancestors of a and b that are furthest from a
77 root (as measured by longest path).
76 root (as measured by longest path).
78
77
79 pfunc must return a list of parent vertices for a given vertex.
78 pfunc must return a list of parent vertices for a given vertex.
80 """
79 """
81 def deepest(nodes):
80 def deepest(nodes):
82 interesting = {}
81 interesting = {}
83 count = max(nodes) + 1
82 count = max(nodes) + 1
84 depth = [0] * count
83 depth = [0] * count
85 seen = [0] * count
84 seen = [0] * count
86 mapping = []
85 mapping = []
87 for (i, n) in enumerate(sorted(nodes)):
86 for (i, n) in enumerate(sorted(nodes)):
88 depth[n] = 1
87 depth[n] = 1
89 b = 1 << i
88 b = 1 << i
90 seen[n] = b
89 seen[n] = b
91 interesting[b] = 1
90 interesting[b] = 1
92 mapping.append((b, n))
91 mapping.append((b, n))
93 nv = count - 1
92 nv = count - 1
94 while nv >= 0 and len(interesting) > 1:
93 while nv >= 0 and len(interesting) > 1:
95 v = nv
94 v = nv
96 nv -= 1
95 nv -= 1
97 dv = depth[v]
96 dv = depth[v]
98 if dv == 0:
97 if dv == 0:
99 continue
98 continue
100 sv = seen[v]
99 sv = seen[v]
101 for p in pfunc(v):
100 for p in pfunc(v):
102 if p == nullrev:
101 if p == nullrev:
103 continue
102 continue
104 dp = depth[p]
103 dp = depth[p]
105 nsp = sp = seen[p]
104 nsp = sp = seen[p]
106 if dp <= dv:
105 if dp <= dv:
107 depth[p] = dv + 1
106 depth[p] = dv + 1
108 if sp != sv:
107 if sp != sv:
109 interesting[sv] += 1
108 interesting[sv] += 1
110 nsp = seen[p] = sv
109 nsp = seen[p] = sv
111 if sp:
110 if sp:
112 interesting[sp] -= 1
111 interesting[sp] -= 1
113 if interesting[sp] == 0:
112 if interesting[sp] == 0:
114 del interesting[sp]
113 del interesting[sp]
115 elif dv == dp - 1:
114 elif dv == dp - 1:
116 nsp = sp | sv
115 nsp = sp | sv
117 if nsp == sp:
116 if nsp == sp:
118 continue
117 continue
119 seen[p] = nsp
118 seen[p] = nsp
120 interesting.setdefault(nsp, 0)
119 interesting.setdefault(nsp, 0)
121 interesting[nsp] += 1
120 interesting[nsp] += 1
122 interesting[sp] -= 1
121 interesting[sp] -= 1
123 if interesting[sp] == 0:
122 if interesting[sp] == 0:
124 del interesting[sp]
123 del interesting[sp]
125 interesting[sv] -= 1
124 interesting[sv] -= 1
126 if interesting[sv] == 0:
125 if interesting[sv] == 0:
127 del interesting[sv]
126 del interesting[sv]
128
127
129 if len(interesting) != 1:
128 if len(interesting) != 1:
130 return []
129 return []
131
130
132 k = 0
131 k = 0
133 for i in interesting:
132 for i in interesting:
134 k |= i
133 k |= i
135 return set(n for (i, n) in mapping if k & i)
134 return set(n for (i, n) in mapping if k & i)
136
135
137 gca = commonancestorsheads(pfunc, *orignodes)
136 gca = commonancestorsheads(pfunc, *orignodes)
138
137
139 if len(gca) <= 1:
138 if len(gca) <= 1:
140 return gca
139 return gca
141 return deepest(gca)
140 return deepest(gca)
142
141
143 class incrementalmissingancestors(object):
142 class incrementalmissingancestors(object):
144 '''persistent state used to calculate missing ancestors incrementally
143 '''persistent state used to calculate missing ancestors incrementally
145
144
146 Although similar in spirit to lazyancestors below, this is a separate class
145 Although similar in spirit to lazyancestors below, this is a separate class
147 because trying to support contains and missingancestors operations with the
146 because trying to support contains and missingancestors operations with the
148 same internal data structures adds needless complexity.'''
147 same internal data structures adds needless complexity.'''
149 def __init__(self, pfunc, bases):
148 def __init__(self, pfunc, bases):
150 self.bases = set(bases)
149 self.bases = set(bases)
151 if not self.bases:
150 if not self.bases:
152 self.bases.add(nullrev)
151 self.bases.add(nullrev)
153 self.pfunc = pfunc
152 self.pfunc = pfunc
154
153
155 def hasbases(self):
154 def hasbases(self):
156 '''whether the common set has any non-trivial bases'''
155 '''whether the common set has any non-trivial bases'''
157 return self.bases and self.bases != {nullrev}
156 return self.bases and self.bases != {nullrev}
158
157
159 def addbases(self, newbases):
158 def addbases(self, newbases):
160 '''grow the ancestor set by adding new bases'''
159 '''grow the ancestor set by adding new bases'''
161 self.bases.update(newbases)
160 self.bases.update(newbases)
162
161
163 def removeancestorsfrom(self, revs):
162 def removeancestorsfrom(self, revs):
164 '''remove all ancestors of bases from the set revs (in place)'''
163 '''remove all ancestors of bases from the set revs (in place)'''
165 bases = self.bases
164 bases = self.bases
166 pfunc = self.pfunc
165 pfunc = self.pfunc
167 revs.difference_update(bases)
166 revs.difference_update(bases)
168 # nullrev is always an ancestor
167 # nullrev is always an ancestor
169 revs.discard(nullrev)
168 revs.discard(nullrev)
170 if not revs:
169 if not revs:
171 return
170 return
172 # anything in revs > start is definitely not an ancestor of bases
171 # anything in revs > start is definitely not an ancestor of bases
173 # revs <= start needs to be investigated
172 # revs <= start needs to be investigated
174 start = max(bases)
173 start = max(bases)
175 keepcount = sum(1 for r in revs if r > start)
174 keepcount = sum(1 for r in revs if r > start)
176 if len(revs) == keepcount:
175 if len(revs) == keepcount:
177 # no revs to consider
176 # no revs to consider
178 return
177 return
179
178
180 for curr in pycompat.xrange(start, min(revs) - 1, -1):
179 for curr in pycompat.xrange(start, min(revs) - 1, -1):
181 if curr not in bases:
180 if curr not in bases:
182 continue
181 continue
183 revs.discard(curr)
182 revs.discard(curr)
184 bases.update(pfunc(curr))
183 bases.update(pfunc(curr))
185 if len(revs) == keepcount:
184 if len(revs) == keepcount:
186 # no more potential revs to discard
185 # no more potential revs to discard
187 break
186 break
188
187
189 def missingancestors(self, revs):
188 def missingancestors(self, revs):
190 '''return all the ancestors of revs that are not ancestors of self.bases
189 '''return all the ancestors of revs that are not ancestors of self.bases
191
190
192 This may include elements from revs.
191 This may include elements from revs.
193
192
194 Equivalent to the revset (::revs - ::self.bases). Revs are returned in
193 Equivalent to the revset (::revs - ::self.bases). Revs are returned in
195 revision number order, which is a topological order.'''
194 revision number order, which is a topological order.'''
196 revsvisit = set(revs)
195 revsvisit = set(revs)
197 basesvisit = self.bases
196 basesvisit = self.bases
198 pfunc = self.pfunc
197 pfunc = self.pfunc
199 bothvisit = revsvisit.intersection(basesvisit)
198 bothvisit = revsvisit.intersection(basesvisit)
200 revsvisit.difference_update(bothvisit)
199 revsvisit.difference_update(bothvisit)
201 if not revsvisit:
200 if not revsvisit:
202 return []
201 return []
203
202
204 start = max(max(revsvisit), max(basesvisit))
203 start = max(max(revsvisit), max(basesvisit))
205 # At this point, we hold the invariants that:
204 # At this point, we hold the invariants that:
206 # - revsvisit is the set of nodes we know are an ancestor of at least
205 # - revsvisit is the set of nodes we know are an ancestor of at least
207 # one of the nodes in revs
206 # one of the nodes in revs
208 # - basesvisit is the same for bases
207 # - basesvisit is the same for bases
209 # - bothvisit is the set of nodes we know are ancestors of at least one
208 # - bothvisit is the set of nodes we know are ancestors of at least one
210 # of the nodes in revs and one of the nodes in bases. bothvisit and
209 # of the nodes in revs and one of the nodes in bases. bothvisit and
211 # revsvisit are mutually exclusive, but bothvisit is a subset of
210 # revsvisit are mutually exclusive, but bothvisit is a subset of
212 # basesvisit.
211 # basesvisit.
213 # Now we walk down in reverse topo order, adding parents of nodes
212 # Now we walk down in reverse topo order, adding parents of nodes
214 # already visited to the sets while maintaining the invariants. When a
213 # already visited to the sets while maintaining the invariants. When a
215 # node is found in both revsvisit and basesvisit, it is removed from
214 # node is found in both revsvisit and basesvisit, it is removed from
216 # revsvisit and added to bothvisit. When revsvisit becomes empty, there
215 # revsvisit and added to bothvisit. When revsvisit becomes empty, there
217 # are no more ancestors of revs that aren't also ancestors of bases, so
216 # are no more ancestors of revs that aren't also ancestors of bases, so
218 # exit.
217 # exit.
219
218
220 missing = []
219 missing = []
221 for curr in pycompat.xrange(start, nullrev, -1):
220 for curr in pycompat.xrange(start, nullrev, -1):
222 if not revsvisit:
221 if not revsvisit:
223 break
222 break
224
223
225 if curr in bothvisit:
224 if curr in bothvisit:
226 bothvisit.remove(curr)
225 bothvisit.remove(curr)
227 # curr's parents might have made it into revsvisit through
226 # curr's parents might have made it into revsvisit through
228 # another path
227 # another path
229 for p in pfunc(curr):
228 for p in pfunc(curr):
230 revsvisit.discard(p)
229 revsvisit.discard(p)
231 basesvisit.add(p)
230 basesvisit.add(p)
232 bothvisit.add(p)
231 bothvisit.add(p)
233 continue
232 continue
234
233
235 if curr in revsvisit:
234 if curr in revsvisit:
236 missing.append(curr)
235 missing.append(curr)
237 revsvisit.remove(curr)
236 revsvisit.remove(curr)
238 thisvisit = revsvisit
237 thisvisit = revsvisit
239 othervisit = basesvisit
238 othervisit = basesvisit
240 elif curr in basesvisit:
239 elif curr in basesvisit:
241 thisvisit = basesvisit
240 thisvisit = basesvisit
242 othervisit = revsvisit
241 othervisit = revsvisit
243 else:
242 else:
244 # not an ancestor of revs or bases: ignore
243 # not an ancestor of revs or bases: ignore
245 continue
244 continue
246
245
247 for p in pfunc(curr):
246 for p in pfunc(curr):
248 if p == nullrev:
247 if p == nullrev:
249 pass
248 pass
250 elif p in othervisit or p in bothvisit:
249 elif p in othervisit or p in bothvisit:
251 # p is implicitly in thisvisit. This means p is or should be
250 # p is implicitly in thisvisit. This means p is or should be
252 # in bothvisit
251 # in bothvisit
253 revsvisit.discard(p)
252 revsvisit.discard(p)
254 basesvisit.add(p)
253 basesvisit.add(p)
255 bothvisit.add(p)
254 bothvisit.add(p)
256 else:
255 else:
257 # visit later
256 # visit later
258 thisvisit.add(p)
257 thisvisit.add(p)
259
258
260 missing.reverse()
259 missing.reverse()
261 return missing
260 return missing
262
261
263 class lazyancestors(object):
262 class lazyancestors(object):
264 def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
263 def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
265 """Create a new object generating ancestors for the given revs. Does
264 """Create a new object generating ancestors for the given revs. Does
266 not generate revs lower than stoprev.
265 not generate revs lower than stoprev.
267
266
268 This is computed lazily starting from revs. The object supports
267 This is computed lazily starting from revs. The object supports
269 iteration and membership.
268 iteration and membership.
270
269
271 cl should be a changelog and revs should be an iterable. inclusive is
270 cl should be a changelog and revs should be an iterable. inclusive is
272 a boolean that indicates whether revs should be included. Revs lower
271 a boolean that indicates whether revs should be included. Revs lower
273 than stoprev will not be generated.
272 than stoprev will not be generated.
274
273
275 Result does not include the null revision."""
274 Result does not include the null revision."""
276 self._parentrevs = pfunc
275 self._parentrevs = pfunc
277 self._initrevs = revs
276 self._initrevs = revs
278 self._stoprev = stoprev
277 self._stoprev = stoprev
279 self._inclusive = inclusive
278 self._inclusive = inclusive
280
279
281 # Initialize data structures for __contains__.
280 # Initialize data structures for __contains__.
282 # For __contains__, we use a heap rather than a deque because
281 # For __contains__, we use a heap rather than a deque because
283 # (a) it minimizes the number of parentrevs calls made
282 # (a) it minimizes the number of parentrevs calls made
284 # (b) it makes the loop termination condition obvious
283 # (b) it makes the loop termination condition obvious
285 # Python's heap is a min-heap. Multiply all values by -1 to convert it
284 # Python's heap is a min-heap. Multiply all values by -1 to convert it
286 # into a max-heap.
285 # into a max-heap.
287 self._containsvisit = [-rev for rev in revs]
286 self._containsvisit = [-rev for rev in revs]
288 heapq.heapify(self._containsvisit)
287 heapq.heapify(self._containsvisit)
289 if inclusive:
288 if inclusive:
290 self._containsseen = set(revs)
289 self._containsseen = set(revs)
291 else:
290 else:
292 self._containsseen = set()
291 self._containsseen = set()
293
292
294 def __nonzero__(self):
293 def __nonzero__(self):
295 """False if the set is empty, True otherwise."""
294 """False if the set is empty, True otherwise."""
296 try:
295 try:
297 next(iter(self))
296 next(iter(self))
298 return True
297 return True
299 except StopIteration:
298 except StopIteration:
300 return False
299 return False
301
300
302 __bool__ = __nonzero__
301 __bool__ = __nonzero__
303
302
304 def __iter__(self):
303 def __iter__(self):
305 """Generate the ancestors of _initrevs in reverse topological order.
304 """Generate the ancestors of _initrevs in reverse topological order.
306
305
307 If inclusive is False, yield a sequence of revision numbers starting
306 If inclusive is False, yield a sequence of revision numbers starting
308 with the parents of each revision in revs, i.e., each revision is *not*
307 with the parents of each revision in revs, i.e., each revision is
309 considered an ancestor of itself. Results are in breadth-first order:
308 *not* considered an ancestor of itself. Results are emitted in reverse
310 parents of each rev in revs, then parents of those, etc.
309 revision number order. That order is also topological: a child is
310 always emitted before its parent.
311
311
312 If inclusive is True, yield all the revs first (ignoring stoprev),
312 If inclusive is True, yield all the revs first (ignoring stoprev),
313 then yield all the ancestors of revs as when inclusive is False.
313 then yield all the ancestors of revs as when inclusive is False. If an
314 If an element in revs is an ancestor of a different rev it is not
314 element in revs is an ancestor of a different rev it is not yielded
315 yielded again."""
315 again."""
316 seen = set()
316 seen = set()
317 revs = self._initrevs
317 revs = self._initrevs
318 if self._inclusive:
318 if self._inclusive:
319 for rev in revs:
319 for rev in revs:
320 yield rev
320 yield rev
321 seen.update(revs)
321 seen.update(revs)
322
322
323 parentrevs = self._parentrevs
323 parentrevs = self._parentrevs
324 stoprev = self._stoprev
324 stoprev = self._stoprev
325 visit = collections.deque(revs)
325 visit = []
326 heapq.heapify(visit)
327 schedule = heapq.heappush
328 nextitem = heapq.heappop
329 see = seen.add
330 see(nullrev)
326
331
327 see = seen.add
332 for r in revs:
328 schedule = visit.append
333 for parent in parentrevs(r):
334 if parent not in seen:
335 schedule(visit, -parent)
336 see(parent)
329
337
330 while visit:
338 while visit:
331 for parent in parentrevs(visit.popleft()):
339 current = -nextitem(visit)
332 if parent >= stoprev and parent not in seen:
340 if current >= stoprev:
333 schedule(parent)
341 yield current
334 see(parent)
342 for parent in parentrevs(current):
335 yield parent
343 if parent not in seen:
344 schedule(visit, -parent)
345 see(parent)
336
346
337 def __contains__(self, target):
347 def __contains__(self, target):
338 """Test whether target is an ancestor of self._initrevs."""
348 """Test whether target is an ancestor of self._initrevs."""
339 # Trying to do both __iter__ and __contains__ using the same visit
349 # Trying to do both __iter__ and __contains__ using the same visit
340 # heap and seen set is complex enough that it slows down both. Keep
350 # heap and seen set is complex enough that it slows down both. Keep
341 # them separate.
351 # them separate.
342 seen = self._containsseen
352 seen = self._containsseen
343 if target in seen:
353 if target in seen:
344 return True
354 return True
345 # Only integer target is valid, but some callers expect 'None in self'
355 # Only integer target is valid, but some callers expect 'None in self'
346 # to be False. So we explicitly allow it.
356 # to be False. So we explicitly allow it.
347 if target is None:
357 if target is None:
348 return False
358 return False
349
359
350 parentrevs = self._parentrevs
360 parentrevs = self._parentrevs
351 visit = self._containsvisit
361 visit = self._containsvisit
352 stoprev = self._stoprev
362 stoprev = self._stoprev
353 heappop = heapq.heappop
363 heappop = heapq.heappop
354 heappush = heapq.heappush
364 heappush = heapq.heappush
355 see = seen.add
365 see = seen.add
356
366
357 targetseen = False
367 targetseen = False
358
368
359 while visit and -visit[0] > target and not targetseen:
369 while visit and -visit[0] > target and not targetseen:
360 for parent in parentrevs(-heappop(visit)):
370 for parent in parentrevs(-heappop(visit)):
361 if parent < stoprev or parent in seen:
371 if parent < stoprev or parent in seen:
362 continue
372 continue
363 # We need to make sure we push all parents into the heap so
373 # We need to make sure we push all parents into the heap so
364 # that we leave it in a consistent state for future calls.
374 # that we leave it in a consistent state for future calls.
365 heappush(visit, -parent)
375 heappush(visit, -parent)
366 see(parent)
376 see(parent)
367 if parent == target:
377 if parent == target:
368 targetseen = True
378 targetseen = True
369
379
370 return targetseen
380 return targetseen
@@ -1,18 +1,18 b''
1 % lazy ancestor set for [], stoprev = 0, inclusive = False
1 % lazy ancestor set for [], stoprev = 0, inclusive = False
2 membership: []
2 membership: []
3 iteration: []
3 iteration: []
4 % lazy ancestor set for [11, 13], stoprev = 0, inclusive = False
4 % lazy ancestor set for [11, 13], stoprev = 0, inclusive = False
5 membership: [7, 8, 3, 4, 1, 0]
5 membership: [7, 8, 3, 4, 1, 0]
6 iteration: [3, 7, 8, 1, 4, 0, 2]
6 iteration: [8, 7, 4, 3, 2, 1, 0]
7 % lazy ancestor set for [1, 3], stoprev = 0, inclusive = False
7 % lazy ancestor set for [1, 3], stoprev = 0, inclusive = False
8 membership: [1, 0]
8 membership: [1, 0]
9 iteration: [0, 1]
9 iteration: [1, 0]
10 % lazy ancestor set for [11, 13], stoprev = 0, inclusive = True
10 % lazy ancestor set for [11, 13], stoprev = 0, inclusive = True
11 membership: [11, 13, 7, 8, 3, 4, 1, 0]
11 membership: [11, 13, 7, 8, 3, 4, 1, 0]
12 iteration: [11, 13, 3, 7, 8, 1, 4, 0, 2]
12 iteration: [11, 13, 8, 7, 4, 3, 2, 1, 0]
13 % lazy ancestor set for [11, 13], stoprev = 6, inclusive = False
13 % lazy ancestor set for [11, 13], stoprev = 6, inclusive = False
14 membership: [7, 8]
14 membership: [7, 8]
15 iteration: [7, 8]
15 iteration: [8, 7]
16 % lazy ancestor set for [11, 13], stoprev = 6, inclusive = True
16 % lazy ancestor set for [11, 13], stoprev = 6, inclusive = True
17 membership: [11, 13, 7, 8]
17 membership: [11, 13, 7, 8]
18 iteration: [11, 13, 7, 8]
18 iteration: [11, 13, 8, 7]
@@ -1,19 +1,19 b''
1 Ancestors of 5
1 Ancestors of 5
2 4 2 0
2 4 2 0
3 Ancestors of 6 and 5
3 Ancestors of 6 and 5
4 3 4 2 1 0
4 4 3 2 1 0
5 Ancestors of 5 and 4
5 Ancestors of 5 and 4
6 4 2 0
6 4 2 0
7 Ancestors of 7, stop at 6
7 Ancestors of 7, stop at 6
8 6
8 6
9 Ancestors of 7, including revs
9 Ancestors of 7, including revs
10 7 6 5 3 4 2 1 0
10 7 6 5 4 3 2 1 0
11 Ancestors of 7, 5 and 3, including revs
11 Ancestors of 7, 5 and 3, including revs
12 7 5 3 6 4 2 1 0
12 7 5 3 6 4 2 1 0
13
13
14 Descendants of 5
14 Descendants of 5
15 7 8
15 7 8
16 Descendants of 5 and 3
16 Descendants of 5 and 3
17 6 7 8
17 6 7 8
18 Descendants of 5 and 4
18 Descendants of 5 and 4
19 5 7 8
19 5 7 8
General Comments 0
You need to be logged in to leave comments. Login now