Show More
@@ -1,302 +1,307 b'' | |||
|
1 | 1 | # ancestor.py - generic DAG ancestor algorithm for mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2006 Matt Mackall <mpm@selenic.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | import heapq |
|
9 | 9 | import util |
|
10 | 10 | from node import nullrev |
|
11 | 11 | |
|
12 | def commonancestorsheads(pfunc, *nodes): | |
|
13 | """Returns a set with the heads of all common ancestors of all nodes, | |
|
14 | heads(::nodes[0] and ::nodes[1] and ...) . | |
|
15 | ||
|
16 | pfunc must return a list of parent vertices for a given vertex. | |
|
17 | """ | |
|
18 | if not isinstance(nodes, set): | |
|
19 | nodes = set(nodes) | |
|
20 | if nullrev in nodes: | |
|
21 | return set() | |
|
22 | if len(nodes) <= 1: | |
|
23 | return nodes | |
|
24 | ||
|
25 | allseen = (1 << len(nodes)) - 1 | |
|
26 | seen = [0] * (max(nodes) + 1) | |
|
27 | for i, n in enumerate(nodes): | |
|
28 | seen[n] = 1 << i | |
|
29 | poison = 1 << (i + 1) | |
|
30 | ||
|
31 | gca = set() | |
|
32 | interesting = len(nodes) | |
|
33 | nv = len(seen) - 1 | |
|
34 | while nv >= 0 and interesting: | |
|
35 | v = nv | |
|
36 | nv -= 1 | |
|
37 | if not seen[v]: | |
|
38 | continue | |
|
39 | sv = seen[v] | |
|
40 | if sv < poison: | |
|
41 | interesting -= 1 | |
|
42 | if sv == allseen: | |
|
43 | gca.add(v) | |
|
44 | sv |= poison | |
|
45 | if v in nodes: | |
|
46 | # history is linear | |
|
47 | return set([v]) | |
|
48 | if sv < poison: | |
|
49 | for p in pfunc(v): | |
|
50 | sp = seen[p] | |
|
51 | if p == nullrev: | |
|
52 | continue | |
|
53 | if sp == 0: | |
|
54 | seen[p] = sv | |
|
55 | interesting += 1 | |
|
56 | elif sp != sv: | |
|
57 | seen[p] |= sv | |
|
58 | else: | |
|
59 | for p in pfunc(v): | |
|
60 | if p == nullrev: | |
|
61 | continue | |
|
62 | sp = seen[p] | |
|
63 | if sp and sp < poison: | |
|
64 | interesting -= 1 | |
|
65 | seen[p] = sv | |
|
66 | return gca | |
|
67 | ||
|
12 | 68 | def ancestors(pfunc, *orignodes): |
|
13 | 69 | """ |
|
14 | 70 | Returns the common ancestors of a and b that are furthest from a |
|
15 | 71 | root (as measured by longest path). |
|
16 | 72 | |
|
17 | 73 | pfunc must return a list of parent vertices for a given vertex. |
|
18 | 74 | """ |
|
19 | if not isinstance(orignodes, set): | |
|
20 | orignodes = set(orignodes) | |
|
21 | if nullrev in orignodes: | |
|
22 | return set() | |
|
23 | if len(orignodes) <= 1: | |
|
24 | return orignodes | |
|
25 | ||
|
26 | def candidates(nodes): | |
|
27 | allseen = (1 << len(nodes)) - 1 | |
|
28 | seen = [0] * (max(nodes) + 1) | |
|
29 | for i, n in enumerate(nodes): | |
|
30 | seen[n] = 1 << i | |
|
31 | poison = 1 << (i + 1) | |
|
32 | ||
|
33 | gca = set() | |
|
34 | interesting = len(nodes) | |
|
35 | nv = len(seen) - 1 | |
|
36 | while nv >= 0 and interesting: | |
|
37 | v = nv | |
|
38 | nv -= 1 | |
|
39 | if not seen[v]: | |
|
40 | continue | |
|
41 | sv = seen[v] | |
|
42 | if sv < poison: | |
|
43 | interesting -= 1 | |
|
44 | if sv == allseen: | |
|
45 | gca.add(v) | |
|
46 | sv |= poison | |
|
47 | if v in nodes: | |
|
48 | # history is linear | |
|
49 | return set([v]) | |
|
50 | if sv < poison: | |
|
51 | for p in pfunc(v): | |
|
52 | sp = seen[p] | |
|
53 | if p == nullrev: | |
|
54 | continue | |
|
55 | if sp == 0: | |
|
56 | seen[p] = sv | |
|
57 | interesting += 1 | |
|
58 | elif sp != sv: | |
|
59 | seen[p] |= sv | |
|
60 | else: | |
|
61 | for p in pfunc(v): | |
|
62 | if p == nullrev: | |
|
63 | continue | |
|
64 | sp = seen[p] | |
|
65 | if sp and sp < poison: | |
|
66 | interesting -= 1 | |
|
67 | seen[p] = sv | |
|
68 | return gca | |
|
69 | ||
|
70 | 75 | def deepest(nodes): |
|
71 | 76 | interesting = {} |
|
72 | 77 | count = max(nodes) + 1 |
|
73 | 78 | depth = [0] * count |
|
74 | 79 | seen = [0] * count |
|
75 | 80 | mapping = [] |
|
76 | 81 | for (i, n) in enumerate(sorted(nodes)): |
|
77 | 82 | depth[n] = 1 |
|
78 | 83 | b = 1 << i |
|
79 | 84 | seen[n] = b |
|
80 | 85 | interesting[b] = 1 |
|
81 | 86 | mapping.append((b, n)) |
|
82 | 87 | nv = count - 1 |
|
83 | 88 | while nv >= 0 and len(interesting) > 1: |
|
84 | 89 | v = nv |
|
85 | 90 | nv -= 1 |
|
86 | 91 | dv = depth[v] |
|
87 | 92 | if dv == 0: |
|
88 | 93 | continue |
|
89 | 94 | sv = seen[v] |
|
90 | 95 | for p in pfunc(v): |
|
91 | 96 | if p == nullrev: |
|
92 | 97 | continue |
|
93 | 98 | dp = depth[p] |
|
94 | 99 | nsp = sp = seen[p] |
|
95 | 100 | if dp <= dv: |
|
96 | 101 | depth[p] = dv + 1 |
|
97 | 102 | if sp != sv: |
|
98 | 103 | interesting[sv] += 1 |
|
99 | 104 | nsp = seen[p] = sv |
|
100 | 105 | if sp: |
|
101 | 106 | interesting[sp] -= 1 |
|
102 | 107 | if interesting[sp] == 0: |
|
103 | 108 | del interesting[sp] |
|
104 | 109 | elif dv == dp - 1: |
|
105 | 110 | nsp = sp | sv |
|
106 | 111 | if nsp == sp: |
|
107 | 112 | continue |
|
108 | 113 | seen[p] = nsp |
|
109 | 114 | interesting.setdefault(nsp, 0) |
|
110 | 115 | interesting[nsp] += 1 |
|
111 | 116 | interesting[sp] -= 1 |
|
112 | 117 | if interesting[sp] == 0: |
|
113 | 118 | del interesting[sp] |
|
114 | 119 | interesting[sv] -= 1 |
|
115 | 120 | if interesting[sv] == 0: |
|
116 | 121 | del interesting[sv] |
|
117 | 122 | |
|
118 | 123 | if len(interesting) != 1: |
|
119 | 124 | return [] |
|
120 | 125 | |
|
121 | 126 | k = 0 |
|
122 | 127 | for i in interesting: |
|
123 | 128 | k |= i |
|
124 | 129 | return set(n for (i, n) in mapping if k & i) |
|
125 | 130 | |
|
126 |
gca = c |
|
|
131 | gca = commonancestorsheads(pfunc, *orignodes) | |
|
127 | 132 | |
|
128 | 133 | if len(gca) <= 1: |
|
129 | 134 | return gca |
|
130 | 135 | return deepest(gca) |
|
131 | 136 | |
|
132 | 137 | def missingancestors(revs, bases, pfunc): |
|
133 | 138 | """Return all the ancestors of revs that are not ancestors of bases. |
|
134 | 139 | |
|
135 | 140 | This may include elements from revs. |
|
136 | 141 | |
|
137 | 142 | Equivalent to the revset (::revs - ::bases). Revs are returned in |
|
138 | 143 | revision number order, which is a topological order. |
|
139 | 144 | |
|
140 | 145 | revs and bases should both be iterables. pfunc must return a list of |
|
141 | 146 | parent revs for a given revs. |
|
142 | 147 | """ |
|
143 | 148 | |
|
144 | 149 | revsvisit = set(revs) |
|
145 | 150 | basesvisit = set(bases) |
|
146 | 151 | if not revsvisit: |
|
147 | 152 | return [] |
|
148 | 153 | if not basesvisit: |
|
149 | 154 | basesvisit.add(nullrev) |
|
150 | 155 | start = max(max(revsvisit), max(basesvisit)) |
|
151 | 156 | bothvisit = revsvisit.intersection(basesvisit) |
|
152 | 157 | revsvisit.difference_update(bothvisit) |
|
153 | 158 | basesvisit.difference_update(bothvisit) |
|
154 | 159 | # At this point, we hold the invariants that: |
|
155 | 160 | # - revsvisit is the set of nodes we know are an ancestor of at least one |
|
156 | 161 | # of the nodes in revs |
|
157 | 162 | # - basesvisit is the same for bases |
|
158 | 163 | # - bothvisit is the set of nodes we know are ancestors of at least one of |
|
159 | 164 | # the nodes in revs and one of the nodes in bases |
|
160 | 165 | # - a node may be in none or one, but not more, of revsvisit, basesvisit |
|
161 | 166 | # and bothvisit at any given time |
|
162 | 167 | # Now we walk down in reverse topo order, adding parents of nodes already |
|
163 | 168 | # visited to the sets while maintaining the invariants. When a node is |
|
164 | 169 | # found in both revsvisit and basesvisit, it is removed from them and |
|
165 | 170 | # added to bothvisit instead. When revsvisit becomes empty, there are no |
|
166 | 171 | # more ancestors of revs that aren't also ancestors of bases, so exit. |
|
167 | 172 | |
|
168 | 173 | missing = [] |
|
169 | 174 | for curr in xrange(start, nullrev, -1): |
|
170 | 175 | if not revsvisit: |
|
171 | 176 | break |
|
172 | 177 | |
|
173 | 178 | if curr in bothvisit: |
|
174 | 179 | bothvisit.remove(curr) |
|
175 | 180 | # curr's parents might have made it into revsvisit or basesvisit |
|
176 | 181 | # through another path |
|
177 | 182 | for p in pfunc(curr): |
|
178 | 183 | revsvisit.discard(p) |
|
179 | 184 | basesvisit.discard(p) |
|
180 | 185 | bothvisit.add(p) |
|
181 | 186 | continue |
|
182 | 187 | |
|
183 | 188 | # curr will never be in both revsvisit and basesvisit, since if it |
|
184 | 189 | # were it'd have been pushed to bothvisit |
|
185 | 190 | if curr in revsvisit: |
|
186 | 191 | missing.append(curr) |
|
187 | 192 | thisvisit = revsvisit |
|
188 | 193 | othervisit = basesvisit |
|
189 | 194 | elif curr in basesvisit: |
|
190 | 195 | thisvisit = basesvisit |
|
191 | 196 | othervisit = revsvisit |
|
192 | 197 | else: |
|
193 | 198 | # not an ancestor of revs or bases: ignore |
|
194 | 199 | continue |
|
195 | 200 | |
|
196 | 201 | thisvisit.remove(curr) |
|
197 | 202 | for p in pfunc(curr): |
|
198 | 203 | if p == nullrev: |
|
199 | 204 | pass |
|
200 | 205 | elif p in othervisit or p in bothvisit: |
|
201 | 206 | # p is implicitly in thisvisit. This means p is or should be |
|
202 | 207 | # in bothvisit |
|
203 | 208 | revsvisit.discard(p) |
|
204 | 209 | basesvisit.discard(p) |
|
205 | 210 | bothvisit.add(p) |
|
206 | 211 | else: |
|
207 | 212 | # visit later |
|
208 | 213 | thisvisit.add(p) |
|
209 | 214 | |
|
210 | 215 | missing.reverse() |
|
211 | 216 | return missing |
|
212 | 217 | |
|
213 | 218 | class lazyancestors(object): |
|
214 | 219 | def __init__(self, cl, revs, stoprev=0, inclusive=False): |
|
215 | 220 | """Create a new object generating ancestors for the given revs. Does |
|
216 | 221 | not generate revs lower than stoprev. |
|
217 | 222 | |
|
218 | 223 | This is computed lazily starting from revs. The object supports |
|
219 | 224 | iteration and membership. |
|
220 | 225 | |
|
221 | 226 | cl should be a changelog and revs should be an iterable. inclusive is |
|
222 | 227 | a boolean that indicates whether revs should be included. Revs lower |
|
223 | 228 | than stoprev will not be generated. |
|
224 | 229 | |
|
225 | 230 | Result does not include the null revision.""" |
|
226 | 231 | self._parentrevs = cl.parentrevs |
|
227 | 232 | self._initrevs = revs |
|
228 | 233 | self._stoprev = stoprev |
|
229 | 234 | self._inclusive = inclusive |
|
230 | 235 | |
|
231 | 236 | # Initialize data structures for __contains__. |
|
232 | 237 | # For __contains__, we use a heap rather than a deque because |
|
233 | 238 | # (a) it minimizes the number of parentrevs calls made |
|
234 | 239 | # (b) it makes the loop termination condition obvious |
|
235 | 240 | # Python's heap is a min-heap. Multiply all values by -1 to convert it |
|
236 | 241 | # into a max-heap. |
|
237 | 242 | self._containsvisit = [-rev for rev in revs] |
|
238 | 243 | heapq.heapify(self._containsvisit) |
|
239 | 244 | if inclusive: |
|
240 | 245 | self._containsseen = set(revs) |
|
241 | 246 | else: |
|
242 | 247 | self._containsseen = set() |
|
243 | 248 | |
|
244 | 249 | def __iter__(self): |
|
245 | 250 | """Generate the ancestors of _initrevs in reverse topological order. |
|
246 | 251 | |
|
247 | 252 | If inclusive is False, yield a sequence of revision numbers starting |
|
248 | 253 | with the parents of each revision in revs, i.e., each revision is *not* |
|
249 | 254 | considered an ancestor of itself. Results are in breadth-first order: |
|
250 | 255 | parents of each rev in revs, then parents of those, etc. |
|
251 | 256 | |
|
252 | 257 | If inclusive is True, yield all the revs first (ignoring stoprev), |
|
253 | 258 | then yield all the ancestors of revs as when inclusive is False. |
|
254 | 259 | If an element in revs is an ancestor of a different rev it is not |
|
255 | 260 | yielded again.""" |
|
256 | 261 | seen = set() |
|
257 | 262 | revs = self._initrevs |
|
258 | 263 | if self._inclusive: |
|
259 | 264 | for rev in revs: |
|
260 | 265 | yield rev |
|
261 | 266 | seen.update(revs) |
|
262 | 267 | |
|
263 | 268 | parentrevs = self._parentrevs |
|
264 | 269 | stoprev = self._stoprev |
|
265 | 270 | visit = util.deque(revs) |
|
266 | 271 | |
|
267 | 272 | while visit: |
|
268 | 273 | for parent in parentrevs(visit.popleft()): |
|
269 | 274 | if parent >= stoprev and parent not in seen: |
|
270 | 275 | visit.append(parent) |
|
271 | 276 | seen.add(parent) |
|
272 | 277 | yield parent |
|
273 | 278 | |
|
274 | 279 | def __contains__(self, target): |
|
275 | 280 | """Test whether target is an ancestor of self._initrevs.""" |
|
276 | 281 | # Trying to do both __iter__ and __contains__ using the same visit |
|
277 | 282 | # heap and seen set is complex enough that it slows down both. Keep |
|
278 | 283 | # them separate. |
|
279 | 284 | seen = self._containsseen |
|
280 | 285 | if target in seen: |
|
281 | 286 | return True |
|
282 | 287 | |
|
283 | 288 | parentrevs = self._parentrevs |
|
284 | 289 | visit = self._containsvisit |
|
285 | 290 | stoprev = self._stoprev |
|
286 | 291 | heappop = heapq.heappop |
|
287 | 292 | heappush = heapq.heappush |
|
288 | 293 | |
|
289 | 294 | targetseen = False |
|
290 | 295 | |
|
291 | 296 | while visit and -visit[0] > target and not targetseen: |
|
292 | 297 | for parent in parentrevs(-heappop(visit)): |
|
293 | 298 | if parent < stoprev or parent in seen: |
|
294 | 299 | continue |
|
295 | 300 | # We need to make sure we push all parents into the heap so |
|
296 | 301 | # that we leave it in a consistent state for future calls. |
|
297 | 302 | heappush(visit, -parent) |
|
298 | 303 | seen.add(parent) |
|
299 | 304 | if parent == target: |
|
300 | 305 | targetseen = True |
|
301 | 306 | |
|
302 | 307 | return targetseen |
General Comments 0
You need to be logged in to leave comments.
Login now