Show More
@@ -1,255 +1,645 b'' | |||||
1 | // ancestors.rs |
|
1 | // ancestors.rs | |
2 | // |
|
2 | // | |
3 | // Copyright 2018 Georges Racinet <gracinet@anybox.fr> |
|
3 | // Copyright 2018 Georges Racinet <gracinet@anybox.fr> | |
4 | // |
|
4 | // | |
5 | // This software may be used and distributed according to the terms of the |
|
5 | // This software may be used and distributed according to the terms of the | |
6 | // GNU General Public License version 2 or any later version. |
|
6 | // GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | //! Rust versions of generic DAG ancestors algorithms for Mercurial |
|
8 | //! Rust versions of generic DAG ancestors algorithms for Mercurial | |
9 |
|
9 | |||
10 | use super::{Graph, GraphError, Revision, NULL_REVISION}; |
|
10 | use super::{Graph, GraphError, Revision, NULL_REVISION}; | |
|
11 | use std::cmp::max; | |||
11 | use std::collections::{BinaryHeap, HashSet}; |
|
12 | use std::collections::{BinaryHeap, HashSet}; | |
12 |
|
13 | |||
13 | /// Iterator over the ancestors of a given list of revisions |
|
14 | /// Iterator over the ancestors of a given list of revisions | |
14 | /// This is a generic type, defined and implemented for any Graph, so that |
|
15 | /// This is a generic type, defined and implemented for any Graph, so that | |
15 | /// it's easy to |
|
16 | /// it's easy to | |
16 | /// |
|
17 | /// | |
17 | /// - unit test in pure Rust |
|
18 | /// - unit test in pure Rust | |
18 | /// - bind to main Mercurial code, potentially in several ways and have these |
|
19 | /// - bind to main Mercurial code, potentially in several ways and have these | |
19 | /// bindings evolve over time |
|
20 | /// bindings evolve over time | |
20 | pub struct AncestorsIterator<G: Graph> { |
|
21 | pub struct AncestorsIterator<G: Graph> { | |
21 | graph: G, |
|
22 | graph: G, | |
22 | visit: BinaryHeap<Revision>, |
|
23 | visit: BinaryHeap<Revision>, | |
23 | seen: HashSet<Revision>, |
|
24 | seen: HashSet<Revision>, | |
24 | stoprev: Revision, |
|
25 | stoprev: Revision, | |
25 | } |
|
26 | } | |
26 |
|
27 | |||
|
28 | pub struct MissingAncestors<G: Graph> { | |||
|
29 | graph: G, | |||
|
30 | bases: HashSet<Revision>, | |||
|
31 | } | |||
|
32 | ||||
27 | impl<G: Graph> AncestorsIterator<G> { |
|
33 | impl<G: Graph> AncestorsIterator<G> { | |
28 | /// Constructor. |
|
34 | /// Constructor. | |
29 | /// |
|
35 | /// | |
30 | /// if `inclusive` is true, then the init revisions are emitted in |
|
36 | /// if `inclusive` is true, then the init revisions are emitted in | |
31 | /// particular, otherwise iteration starts from their parents. |
|
37 | /// particular, otherwise iteration starts from their parents. | |
32 | pub fn new<I>( |
|
38 | pub fn new<I>( | |
33 | graph: G, |
|
39 | graph: G, | |
34 | initrevs: I, |
|
40 | initrevs: I, | |
35 | stoprev: Revision, |
|
41 | stoprev: Revision, | |
36 | inclusive: bool, |
|
42 | inclusive: bool, | |
37 | ) -> Result<Self, GraphError> |
|
43 | ) -> Result<Self, GraphError> | |
38 | where |
|
44 | where | |
39 | I: IntoIterator<Item = Revision>, |
|
45 | I: IntoIterator<Item = Revision>, | |
40 | { |
|
46 | { | |
41 | let filtered_initrevs = initrevs.into_iter().filter(|&r| r >= stoprev); |
|
47 | let filtered_initrevs = initrevs.into_iter().filter(|&r| r >= stoprev); | |
42 | if inclusive { |
|
48 | if inclusive { | |
43 | let visit: BinaryHeap<Revision> = filtered_initrevs.collect(); |
|
49 | let visit: BinaryHeap<Revision> = filtered_initrevs.collect(); | |
44 | let seen = visit.iter().map(|&x| x).collect(); |
|
50 | let seen = visit.iter().map(|&x| x).collect(); | |
45 | return Ok(AncestorsIterator { |
|
51 | return Ok(AncestorsIterator { | |
46 | visit: visit, |
|
52 | visit: visit, | |
47 | seen: seen, |
|
53 | seen: seen, | |
48 | stoprev: stoprev, |
|
54 | stoprev: stoprev, | |
49 | graph: graph, |
|
55 | graph: graph, | |
50 | }); |
|
56 | }); | |
51 | } |
|
57 | } | |
52 | let mut this = AncestorsIterator { |
|
58 | let mut this = AncestorsIterator { | |
53 | visit: BinaryHeap::new(), |
|
59 | visit: BinaryHeap::new(), | |
54 | seen: HashSet::new(), |
|
60 | seen: HashSet::new(), | |
55 | stoprev: stoprev, |
|
61 | stoprev: stoprev, | |
56 | graph: graph, |
|
62 | graph: graph, | |
57 | }; |
|
63 | }; | |
58 | this.seen.insert(NULL_REVISION); |
|
64 | this.seen.insert(NULL_REVISION); | |
59 | for rev in filtered_initrevs { |
|
65 | for rev in filtered_initrevs { | |
60 | for parent in this.graph.parents(rev)?.iter().cloned() { |
|
66 | for parent in this.graph.parents(rev)?.iter().cloned() { | |
61 | this.conditionally_push_rev(parent); |
|
67 | this.conditionally_push_rev(parent); | |
62 | } |
|
68 | } | |
63 | } |
|
69 | } | |
64 | Ok(this) |
|
70 | Ok(this) | |
65 | } |
|
71 | } | |
66 |
|
72 | |||
67 | #[inline] |
|
73 | #[inline] | |
68 | fn conditionally_push_rev(&mut self, rev: Revision) { |
|
74 | fn conditionally_push_rev(&mut self, rev: Revision) { | |
69 | if self.stoprev <= rev && !self.seen.contains(&rev) { |
|
75 | if self.stoprev <= rev && !self.seen.contains(&rev) { | |
70 | self.seen.insert(rev); |
|
76 | self.seen.insert(rev); | |
71 | self.visit.push(rev); |
|
77 | self.visit.push(rev); | |
72 | } |
|
78 | } | |
73 | } |
|
79 | } | |
74 |
|
80 | |||
75 | /// Consumes partially the iterator to tell if the given target |
|
81 | /// Consumes partially the iterator to tell if the given target | |
76 | /// revision |
|
82 | /// revision | |
77 | /// is in the ancestors it emits. |
|
83 | /// is in the ancestors it emits. | |
78 | /// This is meant for iterators actually dedicated to that kind of |
|
84 | /// This is meant for iterators actually dedicated to that kind of | |
79 | /// purpose |
|
85 | /// purpose | |
80 | pub fn contains(&mut self, target: Revision) -> Result<bool, GraphError> { |
|
86 | pub fn contains(&mut self, target: Revision) -> Result<bool, GraphError> { | |
81 | if self.seen.contains(&target) && target != NULL_REVISION { |
|
87 | if self.seen.contains(&target) && target != NULL_REVISION { | |
82 | return Ok(true); |
|
88 | return Ok(true); | |
83 | } |
|
89 | } | |
84 | for item in self { |
|
90 | for item in self { | |
85 | let rev = item?; |
|
91 | let rev = item?; | |
86 | if rev == target { |
|
92 | if rev == target { | |
87 | return Ok(true); |
|
93 | return Ok(true); | |
88 | } |
|
94 | } | |
89 | if rev < target { |
|
95 | if rev < target { | |
90 | return Ok(false); |
|
96 | return Ok(false); | |
91 | } |
|
97 | } | |
92 | } |
|
98 | } | |
93 | Ok(false) |
|
99 | Ok(false) | |
94 | } |
|
100 | } | |
95 | } |
|
101 | } | |
96 |
|
102 | |||
97 | /// Main implementation. |
|
103 | /// Main implementation. | |
98 | /// |
|
104 | /// | |
99 | /// The algorithm is the same as in `_lazyancestorsiter()` from `ancestors.py` |
|
105 | /// The algorithm is the same as in `_lazyancestorsiter()` from `ancestors.py` | |
100 | /// with a few non crucial differences: |
|
106 | /// with a few non crucial differences: | |
101 | /// |
|
107 | /// | |
102 | /// - there's no filtering of invalid parent revisions. Actually, it should be |
|
108 | /// - there's no filtering of invalid parent revisions. Actually, it should be | |
103 | /// consistent and more efficient to filter them from the end caller. |
|
109 | /// consistent and more efficient to filter them from the end caller. | |
104 | /// - we don't have the optimization for adjacent revisions (i.e., the case |
|
110 | /// - we don't have the optimization for adjacent revisions (i.e., the case | |
105 | /// where `p1 == rev - 1`), because it amounts to update the first element of |
|
111 | /// where `p1 == rev - 1`), because it amounts to update the first element of | |
106 | /// the heap without sifting, which Rust's BinaryHeap doesn't let us do. |
|
112 | /// the heap without sifting, which Rust's BinaryHeap doesn't let us do. | |
107 | /// - we save a few pushes by comparing with `stoprev` before pushing |
|
113 | /// - we save a few pushes by comparing with `stoprev` before pushing | |
108 | impl<G: Graph> Iterator for AncestorsIterator<G> { |
|
114 | impl<G: Graph> Iterator for AncestorsIterator<G> { | |
109 | type Item = Result<Revision, GraphError>; |
|
115 | type Item = Result<Revision, GraphError>; | |
110 |
|
116 | |||
111 | fn next(&mut self) -> Option<Self::Item> { |
|
117 | fn next(&mut self) -> Option<Self::Item> { | |
112 | let current = match self.visit.peek() { |
|
118 | let current = match self.visit.peek() { | |
113 | None => { |
|
119 | None => { | |
114 | return None; |
|
120 | return None; | |
115 | } |
|
121 | } | |
116 | Some(c) => *c, |
|
122 | Some(c) => *c, | |
117 | }; |
|
123 | }; | |
118 | let [p1, p2] = match self.graph.parents(current) { |
|
124 | let [p1, p2] = match self.graph.parents(current) { | |
119 | Ok(ps) => ps, |
|
125 | Ok(ps) => ps, | |
120 | Err(e) => return Some(Err(e)), |
|
126 | Err(e) => return Some(Err(e)), | |
121 | }; |
|
127 | }; | |
122 | if p1 < self.stoprev || self.seen.contains(&p1) { |
|
128 | if p1 < self.stoprev || self.seen.contains(&p1) { | |
123 | self.visit.pop(); |
|
129 | self.visit.pop(); | |
124 | } else { |
|
130 | } else { | |
125 | *(self.visit.peek_mut().unwrap()) = p1; |
|
131 | *(self.visit.peek_mut().unwrap()) = p1; | |
126 | self.seen.insert(p1); |
|
132 | self.seen.insert(p1); | |
127 | }; |
|
133 | }; | |
128 |
|
134 | |||
129 | self.conditionally_push_rev(p2); |
|
135 | self.conditionally_push_rev(p2); | |
130 | Some(Ok(current)) |
|
136 | Some(Ok(current)) | |
131 | } |
|
137 | } | |
132 | } |
|
138 | } | |
133 |
|
139 | |||
|
140 | impl<G: Graph> MissingAncestors<G> { | |||
|
141 | pub fn new(graph: G, bases: impl IntoIterator<Item = Revision>) -> Self { | |||
|
142 | let mut bases: HashSet<Revision> = bases.into_iter().collect(); | |||
|
143 | if bases.is_empty() { | |||
|
144 | bases.insert(NULL_REVISION); | |||
|
145 | } | |||
|
146 | MissingAncestors { graph, bases } | |||
|
147 | } | |||
|
148 | ||||
|
149 | pub fn has_bases(&self) -> bool { | |||
|
150 | self.bases.iter().any(|&b| b != NULL_REVISION) | |||
|
151 | } | |||
|
152 | ||||
|
153 | /// Return a reference to current bases. | |||
|
154 | /// | |||
|
155 | /// This is useful in unit tests, but also setdiscovery.py does | |||
|
156 | /// read the bases attribute of a ancestor.missingancestors instance. | |||
|
157 | pub fn get_bases<'a>(&'a self) -> &'a HashSet<Revision> { | |||
|
158 | &self.bases | |||
|
159 | } | |||
|
160 | ||||
|
161 | pub fn add_bases( | |||
|
162 | &mut self, | |||
|
163 | new_bases: impl IntoIterator<Item = Revision>, | |||
|
164 | ) { | |||
|
165 | self.bases.extend(new_bases); | |||
|
166 | } | |||
|
167 | ||||
|
168 | /// Remove all ancestors of self.bases from the revs set (in place) | |||
|
169 | pub fn remove_ancestors_from( | |||
|
170 | &mut self, | |||
|
171 | revs: &mut HashSet<Revision>, | |||
|
172 | ) -> Result<(), GraphError> { | |||
|
173 | revs.retain(|r| !self.bases.contains(r)); | |||
|
174 | // the null revision is always an ancestor | |||
|
175 | revs.remove(&NULL_REVISION); | |||
|
176 | if revs.is_empty() { | |||
|
177 | return Ok(()); | |||
|
178 | } | |||
|
179 | // anything in revs > start is definitely not an ancestor of bases | |||
|
180 | // revs <= start need to be investigated | |||
|
181 | // TODO optim: if a missingancestors is to be used several times, | |||
|
182 | // we shouldn't need to iterate each time on bases | |||
|
183 | let start = match self.bases.iter().cloned().max() { | |||
|
184 | Some(m) => m, | |||
|
185 | None => { | |||
|
186 | // bases is empty (shouldn't happen, but let's be safe) | |||
|
187 | return Ok(()); | |||
|
188 | } | |||
|
189 | }; | |||
|
190 | // whatever happens, we'll keep at least keepcount of them | |||
|
191 | // knowing this gives us a earlier stop condition than | |||
|
192 | // going all the way to the root | |||
|
193 | let keepcount = revs.iter().filter(|r| **r > start).count(); | |||
|
194 | ||||
|
195 | let mut curr = start; | |||
|
196 | while curr != NULL_REVISION && revs.len() > keepcount { | |||
|
197 | if self.bases.contains(&curr) { | |||
|
198 | revs.remove(&curr); | |||
|
199 | self.add_parents(curr)?; | |||
|
200 | } | |||
|
201 | curr -= 1; | |||
|
202 | } | |||
|
203 | Ok(()) | |||
|
204 | } | |||
|
205 | ||||
|
206 | /// Add rev's parents to self.bases | |||
|
207 | #[inline] | |||
|
208 | fn add_parents(&mut self, rev: Revision) -> Result<(), GraphError> { | |||
|
209 | // No need to bother the set with inserting NULL_REVISION over and | |||
|
210 | // over | |||
|
211 | for p in self.graph.parents(rev)?.iter().cloned() { | |||
|
212 | if p != NULL_REVISION { | |||
|
213 | self.bases.insert(p); | |||
|
214 | } | |||
|
215 | } | |||
|
216 | Ok(()) | |||
|
217 | } | |||
|
218 | ||||
|
219 | /// Return all the ancestors of revs that are not ancestors of self.bases | |||
|
220 | /// | |||
|
221 | /// This may include elements from revs. | |||
|
222 | /// | |||
|
223 | /// Equivalent to the revset (::revs - ::self.bases). Revs are returned in | |||
|
224 | /// revision number order, which is a topological order. | |||
|
225 | pub fn missing_ancestors( | |||
|
226 | &mut self, | |||
|
227 | revs: impl IntoIterator<Item = Revision>, | |||
|
228 | ) -> Result<Vec<Revision>, GraphError> { | |||
|
229 | // just for convenience and comparison with Python version | |||
|
230 | let bases_visit = &mut self.bases; | |||
|
231 | let mut revs: HashSet<Revision> = revs | |||
|
232 | .into_iter() | |||
|
233 | .filter(|r| !bases_visit.contains(r)) | |||
|
234 | .collect(); | |||
|
235 | let revs_visit = &mut revs; | |||
|
236 | let mut both_visit: HashSet<Revision> = | |||
|
237 | revs_visit.intersection(&bases_visit).cloned().collect(); | |||
|
238 | if revs_visit.is_empty() { | |||
|
239 | return Ok(Vec::new()); | |||
|
240 | } | |||
|
241 | ||||
|
242 | let max_bases = | |||
|
243 | bases_visit.iter().cloned().max().unwrap_or(NULL_REVISION); | |||
|
244 | let max_revs = | |||
|
245 | revs_visit.iter().cloned().max().unwrap_or(NULL_REVISION); | |||
|
246 | let start = max(max_bases, max_revs); | |||
|
247 | ||||
|
248 | // TODO heuristics for with_capacity()? | |||
|
249 | let mut missing: Vec<Revision> = Vec::new(); | |||
|
250 | for curr in (0..=start).map(|i| start - i) { | |||
|
251 | if revs_visit.is_empty() { | |||
|
252 | break; | |||
|
253 | } | |||
|
254 | if both_visit.contains(&curr) { | |||
|
255 | // curr's parents might have made it into revs_visit through | |||
|
256 | // another path | |||
|
257 | // TODO optim: Rust's HashSet.remove returns a boolean telling | |||
|
258 | // if it happened. This will spare us one set lookup | |||
|
259 | both_visit.remove(&curr); | |||
|
260 | for p in self.graph.parents(curr)?.iter().cloned() { | |||
|
261 | if p == NULL_REVISION { | |||
|
262 | continue; | |||
|
263 | } | |||
|
264 | revs_visit.remove(&p); | |||
|
265 | bases_visit.insert(p); | |||
|
266 | both_visit.insert(p); | |||
|
267 | } | |||
|
268 | continue; | |||
|
269 | } | |||
|
270 | // in Rust, one can't just use mutable variables assignation | |||
|
271 | // to be more straightforward. Instead of Python's | |||
|
272 | // thisvisit and othervisit, we'll differentiate with a boolean | |||
|
273 | let this_visit_is_revs = { | |||
|
274 | if revs_visit.remove(&curr) { | |||
|
275 | missing.push(curr); | |||
|
276 | true | |||
|
277 | } else if bases_visit.contains(&curr) { | |||
|
278 | false | |||
|
279 | } else { | |||
|
280 | // not an ancestor of revs or bases: ignore | |||
|
281 | continue; | |||
|
282 | } | |||
|
283 | }; | |||
|
284 | ||||
|
285 | for p in self.graph.parents(curr)?.iter().cloned() { | |||
|
286 | if p == NULL_REVISION { | |||
|
287 | continue; | |||
|
288 | } | |||
|
289 | let in_other_visit = if this_visit_is_revs { | |||
|
290 | bases_visit.contains(&p) | |||
|
291 | } else { | |||
|
292 | revs_visit.contains(&p) | |||
|
293 | }; | |||
|
294 | if in_other_visit || both_visit.contains(&p) { | |||
|
295 | // p is implicitely in this_visit. | |||
|
296 | // This means p is or should be in bothvisit | |||
|
297 | // TODO optim: hence if bothvisit, we look up twice | |||
|
298 | revs_visit.remove(&p); | |||
|
299 | bases_visit.insert(p); | |||
|
300 | both_visit.insert(p); | |||
|
301 | } else { | |||
|
302 | // visit later | |||
|
303 | if this_visit_is_revs { | |||
|
304 | revs_visit.insert(p); | |||
|
305 | } else { | |||
|
306 | bases_visit.insert(p); | |||
|
307 | } | |||
|
308 | } | |||
|
309 | } | |||
|
310 | } | |||
|
311 | missing.reverse(); | |||
|
312 | Ok(missing) | |||
|
313 | } | |||
|
314 | } | |||
|
315 | ||||
134 | #[cfg(test)] |
|
316 | #[cfg(test)] | |
135 | mod tests { |
|
317 | mod tests { | |
136 |
|
318 | |||
137 | use super::*; |
|
319 | use super::*; | |
|
320 | use std::iter::FromIterator; | |||
138 |
|
321 | |||
139 | #[derive(Clone, Debug)] |
|
322 | #[derive(Clone, Debug)] | |
140 | struct Stub; |
|
323 | struct Stub; | |
141 |
|
324 | |||
142 | /// This is the same as the dict from test-ancestors.py |
|
325 | /// This is the same as the dict from test-ancestors.py | |
143 | impl Graph for Stub { |
|
326 | impl Graph for Stub { | |
144 | fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> { |
|
327 | fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> { | |
145 | match rev { |
|
328 | match rev { | |
146 | 0 => Ok([-1, -1]), |
|
329 | 0 => Ok([-1, -1]), | |
147 | 1 => Ok([0, -1]), |
|
330 | 1 => Ok([0, -1]), | |
148 | 2 => Ok([1, -1]), |
|
331 | 2 => Ok([1, -1]), | |
149 | 3 => Ok([1, -1]), |
|
332 | 3 => Ok([1, -1]), | |
150 | 4 => Ok([2, -1]), |
|
333 | 4 => Ok([2, -1]), | |
151 | 5 => Ok([4, -1]), |
|
334 | 5 => Ok([4, -1]), | |
152 | 6 => Ok([4, -1]), |
|
335 | 6 => Ok([4, -1]), | |
153 | 7 => Ok([4, -1]), |
|
336 | 7 => Ok([4, -1]), | |
154 | 8 => Ok([-1, -1]), |
|
337 | 8 => Ok([-1, -1]), | |
155 | 9 => Ok([6, 7]), |
|
338 | 9 => Ok([6, 7]), | |
156 | 10 => Ok([5, -1]), |
|
339 | 10 => Ok([5, -1]), | |
157 | 11 => Ok([3, 7]), |
|
340 | 11 => Ok([3, 7]), | |
158 | 12 => Ok([9, -1]), |
|
341 | 12 => Ok([9, -1]), | |
159 | 13 => Ok([8, -1]), |
|
342 | 13 => Ok([8, -1]), | |
160 | r => Err(GraphError::ParentOutOfRange(r)), |
|
343 | r => Err(GraphError::ParentOutOfRange(r)), | |
161 | } |
|
344 | } | |
162 | } |
|
345 | } | |
163 | } |
|
346 | } | |
164 |
|
347 | |||
165 | fn list_ancestors<G: Graph>( |
|
348 | fn list_ancestors<G: Graph>( | |
166 | graph: G, |
|
349 | graph: G, | |
167 | initrevs: Vec<Revision>, |
|
350 | initrevs: Vec<Revision>, | |
168 | stoprev: Revision, |
|
351 | stoprev: Revision, | |
169 | inclusive: bool, |
|
352 | inclusive: bool, | |
170 | ) -> Vec<Revision> { |
|
353 | ) -> Vec<Revision> { | |
171 | AncestorsIterator::new(graph, initrevs, stoprev, inclusive) |
|
354 | AncestorsIterator::new(graph, initrevs, stoprev, inclusive) | |
172 | .unwrap() |
|
355 | .unwrap() | |
173 | .map(|res| res.unwrap()) |
|
356 | .map(|res| res.unwrap()) | |
174 | .collect() |
|
357 | .collect() | |
175 | } |
|
358 | } | |
176 |
|
359 | |||
177 | #[test] |
|
360 | #[test] | |
178 | /// Same tests as test-ancestor.py, without membership |
|
361 | /// Same tests as test-ancestor.py, without membership | |
179 | /// (see also test-ancestor.py.out) |
|
362 | /// (see also test-ancestor.py.out) | |
180 | fn test_list_ancestor() { |
|
363 | fn test_list_ancestor() { | |
181 | assert_eq!(list_ancestors(Stub, vec![], 0, false), vec![]); |
|
364 | assert_eq!(list_ancestors(Stub, vec![], 0, false), vec![]); | |
182 | assert_eq!( |
|
365 | assert_eq!( | |
183 | list_ancestors(Stub, vec![11, 13], 0, false), |
|
366 | list_ancestors(Stub, vec![11, 13], 0, false), | |
184 | vec![8, 7, 4, 3, 2, 1, 0] |
|
367 | vec![8, 7, 4, 3, 2, 1, 0] | |
185 | ); |
|
368 | ); | |
186 | assert_eq!(list_ancestors(Stub, vec![1, 3], 0, false), vec![1, 0]); |
|
369 | assert_eq!(list_ancestors(Stub, vec![1, 3], 0, false), vec![1, 0]); | |
187 | assert_eq!( |
|
370 | assert_eq!( | |
188 | list_ancestors(Stub, vec![11, 13], 0, true), |
|
371 | list_ancestors(Stub, vec![11, 13], 0, true), | |
189 | vec![13, 11, 8, 7, 4, 3, 2, 1, 0] |
|
372 | vec![13, 11, 8, 7, 4, 3, 2, 1, 0] | |
190 | ); |
|
373 | ); | |
191 | assert_eq!(list_ancestors(Stub, vec![11, 13], 6, false), vec![8, 7]); |
|
374 | assert_eq!(list_ancestors(Stub, vec![11, 13], 6, false), vec![8, 7]); | |
192 | assert_eq!( |
|
375 | assert_eq!( | |
193 | list_ancestors(Stub, vec![11, 13], 6, true), |
|
376 | list_ancestors(Stub, vec![11, 13], 6, true), | |
194 | vec![13, 11, 8, 7] |
|
377 | vec![13, 11, 8, 7] | |
195 | ); |
|
378 | ); | |
196 | assert_eq!(list_ancestors(Stub, vec![11, 13], 11, true), vec![13, 11]); |
|
379 | assert_eq!(list_ancestors(Stub, vec![11, 13], 11, true), vec![13, 11]); | |
197 | assert_eq!(list_ancestors(Stub, vec![11, 13], 12, true), vec![13]); |
|
380 | assert_eq!(list_ancestors(Stub, vec![11, 13], 12, true), vec![13]); | |
198 | assert_eq!( |
|
381 | assert_eq!( | |
199 | list_ancestors(Stub, vec![10, 1], 0, true), |
|
382 | list_ancestors(Stub, vec![10, 1], 0, true), | |
200 | vec![10, 5, 4, 2, 1, 0] |
|
383 | vec![10, 5, 4, 2, 1, 0] | |
201 | ); |
|
384 | ); | |
202 | } |
|
385 | } | |
203 |
|
386 | |||
204 | #[test] |
|
387 | #[test] | |
205 | /// Corner case that's not directly in test-ancestors.py, but |
|
388 | /// Corner case that's not directly in test-ancestors.py, but | |
206 | /// that happens quite often, as demonstrated by running the whole |
|
389 | /// that happens quite often, as demonstrated by running the whole | |
207 | /// suite. |
|
390 | /// suite. | |
208 | /// For instance, run tests/test-obsolete-checkheads.t |
|
391 | /// For instance, run tests/test-obsolete-checkheads.t | |
209 | fn test_nullrev_input() { |
|
392 | fn test_nullrev_input() { | |
210 | let mut iter = |
|
393 | let mut iter = | |
211 | AncestorsIterator::new(Stub, vec![-1], 0, false).unwrap(); |
|
394 | AncestorsIterator::new(Stub, vec![-1], 0, false).unwrap(); | |
212 | assert_eq!(iter.next(), None) |
|
395 | assert_eq!(iter.next(), None) | |
213 | } |
|
396 | } | |
214 |
|
397 | |||
215 | #[test] |
|
398 | #[test] | |
216 | fn test_contains() { |
|
399 | fn test_contains() { | |
217 | let mut lazy = |
|
400 | let mut lazy = | |
218 | AncestorsIterator::new(Stub, vec![10, 1], 0, true).unwrap(); |
|
401 | AncestorsIterator::new(Stub, vec![10, 1], 0, true).unwrap(); | |
219 | assert!(lazy.contains(1).unwrap()); |
|
402 | assert!(lazy.contains(1).unwrap()); | |
220 | assert!(!lazy.contains(3).unwrap()); |
|
403 | assert!(!lazy.contains(3).unwrap()); | |
221 |
|
404 | |||
222 | let mut lazy = |
|
405 | let mut lazy = | |
223 | AncestorsIterator::new(Stub, vec![0], 0, false).unwrap(); |
|
406 | AncestorsIterator::new(Stub, vec![0], 0, false).unwrap(); | |
224 | assert!(!lazy.contains(NULL_REVISION).unwrap()); |
|
407 | assert!(!lazy.contains(NULL_REVISION).unwrap()); | |
225 | } |
|
408 | } | |
226 |
|
409 | |||
227 | /// A corrupted Graph, supporting error handling tests |
|
410 | /// A corrupted Graph, supporting error handling tests | |
228 | struct Corrupted; |
|
411 | struct Corrupted; | |
229 |
|
412 | |||
230 | impl Graph for Corrupted { |
|
413 | impl Graph for Corrupted { | |
231 | fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> { |
|
414 | fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> { | |
232 | match rev { |
|
415 | match rev { | |
233 | 1 => Ok([0, -1]), |
|
416 | 1 => Ok([0, -1]), | |
234 | r => Err(GraphError::ParentOutOfRange(r)), |
|
417 | r => Err(GraphError::ParentOutOfRange(r)), | |
235 | } |
|
418 | } | |
236 | } |
|
419 | } | |
237 | } |
|
420 | } | |
238 |
|
421 | |||
239 | #[test] |
|
422 | #[test] | |
240 | fn test_initrev_out_of_range() { |
|
423 | fn test_initrev_out_of_range() { | |
241 | // inclusive=false looks up initrev's parents right away |
|
424 | // inclusive=false looks up initrev's parents right away | |
242 | match AncestorsIterator::new(Stub, vec![25], 0, false) { |
|
425 | match AncestorsIterator::new(Stub, vec![25], 0, false) { | |
243 | Ok(_) => panic!("Should have been ParentOutOfRange"), |
|
426 | Ok(_) => panic!("Should have been ParentOutOfRange"), | |
244 | Err(e) => assert_eq!(e, GraphError::ParentOutOfRange(25)), |
|
427 | Err(e) => assert_eq!(e, GraphError::ParentOutOfRange(25)), | |
245 | } |
|
428 | } | |
246 | } |
|
429 | } | |
247 |
|
430 | |||
248 | #[test] |
|
431 | #[test] | |
249 | fn test_next_out_of_range() { |
|
432 | fn test_next_out_of_range() { | |
250 | // inclusive=false looks up initrev's parents right away |
|
433 | // inclusive=false looks up initrev's parents right away | |
251 | let mut iter = |
|
434 | let mut iter = | |
252 | AncestorsIterator::new(Corrupted, vec![1], 0, false).unwrap(); |
|
435 | AncestorsIterator::new(Corrupted, vec![1], 0, false).unwrap(); | |
253 | assert_eq!(iter.next(), Some(Err(GraphError::ParentOutOfRange(0)))); |
|
436 | assert_eq!(iter.next(), Some(Err(GraphError::ParentOutOfRange(0)))); | |
254 | } |
|
437 | } | |
|
438 | ||||
|
439 | #[test] | |||
|
440 | /// Test constructor, add/get bases | |||
|
441 | fn test_missing_bases() { | |||
|
442 | let mut missing_ancestors = | |||
|
443 | MissingAncestors::new(Stub, [5, 3, 1, 3].iter().cloned()); | |||
|
444 | let mut as_vec: Vec<Revision> = | |||
|
445 | missing_ancestors.get_bases().iter().cloned().collect(); | |||
|
446 | as_vec.sort(); | |||
|
447 | assert_eq!(as_vec, [1, 3, 5]); | |||
|
448 | ||||
|
449 | missing_ancestors.add_bases([3, 7, 8].iter().cloned()); | |||
|
450 | as_vec = missing_ancestors.get_bases().iter().cloned().collect(); | |||
|
451 | as_vec.sort(); | |||
|
452 | assert_eq!(as_vec, [1, 3, 5, 7, 8]); | |||
|
453 | } | |||
|
454 | ||||
|
455 | fn assert_missing_remove( | |||
|
456 | bases: &[Revision], | |||
|
457 | revs: &[Revision], | |||
|
458 | expected: &[Revision], | |||
|
459 | ) { | |||
|
460 | let mut missing_ancestors = | |||
|
461 | MissingAncestors::new(Stub, bases.iter().cloned()); | |||
|
462 | let mut revset: HashSet<Revision> = revs.iter().cloned().collect(); | |||
|
463 | missing_ancestors | |||
|
464 | .remove_ancestors_from(&mut revset) | |||
|
465 | .unwrap(); | |||
|
466 | let mut as_vec: Vec<Revision> = revset.into_iter().collect(); | |||
|
467 | as_vec.sort(); | |||
|
468 | assert_eq!(as_vec.as_slice(), expected); | |||
|
469 | } | |||
|
470 | ||||
|
471 | #[test] | |||
|
472 | fn test_missing_remove() { | |||
|
473 | assert_missing_remove( | |||
|
474 | &[1, 2, 3, 4, 7], | |||
|
475 | Vec::from_iter(1..10).as_slice(), | |||
|
476 | &[5, 6, 8, 9], | |||
|
477 | ); | |||
|
478 | assert_missing_remove(&[10], &[11, 12, 13, 14], &[11, 12, 13, 14]); | |||
|
479 | assert_missing_remove(&[7], &[1, 2, 3, 4, 5], &[3, 5]); | |||
|
480 | } | |||
|
481 | ||||
|
482 | fn assert_missing_ancestors( | |||
|
483 | bases: &[Revision], | |||
|
484 | revs: &[Revision], | |||
|
485 | expected: &[Revision], | |||
|
486 | ) { | |||
|
487 | let mut missing_ancestors = | |||
|
488 | MissingAncestors::new(Stub, bases.iter().cloned()); | |||
|
489 | let missing = missing_ancestors | |||
|
490 | .missing_ancestors(revs.iter().cloned()) | |||
|
491 | .unwrap(); | |||
|
492 | assert_eq!(missing.as_slice(), expected); | |||
|
493 | } | |||
|
494 | ||||
|
495 | #[test] | |||
|
496 | fn test_missing_ancestors() { | |||
|
497 | // examples taken from test-ancestors.py by having it run | |||
|
498 | // on the same graph (both naive and fast Python algs) | |||
|
499 | assert_missing_ancestors(&[10], &[11], &[3, 7, 11]); | |||
|
500 | assert_missing_ancestors(&[11], &[10], &[5, 10]); | |||
|
501 | assert_missing_ancestors(&[7], &[9, 11], &[3, 6, 9, 11]); | |||
|
502 | } | |||
|
503 | ||||
|
504 | // A Graph represented by a vector whose indices are revisions | |||
|
505 | // and values are parents of the revisions | |||
|
506 | type VecGraph = Vec<[Revision; 2]>; | |||
|
507 | ||||
|
508 | impl Graph for VecGraph { | |||
|
509 | fn parents(&self, rev: Revision) -> Result<[Revision; 2], GraphError> { | |||
|
510 | Ok(self[rev as usize]) | |||
|
511 | } | |||
|
512 | } | |||
|
513 | ||||
|
514 | /// An interesting case found by a random generator similar to | |||
|
515 | /// the one in test-ancestor.py. An early version of Rust MissingAncestors | |||
|
516 | /// failed this, yet none of the integration tests of the whole suite | |||
|
517 | /// catched it. | |||
|
518 | #[test] | |||
|
519 | fn test_remove_ancestors_from_case1() { | |||
|
520 | let graph: VecGraph = vec![ | |||
|
521 | [NULL_REVISION, NULL_REVISION], | |||
|
522 | [0, NULL_REVISION], | |||
|
523 | [1, 0], | |||
|
524 | [2, 1], | |||
|
525 | [3, NULL_REVISION], | |||
|
526 | [4, NULL_REVISION], | |||
|
527 | [5, 1], | |||
|
528 | [2, NULL_REVISION], | |||
|
529 | [7, NULL_REVISION], | |||
|
530 | [8, NULL_REVISION], | |||
|
531 | [9, NULL_REVISION], | |||
|
532 | [10, 1], | |||
|
533 | [3, NULL_REVISION], | |||
|
534 | [12, NULL_REVISION], | |||
|
535 | [13, NULL_REVISION], | |||
|
536 | [14, NULL_REVISION], | |||
|
537 | [4, NULL_REVISION], | |||
|
538 | [16, NULL_REVISION], | |||
|
539 | [17, NULL_REVISION], | |||
|
540 | [18, NULL_REVISION], | |||
|
541 | [19, 11], | |||
|
542 | [20, NULL_REVISION], | |||
|
543 | [21, NULL_REVISION], | |||
|
544 | [22, NULL_REVISION], | |||
|
545 | [23, NULL_REVISION], | |||
|
546 | [2, NULL_REVISION], | |||
|
547 | [3, NULL_REVISION], | |||
|
548 | [26, 24], | |||
|
549 | [27, NULL_REVISION], | |||
|
550 | [28, NULL_REVISION], | |||
|
551 | [12, NULL_REVISION], | |||
|
552 | [1, NULL_REVISION], | |||
|
553 | [1, 9], | |||
|
554 | [32, NULL_REVISION], | |||
|
555 | [33, NULL_REVISION], | |||
|
556 | [34, 31], | |||
|
557 | [35, NULL_REVISION], | |||
|
558 | [36, 26], | |||
|
559 | [37, NULL_REVISION], | |||
|
560 | [38, NULL_REVISION], | |||
|
561 | [39, NULL_REVISION], | |||
|
562 | [40, NULL_REVISION], | |||
|
563 | [41, NULL_REVISION], | |||
|
564 | [42, 26], | |||
|
565 | [0, NULL_REVISION], | |||
|
566 | [44, NULL_REVISION], | |||
|
567 | [45, 4], | |||
|
568 | [40, NULL_REVISION], | |||
|
569 | [47, NULL_REVISION], | |||
|
570 | [36, 0], | |||
|
571 | [49, NULL_REVISION], | |||
|
572 | [NULL_REVISION, NULL_REVISION], | |||
|
573 | [51, NULL_REVISION], | |||
|
574 | [52, NULL_REVISION], | |||
|
575 | [53, NULL_REVISION], | |||
|
576 | [14, NULL_REVISION], | |||
|
577 | [55, NULL_REVISION], | |||
|
578 | [15, NULL_REVISION], | |||
|
579 | [23, NULL_REVISION], | |||
|
580 | [58, NULL_REVISION], | |||
|
581 | [59, NULL_REVISION], | |||
|
582 | [2, NULL_REVISION], | |||
|
583 | [61, 59], | |||
|
584 | [62, NULL_REVISION], | |||
|
585 | [63, NULL_REVISION], | |||
|
586 | [NULL_REVISION, NULL_REVISION], | |||
|
587 | [65, NULL_REVISION], | |||
|
588 | [66, NULL_REVISION], | |||
|
589 | [67, NULL_REVISION], | |||
|
590 | [68, NULL_REVISION], | |||
|
591 | [37, 28], | |||
|
592 | [69, 25], | |||
|
593 | [71, NULL_REVISION], | |||
|
594 | [72, NULL_REVISION], | |||
|
595 | [50, 2], | |||
|
596 | [74, NULL_REVISION], | |||
|
597 | [12, NULL_REVISION], | |||
|
598 | [18, NULL_REVISION], | |||
|
599 | [77, NULL_REVISION], | |||
|
600 | [78, NULL_REVISION], | |||
|
601 | [79, NULL_REVISION], | |||
|
602 | [43, 33], | |||
|
603 | [81, NULL_REVISION], | |||
|
604 | [82, NULL_REVISION], | |||
|
605 | [83, NULL_REVISION], | |||
|
606 | [84, 45], | |||
|
607 | [85, NULL_REVISION], | |||
|
608 | [86, NULL_REVISION], | |||
|
609 | [NULL_REVISION, NULL_REVISION], | |||
|
610 | [88, NULL_REVISION], | |||
|
611 | [NULL_REVISION, NULL_REVISION], | |||
|
612 | [76, 83], | |||
|
613 | [44, NULL_REVISION], | |||
|
614 | [92, NULL_REVISION], | |||
|
615 | [93, NULL_REVISION], | |||
|
616 | [9, NULL_REVISION], | |||
|
617 | [95, 67], | |||
|
618 | [96, NULL_REVISION], | |||
|
619 | [97, NULL_REVISION], | |||
|
620 | [NULL_REVISION, NULL_REVISION], | |||
|
621 | ]; | |||
|
622 | let problem_rev = 28 as Revision; | |||
|
623 | let problem_base = 70 as Revision; | |||
|
624 | // making the problem obvious: problem_rev is a parent of problem_base | |||
|
625 | assert_eq!(graph.parents(problem_base).unwrap()[1], problem_rev); | |||
|
626 | ||||
|
627 | let mut missing_ancestors: MissingAncestors<VecGraph> = | |||
|
628 | MissingAncestors::new( | |||
|
629 | graph, | |||
|
630 | [60, 26, 70, 3, 96, 19, 98, 49, 97, 47, 1, 6] | |||
|
631 | .iter() | |||
|
632 | .cloned(), | |||
|
633 | ); | |||
|
634 | assert!(missing_ancestors.bases.contains(&problem_base)); | |||
|
635 | ||||
|
636 | let mut revs: HashSet<Revision> = | |||
|
637 | [4, 12, 41, 28, 68, 38, 1, 30, 56, 44] | |||
|
638 | .iter() | |||
|
639 | .cloned() | |||
|
640 | .collect(); | |||
|
641 | missing_ancestors.remove_ancestors_from(&mut revs).unwrap(); | |||
|
642 | assert!(!revs.contains(&problem_rev)); | |||
|
643 | } | |||
|
644 | ||||
255 | } |
|
645 | } |
@@ -1,24 +1,27 b'' | |||||
1 | // Copyright 2018 Georges Racinet <gracinet@anybox.fr> |
|
1 | // Copyright 2018 Georges Racinet <gracinet@anybox.fr> | |
2 | // |
|
2 | // | |
3 | // This software may be used and distributed according to the terms of the |
|
3 | // This software may be used and distributed according to the terms of the | |
4 | // GNU General Public License version 2 or any later version. |
|
4 | // GNU General Public License version 2 or any later version. | |
5 | mod ancestors; |
|
5 | mod ancestors; | |
6 | pub use ancestors::AncestorsIterator; |
|
6 | pub use ancestors::{AncestorsIterator, MissingAncestors}; | |
7 |
|
7 | |||
8 | /// Mercurial revision numbers |
|
8 | /// Mercurial revision numbers | |
9 | /// |
|
9 | /// | |
10 | /// As noted in revlog.c, revision numbers are actually encoded in |
|
10 | /// As noted in revlog.c, revision numbers are actually encoded in | |
11 | /// 4 bytes, and are liberally converted to ints, whence the i32 |
|
11 | /// 4 bytes, and are liberally converted to ints, whence the i32 | |
12 | pub type Revision = i32; |
|
12 | pub type Revision = i32; | |
13 |
|
13 | |||
14 | pub const NULL_REVISION: Revision = -1; |
|
14 | pub const NULL_REVISION: Revision = -1; | |
15 |
|
15 | |||
16 | /// The simplest expression of what we need of Mercurial DAGs. |
|
16 | /// The simplest expression of what we need of Mercurial DAGs. | |
17 | pub trait Graph { |
|
17 | pub trait Graph { | |
|
18 | /// Return the two parents of the given `Revision`. | |||
|
19 | /// | |||
|
20 | /// Each of the parents can be independently `NULL_REVISION` | |||
18 | fn parents(&self, Revision) -> Result<[Revision; 2], GraphError>; |
|
21 | fn parents(&self, Revision) -> Result<[Revision; 2], GraphError>; | |
19 | } |
|
22 | } | |
20 |
|
23 | |||
21 | #[derive(Clone, Debug, PartialEq)] |
|
24 | #[derive(Clone, Debug, PartialEq)] | |
22 | pub enum GraphError { |
|
25 | pub enum GraphError { | |
23 | ParentOutOfRange(Revision), |
|
26 | ParentOutOfRange(Revision), | |
24 | } |
|
27 | } |
@@ -1,284 +1,343 b'' | |||||
1 | from __future__ import absolute_import, print_function |
|
1 | from __future__ import absolute_import, print_function | |
2 |
|
2 | |||
3 | import binascii |
|
3 | import binascii | |
4 | import getopt |
|
4 | import getopt | |
5 | import math |
|
5 | import math | |
6 | import os |
|
6 | import os | |
7 | import random |
|
7 | import random | |
8 | import sys |
|
8 | import sys | |
9 | import time |
|
9 | import time | |
10 |
|
10 | |||
11 | from mercurial.node import nullrev |
|
11 | from mercurial.node import nullrev | |
12 | from mercurial import ( |
|
12 | from mercurial import ( | |
13 | ancestor, |
|
13 | ancestor, | |
14 | debugcommands, |
|
14 | debugcommands, | |
15 | hg, |
|
15 | hg, | |
16 | pycompat, |
|
16 | pycompat, | |
17 | ui as uimod, |
|
17 | ui as uimod, | |
18 | util, |
|
18 | util, | |
19 | ) |
|
19 | ) | |
20 |
|
20 | |||
21 | if pycompat.ispy3: |
|
21 | if pycompat.ispy3: | |
22 | long = int |
|
22 | long = int | |
23 | xrange = range |
|
23 | xrange = range | |
24 |
|
24 | |||
25 | def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7): |
|
25 | def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7): | |
26 | '''nodes: total number of nodes in the graph |
|
26 | '''nodes: total number of nodes in the graph | |
27 | rootprob: probability that a new node (not 0) will be a root |
|
27 | rootprob: probability that a new node (not 0) will be a root | |
28 | mergeprob: probability that, excluding a root a node will be a merge |
|
28 | mergeprob: probability that, excluding a root a node will be a merge | |
29 | prevprob: probability that p1 will be the previous node |
|
29 | prevprob: probability that p1 will be the previous node | |
30 |
|
30 | |||
31 | return value is a graph represented as an adjacency list. |
|
31 | return value is a graph represented as an adjacency list. | |
32 | ''' |
|
32 | ''' | |
33 | graph = [None] * nodes |
|
33 | graph = [None] * nodes | |
34 | for i in xrange(nodes): |
|
34 | for i in xrange(nodes): | |
35 | if i == 0 or rng.random() < rootprob: |
|
35 | if i == 0 or rng.random() < rootprob: | |
36 | graph[i] = [nullrev] |
|
36 | graph[i] = [nullrev] | |
37 | elif i == 1: |
|
37 | elif i == 1: | |
38 | graph[i] = [0] |
|
38 | graph[i] = [0] | |
39 | elif rng.random() < mergeprob: |
|
39 | elif rng.random() < mergeprob: | |
40 | if i == 2 or rng.random() < prevprob: |
|
40 | if i == 2 or rng.random() < prevprob: | |
41 | # p1 is prev |
|
41 | # p1 is prev | |
42 | p1 = i - 1 |
|
42 | p1 = i - 1 | |
43 | else: |
|
43 | else: | |
44 | p1 = rng.randrange(i - 1) |
|
44 | p1 = rng.randrange(i - 1) | |
45 | p2 = rng.choice(list(range(0, p1)) + list(range(p1 + 1, i))) |
|
45 | p2 = rng.choice(list(range(0, p1)) + list(range(p1 + 1, i))) | |
46 | graph[i] = [p1, p2] |
|
46 | graph[i] = [p1, p2] | |
47 | elif rng.random() < prevprob: |
|
47 | elif rng.random() < prevprob: | |
48 | graph[i] = [i - 1] |
|
48 | graph[i] = [i - 1] | |
49 | else: |
|
49 | else: | |
50 | graph[i] = [rng.randrange(i - 1)] |
|
50 | graph[i] = [rng.randrange(i - 1)] | |
51 |
|
51 | |||
52 | return graph |
|
52 | return graph | |
53 |
|
53 | |||
54 | def buildancestorsets(graph): |
|
54 | def buildancestorsets(graph): | |
55 | ancs = [None] * len(graph) |
|
55 | ancs = [None] * len(graph) | |
56 | for i in xrange(len(graph)): |
|
56 | for i in xrange(len(graph)): | |
57 | ancs[i] = {i} |
|
57 | ancs[i] = {i} | |
58 | if graph[i] == [nullrev]: |
|
58 | if graph[i] == [nullrev]: | |
59 | continue |
|
59 | continue | |
60 | for p in graph[i]: |
|
60 | for p in graph[i]: | |
61 | ancs[i].update(ancs[p]) |
|
61 | ancs[i].update(ancs[p]) | |
62 | return ancs |
|
62 | return ancs | |
63 |
|
63 | |||
64 | class naiveincrementalmissingancestors(object): |
|
64 | class naiveincrementalmissingancestors(object): | |
65 | def __init__(self, ancs, bases): |
|
65 | def __init__(self, ancs, bases): | |
66 | self.ancs = ancs |
|
66 | self.ancs = ancs | |
67 | self.bases = set(bases) |
|
67 | self.bases = set(bases) | |
68 | def addbases(self, newbases): |
|
68 | def addbases(self, newbases): | |
69 | self.bases.update(newbases) |
|
69 | self.bases.update(newbases) | |
70 | def removeancestorsfrom(self, revs): |
|
70 | def removeancestorsfrom(self, revs): | |
71 | for base in self.bases: |
|
71 | for base in self.bases: | |
72 | if base != nullrev: |
|
72 | if base != nullrev: | |
73 | revs.difference_update(self.ancs[base]) |
|
73 | revs.difference_update(self.ancs[base]) | |
74 | revs.discard(nullrev) |
|
74 | revs.discard(nullrev) | |
75 | def missingancestors(self, revs): |
|
75 | def missingancestors(self, revs): | |
76 | res = set() |
|
76 | res = set() | |
77 | for rev in revs: |
|
77 | for rev in revs: | |
78 | if rev != nullrev: |
|
78 | if rev != nullrev: | |
79 | res.update(self.ancs[rev]) |
|
79 | res.update(self.ancs[rev]) | |
80 | for base in self.bases: |
|
80 | for base in self.bases: | |
81 | if base != nullrev: |
|
81 | if base != nullrev: | |
82 | res.difference_update(self.ancs[base]) |
|
82 | res.difference_update(self.ancs[base]) | |
83 | return sorted(res) |
|
83 | return sorted(res) | |
84 |
|
84 | |||
85 | def test_missingancestors(seed, rng): |
|
85 | def test_missingancestors(seed, rng): | |
86 | # empirically observed to take around 1 second |
|
86 | # empirically observed to take around 1 second | |
87 | graphcount = 100 |
|
87 | graphcount = 100 | |
88 | testcount = 10 |
|
88 | testcount = 10 | |
89 | inccount = 10 |
|
89 | inccount = 10 | |
90 | nerrs = [0] |
|
90 | nerrs = [0] | |
91 | # the default mu and sigma give us a nice distribution of mostly |
|
91 | # the default mu and sigma give us a nice distribution of mostly | |
92 | # single-digit counts (including 0) with some higher ones |
|
92 | # single-digit counts (including 0) with some higher ones | |
93 | def lognormrandom(mu, sigma): |
|
93 | def lognormrandom(mu, sigma): | |
94 | return int(math.floor(rng.lognormvariate(mu, sigma))) |
|
94 | return int(math.floor(rng.lognormvariate(mu, sigma))) | |
95 |
|
95 | |||
96 | def samplerevs(nodes, mu=1.1, sigma=0.8): |
|
96 | def samplerevs(nodes, mu=1.1, sigma=0.8): | |
97 | count = min(lognormrandom(mu, sigma), len(nodes)) |
|
97 | count = min(lognormrandom(mu, sigma), len(nodes)) | |
98 | return rng.sample(nodes, count) |
|
98 | return rng.sample(nodes, count) | |
99 |
|
99 | |||
100 | def err(seed, graph, bases, seq, output, expected): |
|
100 | def err(seed, graph, bases, seq, output, expected): | |
101 | if nerrs[0] == 0: |
|
101 | if nerrs[0] == 0: | |
102 | print('seed:', hex(seed)[:-1], file=sys.stderr) |
|
102 | print('seed:', hex(seed)[:-1], file=sys.stderr) | |
103 | if gerrs[0] == 0: |
|
103 | if gerrs[0] == 0: | |
104 | print('graph:', graph, file=sys.stderr) |
|
104 | print('graph:', graph, file=sys.stderr) | |
105 | print('* bases:', bases, file=sys.stderr) |
|
105 | print('* bases:', bases, file=sys.stderr) | |
106 | print('* seq: ', seq, file=sys.stderr) |
|
106 | print('* seq: ', seq, file=sys.stderr) | |
107 | print('* output: ', output, file=sys.stderr) |
|
107 | print('* output: ', output, file=sys.stderr) | |
108 | print('* expected:', expected, file=sys.stderr) |
|
108 | print('* expected:', expected, file=sys.stderr) | |
109 | nerrs[0] += 1 |
|
109 | nerrs[0] += 1 | |
110 | gerrs[0] += 1 |
|
110 | gerrs[0] += 1 | |
111 |
|
111 | |||
112 | for g in xrange(graphcount): |
|
112 | for g in xrange(graphcount): | |
113 | graph = buildgraph(rng) |
|
113 | graph = buildgraph(rng) | |
114 | ancs = buildancestorsets(graph) |
|
114 | ancs = buildancestorsets(graph) | |
115 | gerrs = [0] |
|
115 | gerrs = [0] | |
116 | for _ in xrange(testcount): |
|
116 | for _ in xrange(testcount): | |
117 | # start from nullrev to include it as a possibility |
|
117 | # start from nullrev to include it as a possibility | |
118 | graphnodes = range(nullrev, len(graph)) |
|
118 | graphnodes = range(nullrev, len(graph)) | |
119 | bases = samplerevs(graphnodes) |
|
119 | bases = samplerevs(graphnodes) | |
120 |
|
120 | |||
121 | # fast algorithm |
|
121 | # fast algorithm | |
122 | inc = ancestor.incrementalmissingancestors(graph.__getitem__, bases) |
|
122 | inc = ancestor.incrementalmissingancestors(graph.__getitem__, bases) | |
123 | # reference slow algorithm |
|
123 | # reference slow algorithm | |
124 | naiveinc = naiveincrementalmissingancestors(ancs, bases) |
|
124 | naiveinc = naiveincrementalmissingancestors(ancs, bases) | |
125 | seq = [] |
|
125 | seq = [] | |
126 | revs = [] |
|
126 | revs = [] | |
127 | for _ in xrange(inccount): |
|
127 | for _ in xrange(inccount): | |
128 | if rng.random() < 0.2: |
|
128 | if rng.random() < 0.2: | |
129 | newbases = samplerevs(graphnodes) |
|
129 | newbases = samplerevs(graphnodes) | |
130 | seq.append(('addbases', newbases)) |
|
130 | seq.append(('addbases', newbases)) | |
131 | inc.addbases(newbases) |
|
131 | inc.addbases(newbases) | |
132 | naiveinc.addbases(newbases) |
|
132 | naiveinc.addbases(newbases) | |
133 | if rng.random() < 0.4: |
|
133 | if rng.random() < 0.4: | |
134 | # larger set so that there are more revs to remove from |
|
134 | # larger set so that there are more revs to remove from | |
135 | revs = samplerevs(graphnodes, mu=1.5) |
|
135 | revs = samplerevs(graphnodes, mu=1.5) | |
136 | seq.append(('removeancestorsfrom', revs)) |
|
136 | seq.append(('removeancestorsfrom', revs)) | |
137 | hrevs = set(revs) |
|
137 | hrevs = set(revs) | |
138 | rrevs = set(revs) |
|
138 | rrevs = set(revs) | |
139 | inc.removeancestorsfrom(hrevs) |
|
139 | inc.removeancestorsfrom(hrevs) | |
140 | naiveinc.removeancestorsfrom(rrevs) |
|
140 | naiveinc.removeancestorsfrom(rrevs) | |
141 | if hrevs != rrevs: |
|
141 | if hrevs != rrevs: | |
142 | err(seed, graph, bases, seq, sorted(hrevs), |
|
142 | err(seed, graph, bases, seq, sorted(hrevs), | |
143 | sorted(rrevs)) |
|
143 | sorted(rrevs)) | |
144 | else: |
|
144 | else: | |
145 | revs = samplerevs(graphnodes) |
|
145 | revs = samplerevs(graphnodes) | |
146 | seq.append(('missingancestors', revs)) |
|
146 | seq.append(('missingancestors', revs)) | |
147 | h = inc.missingancestors(revs) |
|
147 | h = inc.missingancestors(revs) | |
148 | r = naiveinc.missingancestors(revs) |
|
148 | r = naiveinc.missingancestors(revs) | |
149 | if h != r: |
|
149 | if h != r: | |
150 | err(seed, graph, bases, seq, h, r) |
|
150 | err(seed, graph, bases, seq, h, r) | |
151 |
|
151 | |||
152 | # graph is a dict of child->parent adjacency lists for this graph: |
|
152 | # graph is a dict of child->parent adjacency lists for this graph: | |
153 | # o 13 |
|
153 | # o 13 | |
154 | # | |
|
154 | # | | |
155 | # | o 12 |
|
155 | # | o 12 | |
156 | # | | |
|
156 | # | | | |
157 | # | | o 11 |
|
157 | # | | o 11 | |
158 | # | | |\ |
|
158 | # | | |\ | |
159 | # | | | | o 10 |
|
159 | # | | | | o 10 | |
160 | # | | | | | |
|
160 | # | | | | | | |
161 | # | o---+ | 9 |
|
161 | # | o---+ | 9 | |
162 | # | | | | | |
|
162 | # | | | | | | |
163 | # o | | | | 8 |
|
163 | # o | | | | 8 | |
164 | # / / / / |
|
164 | # / / / / | |
165 | # | | o | 7 |
|
165 | # | | o | 7 | |
166 | # | | | | |
|
166 | # | | | | | |
167 | # o---+ | 6 |
|
167 | # o---+ | 6 | |
168 | # / / / |
|
168 | # / / / | |
169 | # | | o 5 |
|
169 | # | | o 5 | |
170 | # | |/ |
|
170 | # | |/ | |
171 | # | o 4 |
|
171 | # | o 4 | |
172 | # | | |
|
172 | # | | | |
173 | # o | 3 |
|
173 | # o | 3 | |
174 | # | | |
|
174 | # | | | |
175 | # | o 2 |
|
175 | # | o 2 | |
176 | # |/ |
|
176 | # |/ | |
177 | # o 1 |
|
177 | # o 1 | |
178 | # | |
|
178 | # | | |
179 | # o 0 |
|
179 | # o 0 | |
180 |
|
180 | |||
181 | graph = {0: [-1, -1], 1: [0, -1], 2: [1, -1], 3: [1, -1], 4: [2, -1], |
|
181 | graph = {0: [-1, -1], 1: [0, -1], 2: [1, -1], 3: [1, -1], 4: [2, -1], | |
182 | 5: [4, -1], 6: [4, -1], 7: [4, -1], 8: [-1, -1], 9: [6, 7], |
|
182 | 5: [4, -1], 6: [4, -1], 7: [4, -1], 8: [-1, -1], 9: [6, 7], | |
183 | 10: [5, -1], 11: [3, 7], 12: [9, -1], 13: [8, -1]} |
|
183 | 10: [5, -1], 11: [3, 7], 12: [9, -1], 13: [8, -1]} | |
184 |
|
184 | |||
|
185 | def test_missingancestors_explicit(): | |||
|
186 | """A few explicit cases, easier to check for catching errors in refactors. | |||
|
187 | ||||
|
188 | The bigger graph at the end has been produced by the random generator | |||
|
189 | above, and we have some evidence that the other tests don't cover it. | |||
|
190 | """ | |||
|
191 | for i, (bases, revs) in enumerate((({1, 2, 3, 4, 7}, set(xrange(10))), | |||
|
192 | ({10}, set({11, 12, 13, 14})), | |||
|
193 | ({7}, set({1, 2, 3, 4, 5})), | |||
|
194 | )): | |||
|
195 | print("%% removeancestorsfrom(), example %d" % (i + 1)) | |||
|
196 | missanc = ancestor.incrementalmissingancestors(graph.get, bases) | |||
|
197 | missanc.removeancestorsfrom(revs) | |||
|
198 | print("remaining (sorted): %s" % sorted(list(revs))) | |||
|
199 | ||||
|
200 | for i, (bases, revs) in enumerate((({10}, {11}), | |||
|
201 | ({11}, {10}), | |||
|
202 | ({7}, {9, 11}), | |||
|
203 | )): | |||
|
204 | print("%% missingancestors(), example %d" % (i + 1)) | |||
|
205 | missanc = ancestor.incrementalmissingancestors(graph.get, bases) | |||
|
206 | print("return %s" % missanc.missingancestors(revs)) | |||
|
207 | ||||
|
208 | print("% removeancestorsfrom(), bigger graph") | |||
|
209 | vecgraph = [ | |||
|
210 | [-1, -1], [0, -1], [1, 0], [2, 1], [3, -1], [4, -1], [5, 1], | |||
|
211 | [2, -1], [7, -1], [8, -1], [9, -1], [10, 1], [3, -1], [12, -1], | |||
|
212 | [13, -1], [14, -1], [4, -1], [16, -1], [17, -1], [18, -1], | |||
|
213 | [19, 11], [20, -1], [21, -1], [22, -1], [23, -1], [2, -1], | |||
|
214 | [3, -1], [26, 24], [27, -1], [28, -1], [12, -1], [1, -1], [1, 9], | |||
|
215 | [32, -1], [33, -1], [34, 31], [35, -1], [36, 26], [37, -1], | |||
|
216 | [38, -1], [39, -1], [40, -1], [41, -1], [42, 26], [0, -1], | |||
|
217 | [44, -1], [45, 4], [40, -1], [47, -1], [36, 0], [49, -1], | |||
|
218 | [-1, -1], [51, -1], [52, -1], [53, -1], [14, -1], | |||
|
219 | [55, -1], [15, -1], [23, -1], [58, -1], [59, -1], [2, -1], | |||
|
220 | [61, 59], [62, -1], [63, -1], [-1, -1], [65, -1], | |||
|
221 | [66, -1], [67, -1], [68, -1], [37, 28], [69, 25], | |||
|
222 | [71, -1], [72, -1], [50, 2], [74, -1], [12, -1], | |||
|
223 | [18, -1], [77, -1], [78, -1], [79, -1], [43, 33], | |||
|
224 | [81, -1], [82, -1], [83, -1], [84, 45], [85, -1], | |||
|
225 | [86, -1], [-1, -1], [88, -1], [-1, -1], [76, 83], [44, -1], | |||
|
226 | [92, -1], [93, -1], [9, -1], [95, 67], [96, -1], [97, -1], | |||
|
227 | [-1, -1]] | |||
|
228 | problem_rev = 28 | |||
|
229 | problem_base = 70 | |||
|
230 | # problem_rev is a parent of problem_base, but a faulty implementation | |||
|
231 | # could forget to remove it. | |||
|
232 | bases = {60, 26, 70, 3, 96, 19, 98, 49, 97, 47, 1, 6} | |||
|
233 | if problem_rev not in vecgraph[problem_base] or problem_base not in bases: | |||
|
234 | print("Conditions have changed") | |||
|
235 | missanc = ancestor.incrementalmissingancestors(vecgraph.__getitem__, bases) | |||
|
236 | revs = {4, 12, 41, 28, 68, 38, 1, 30, 56, 44} | |||
|
237 | missanc.removeancestorsfrom(revs) | |||
|
238 | if 28 in revs: | |||
|
239 | print("Failed!") | |||
|
240 | else: | |||
|
241 | print("Ok") | |||
|
242 | ||||
185 | def genlazyancestors(revs, stoprev=0, inclusive=False): |
|
243 | def genlazyancestors(revs, stoprev=0, inclusive=False): | |
186 | print(("%% lazy ancestor set for %s, stoprev = %s, inclusive = %s" % |
|
244 | print(("%% lazy ancestor set for %s, stoprev = %s, inclusive = %s" % | |
187 | (revs, stoprev, inclusive))) |
|
245 | (revs, stoprev, inclusive))) | |
188 | return ancestor.lazyancestors(graph.get, revs, stoprev=stoprev, |
|
246 | return ancestor.lazyancestors(graph.get, revs, stoprev=stoprev, | |
189 | inclusive=inclusive) |
|
247 | inclusive=inclusive) | |
190 |
|
248 | |||
191 | def printlazyancestors(s, l): |
|
249 | def printlazyancestors(s, l): | |
192 | print('membership: %r' % [n for n in l if n in s]) |
|
250 | print('membership: %r' % [n for n in l if n in s]) | |
193 | print('iteration: %r' % list(s)) |
|
251 | print('iteration: %r' % list(s)) | |
194 |
|
252 | |||
195 | def test_lazyancestors(): |
|
253 | def test_lazyancestors(): | |
196 | # Empty revs |
|
254 | # Empty revs | |
197 | s = genlazyancestors([]) |
|
255 | s = genlazyancestors([]) | |
198 | printlazyancestors(s, [3, 0, -1]) |
|
256 | printlazyancestors(s, [3, 0, -1]) | |
199 |
|
257 | |||
200 | # Standard example |
|
258 | # Standard example | |
201 | s = genlazyancestors([11, 13]) |
|
259 | s = genlazyancestors([11, 13]) | |
202 | printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) |
|
260 | printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) | |
203 |
|
261 | |||
204 | # Standard with ancestry in the initial set (1 is ancestor of 3) |
|
262 | # Standard with ancestry in the initial set (1 is ancestor of 3) | |
205 | s = genlazyancestors([1, 3]) |
|
263 | s = genlazyancestors([1, 3]) | |
206 | printlazyancestors(s, [1, -1, 0]) |
|
264 | printlazyancestors(s, [1, -1, 0]) | |
207 |
|
265 | |||
208 | # Including revs |
|
266 | # Including revs | |
209 | s = genlazyancestors([11, 13], inclusive=True) |
|
267 | s = genlazyancestors([11, 13], inclusive=True) | |
210 | printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) |
|
268 | printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) | |
211 |
|
269 | |||
212 | # Test with stoprev |
|
270 | # Test with stoprev | |
213 | s = genlazyancestors([11, 13], stoprev=6) |
|
271 | s = genlazyancestors([11, 13], stoprev=6) | |
214 | printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) |
|
272 | printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) | |
215 | s = genlazyancestors([11, 13], stoprev=6, inclusive=True) |
|
273 | s = genlazyancestors([11, 13], stoprev=6, inclusive=True) | |
216 | printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) |
|
274 | printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) | |
217 |
|
275 | |||
218 | # Test with stoprev >= min(initrevs) |
|
276 | # Test with stoprev >= min(initrevs) | |
219 | s = genlazyancestors([11, 13], stoprev=11, inclusive=True) |
|
277 | s = genlazyancestors([11, 13], stoprev=11, inclusive=True) | |
220 | printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) |
|
278 | printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) | |
221 | s = genlazyancestors([11, 13], stoprev=12, inclusive=True) |
|
279 | s = genlazyancestors([11, 13], stoprev=12, inclusive=True) | |
222 | printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) |
|
280 | printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) | |
223 |
|
281 | |||
224 | # Contiguous chains: 5->4, 2->1 (where 1 is in seen set), 1->0 |
|
282 | # Contiguous chains: 5->4, 2->1 (where 1 is in seen set), 1->0 | |
225 | s = genlazyancestors([10, 1], inclusive=True) |
|
283 | s = genlazyancestors([10, 1], inclusive=True) | |
226 | printlazyancestors(s, [2, 10, 4, 5, -1, 0, 1]) |
|
284 | printlazyancestors(s, [2, 10, 4, 5, -1, 0, 1]) | |
227 |
|
285 | |||
228 | # The C gca algorithm requires a real repo. These are textual descriptions of |
|
286 | # The C gca algorithm requires a real repo. These are textual descriptions of | |
229 | # DAGs that have been known to be problematic, and, optionally, known pairs |
|
287 | # DAGs that have been known to be problematic, and, optionally, known pairs | |
230 | # of revisions and their expected ancestor list. |
|
288 | # of revisions and their expected ancestor list. | |
231 | dagtests = [ |
|
289 | dagtests = [ | |
232 | (b'+2*2*2/*3/2', {}), |
|
290 | (b'+2*2*2/*3/2', {}), | |
233 | (b'+3*3/*2*2/*4*4/*4/2*4/2*2', {}), |
|
291 | (b'+3*3/*2*2/*4*4/*4/2*4/2*2', {}), | |
234 | (b'+2*2*/2*4*/4*/3*2/4', {(6, 7): [3, 5]}), |
|
292 | (b'+2*2*/2*4*/4*/3*2/4', {(6, 7): [3, 5]}), | |
235 | ] |
|
293 | ] | |
236 | def test_gca(): |
|
294 | def test_gca(): | |
237 | u = uimod.ui.load() |
|
295 | u = uimod.ui.load() | |
238 | for i, (dag, tests) in enumerate(dagtests): |
|
296 | for i, (dag, tests) in enumerate(dagtests): | |
239 | repo = hg.repository(u, b'gca%d' % i, create=1) |
|
297 | repo = hg.repository(u, b'gca%d' % i, create=1) | |
240 | cl = repo.changelog |
|
298 | cl = repo.changelog | |
241 | if not util.safehasattr(cl.index, 'ancestors'): |
|
299 | if not util.safehasattr(cl.index, 'ancestors'): | |
242 | # C version not available |
|
300 | # C version not available | |
243 | return |
|
301 | return | |
244 |
|
302 | |||
245 | debugcommands.debugbuilddag(u, repo, dag) |
|
303 | debugcommands.debugbuilddag(u, repo, dag) | |
246 | # Compare the results of the Python and C versions. This does not |
|
304 | # Compare the results of the Python and C versions. This does not | |
247 | # include choosing a winner when more than one gca exists -- we make |
|
305 | # include choosing a winner when more than one gca exists -- we make | |
248 | # sure both return exactly the same set of gcas. |
|
306 | # sure both return exactly the same set of gcas. | |
249 | # Also compare against expected results, if available. |
|
307 | # Also compare against expected results, if available. | |
250 | for a in cl: |
|
308 | for a in cl: | |
251 | for b in cl: |
|
309 | for b in cl: | |
252 | cgcas = sorted(cl.index.ancestors(a, b)) |
|
310 | cgcas = sorted(cl.index.ancestors(a, b)) | |
253 | pygcas = sorted(ancestor.ancestors(cl.parentrevs, a, b)) |
|
311 | pygcas = sorted(ancestor.ancestors(cl.parentrevs, a, b)) | |
254 | expected = None |
|
312 | expected = None | |
255 | if (a, b) in tests: |
|
313 | if (a, b) in tests: | |
256 | expected = tests[(a, b)] |
|
314 | expected = tests[(a, b)] | |
257 | if cgcas != pygcas or (expected and cgcas != expected): |
|
315 | if cgcas != pygcas or (expected and cgcas != expected): | |
258 | print("test_gca: for dag %s, gcas for %d, %d:" |
|
316 | print("test_gca: for dag %s, gcas for %d, %d:" | |
259 | % (dag, a, b)) |
|
317 | % (dag, a, b)) | |
260 | print(" C returned: %s" % cgcas) |
|
318 | print(" C returned: %s" % cgcas) | |
261 | print(" Python returned: %s" % pygcas) |
|
319 | print(" Python returned: %s" % pygcas) | |
262 | if expected: |
|
320 | if expected: | |
263 | print(" expected: %s" % expected) |
|
321 | print(" expected: %s" % expected) | |
264 |
|
322 | |||
265 | def main(): |
|
323 | def main(): | |
266 | seed = None |
|
324 | seed = None | |
267 | opts, args = getopt.getopt(sys.argv[1:], 's:', ['seed=']) |
|
325 | opts, args = getopt.getopt(sys.argv[1:], 's:', ['seed=']) | |
268 | for o, a in opts: |
|
326 | for o, a in opts: | |
269 | if o in ('-s', '--seed'): |
|
327 | if o in ('-s', '--seed'): | |
270 | seed = long(a, base=0) # accepts base 10 or 16 strings |
|
328 | seed = long(a, base=0) # accepts base 10 or 16 strings | |
271 |
|
329 | |||
272 | if seed is None: |
|
330 | if seed is None: | |
273 | try: |
|
331 | try: | |
274 | seed = long(binascii.hexlify(os.urandom(16)), 16) |
|
332 | seed = long(binascii.hexlify(os.urandom(16)), 16) | |
275 | except AttributeError: |
|
333 | except AttributeError: | |
276 | seed = long(time.time() * 1000) |
|
334 | seed = long(time.time() * 1000) | |
277 |
|
335 | |||
278 | rng = random.Random(seed) |
|
336 | rng = random.Random(seed) | |
|
337 | test_missingancestors_explicit() | |||
279 | test_missingancestors(seed, rng) |
|
338 | test_missingancestors(seed, rng) | |
280 | test_lazyancestors() |
|
339 | test_lazyancestors() | |
281 | test_gca() |
|
340 | test_gca() | |
282 |
|
341 | |||
283 | if __name__ == '__main__': |
|
342 | if __name__ == '__main__': | |
284 | main() |
|
343 | main() |
@@ -1,27 +1,41 b'' | |||||
|
1 | % removeancestorsfrom(), example 1 | |||
|
2 | remaining (sorted): [5, 6, 8, 9] | |||
|
3 | % removeancestorsfrom(), example 2 | |||
|
4 | remaining (sorted): [11, 12, 13, 14] | |||
|
5 | % removeancestorsfrom(), example 3 | |||
|
6 | remaining (sorted): [3, 5] | |||
|
7 | % missingancestors(), example 1 | |||
|
8 | return [3, 7, 11] | |||
|
9 | % missingancestors(), example 2 | |||
|
10 | return [5, 10] | |||
|
11 | % missingancestors(), example 3 | |||
|
12 | return [3, 6, 9, 11] | |||
|
13 | % removeancestorsfrom(), bigger graph | |||
|
14 | Ok | |||
1 | % lazy ancestor set for [], stoprev = 0, inclusive = False |
|
15 | % lazy ancestor set for [], stoprev = 0, inclusive = False | |
2 | membership: [] |
|
16 | membership: [] | |
3 | iteration: [] |
|
17 | iteration: [] | |
4 | % lazy ancestor set for [11, 13], stoprev = 0, inclusive = False |
|
18 | % lazy ancestor set for [11, 13], stoprev = 0, inclusive = False | |
5 | membership: [7, 8, 3, 4, 1, 0] |
|
19 | membership: [7, 8, 3, 4, 1, 0] | |
6 | iteration: [8, 7, 4, 3, 2, 1, 0] |
|
20 | iteration: [8, 7, 4, 3, 2, 1, 0] | |
7 | % lazy ancestor set for [1, 3], stoprev = 0, inclusive = False |
|
21 | % lazy ancestor set for [1, 3], stoprev = 0, inclusive = False | |
8 | membership: [1, 0] |
|
22 | membership: [1, 0] | |
9 | iteration: [1, 0] |
|
23 | iteration: [1, 0] | |
10 | % lazy ancestor set for [11, 13], stoprev = 0, inclusive = True |
|
24 | % lazy ancestor set for [11, 13], stoprev = 0, inclusive = True | |
11 | membership: [11, 13, 7, 8, 3, 4, 1, 0] |
|
25 | membership: [11, 13, 7, 8, 3, 4, 1, 0] | |
12 | iteration: [13, 11, 8, 7, 4, 3, 2, 1, 0] |
|
26 | iteration: [13, 11, 8, 7, 4, 3, 2, 1, 0] | |
13 | % lazy ancestor set for [11, 13], stoprev = 6, inclusive = False |
|
27 | % lazy ancestor set for [11, 13], stoprev = 6, inclusive = False | |
14 | membership: [7, 8] |
|
28 | membership: [7, 8] | |
15 | iteration: [8, 7] |
|
29 | iteration: [8, 7] | |
16 | % lazy ancestor set for [11, 13], stoprev = 6, inclusive = True |
|
30 | % lazy ancestor set for [11, 13], stoprev = 6, inclusive = True | |
17 | membership: [11, 13, 7, 8] |
|
31 | membership: [11, 13, 7, 8] | |
18 | iteration: [13, 11, 8, 7] |
|
32 | iteration: [13, 11, 8, 7] | |
19 | % lazy ancestor set for [11, 13], stoprev = 11, inclusive = True |
|
33 | % lazy ancestor set for [11, 13], stoprev = 11, inclusive = True | |
20 | membership: [11, 13] |
|
34 | membership: [11, 13] | |
21 | iteration: [13, 11] |
|
35 | iteration: [13, 11] | |
22 | % lazy ancestor set for [11, 13], stoprev = 12, inclusive = True |
|
36 | % lazy ancestor set for [11, 13], stoprev = 12, inclusive = True | |
23 | membership: [13] |
|
37 | membership: [13] | |
24 | iteration: [13] |
|
38 | iteration: [13] | |
25 | % lazy ancestor set for [10, 1], stoprev = 0, inclusive = True |
|
39 | % lazy ancestor set for [10, 1], stoprev = 0, inclusive = True | |
26 | membership: [2, 10, 4, 5, 0, 1] |
|
40 | membership: [2, 10, 4, 5, 0, 1] | |
27 | iteration: [10, 5, 4, 2, 1, 0] |
|
41 | iteration: [10, 5, 4, 2, 1, 0] |
General Comments 0
You need to be logged in to leave comments.
Login now