##// END OF EJS Templates
rust-pyo3: MissingAncestors
Georges Racinet -
r53432:507fec66 default
parent child Browse files
Show More
@@ -1,164 +1,292
1 // ancestors.rs
1 // ancestors.rs
2 //
2 //
3 // Copyright 2024 Georges Racinet <georges.racinet@cloudcrane.io>
3 // Copyright 2024 Georges Racinet <georges.racinet@cloudcrane.io>
4 //
4 //
5 // This software may be used and distributed according to the terms of the
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
6 // GNU General Public License version 2 or any later version.
7
7
8 //! Bindings for the `hg::ancestors` module provided by the
8 //! Bindings for the `hg::ancestors` module provided by the
9 //! `hg-core` crate. From Python, this will be seen as `pyo3_rustext.ancestor`
9 //! `hg-core` crate. From Python, this will be seen as `pyo3_rustext.ancestor`
10 //! and can be used as replacement for the the pure `ancestor` Python module.
10 //! and can be used as replacement for the the pure `ancestor` Python module.
11 use cpython::UnsafePyLeaked;
11 use cpython::UnsafePyLeaked;
12 use pyo3::prelude::*;
12 use pyo3::prelude::*;
13 use pyo3::types::PyTuple;
13
14
15 use std::collections::HashSet;
14 use std::sync::RwLock;
16 use std::sync::RwLock;
15
17
18 use hg::MissingAncestors as CoreMissing;
16 use vcsgraph::lazy_ancestors::{
19 use vcsgraph::lazy_ancestors::{
17 AncestorsIterator as VCGAncestorsIterator,
20 AncestorsIterator as VCGAncestorsIterator,
18 LazyAncestors as VCGLazyAncestors,
21 LazyAncestors as VCGLazyAncestors,
19 };
22 };
20
23
21 use crate::convert_cpython::{
24 use crate::convert_cpython::{
22 proxy_index_py_leak, py_leaked_borrow, py_leaked_borrow_mut,
25 proxy_index_py_leak, py_leaked_borrow, py_leaked_borrow_mut,
23 py_leaked_or_map_err,
26 py_leaked_or_map_err,
24 };
27 };
25 use crate::exceptions::{map_lock_error, GraphError};
28 use crate::exceptions::{map_lock_error, GraphError};
26 use crate::revision::{rev_pyiter_collect_with_py_index, PyRevision};
29 use crate::revision::{rev_pyiter_collect_with_py_index, PyRevision};
27 use crate::util::new_submodule;
30 use crate::util::new_submodule;
28 use rusthg::revlog::PySharedIndex;
31 use rusthg::revlog::PySharedIndex;
29
32
30 #[pyclass]
33 #[pyclass]
31 struct AncestorsIterator {
34 struct AncestorsIterator {
32 inner: RwLock<UnsafePyLeaked<VCGAncestorsIterator<PySharedIndex>>>,
35 inner: RwLock<UnsafePyLeaked<VCGAncestorsIterator<PySharedIndex>>>,
33 }
36 }
34
37
35 #[pymethods]
38 #[pymethods]
36 impl AncestorsIterator {
39 impl AncestorsIterator {
37 #[new]
40 #[new]
38 fn new(
41 fn new(
39 index_proxy: &Bound<'_, PyAny>,
42 index_proxy: &Bound<'_, PyAny>,
40 initrevs: &Bound<'_, PyAny>,
43 initrevs: &Bound<'_, PyAny>,
41 stoprev: PyRevision,
44 stoprev: PyRevision,
42 inclusive: bool,
45 inclusive: bool,
43 ) -> PyResult<Self> {
46 ) -> PyResult<Self> {
44 let initvec: Vec<_> =
47 let initvec: Vec<_> =
45 rev_pyiter_collect_with_py_index(initrevs, index_proxy)?;
48 rev_pyiter_collect_with_py_index(initrevs, index_proxy)?;
46 let (py, leaked_idx) = proxy_index_py_leak(index_proxy)?;
49 let (py, leaked_idx) = proxy_index_py_leak(index_proxy)?;
47 let res_ait = unsafe {
50 let res_ait = unsafe {
48 leaked_idx.map(py, |idx| {
51 leaked_idx.map(py, |idx| {
49 VCGAncestorsIterator::new(
52 VCGAncestorsIterator::new(
50 idx,
53 idx,
51 initvec.into_iter().map(|r| r.0),
54 initvec.into_iter().map(|r| r.0),
52 stoprev.0,
55 stoprev.0,
53 inclusive,
56 inclusive,
54 )
57 )
55 })
58 })
56 };
59 };
57 let ait =
60 let ait =
58 py_leaked_or_map_err(py, res_ait, GraphError::from_vcsgraph)?;
61 py_leaked_or_map_err(py, res_ait, GraphError::from_vcsgraph)?;
59 let inner = ait.into();
62 let inner = ait.into();
60 Ok(Self { inner })
63 Ok(Self { inner })
61 }
64 }
62
65
63 fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> {
66 fn __iter__(slf: PyRef<'_, Self>) -> PyRef<'_, Self> {
64 slf
67 slf
65 }
68 }
66
69
67 fn __next__(slf: PyRefMut<'_, Self>) -> PyResult<Option<PyRevision>> {
70 fn __next__(slf: PyRefMut<'_, Self>) -> PyResult<Option<PyRevision>> {
68 let mut leaked = slf.inner.write().map_err(map_lock_error)?;
71 let mut leaked = slf.inner.write().map_err(map_lock_error)?;
69 // Safety: we don't leak the inner 'static ref out of UnsafePyLeaked
72 // Safety: we don't leak the inner 'static ref out of UnsafePyLeaked
70 let mut inner = unsafe { py_leaked_borrow_mut(&slf, &mut leaked)? };
73 let mut inner = unsafe { py_leaked_borrow_mut(&slf, &mut leaked)? };
71 match inner.next() {
74 match inner.next() {
72 Some(Err(e)) => Err(GraphError::from_vcsgraph(e)),
75 Some(Err(e)) => Err(GraphError::from_vcsgraph(e)),
73 None => Ok(None),
76 None => Ok(None),
74 Some(Ok(r)) => Ok(Some(PyRevision(r))),
77 Some(Ok(r)) => Ok(Some(PyRevision(r))),
75 }
78 }
76 }
79 }
77 }
80 }
78
81
79 #[pyclass(sequence)]
82 #[pyclass(sequence)]
80 struct LazyAncestors {
83 struct LazyAncestors {
81 inner: RwLock<UnsafePyLeaked<VCGLazyAncestors<PySharedIndex>>>,
84 inner: RwLock<UnsafePyLeaked<VCGLazyAncestors<PySharedIndex>>>,
82 proxy_index: PyObject,
85 proxy_index: PyObject,
83 initrevs: PyObject,
86 initrevs: PyObject,
84 stoprev: PyRevision,
87 stoprev: PyRevision,
85 inclusive: bool,
88 inclusive: bool,
86 }
89 }
87
90
88 #[pymethods]
91 #[pymethods]
89 impl LazyAncestors {
92 impl LazyAncestors {
90 #[new]
93 #[new]
91 fn new(
94 fn new(
92 index_proxy: &Bound<'_, PyAny>,
95 index_proxy: &Bound<'_, PyAny>,
93 initrevs: &Bound<'_, PyAny>,
96 initrevs: &Bound<'_, PyAny>,
94 stoprev: PyRevision,
97 stoprev: PyRevision,
95 inclusive: bool,
98 inclusive: bool,
96 ) -> PyResult<Self> {
99 ) -> PyResult<Self> {
97 let cloned_proxy = index_proxy.clone().unbind();
100 let cloned_proxy = index_proxy.clone().unbind();
98 let initvec: Vec<_> =
101 let initvec: Vec<_> =
99 rev_pyiter_collect_with_py_index(initrevs, index_proxy)?;
102 rev_pyiter_collect_with_py_index(initrevs, index_proxy)?;
100 let (py, leaked_idx) = proxy_index_py_leak(index_proxy)?;
103 let (py, leaked_idx) = proxy_index_py_leak(index_proxy)?;
101 // Safety: we don't leak the "faked" reference out of
104 // Safety: we don't leak the "faked" reference out of
102 // `UnsafePyLeaked`
105 // `UnsafePyLeaked`
103 let res_lazy = unsafe {
106 let res_lazy = unsafe {
104 leaked_idx.map(py, |idx| {
107 leaked_idx.map(py, |idx| {
105 VCGLazyAncestors::new(
108 VCGLazyAncestors::new(
106 idx,
109 idx,
107 initvec.into_iter().map(|r| r.0),
110 initvec.into_iter().map(|r| r.0),
108 stoprev.0,
111 stoprev.0,
109 inclusive,
112 inclusive,
110 )
113 )
111 })
114 })
112 };
115 };
113 let lazy =
116 let lazy =
114 py_leaked_or_map_err(py, res_lazy, GraphError::from_vcsgraph)?;
117 py_leaked_or_map_err(py, res_lazy, GraphError::from_vcsgraph)?;
115 Ok(Self {
118 Ok(Self {
116 inner: lazy.into(),
119 inner: lazy.into(),
117 proxy_index: cloned_proxy,
120 proxy_index: cloned_proxy,
118 initrevs: initrevs.clone().unbind(),
121 initrevs: initrevs.clone().unbind(),
119 stoprev,
122 stoprev,
120 inclusive,
123 inclusive,
121 })
124 })
122 }
125 }
123
126
124 fn __bool__(slf: PyRef<'_, Self>) -> PyResult<bool> {
127 fn __bool__(slf: PyRef<'_, Self>) -> PyResult<bool> {
125 let leaked = slf.inner.read().map_err(map_lock_error)?;
128 let leaked = slf.inner.read().map_err(map_lock_error)?;
126 // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
129 // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
127 let inner = unsafe { py_leaked_borrow(&slf, &leaked) }?;
130 let inner = unsafe { py_leaked_borrow(&slf, &leaked) }?;
128 Ok(!inner.is_empty())
131 Ok(!inner.is_empty())
129 }
132 }
130
133
131 fn __contains__(
134 fn __contains__(
132 slf: PyRefMut<'_, Self>,
135 slf: PyRefMut<'_, Self>,
133 obj: &Bound<'_, PyAny>,
136 obj: &Bound<'_, PyAny>,
134 ) -> PyResult<bool> {
137 ) -> PyResult<bool> {
135 PyRevision::extract_bound(obj).map_or(Ok(false), |rev| {
138 PyRevision::extract_bound(obj).map_or(Ok(false), |rev| {
136 let mut leaked = slf.inner.write().map_err(map_lock_error)?;
139 let mut leaked = slf.inner.write().map_err(map_lock_error)?;
137 // Safety: we don't leak the "faked" reference out of
140 // Safety: we don't leak the "faked" reference out of
138 // `UnsafePyLeaked`
141 // `UnsafePyLeaked`
139 let mut inner =
142 let mut inner =
140 unsafe { py_leaked_borrow_mut(&slf, &mut leaked) }?;
143 unsafe { py_leaked_borrow_mut(&slf, &mut leaked) }?;
141 inner.contains(rev.0).map_err(GraphError::from_vcsgraph)
144 inner.contains(rev.0).map_err(GraphError::from_vcsgraph)
142 })
145 })
143 }
146 }
144
147
145 fn __iter__(slf: PyRef<'_, Self>) -> PyResult<AncestorsIterator> {
148 fn __iter__(slf: PyRef<'_, Self>) -> PyResult<AncestorsIterator> {
146 let py = slf.py();
149 let py = slf.py();
147 AncestorsIterator::new(
150 AncestorsIterator::new(
148 slf.proxy_index.clone_ref(py).bind(py),
151 slf.proxy_index.clone_ref(py).bind(py),
149 slf.initrevs.clone_ref(py).bind(py),
152 slf.initrevs.clone_ref(py).bind(py),
150 slf.stoprev,
153 slf.stoprev,
151 slf.inclusive,
154 slf.inclusive,
152 )
155 )
153 }
156 }
154 }
157 }
155
158
159 #[pyclass]
160 struct MissingAncestors {
161 inner: RwLock<UnsafePyLeaked<CoreMissing<PySharedIndex>>>,
162 proxy_index: PyObject,
163 }
164
165 #[pymethods]
166 impl MissingAncestors {
167 #[new]
168 fn new(
169 index_proxy: &Bound<'_, PyAny>,
170 bases: &Bound<'_, PyAny>,
171 ) -> PyResult<Self> {
172 let cloned_proxy = index_proxy.clone().unbind();
173 let bases_vec: Vec<_> =
174 rev_pyiter_collect_with_py_index(bases, index_proxy)?;
175 let (py, leaked_idx) = proxy_index_py_leak(index_proxy)?;
176
177 // Safety: we don't leak the "faked" reference out of
178 // `UnsafePyLeaked`
179 let inner = unsafe {
180 leaked_idx.map(py, |idx| CoreMissing::new(idx, bases_vec))
181 };
182 Ok(Self {
183 inner: inner.into(),
184 proxy_index: cloned_proxy,
185 })
186 }
187
188 fn hasbases(slf: PyRef<'_, Self>) -> PyResult<bool> {
189 let leaked = slf.inner.read().map_err(map_lock_error)?;
190 // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
191 let inner = unsafe { py_leaked_borrow(&slf, &leaked) }?;
192 Ok(inner.has_bases())
193 }
194
195 fn addbases(
196 slf: PyRefMut<'_, Self>,
197 bases: &Bound<'_, PyAny>,
198 ) -> PyResult<()> {
199 let index_proxy = slf.proxy_index.bind(slf.py());
200 let bases_vec: Vec<_> =
201 rev_pyiter_collect_with_py_index(bases, index_proxy)?;
202
203 let mut leaked = slf.inner.write().map_err(map_lock_error)?;
204 // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
205 let mut inner = unsafe { py_leaked_borrow_mut(&slf, &mut leaked) }?;
206 inner.add_bases(bases_vec);
207 Ok(())
208 }
209
210 fn bases(slf: PyRef<'_, Self>) -> PyResult<HashSet<PyRevision>> {
211 let leaked = slf.inner.read().map_err(map_lock_error)?;
212 // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
213 let inner = unsafe { py_leaked_borrow(&slf, &leaked) }?;
214 Ok(inner.get_bases().iter().map(|r| PyRevision(r.0)).collect())
215 }
216
217 fn basesheads(slf: PyRef<'_, Self>) -> PyResult<HashSet<PyRevision>> {
218 let leaked = slf.inner.read().map_err(map_lock_error)?;
219 // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
220 let inner = unsafe { py_leaked_borrow(&slf, &leaked) }?;
221 Ok(inner
222 .bases_heads()
223 .map_err(GraphError::from_hg)?
224 .iter()
225 .map(|r| PyRevision(r.0))
226 .collect())
227 }
228
229 fn removeancestorsfrom(
230 slf: PyRef<'_, Self>,
231 revs: &Bound<'_, PyAny>,
232 ) -> PyResult<()> {
233 // Original comment from hg-cpython:
234 // this is very lame: we convert to a Rust set, update it in place
235 // and then convert back to Python, only to have Python remove the
236 // excess (thankfully, Python is happy with a list or even an
237 // iterator)
238 // Leads to improve this:
239 // - have the CoreMissing instead do something emit revisions to
240 // discard
241 // - define a trait for sets of revisions in the core and implement
242 // it for a Python set rewrapped with the GIL marker
243 // PyO3 additional comment: the trait approach would probably be
244 // simpler because we can implement it without a Py wrappper, just
245 // on &Bound<'py, PySet>
246 let index_proxy = slf.proxy_index.bind(slf.py());
247 let mut revs_set: HashSet<_> =
248 rev_pyiter_collect_with_py_index(revs, index_proxy)?;
249
250 let mut leaked = slf.inner.write().map_err(map_lock_error)?;
251 // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
252 let mut inner = unsafe { py_leaked_borrow_mut(&slf, &mut leaked) }?;
253
254 inner
255 .remove_ancestors_from(&mut revs_set)
256 .map_err(GraphError::from_hg)?;
257 // convert as Python tuple and discard from original `revs`
258 let remaining_tuple =
259 PyTuple::new(slf.py(), revs_set.iter().map(|r| PyRevision(r.0)))?;
260 revs.call_method("intersection_update", (remaining_tuple,), None)?;
261 Ok(())
262 }
263
264 fn missingancestors(
265 slf: PyRefMut<'_, Self>,
266 bases: &Bound<'_, PyAny>,
267 ) -> PyResult<Vec<PyRevision>> {
268 let index_proxy = slf.proxy_index.bind(slf.py());
269 let revs_vec: Vec<_> =
270 rev_pyiter_collect_with_py_index(bases, index_proxy)?;
271
272 let mut leaked = slf.inner.write().map_err(map_lock_error)?;
273 // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
274 let mut inner = unsafe { py_leaked_borrow_mut(&slf, &mut leaked) }?;
275
276 let missing_vec = inner
277 .missing_ancestors(revs_vec)
278 .map_err(GraphError::from_hg)?;
279 Ok(missing_vec.iter().map(|r| PyRevision(r.0)).collect())
280 }
281 }
282
156 pub fn init_module<'py>(
283 pub fn init_module<'py>(
157 py: Python<'py>,
284 py: Python<'py>,
158 package: &str,
285 package: &str,
159 ) -> PyResult<Bound<'py, PyModule>> {
286 ) -> PyResult<Bound<'py, PyModule>> {
160 let m = new_submodule(py, package, "ancestor")?;
287 let m = new_submodule(py, package, "ancestor")?;
161 m.add_class::<AncestorsIterator>()?;
288 m.add_class::<AncestorsIterator>()?;
162 m.add_class::<LazyAncestors>()?;
289 m.add_class::<LazyAncestors>()?;
290 m.add_class::<MissingAncestors>()?;
163 Ok(m)
291 Ok(m)
164 }
292 }
@@ -1,221 +1,221
1 import sys
1 import sys
2
2
3 from mercurial.node import wdirrev
3 from mercurial.node import wdirrev
4
4
5 from mercurial.testing import revlog as revlogtesting
5 from mercurial.testing import revlog as revlogtesting
6
6
7 try:
7 try:
8 from mercurial import pyo3_rustext, rustext
8 from mercurial import pyo3_rustext, rustext
9
9
10 rustext.__name__ # trigger immediate actual import
10 rustext.__name__ # trigger immediate actual import
11 pyo3_rustext.__name__
11 pyo3_rustext.__name__
12 except ImportError:
12 except ImportError:
13 rustext = pyo3_rustext = None
13 rustext = pyo3_rustext = None
14
14
15 try:
15 try:
16 from mercurial.cext import parsers as cparsers
16 from mercurial.cext import parsers as cparsers
17 except ImportError:
17 except ImportError:
18 cparsers = None
18 cparsers = None
19
19
20
20
21 class RustAncestorsTestMixin:
21 class RustAncestorsTestMixin:
22 """Test the correctness of binding to Rust code.
22 """Test the correctness of binding to Rust code.
23
23
24 This test is merely for the binding to Rust itself: extraction of
24 This test is merely for the binding to Rust itself: extraction of
25 Python variable, giving back the results etc.
25 Python variable, giving back the results etc.
26
26
27 It is not meant to test the algorithmic correctness of the operations
27 It is not meant to test the algorithmic correctness of the operations
28 on ancestors it provides. Hence the very simple embedded index data is
28 on ancestors it provides. Hence the very simple embedded index data is
29 good enough.
29 good enough.
30
30
31 Algorithmic correctness is asserted by the Rust unit tests.
31 Algorithmic correctness is asserted by the Rust unit tests.
32
32
33 At this point, we have two sets of bindings, in `hg-cpython` and
33 At this point, we have two sets of bindings, in `hg-cpython` and
34 `hg-pyo3`. This class used to be for the first and now contains
34 `hg-pyo3`. This class used to be for the first and now contains
35 the tests that are identical in both bindings. As of this writing,
35 the tests that are identical in both bindings. As of this writing,
36 there are more implementations in `hg-cpython` than `hg-pyo3`, hence
36 there are more implementations in `hg-cpython` than `hg-pyo3`, hence
37 some more tests in the subclass for `hg-cpython`. When the work on PyO3
37 some more tests in the subclass for `hg-cpython`. When the work on PyO3
38 is complete, the subclasses for `hg-cpython` should have no specific
38 is complete, the subclasses for `hg-cpython` should have no specific
39 test left. Later on, when we remove the dead code in `hg-cpython`, the tests
39 test left. Later on, when we remove the dead code in `hg-cpython`, the tests
40 should migrate from the mixin to the class for `hg-pyo3`, until we can
40 should migrate from the mixin to the class for `hg-pyo3`, until we can
41 simply remove the mixin.
41 simply remove the mixin.
42 """
42 """
43
43
44 @classmethod
44 @classmethod
45 def ancestors_mod(cls):
45 def ancestors_mod(cls):
46 return cls.rustext_pkg.ancestor
46 return cls.rustext_pkg.ancestor
47
47
48 @classmethod
48 @classmethod
49 def dagop_mod(cls):
49 def dagop_mod(cls):
50 return cls.rustext_pkg.dagop
50 return cls.rustext_pkg.dagop
51
51
52 @classmethod
52 @classmethod
53 def graph_error(cls):
53 def graph_error(cls):
54 return cls.rustext_pkg.GraphError
54 return cls.rustext_pkg.GraphError
55
55
56 def testiteratorrevlist(self):
56 def testiteratorrevlist(self):
57 AncestorsIterator = self.ancestors_mod().AncestorsIterator
57 AncestorsIterator = self.ancestors_mod().AncestorsIterator
58
58
59 idx = self.parserustindex()
59 idx = self.parserustindex()
60 # checking test assumption about the index binary data:
60 # checking test assumption about the index binary data:
61 self.assertEqual(
61 self.assertEqual(
62 {i: (r[5], r[6]) for i, r in enumerate(idx)},
62 {i: (r[5], r[6]) for i, r in enumerate(idx)},
63 {0: (-1, -1), 1: (0, -1), 2: (1, -1), 3: (2, -1)},
63 {0: (-1, -1), 1: (0, -1), 2: (1, -1), 3: (2, -1)},
64 )
64 )
65 ait = AncestorsIterator(idx, [3], 0, True)
65 ait = AncestorsIterator(idx, [3], 0, True)
66 self.assertEqual([r for r in ait], [3, 2, 1, 0])
66 self.assertEqual([r for r in ait], [3, 2, 1, 0])
67
67
68 ait = AncestorsIterator(idx, [3], 0, False)
68 ait = AncestorsIterator(idx, [3], 0, False)
69 self.assertEqual([r for r in ait], [2, 1, 0])
69 self.assertEqual([r for r in ait], [2, 1, 0])
70
70
71 ait = AncestorsIterator(idx, [3], 0, False)
71 ait = AncestorsIterator(idx, [3], 0, False)
72 # tainting the index with a mutation, let's see what happens
72 # tainting the index with a mutation, let's see what happens
73 # (should be more critical with AncestorsIterator)
73 # (should be more critical with AncestorsIterator)
74 del idx[0:2]
74 del idx[0:2]
75 try:
75 try:
76 next(ait)
76 next(ait)
77 except RuntimeError as exc:
77 except RuntimeError as exc:
78 assert "leaked reference after mutation" in exc.args[0]
78 assert "leaked reference after mutation" in exc.args[0]
79 else:
79 else:
80 raise AssertionError("Expected an exception")
80 raise AssertionError("Expected an exception")
81
81
82 def testlazyancestors(self):
82 def testlazyancestors(self):
83 LazyAncestors = self.ancestors_mod().LazyAncestors
83 LazyAncestors = self.ancestors_mod().LazyAncestors
84
84
85 idx = self.parserustindex()
85 idx = self.parserustindex()
86 start_count = sys.getrefcount(idx.inner) # should be 2 (see Python doc)
86 start_count = sys.getrefcount(idx.inner) # should be 2 (see Python doc)
87 self.assertEqual(
87 self.assertEqual(
88 {i: (r[5], r[6]) for i, r in enumerate(idx)},
88 {i: (r[5], r[6]) for i, r in enumerate(idx)},
89 {0: (-1, -1), 1: (0, -1), 2: (1, -1), 3: (2, -1)},
89 {0: (-1, -1), 1: (0, -1), 2: (1, -1), 3: (2, -1)},
90 )
90 )
91 lazy = LazyAncestors(idx, [3], 0, True)
91 lazy = LazyAncestors(idx, [3], 0, True)
92 # the LazyAncestors instance holds just one reference to the
92 # the LazyAncestors instance holds just one reference to the
93 # inner revlog. TODO check that this is normal
93 # inner revlog. TODO check that this is normal
94 self.assertEqual(sys.getrefcount(idx.inner), start_count + 1)
94 self.assertEqual(sys.getrefcount(idx.inner), start_count + 1)
95
95
96 self.assertTrue(2 in lazy)
96 self.assertTrue(2 in lazy)
97 self.assertTrue(bool(lazy))
97 self.assertTrue(bool(lazy))
98 self.assertFalse(None in lazy)
98 self.assertFalse(None in lazy)
99 self.assertEqual(list(lazy), [3, 2, 1, 0])
99 self.assertEqual(list(lazy), [3, 2, 1, 0])
100 # a second time to validate that we spawn new iterators
100 # a second time to validate that we spawn new iterators
101 self.assertEqual(list(lazy), [3, 2, 1, 0])
101 self.assertEqual(list(lazy), [3, 2, 1, 0])
102
102
103 # now let's watch the refcounts closer
103 # now let's watch the refcounts closer
104 ait = iter(lazy)
104 ait = iter(lazy)
105 self.assertEqual(sys.getrefcount(idx.inner), start_count + 2)
105 self.assertEqual(sys.getrefcount(idx.inner), start_count + 2)
106 del ait
106 del ait
107 self.assertEqual(sys.getrefcount(idx.inner), start_count + 1)
107 self.assertEqual(sys.getrefcount(idx.inner), start_count + 1)
108 del lazy
108 del lazy
109 self.assertEqual(sys.getrefcount(idx.inner), start_count)
109 self.assertEqual(sys.getrefcount(idx.inner), start_count)
110
110
111 # let's check bool for an empty one
111 # let's check bool for an empty one
112 self.assertFalse(LazyAncestors(idx, [0], 0, False))
112 self.assertFalse(LazyAncestors(idx, [0], 0, False))
113
113
114 def testrefcount(self):
114 def testrefcount(self):
115 AncestorsIterator = self.ancestors_mod().AncestorsIterator
115 AncestorsIterator = self.ancestors_mod().AncestorsIterator
116
116
117 idx = self.parserustindex()
117 idx = self.parserustindex()
118 start_count = sys.getrefcount(idx.inner)
118 start_count = sys.getrefcount(idx.inner)
119
119
120 # refcount increases upon iterator init...
120 # refcount increases upon iterator init...
121 ait = AncestorsIterator(idx, [3], 0, True)
121 ait = AncestorsIterator(idx, [3], 0, True)
122 self.assertEqual(sys.getrefcount(idx.inner), start_count + 1)
122 self.assertEqual(sys.getrefcount(idx.inner), start_count + 1)
123 self.assertEqual(next(ait), 3)
123 self.assertEqual(next(ait), 3)
124
124
125 # and decreases once the iterator is removed
125 # and decreases once the iterator is removed
126 del ait
126 del ait
127 self.assertEqual(sys.getrefcount(idx.inner), start_count)
127 self.assertEqual(sys.getrefcount(idx.inner), start_count)
128
128
129 # and removing ref to the index after iterator init is no issue
129 # and removing ref to the index after iterator init is no issue
130 ait = AncestorsIterator(idx, [3], 0, True)
130 ait = AncestorsIterator(idx, [3], 0, True)
131 del idx
131 del idx
132 self.assertEqual(list(ait), [3, 2, 1, 0])
132 self.assertEqual(list(ait), [3, 2, 1, 0])
133
133
134 # the index is not tracked by the GC, hence there is nothing more
134 # the index is not tracked by the GC, hence there is nothing more
135 # we can assert to check that it is properly deleted once its refcount
135 # we can assert to check that it is properly deleted once its refcount
136 # drops to 0
136 # drops to 0
137
137
138 def testgrapherror(self):
138 def testgrapherror(self):
139 AncestorsIterator = self.ancestors_mod().AncestorsIterator
139 AncestorsIterator = self.ancestors_mod().AncestorsIterator
140 GraphError = self.graph_error()
140 GraphError = self.graph_error()
141
141
142 data = (
142 data = (
143 revlogtesting.data_non_inlined[: 64 + 27]
143 revlogtesting.data_non_inlined[: 64 + 27]
144 + b'\xf2'
144 + b'\xf2'
145 + revlogtesting.data_non_inlined[64 + 28 :]
145 + revlogtesting.data_non_inlined[64 + 28 :]
146 )
146 )
147 idx = self.parserustindex(data=data)
147 idx = self.parserustindex(data=data)
148 with self.assertRaises(GraphError) as arc:
148 with self.assertRaises(GraphError) as arc:
149 AncestorsIterator(idx, [1], -1, False)
149 AncestorsIterator(idx, [1], -1, False)
150 exc = arc.exception
150 exc = arc.exception
151 self.assertIsInstance(exc, ValueError)
151 self.assertIsInstance(exc, ValueError)
152 # rust-cpython issues appropriate str instances for Python 2 and 3
152 # rust-cpython issues appropriate str instances for Python 2 and 3
153 self.assertEqual(exc.args, ('ParentOutOfRange', 1))
153 self.assertEqual(exc.args, ('ParentOutOfRange', 1))
154
154
155 def testwdirunsupported(self):
155 def testwdirunsupported(self):
156 AncestorsIterator = self.ancestors_mod().AncestorsIterator
156 AncestorsIterator = self.ancestors_mod().AncestorsIterator
157 GraphError = self.graph_error()
157 GraphError = self.graph_error()
158
158
159 # trying to access ancestors of the working directory raises
159 # trying to access ancestors of the working directory raises
160 idx = self.parserustindex()
160 idx = self.parserustindex()
161 with self.assertRaises(GraphError) as arc:
161 with self.assertRaises(GraphError) as arc:
162 list(AncestorsIterator(idx, [wdirrev], -1, False))
162 list(AncestorsIterator(idx, [wdirrev], -1, False))
163
163
164 exc = arc.exception
164 exc = arc.exception
165 self.assertIsInstance(exc, ValueError)
165 self.assertIsInstance(exc, ValueError)
166 # rust-cpython issues appropriate str instances for Python 2 and 3
166 # rust-cpython issues appropriate str instances for Python 2 and 3
167 self.assertEqual(exc.args, ('InvalidRevision', wdirrev))
167 self.assertEqual(exc.args, ('InvalidRevision', wdirrev))
168
168
169 def testheadrevs(self):
169 def testheadrevs(self):
170 dagop = self.dagop_mod()
170 dagop = self.dagop_mod()
171
171
172 idx = self.parserustindex()
172 idx = self.parserustindex()
173 self.assertEqual(dagop.headrevs(idx, [1, 2, 3]), {3})
173 self.assertEqual(dagop.headrevs(idx, [1, 2, 3]), {3})
174
174
175
176 class RustCPythonAncestorsTest(
177 revlogtesting.RustRevlogBasedTestBase, RustAncestorsTestMixin
178 ):
179 rustext_pkg = rustext
180
181 def testmissingancestors(self):
175 def testmissingancestors(self):
182 MissingAncestors = self.ancestors_mod().MissingAncestors
176 MissingAncestors = self.ancestors_mod().MissingAncestors
183
177
184 idx = self.parserustindex()
178 idx = self.parserustindex()
185 missanc = MissingAncestors(idx, [1])
179 missanc = MissingAncestors(idx, [1])
186 self.assertTrue(missanc.hasbases())
180 self.assertTrue(missanc.hasbases())
187 self.assertEqual(missanc.missingancestors([3]), [2, 3])
181 self.assertEqual(missanc.missingancestors([3]), [2, 3])
188 missanc.addbases({2})
182 missanc.addbases({2})
189 self.assertEqual(missanc.bases(), {1, 2})
183 self.assertEqual(missanc.bases(), {1, 2})
190 self.assertEqual(missanc.missingancestors([3]), [3])
184 self.assertEqual(missanc.missingancestors([3]), [3])
191 self.assertEqual(missanc.basesheads(), {2})
185 self.assertEqual(missanc.basesheads(), {2})
192
186
193 def testmissingancestorsremove(self):
187 def testmissingancestorsremove(self):
194 MissingAncestors = self.ancestors_mod().MissingAncestors
188 MissingAncestors = self.ancestors_mod().MissingAncestors
195
189
196 idx = self.parserustindex()
190 idx = self.parserustindex()
197 missanc = MissingAncestors(idx, [1])
191 missanc = MissingAncestors(idx, [1])
198 revs = {0, 1, 2, 3}
192 revs = {0, 1, 2, 3}
199 missanc.removeancestorsfrom(revs)
193 missanc.removeancestorsfrom(revs)
200 self.assertEqual(revs, {2, 3})
194 self.assertEqual(revs, {2, 3})
201
195
202
196
197 class RustCPythonAncestorsTest(
198 revlogtesting.RustRevlogBasedTestBase, RustAncestorsTestMixin
199 ):
200 rustext_pkg = rustext
201
202
203 class PyO3AncestorsTest(
203 class PyO3AncestorsTest(
204 revlogtesting.RustRevlogBasedTestBase, RustAncestorsTestMixin
204 revlogtesting.RustRevlogBasedTestBase, RustAncestorsTestMixin
205 ):
205 ):
206 rustext_pkg = pyo3_rustext
206 rustext_pkg = pyo3_rustext
207
207
208 def test_rank(self):
208 def test_rank(self):
209 dagop = self.dagop_mod()
209 dagop = self.dagop_mod()
210
210
211 idx = self.parserustindex()
211 idx = self.parserustindex()
212 try:
212 try:
213 dagop.rank(idx, 1, 2)
213 dagop.rank(idx, 1, 2)
214 except pyo3_rustext.GraphError as exc:
214 except pyo3_rustext.GraphError as exc:
215 self.assertEqual(exc.args, ("InconsistentGraphData",))
215 self.assertEqual(exc.args, ("InconsistentGraphData",))
216
216
217
217
218 if __name__ == '__main__':
218 if __name__ == '__main__':
219 import silenttestrunner
219 import silenttestrunner
220
220
221 silenttestrunner.main(__name__)
221 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now