##// END OF EJS Templates
rust-pyo3: implementation of LazyAncestors...
Georges Racinet -
r53430:6b694bdf default
parent child Browse files
Show More
@@ -13,11 +13,14 use pyo3::prelude::*;
13
13
14 use std::sync::RwLock;
14 use std::sync::RwLock;
15
15
16 use vcsgraph::lazy_ancestors::AncestorsIterator as VCGAncestorsIterator;
16 use vcsgraph::lazy_ancestors::{
17 AncestorsIterator as VCGAncestorsIterator,
18 LazyAncestors as VCGLazyAncestors,
19 };
17
20
18 use crate::convert_cpython::{
21 use crate::convert_cpython::{
19 proxy_index_extract, proxy_index_py_leak, py_leaked_borrow_mut,
22 proxy_index_extract, proxy_index_py_leak, py_leaked_borrow,
20 py_leaked_or_map_err,
23 py_leaked_borrow_mut, py_leaked_or_map_err,
21 };
24 };
22 use crate::exceptions::{map_lock_error, GraphError};
25 use crate::exceptions::{map_lock_error, GraphError};
23 use crate::revision::{rev_pyiter_collect, PyRevision};
26 use crate::revision::{rev_pyiter_collect, PyRevision};
@@ -77,11 +80,93 impl AncestorsIterator {
77 }
80 }
78 }
81 }
79
82
83 #[pyclass(sequence)]
84 struct LazyAncestors {
85 inner: RwLock<UnsafePyLeaked<VCGLazyAncestors<PySharedIndex>>>,
86 proxy_index: PyObject,
87 initrevs: PyObject,
88 stoprev: PyRevision,
89 inclusive: bool,
90 }
91
92 #[pymethods]
93 impl LazyAncestors {
94 #[new]
95 fn new(
96 index_proxy: &Bound<'_, PyAny>,
97 initrevs: &Bound<'_, PyAny>,
98 stoprev: PyRevision,
99 inclusive: bool,
100 ) -> PyResult<Self> {
101 let cloned_proxy = index_proxy.clone().unbind();
102 let initvec: Vec<_> = {
103 // Safety: we don't leak the "faked" reference out of
104 // `UnsafePyLeaked`
105 let borrowed_idx = unsafe { proxy_index_extract(index_proxy)? };
106 rev_pyiter_collect(initrevs, borrowed_idx)?
107 };
108 let (py, leaked_idx) = proxy_index_py_leak(index_proxy)?;
109 // Safety: we don't leak the "faked" reference out of
110 // `UnsafePyLeaked`
111 let res_lazy = unsafe {
112 leaked_idx.map(py, |idx| {
113 VCGLazyAncestors::new(
114 idx,
115 initvec.into_iter().map(|r| r.0),
116 stoprev.0,
117 inclusive,
118 )
119 })
120 };
121 let lazy =
122 py_leaked_or_map_err(py, res_lazy, GraphError::from_vcsgraph)?;
123 Ok(Self {
124 inner: lazy.into(),
125 proxy_index: cloned_proxy,
126 initrevs: initrevs.clone().unbind(),
127 stoprev,
128 inclusive,
129 })
130 }
131
132 fn __bool__(slf: PyRef<'_, Self>) -> PyResult<bool> {
133 let leaked = slf.inner.read().map_err(map_lock_error)?;
134 // Safety: we don't leak the "faked" reference out of `UnsafePyLeaked`
135 let inner = unsafe { py_leaked_borrow(&slf, &leaked) }?;
136 Ok(!inner.is_empty())
137 }
138
139 fn __contains__(
140 slf: PyRefMut<'_, Self>,
141 obj: &Bound<'_, PyAny>,
142 ) -> PyResult<bool> {
143 PyRevision::extract_bound(obj).map_or(Ok(false), |rev| {
144 let mut leaked = slf.inner.write().map_err(map_lock_error)?;
145 // Safety: we don't leak the "faked" reference out of
146 // `UnsafePyLeaked`
147 let mut inner =
148 unsafe { py_leaked_borrow_mut(&slf, &mut leaked) }?;
149 inner.contains(rev.0).map_err(GraphError::from_vcsgraph)
150 })
151 }
152
153 fn __iter__(slf: PyRef<'_, Self>) -> PyResult<AncestorsIterator> {
154 let py = slf.py();
155 AncestorsIterator::new(
156 slf.proxy_index.clone_ref(py).bind(py),
157 slf.initrevs.clone_ref(py).bind(py),
158 slf.stoprev,
159 slf.inclusive,
160 )
161 }
162 }
163
80 pub fn init_module<'py>(
164 pub fn init_module<'py>(
81 py: Python<'py>,
165 py: Python<'py>,
82 package: &str,
166 package: &str,
83 ) -> PyResult<Bound<'py, PyModule>> {
167 ) -> PyResult<Bound<'py, PyModule>> {
84 let m = new_submodule(py, package, "ancestor")?;
168 let m = new_submodule(py, package, "ancestor")?;
85 m.add_class::<AncestorsIterator>()?;
169 m.add_class::<AncestorsIterator>()?;
170 m.add_class::<LazyAncestors>()?;
86 Ok(m)
171 Ok(m)
87 }
172 }
@@ -222,7 +222,6 pub(crate) unsafe fn proxy_index_extract
222 /// static reference. It is possible, depending on `T` that such a leak cannot
222 /// static reference. It is possible, depending on `T` that such a leak cannot
223 /// occur in practice. We may later on define a marker trait for this,
223 /// occur in practice. We may later on define a marker trait for this,
224 /// which will allow us to make declare this function to be safe.
224 /// which will allow us to make declare this function to be safe.
225 #[allow(dead_code)]
226 pub(crate) unsafe fn py_leaked_borrow<'a, 'py: 'a, T>(
225 pub(crate) unsafe fn py_leaked_borrow<'a, 'py: 'a, T>(
227 py: &impl WithGIL<'py>,
226 py: &impl WithGIL<'py>,
228 leaked: &'a cpython::UnsafePyLeaked<T>,
227 leaked: &'a cpython::UnsafePyLeaked<T>,
@@ -68,6 +68,49 class RustAncestorsTestMixin:
68 ait = AncestorsIterator(idx, [3], 0, False)
68 ait = AncestorsIterator(idx, [3], 0, False)
69 self.assertEqual([r for r in ait], [2, 1, 0])
69 self.assertEqual([r for r in ait], [2, 1, 0])
70
70
71 ait = AncestorsIterator(idx, [3], 0, False)
72 # tainting the index with a mutation, let's see what happens
73 # (should be more critical with AncestorsIterator)
74 del idx[0:2]
75 try:
76 next(ait)
77 except RuntimeError as exc:
78 assert "leaked reference after mutation" in exc.args[0]
79 else:
80 raise AssertionError("Expected an exception")
81
82 def testlazyancestors(self):
83 LazyAncestors = self.ancestors_mod().LazyAncestors
84
85 idx = self.parserustindex()
86 start_count = sys.getrefcount(idx.inner) # should be 2 (see Python doc)
87 self.assertEqual(
88 {i: (r[5], r[6]) for i, r in enumerate(idx)},
89 {0: (-1, -1), 1: (0, -1), 2: (1, -1), 3: (2, -1)},
90 )
91 lazy = LazyAncestors(idx, [3], 0, True)
92 # the LazyAncestors instance holds just one reference to the
93 # inner revlog. TODO check that this is normal
94 self.assertEqual(sys.getrefcount(idx.inner), start_count + 1)
95
96 self.assertTrue(2 in lazy)
97 self.assertTrue(bool(lazy))
98 self.assertFalse(None in lazy)
99 self.assertEqual(list(lazy), [3, 2, 1, 0])
100 # a second time to validate that we spawn new iterators
101 self.assertEqual(list(lazy), [3, 2, 1, 0])
102
103 # now let's watch the refcounts closer
104 ait = iter(lazy)
105 self.assertEqual(sys.getrefcount(idx.inner), start_count + 2)
106 del ait
107 self.assertEqual(sys.getrefcount(idx.inner), start_count + 1)
108 del lazy
109 self.assertEqual(sys.getrefcount(idx.inner), start_count)
110
111 # let's check bool for an empty one
112 self.assertFalse(LazyAncestors(idx, [0], 0, False))
113
71 def testrefcount(self):
114 def testrefcount(self):
72 AncestorsIterator = self.ancestors_mod().AncestorsIterator
115 AncestorsIterator = self.ancestors_mod().AncestorsIterator
73
116
@@ -135,37 +178,6 class RustCPythonAncestorsTest(
135 ):
178 ):
136 rustext_pkg = rustext
179 rustext_pkg = rustext
137
180
138 def testlazyancestors(self):
139 LazyAncestors = self.ancestors_mod().LazyAncestors
140
141 idx = self.parserustindex()
142 start_count = sys.getrefcount(idx.inner) # should be 2 (see Python doc)
143 self.assertEqual(
144 {i: (r[5], r[6]) for i, r in enumerate(idx)},
145 {0: (-1, -1), 1: (0, -1), 2: (1, -1), 3: (2, -1)},
146 )
147 lazy = LazyAncestors(idx, [3], 0, True)
148 # the LazyAncestors instance holds just one reference to the
149 # inner revlog.
150 self.assertEqual(sys.getrefcount(idx.inner), start_count + 1)
151
152 self.assertTrue(2 in lazy)
153 self.assertTrue(bool(lazy))
154 self.assertEqual(list(lazy), [3, 2, 1, 0])
155 # a second time to validate that we spawn new iterators
156 self.assertEqual(list(lazy), [3, 2, 1, 0])
157
158 # now let's watch the refcounts closer
159 ait = iter(lazy)
160 self.assertEqual(sys.getrefcount(idx.inner), start_count + 2)
161 del ait
162 self.assertEqual(sys.getrefcount(idx.inner), start_count + 1)
163 del lazy
164 self.assertEqual(sys.getrefcount(idx.inner), start_count)
165
166 # let's check bool for an empty one
167 self.assertFalse(LazyAncestors(idx, [0], 0, False))
168
169 def testmissingancestors(self):
181 def testmissingancestors(self):
170 MissingAncestors = self.ancestors_mod().MissingAncestors
182 MissingAncestors = self.ancestors_mod().MissingAncestors
171
183
General Comments 0
You need to be logged in to leave comments. Login now