Show More
@@ -1,49 +1,43 b'' | |||
|
1 | 1 | [package] |
|
2 | 2 | name = "hg-core" |
|
3 | 3 | version = "0.1.0" |
|
4 | 4 | authors = ["Georges Racinet <gracinet@anybox.fr>"] |
|
5 | 5 | description = "Mercurial pure Rust core library, with no assumption on Python bindings (FFI)" |
|
6 | 6 | edition = "2018" |
|
7 | 7 | |
|
8 | 8 | [lib] |
|
9 | 9 | name = "hg" |
|
10 | 10 | |
|
11 | 11 | [dependencies] |
|
12 | 12 | bytes-cast = "0.1" |
|
13 | 13 | byteorder = "1.3.4" |
|
14 | 14 | derive_more = "0.99" |
|
15 | 15 | home = "0.5" |
|
16 | 16 | im-rc = "15.0.*" |
|
17 | 17 | lazy_static = "1.4.0" |
|
18 | 18 | rand = "0.7.3" |
|
19 | 19 | rand_pcg = "0.2.1" |
|
20 | 20 | rand_distr = "0.2.2" |
|
21 | 21 | rayon = "1.3.0" |
|
22 | 22 | regex = "1.3.9" |
|
23 | 23 | twox-hash = "1.5.0" |
|
24 | 24 | same-file = "1.0.6" |
|
25 | 25 | crossbeam-channel = "0.4" |
|
26 | 26 | micro-timer = "0.3.0" |
|
27 | 27 | log = "0.4.8" |
|
28 | 28 | memmap = "0.7.0" |
|
29 | 29 | zstd = "0.5.3" |
|
30 | 30 | rust-crypto = "0.2.36" |
|
31 | 31 | format-bytes = "0.2.2" |
|
32 | 32 | |
|
33 | 33 | # We don't use the `miniz-oxide` backend to not change rhg benchmarks and until |
|
34 | 34 | # we have a clearer view of which backend is the fastest. |
|
35 | 35 | [dependencies.flate2] |
|
36 | 36 | version = "1.0.16" |
|
37 | 37 | features = ["zlib"] |
|
38 | 38 | default-features = false |
|
39 | 39 | |
|
40 | 40 | [dev-dependencies] |
|
41 | 41 | clap = "*" |
|
42 | 42 | pretty_assertions = "0.6.1" |
|
43 | 43 | tempfile = "3.1.0" |
|
44 | ||
|
45 | [features] | |
|
46 | # Use a (still unoptimized) tree for the dirstate instead of the current flat | |
|
47 | # dirstate. This is not yet recommended for performance reasons. A future | |
|
48 | # version might make it the default, or make it a runtime option. | |
|
49 | dirstate-tree = [] |
@@ -1,104 +1,96 b'' | |||
|
1 | 1 | // dirstate module |
|
2 | 2 | // |
|
3 | 3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> |
|
4 | 4 | // |
|
5 | 5 | // This software may be used and distributed according to the terms of the |
|
6 | 6 | // GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | use crate::errors::HgError; |
|
9 | 9 | use crate::revlog::Node; |
|
10 | 10 | use crate::{utils::hg_path::HgPathBuf, FastHashMap}; |
|
11 | 11 | use bytes_cast::{unaligned, BytesCast}; |
|
12 | 12 | use std::collections::hash_map; |
|
13 | 13 | use std::convert::TryFrom; |
|
14 | 14 | |
|
15 | 15 | pub mod dirs_multiset; |
|
16 | 16 | pub mod dirstate_map; |
|
17 | #[cfg(feature = "dirstate-tree")] | |
|
18 | pub mod dirstate_tree; | |
|
19 | 17 | pub mod parsers; |
|
20 | 18 | pub mod status; |
|
21 | 19 | |
|
22 | 20 | #[derive(Debug, PartialEq, Clone, BytesCast)] |
|
23 | 21 | #[repr(C)] |
|
24 | 22 | pub struct DirstateParents { |
|
25 | 23 | pub p1: Node, |
|
26 | 24 | pub p2: Node, |
|
27 | 25 | } |
|
28 | 26 | |
|
29 | 27 | /// The C implementation uses all signed types. This will be an issue |
|
30 | 28 | /// either when 4GB+ source files are commonplace or in 2038, whichever |
|
31 | 29 | /// comes first. |
|
32 | 30 | #[derive(Debug, PartialEq, Copy, Clone)] |
|
33 | 31 | pub struct DirstateEntry { |
|
34 | 32 | pub state: EntryState, |
|
35 | 33 | pub mode: i32, |
|
36 | 34 | pub mtime: i32, |
|
37 | 35 | pub size: i32, |
|
38 | 36 | } |
|
39 | 37 | |
|
40 | 38 | #[derive(BytesCast)] |
|
41 | 39 | #[repr(C)] |
|
42 | 40 | struct RawEntry { |
|
43 | 41 | state: u8, |
|
44 | 42 | mode: unaligned::I32Be, |
|
45 | 43 | size: unaligned::I32Be, |
|
46 | 44 | mtime: unaligned::I32Be, |
|
47 | 45 | length: unaligned::I32Be, |
|
48 | 46 | } |
|
49 | 47 | |
|
50 | 48 | /// A `DirstateEntry` with a size of `-2` means that it was merged from the |
|
51 | 49 | /// other parent. This allows revert to pick the right status back during a |
|
52 | 50 | /// merge. |
|
53 | 51 | pub const SIZE_FROM_OTHER_PARENT: i32 = -2; |
|
54 | 52 | |
|
55 | #[cfg(not(feature = "dirstate-tree"))] | |
|
56 | 53 | pub type StateMap = FastHashMap<HgPathBuf, DirstateEntry>; |
|
57 | #[cfg(not(feature = "dirstate-tree"))] | |
|
58 | 54 | pub type StateMapIter<'a> = hash_map::Iter<'a, HgPathBuf, DirstateEntry>; |
|
59 | 55 | |
|
60 | #[cfg(feature = "dirstate-tree")] | |
|
61 | pub type StateMap = dirstate_tree::tree::Tree; | |
|
62 | #[cfg(feature = "dirstate-tree")] | |
|
63 | pub type StateMapIter<'a> = dirstate_tree::iter::Iter<'a>; | |
|
64 | 56 | pub type CopyMap = FastHashMap<HgPathBuf, HgPathBuf>; |
|
65 | 57 | pub type CopyMapIter<'a> = hash_map::Iter<'a, HgPathBuf, HgPathBuf>; |
|
66 | 58 | |
|
67 | 59 | #[derive(Copy, Clone, Debug, Eq, PartialEq)] |
|
68 | 60 | pub enum EntryState { |
|
69 | 61 | Normal, |
|
70 | 62 | Added, |
|
71 | 63 | Removed, |
|
72 | 64 | Merged, |
|
73 | 65 | Unknown, |
|
74 | 66 | } |
|
75 | 67 | |
|
76 | 68 | impl TryFrom<u8> for EntryState { |
|
77 | 69 | type Error = HgError; |
|
78 | 70 | |
|
79 | 71 | fn try_from(value: u8) -> Result<Self, Self::Error> { |
|
80 | 72 | match value { |
|
81 | 73 | b'n' => Ok(EntryState::Normal), |
|
82 | 74 | b'a' => Ok(EntryState::Added), |
|
83 | 75 | b'r' => Ok(EntryState::Removed), |
|
84 | 76 | b'm' => Ok(EntryState::Merged), |
|
85 | 77 | b'?' => Ok(EntryState::Unknown), |
|
86 | 78 | _ => Err(HgError::CorruptedRepository(format!( |
|
87 | 79 | "Incorrect dirstate entry state {}", |
|
88 | 80 | value |
|
89 | 81 | ))), |
|
90 | 82 | } |
|
91 | 83 | } |
|
92 | 84 | } |
|
93 | 85 | |
|
94 | 86 | impl Into<u8> for EntryState { |
|
95 | 87 | fn into(self) -> u8 { |
|
96 | 88 | match self { |
|
97 | 89 | EntryState::Normal => b'n', |
|
98 | 90 | EntryState::Added => b'a', |
|
99 | 91 | EntryState::Removed => b'r', |
|
100 | 92 | EntryState::Merged => b'm', |
|
101 | 93 | EntryState::Unknown => b'?', |
|
102 | 94 | } |
|
103 | 95 | } |
|
104 | 96 | } |
@@ -1,446 +1,421 b'' | |||
|
1 | 1 | // dirs_multiset.rs |
|
2 | 2 | // |
|
3 | 3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> |
|
4 | 4 | // |
|
5 | 5 | // This software may be used and distributed according to the terms of the |
|
6 | 6 | // GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | //! A multiset of directory names. |
|
9 | 9 | //! |
|
10 | 10 | //! Used to counts the references to directories in a manifest or dirstate. |
|
11 | 11 | use crate::{ |
|
12 | 12 | dirstate::EntryState, |
|
13 | 13 | utils::{ |
|
14 | 14 | files, |
|
15 | 15 | hg_path::{HgPath, HgPathBuf, HgPathError}, |
|
16 | 16 | }, |
|
17 | 17 | DirstateEntry, DirstateMapError, FastHashMap, StateMap, |
|
18 | 18 | }; |
|
19 | 19 | use std::collections::{hash_map, hash_map::Entry, HashMap, HashSet}; |
|
20 | 20 | |
|
21 | 21 | // could be encapsulated if we care API stability more seriously |
|
22 | 22 | pub type DirsMultisetIter<'a> = hash_map::Keys<'a, HgPathBuf, u32>; |
|
23 | 23 | |
|
24 | 24 | #[derive(PartialEq, Debug)] |
|
25 | 25 | pub struct DirsMultiset { |
|
26 | 26 | inner: FastHashMap<HgPathBuf, u32>, |
|
27 | 27 | } |
|
28 | 28 | |
|
29 | 29 | impl DirsMultiset { |
|
30 | 30 | /// Initializes the multiset from a dirstate. |
|
31 | 31 | /// |
|
32 | 32 | /// If `skip_state` is provided, skips dirstate entries with equal state. |
|
33 | #[cfg(not(feature = "dirstate-tree"))] | |
|
34 | 33 | pub fn from_dirstate( |
|
35 | 34 | dirstate: &StateMap, |
|
36 | 35 | skip_state: Option<EntryState>, |
|
37 | 36 | ) -> Result<Self, DirstateMapError> { |
|
38 | 37 | let mut multiset = DirsMultiset { |
|
39 | 38 | inner: FastHashMap::default(), |
|
40 | 39 | }; |
|
41 | 40 | for (filename, DirstateEntry { state, .. }) in dirstate.iter() { |
|
42 | 41 | // This `if` is optimized out of the loop |
|
43 | 42 | if let Some(skip) = skip_state { |
|
44 | 43 | if skip != *state { |
|
45 | 44 | multiset.add_path(filename)?; |
|
46 | 45 | } |
|
47 | 46 | } else { |
|
48 | 47 | multiset.add_path(filename)?; |
|
49 | 48 | } |
|
50 | 49 | } |
|
51 | 50 | |
|
52 | 51 | Ok(multiset) |
|
53 | 52 | } |
|
54 | /// Initializes the multiset from a dirstate. | |
|
55 | /// | |
|
56 | /// If `skip_state` is provided, skips dirstate entries with equal state. | |
|
57 | #[cfg(feature = "dirstate-tree")] | |
|
58 | pub fn from_dirstate( | |
|
59 | dirstate: &StateMap, | |
|
60 | skip_state: Option<EntryState>, | |
|
61 | ) -> Result<Self, DirstateMapError> { | |
|
62 | let mut multiset = DirsMultiset { | |
|
63 | inner: FastHashMap::default(), | |
|
64 | }; | |
|
65 | for (filename, DirstateEntry { state, .. }) in dirstate.iter() { | |
|
66 | // This `if` is optimized out of the loop | |
|
67 | if let Some(skip) = skip_state { | |
|
68 | if skip != state { | |
|
69 | multiset.add_path(filename)?; | |
|
70 | } | |
|
71 | } else { | |
|
72 | multiset.add_path(filename)?; | |
|
73 | } | |
|
74 | } | |
|
75 | ||
|
76 | Ok(multiset) | |
|
77 | } | |
|
78 | 53 | |
|
79 | 54 | /// Initializes the multiset from a manifest. |
|
80 | 55 | pub fn from_manifest( |
|
81 | 56 | manifest: &[impl AsRef<HgPath>], |
|
82 | 57 | ) -> Result<Self, DirstateMapError> { |
|
83 | 58 | let mut multiset = DirsMultiset { |
|
84 | 59 | inner: FastHashMap::default(), |
|
85 | 60 | }; |
|
86 | 61 | |
|
87 | 62 | for filename in manifest { |
|
88 | 63 | multiset.add_path(filename.as_ref())?; |
|
89 | 64 | } |
|
90 | 65 | |
|
91 | 66 | Ok(multiset) |
|
92 | 67 | } |
|
93 | 68 | |
|
94 | 69 | /// Increases the count of deepest directory contained in the path. |
|
95 | 70 | /// |
|
96 | 71 | /// If the directory is not yet in the map, adds its parents. |
|
97 | 72 | pub fn add_path( |
|
98 | 73 | &mut self, |
|
99 | 74 | path: impl AsRef<HgPath>, |
|
100 | 75 | ) -> Result<(), DirstateMapError> { |
|
101 | 76 | for subpath in files::find_dirs(path.as_ref()) { |
|
102 | 77 | if subpath.as_bytes().last() == Some(&b'/') { |
|
103 | 78 | // TODO Remove this once PathAuditor is certified |
|
104 | 79 | // as the only entrypoint for path data |
|
105 | 80 | let second_slash_index = subpath.len() - 1; |
|
106 | 81 | |
|
107 | 82 | return Err(DirstateMapError::InvalidPath( |
|
108 | 83 | HgPathError::ConsecutiveSlashes { |
|
109 | 84 | bytes: path.as_ref().as_bytes().to_owned(), |
|
110 | 85 | second_slash_index, |
|
111 | 86 | }, |
|
112 | 87 | )); |
|
113 | 88 | } |
|
114 | 89 | if let Some(val) = self.inner.get_mut(subpath) { |
|
115 | 90 | *val += 1; |
|
116 | 91 | break; |
|
117 | 92 | } |
|
118 | 93 | self.inner.insert(subpath.to_owned(), 1); |
|
119 | 94 | } |
|
120 | 95 | Ok(()) |
|
121 | 96 | } |
|
122 | 97 | |
|
123 | 98 | /// Decreases the count of deepest directory contained in the path. |
|
124 | 99 | /// |
|
125 | 100 | /// If it is the only reference, decreases all parents until one is |
|
126 | 101 | /// removed. |
|
127 | 102 | /// If the directory is not in the map, something horrible has happened. |
|
128 | 103 | pub fn delete_path( |
|
129 | 104 | &mut self, |
|
130 | 105 | path: impl AsRef<HgPath>, |
|
131 | 106 | ) -> Result<(), DirstateMapError> { |
|
132 | 107 | for subpath in files::find_dirs(path.as_ref()) { |
|
133 | 108 | match self.inner.entry(subpath.to_owned()) { |
|
134 | 109 | Entry::Occupied(mut entry) => { |
|
135 | 110 | let val = *entry.get(); |
|
136 | 111 | if val > 1 { |
|
137 | 112 | entry.insert(val - 1); |
|
138 | 113 | break; |
|
139 | 114 | } |
|
140 | 115 | entry.remove(); |
|
141 | 116 | } |
|
142 | 117 | Entry::Vacant(_) => { |
|
143 | 118 | return Err(DirstateMapError::PathNotFound( |
|
144 | 119 | path.as_ref().to_owned(), |
|
145 | 120 | )) |
|
146 | 121 | } |
|
147 | 122 | }; |
|
148 | 123 | } |
|
149 | 124 | |
|
150 | 125 | Ok(()) |
|
151 | 126 | } |
|
152 | 127 | |
|
153 | 128 | pub fn contains(&self, key: impl AsRef<HgPath>) -> bool { |
|
154 | 129 | self.inner.contains_key(key.as_ref()) |
|
155 | 130 | } |
|
156 | 131 | |
|
157 | 132 | pub fn iter(&self) -> DirsMultisetIter { |
|
158 | 133 | self.inner.keys() |
|
159 | 134 | } |
|
160 | 135 | |
|
161 | 136 | pub fn len(&self) -> usize { |
|
162 | 137 | self.inner.len() |
|
163 | 138 | } |
|
164 | 139 | |
|
165 | 140 | pub fn is_empty(&self) -> bool { |
|
166 | 141 | self.len() == 0 |
|
167 | 142 | } |
|
168 | 143 | } |
|
169 | 144 | |
|
170 | 145 | /// This is basically a reimplementation of `DirsMultiset` that stores the |
|
171 | 146 | /// children instead of just a count of them, plus a small optional |
|
172 | 147 | /// optimization to avoid some directories we don't need. |
|
173 | 148 | #[derive(PartialEq, Debug)] |
|
174 | 149 | pub struct DirsChildrenMultiset<'a> { |
|
175 | 150 | inner: FastHashMap<&'a HgPath, HashSet<&'a HgPath>>, |
|
176 | 151 | only_include: Option<HashSet<&'a HgPath>>, |
|
177 | 152 | } |
|
178 | 153 | |
|
179 | 154 | impl<'a> DirsChildrenMultiset<'a> { |
|
180 | 155 | pub fn new( |
|
181 | 156 | paths: impl Iterator<Item = &'a HgPathBuf>, |
|
182 | 157 | only_include: Option<&'a HashSet<impl AsRef<HgPath> + 'a>>, |
|
183 | 158 | ) -> Self { |
|
184 | 159 | let mut new = Self { |
|
185 | 160 | inner: HashMap::default(), |
|
186 | 161 | only_include: only_include |
|
187 | 162 | .map(|s| s.iter().map(AsRef::as_ref).collect()), |
|
188 | 163 | }; |
|
189 | 164 | |
|
190 | 165 | for path in paths { |
|
191 | 166 | new.add_path(path) |
|
192 | 167 | } |
|
193 | 168 | |
|
194 | 169 | new |
|
195 | 170 | } |
|
196 | 171 | fn add_path(&mut self, path: &'a (impl AsRef<HgPath> + 'a)) { |
|
197 | 172 | if path.as_ref().is_empty() { |
|
198 | 173 | return; |
|
199 | 174 | } |
|
200 | 175 | for (directory, basename) in files::find_dirs_with_base(path.as_ref()) |
|
201 | 176 | { |
|
202 | 177 | if !self.is_dir_included(directory) { |
|
203 | 178 | continue; |
|
204 | 179 | } |
|
205 | 180 | self.inner |
|
206 | 181 | .entry(directory) |
|
207 | 182 | .and_modify(|e| { |
|
208 | 183 | e.insert(basename); |
|
209 | 184 | }) |
|
210 | 185 | .or_insert_with(|| { |
|
211 | 186 | let mut set = HashSet::new(); |
|
212 | 187 | set.insert(basename); |
|
213 | 188 | set |
|
214 | 189 | }); |
|
215 | 190 | } |
|
216 | 191 | } |
|
217 | 192 | fn is_dir_included(&self, dir: impl AsRef<HgPath>) -> bool { |
|
218 | 193 | match &self.only_include { |
|
219 | 194 | None => false, |
|
220 | 195 | Some(i) => i.contains(dir.as_ref()), |
|
221 | 196 | } |
|
222 | 197 | } |
|
223 | 198 | |
|
224 | 199 | pub fn get( |
|
225 | 200 | &self, |
|
226 | 201 | path: impl AsRef<HgPath>, |
|
227 | 202 | ) -> Option<&HashSet<&'a HgPath>> { |
|
228 | 203 | self.inner.get(path.as_ref()) |
|
229 | 204 | } |
|
230 | 205 | } |
|
231 | 206 | |
|
232 | 207 | #[cfg(test)] |
|
233 | 208 | mod tests { |
|
234 | 209 | use super::*; |
|
235 | 210 | |
|
236 | 211 | #[test] |
|
237 | 212 | fn test_delete_path_path_not_found() { |
|
238 | 213 | let manifest: Vec<HgPathBuf> = vec![]; |
|
239 | 214 | let mut map = DirsMultiset::from_manifest(&manifest).unwrap(); |
|
240 | 215 | let path = HgPathBuf::from_bytes(b"doesnotexist/"); |
|
241 | 216 | assert_eq!( |
|
242 | 217 | Err(DirstateMapError::PathNotFound(path.to_owned())), |
|
243 | 218 | map.delete_path(&path) |
|
244 | 219 | ); |
|
245 | 220 | } |
|
246 | 221 | |
|
247 | 222 | #[test] |
|
248 | 223 | fn test_delete_path_empty_path() { |
|
249 | 224 | let mut map = |
|
250 | 225 | DirsMultiset::from_manifest(&vec![HgPathBuf::new()]).unwrap(); |
|
251 | 226 | let path = HgPath::new(b""); |
|
252 | 227 | assert_eq!(Ok(()), map.delete_path(path)); |
|
253 | 228 | assert_eq!( |
|
254 | 229 | Err(DirstateMapError::PathNotFound(path.to_owned())), |
|
255 | 230 | map.delete_path(path) |
|
256 | 231 | ); |
|
257 | 232 | } |
|
258 | 233 | |
|
259 | 234 | #[test] |
|
260 | 235 | fn test_delete_path_successful() { |
|
261 | 236 | let mut map = DirsMultiset { |
|
262 | 237 | inner: [("", 5), ("a", 3), ("a/b", 2), ("a/c", 1)] |
|
263 | 238 | .iter() |
|
264 | 239 | .map(|(k, v)| (HgPathBuf::from_bytes(k.as_bytes()), *v)) |
|
265 | 240 | .collect(), |
|
266 | 241 | }; |
|
267 | 242 | |
|
268 | 243 | assert_eq!(Ok(()), map.delete_path(HgPath::new(b"a/b/"))); |
|
269 | 244 | eprintln!("{:?}", map); |
|
270 | 245 | assert_eq!(Ok(()), map.delete_path(HgPath::new(b"a/b/"))); |
|
271 | 246 | eprintln!("{:?}", map); |
|
272 | 247 | assert_eq!( |
|
273 | 248 | Err(DirstateMapError::PathNotFound(HgPathBuf::from_bytes( |
|
274 | 249 | b"a/b/" |
|
275 | 250 | ))), |
|
276 | 251 | map.delete_path(HgPath::new(b"a/b/")) |
|
277 | 252 | ); |
|
278 | 253 | |
|
279 | 254 | assert_eq!(2, *map.inner.get(HgPath::new(b"a")).unwrap()); |
|
280 | 255 | assert_eq!(1, *map.inner.get(HgPath::new(b"a/c")).unwrap()); |
|
281 | 256 | eprintln!("{:?}", map); |
|
282 | 257 | assert_eq!(Ok(()), map.delete_path(HgPath::new(b"a/"))); |
|
283 | 258 | eprintln!("{:?}", map); |
|
284 | 259 | |
|
285 | 260 | assert_eq!(Ok(()), map.delete_path(HgPath::new(b"a/c/"))); |
|
286 | 261 | assert_eq!( |
|
287 | 262 | Err(DirstateMapError::PathNotFound(HgPathBuf::from_bytes( |
|
288 | 263 | b"a/c/" |
|
289 | 264 | ))), |
|
290 | 265 | map.delete_path(HgPath::new(b"a/c/")) |
|
291 | 266 | ); |
|
292 | 267 | } |
|
293 | 268 | |
|
294 | 269 | #[test] |
|
295 | 270 | fn test_add_path_empty_path() { |
|
296 | 271 | let manifest: Vec<HgPathBuf> = vec![]; |
|
297 | 272 | let mut map = DirsMultiset::from_manifest(&manifest).unwrap(); |
|
298 | 273 | let path = HgPath::new(b""); |
|
299 | 274 | map.add_path(path).unwrap(); |
|
300 | 275 | |
|
301 | 276 | assert_eq!(1, map.len()); |
|
302 | 277 | } |
|
303 | 278 | |
|
304 | 279 | #[test] |
|
305 | 280 | fn test_add_path_successful() { |
|
306 | 281 | let manifest: Vec<HgPathBuf> = vec![]; |
|
307 | 282 | let mut map = DirsMultiset::from_manifest(&manifest).unwrap(); |
|
308 | 283 | |
|
309 | 284 | map.add_path(HgPath::new(b"a/")).unwrap(); |
|
310 | 285 | assert_eq!(1, *map.inner.get(HgPath::new(b"a")).unwrap()); |
|
311 | 286 | assert_eq!(1, *map.inner.get(HgPath::new(b"")).unwrap()); |
|
312 | 287 | assert_eq!(2, map.len()); |
|
313 | 288 | |
|
314 | 289 | // Non directory should be ignored |
|
315 | 290 | map.add_path(HgPath::new(b"a")).unwrap(); |
|
316 | 291 | assert_eq!(1, *map.inner.get(HgPath::new(b"a")).unwrap()); |
|
317 | 292 | assert_eq!(2, map.len()); |
|
318 | 293 | |
|
319 | 294 | // Non directory will still add its base |
|
320 | 295 | map.add_path(HgPath::new(b"a/b")).unwrap(); |
|
321 | 296 | assert_eq!(2, *map.inner.get(HgPath::new(b"a")).unwrap()); |
|
322 | 297 | assert_eq!(2, map.len()); |
|
323 | 298 | |
|
324 | 299 | // Duplicate path works |
|
325 | 300 | map.add_path(HgPath::new(b"a/")).unwrap(); |
|
326 | 301 | assert_eq!(3, *map.inner.get(HgPath::new(b"a")).unwrap()); |
|
327 | 302 | |
|
328 | 303 | // Nested dir adds to its base |
|
329 | 304 | map.add_path(HgPath::new(b"a/b/")).unwrap(); |
|
330 | 305 | assert_eq!(4, *map.inner.get(HgPath::new(b"a")).unwrap()); |
|
331 | 306 | assert_eq!(1, *map.inner.get(HgPath::new(b"a/b")).unwrap()); |
|
332 | 307 | |
|
333 | 308 | // but not its base's base, because it already existed |
|
334 | 309 | map.add_path(HgPath::new(b"a/b/c/")).unwrap(); |
|
335 | 310 | assert_eq!(4, *map.inner.get(HgPath::new(b"a")).unwrap()); |
|
336 | 311 | assert_eq!(2, *map.inner.get(HgPath::new(b"a/b")).unwrap()); |
|
337 | 312 | |
|
338 | 313 | map.add_path(HgPath::new(b"a/c/")).unwrap(); |
|
339 | 314 | assert_eq!(1, *map.inner.get(HgPath::new(b"a/c")).unwrap()); |
|
340 | 315 | |
|
341 | 316 | let expected = DirsMultiset { |
|
342 | 317 | inner: [("", 2), ("a", 5), ("a/b", 2), ("a/b/c", 1), ("a/c", 1)] |
|
343 | 318 | .iter() |
|
344 | 319 | .map(|(k, v)| (HgPathBuf::from_bytes(k.as_bytes()), *v)) |
|
345 | 320 | .collect(), |
|
346 | 321 | }; |
|
347 | 322 | assert_eq!(map, expected); |
|
348 | 323 | } |
|
349 | 324 | |
|
350 | 325 | #[test] |
|
351 | 326 | fn test_dirsmultiset_new_empty() { |
|
352 | 327 | let manifest: Vec<HgPathBuf> = vec![]; |
|
353 | 328 | let new = DirsMultiset::from_manifest(&manifest).unwrap(); |
|
354 | 329 | let expected = DirsMultiset { |
|
355 | 330 | inner: FastHashMap::default(), |
|
356 | 331 | }; |
|
357 | 332 | assert_eq!(expected, new); |
|
358 | 333 | |
|
359 | 334 | let new = |
|
360 | 335 | DirsMultiset::from_dirstate(&StateMap::default(), None).unwrap(); |
|
361 | 336 | let expected = DirsMultiset { |
|
362 | 337 | inner: FastHashMap::default(), |
|
363 | 338 | }; |
|
364 | 339 | assert_eq!(expected, new); |
|
365 | 340 | } |
|
366 | 341 | |
|
367 | 342 | #[test] |
|
368 | 343 | fn test_dirsmultiset_new_no_skip() { |
|
369 | 344 | let input_vec: Vec<HgPathBuf> = ["a/", "b/", "a/c", "a/d/"] |
|
370 | 345 | .iter() |
|
371 | 346 | .map(|e| HgPathBuf::from_bytes(e.as_bytes())) |
|
372 | 347 | .collect(); |
|
373 | 348 | let expected_inner = [("", 2), ("a", 3), ("b", 1), ("a/d", 1)] |
|
374 | 349 | .iter() |
|
375 | 350 | .map(|(k, v)| (HgPathBuf::from_bytes(k.as_bytes()), *v)) |
|
376 | 351 | .collect(); |
|
377 | 352 | |
|
378 | 353 | let new = DirsMultiset::from_manifest(&input_vec).unwrap(); |
|
379 | 354 | let expected = DirsMultiset { |
|
380 | 355 | inner: expected_inner, |
|
381 | 356 | }; |
|
382 | 357 | assert_eq!(expected, new); |
|
383 | 358 | |
|
384 | 359 | let input_map = ["b/x", "a/c", "a/d/x"] |
|
385 | 360 | .iter() |
|
386 | 361 | .map(|f| { |
|
387 | 362 | ( |
|
388 | 363 | HgPathBuf::from_bytes(f.as_bytes()), |
|
389 | 364 | DirstateEntry { |
|
390 | 365 | state: EntryState::Normal, |
|
391 | 366 | mode: 0, |
|
392 | 367 | mtime: 0, |
|
393 | 368 | size: 0, |
|
394 | 369 | }, |
|
395 | 370 | ) |
|
396 | 371 | }) |
|
397 | 372 | .collect(); |
|
398 | 373 | let expected_inner = [("", 2), ("a", 2), ("b", 1), ("a/d", 1)] |
|
399 | 374 | .iter() |
|
400 | 375 | .map(|(k, v)| (HgPathBuf::from_bytes(k.as_bytes()), *v)) |
|
401 | 376 | .collect(); |
|
402 | 377 | |
|
403 | 378 | let new = DirsMultiset::from_dirstate(&input_map, None).unwrap(); |
|
404 | 379 | let expected = DirsMultiset { |
|
405 | 380 | inner: expected_inner, |
|
406 | 381 | }; |
|
407 | 382 | assert_eq!(expected, new); |
|
408 | 383 | } |
|
409 | 384 | |
|
410 | 385 | #[test] |
|
411 | 386 | fn test_dirsmultiset_new_skip() { |
|
412 | 387 | let input_map = [ |
|
413 | 388 | ("a/", EntryState::Normal), |
|
414 | 389 | ("a/b", EntryState::Normal), |
|
415 | 390 | ("a/c", EntryState::Removed), |
|
416 | 391 | ("a/d", EntryState::Merged), |
|
417 | 392 | ] |
|
418 | 393 | .iter() |
|
419 | 394 | .map(|(f, state)| { |
|
420 | 395 | ( |
|
421 | 396 | HgPathBuf::from_bytes(f.as_bytes()), |
|
422 | 397 | DirstateEntry { |
|
423 | 398 | state: *state, |
|
424 | 399 | mode: 0, |
|
425 | 400 | mtime: 0, |
|
426 | 401 | size: 0, |
|
427 | 402 | }, |
|
428 | 403 | ) |
|
429 | 404 | }) |
|
430 | 405 | .collect(); |
|
431 | 406 | |
|
432 | 407 | // "a" incremented with "a/c" and "a/d/" |
|
433 | 408 | let expected_inner = [("", 1), ("a", 2)] |
|
434 | 409 | .iter() |
|
435 | 410 | .map(|(k, v)| (HgPathBuf::from_bytes(k.as_bytes()), *v)) |
|
436 | 411 | .collect(); |
|
437 | 412 | |
|
438 | 413 | let new = |
|
439 | 414 | DirsMultiset::from_dirstate(&input_map, Some(EntryState::Normal)) |
|
440 | 415 | .unwrap(); |
|
441 | 416 | let expected = DirsMultiset { |
|
442 | 417 | inner: expected_inner, |
|
443 | 418 | }; |
|
444 | 419 | assert_eq!(expected, new); |
|
445 | 420 | } |
|
446 | 421 | } |
@@ -1,549 +1,503 b'' | |||
|
1 | 1 | // dirstate_map.rs |
|
2 | 2 | // |
|
3 | 3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> |
|
4 | 4 | // |
|
5 | 5 | // This software may be used and distributed according to the terms of the |
|
6 | 6 | // GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | use crate::errors::HgError; |
|
9 | 9 | use crate::revlog::node::NULL_NODE; |
|
10 | 10 | use crate::{ |
|
11 | 11 | dirstate::{parsers::PARENT_SIZE, EntryState, SIZE_FROM_OTHER_PARENT}, |
|
12 | 12 | pack_dirstate, parse_dirstate, |
|
13 | 13 | utils::{ |
|
14 | 14 | files::normalize_case, |
|
15 | 15 | hg_path::{HgPath, HgPathBuf}, |
|
16 | 16 | }, |
|
17 | 17 | CopyMap, DirsMultiset, DirstateEntry, DirstateError, DirstateMapError, |
|
18 | 18 | DirstateParents, FastHashMap, StateMap, |
|
19 | 19 | }; |
|
20 | 20 | use micro_timer::timed; |
|
21 | 21 | use std::collections::HashSet; |
|
22 | 22 | use std::convert::TryInto; |
|
23 | 23 | use std::iter::FromIterator; |
|
24 | 24 | use std::ops::Deref; |
|
25 | 25 | use std::time::Duration; |
|
26 | 26 | |
|
27 | 27 | pub type FileFoldMap = FastHashMap<HgPathBuf, HgPathBuf>; |
|
28 | 28 | |
|
29 | 29 | const MTIME_UNSET: i32 = -1; |
|
30 | 30 | |
|
31 | 31 | #[derive(Default)] |
|
32 | 32 | pub struct DirstateMap { |
|
33 | 33 | state_map: StateMap, |
|
34 | 34 | pub copy_map: CopyMap, |
|
35 | 35 | file_fold_map: Option<FileFoldMap>, |
|
36 | 36 | pub dirs: Option<DirsMultiset>, |
|
37 | 37 | pub all_dirs: Option<DirsMultiset>, |
|
38 | 38 | non_normal_set: Option<HashSet<HgPathBuf>>, |
|
39 | 39 | other_parent_set: Option<HashSet<HgPathBuf>>, |
|
40 | 40 | parents: Option<DirstateParents>, |
|
41 | 41 | dirty_parents: bool, |
|
42 | 42 | } |
|
43 | 43 | |
|
44 | 44 | /// Should only really be used in python interface code, for clarity |
|
45 | 45 | impl Deref for DirstateMap { |
|
46 | 46 | type Target = StateMap; |
|
47 | 47 | |
|
48 | 48 | fn deref(&self) -> &Self::Target { |
|
49 | 49 | &self.state_map |
|
50 | 50 | } |
|
51 | 51 | } |
|
52 | 52 | |
|
53 | 53 | impl FromIterator<(HgPathBuf, DirstateEntry)> for DirstateMap { |
|
54 | 54 | fn from_iter<I: IntoIterator<Item = (HgPathBuf, DirstateEntry)>>( |
|
55 | 55 | iter: I, |
|
56 | 56 | ) -> Self { |
|
57 | 57 | Self { |
|
58 | 58 | state_map: iter.into_iter().collect(), |
|
59 | 59 | ..Self::default() |
|
60 | 60 | } |
|
61 | 61 | } |
|
62 | 62 | } |
|
63 | 63 | |
|
64 | 64 | impl DirstateMap { |
|
65 | 65 | pub fn new() -> Self { |
|
66 | 66 | Self::default() |
|
67 | 67 | } |
|
68 | 68 | |
|
69 | 69 | pub fn clear(&mut self) { |
|
70 | 70 | self.state_map = StateMap::default(); |
|
71 | 71 | self.copy_map.clear(); |
|
72 | 72 | self.file_fold_map = None; |
|
73 | 73 | self.non_normal_set = None; |
|
74 | 74 | self.other_parent_set = None; |
|
75 | 75 | self.set_parents(&DirstateParents { |
|
76 | 76 | p1: NULL_NODE, |
|
77 | 77 | p2: NULL_NODE, |
|
78 | 78 | }) |
|
79 | 79 | } |
|
80 | 80 | |
|
81 | 81 | /// Add a tracked file to the dirstate |
|
82 | 82 | pub fn add_file( |
|
83 | 83 | &mut self, |
|
84 | 84 | filename: &HgPath, |
|
85 | 85 | old_state: EntryState, |
|
86 | 86 | entry: DirstateEntry, |
|
87 | 87 | ) -> Result<(), DirstateMapError> { |
|
88 | 88 | if old_state == EntryState::Unknown || old_state == EntryState::Removed |
|
89 | 89 | { |
|
90 | 90 | if let Some(ref mut dirs) = self.dirs { |
|
91 | 91 | dirs.add_path(filename)?; |
|
92 | 92 | } |
|
93 | 93 | } |
|
94 | 94 | if old_state == EntryState::Unknown { |
|
95 | 95 | if let Some(ref mut all_dirs) = self.all_dirs { |
|
96 | 96 | all_dirs.add_path(filename)?; |
|
97 | 97 | } |
|
98 | 98 | } |
|
99 | 99 | self.state_map.insert(filename.to_owned(), entry.to_owned()); |
|
100 | 100 | |
|
101 | 101 | if entry.state != EntryState::Normal || entry.mtime == MTIME_UNSET { |
|
102 | 102 | self.get_non_normal_other_parent_entries() |
|
103 | 103 | .0 |
|
104 | 104 | .insert(filename.to_owned()); |
|
105 | 105 | } |
|
106 | 106 | |
|
107 | 107 | if entry.size == SIZE_FROM_OTHER_PARENT { |
|
108 | 108 | self.get_non_normal_other_parent_entries() |
|
109 | 109 | .1 |
|
110 | 110 | .insert(filename.to_owned()); |
|
111 | 111 | } |
|
112 | 112 | Ok(()) |
|
113 | 113 | } |
|
114 | 114 | |
|
115 | 115 | /// Mark a file as removed in the dirstate. |
|
116 | 116 | /// |
|
117 | 117 | /// The `size` parameter is used to store sentinel values that indicate |
|
118 | 118 | /// the file's previous state. In the future, we should refactor this |
|
119 | 119 | /// to be more explicit about what that state is. |
|
120 | 120 | pub fn remove_file( |
|
121 | 121 | &mut self, |
|
122 | 122 | filename: &HgPath, |
|
123 | 123 | old_state: EntryState, |
|
124 | 124 | size: i32, |
|
125 | 125 | ) -> Result<(), DirstateMapError> { |
|
126 | 126 | if old_state != EntryState::Unknown && old_state != EntryState::Removed |
|
127 | 127 | { |
|
128 | 128 | if let Some(ref mut dirs) = self.dirs { |
|
129 | 129 | dirs.delete_path(filename)?; |
|
130 | 130 | } |
|
131 | 131 | } |
|
132 | 132 | if old_state == EntryState::Unknown { |
|
133 | 133 | if let Some(ref mut all_dirs) = self.all_dirs { |
|
134 | 134 | all_dirs.add_path(filename)?; |
|
135 | 135 | } |
|
136 | 136 | } |
|
137 | 137 | |
|
138 | 138 | if let Some(ref mut file_fold_map) = self.file_fold_map { |
|
139 | 139 | file_fold_map.remove(&normalize_case(filename)); |
|
140 | 140 | } |
|
141 | 141 | self.state_map.insert( |
|
142 | 142 | filename.to_owned(), |
|
143 | 143 | DirstateEntry { |
|
144 | 144 | state: EntryState::Removed, |
|
145 | 145 | mode: 0, |
|
146 | 146 | size, |
|
147 | 147 | mtime: 0, |
|
148 | 148 | }, |
|
149 | 149 | ); |
|
150 | 150 | self.get_non_normal_other_parent_entries() |
|
151 | 151 | .0 |
|
152 | 152 | .insert(filename.to_owned()); |
|
153 | 153 | Ok(()) |
|
154 | 154 | } |
|
155 | 155 | |
|
156 | 156 | /// Remove a file from the dirstate. |
|
157 | 157 | /// Returns `true` if the file was previously recorded. |
|
158 | 158 | pub fn drop_file( |
|
159 | 159 | &mut self, |
|
160 | 160 | filename: &HgPath, |
|
161 | 161 | old_state: EntryState, |
|
162 | 162 | ) -> Result<bool, DirstateMapError> { |
|
163 | 163 | let exists = self.state_map.remove(filename).is_some(); |
|
164 | 164 | |
|
165 | 165 | if exists { |
|
166 | 166 | if old_state != EntryState::Removed { |
|
167 | 167 | if let Some(ref mut dirs) = self.dirs { |
|
168 | 168 | dirs.delete_path(filename)?; |
|
169 | 169 | } |
|
170 | 170 | } |
|
171 | 171 | if let Some(ref mut all_dirs) = self.all_dirs { |
|
172 | 172 | all_dirs.delete_path(filename)?; |
|
173 | 173 | } |
|
174 | 174 | } |
|
175 | 175 | if let Some(ref mut file_fold_map) = self.file_fold_map { |
|
176 | 176 | file_fold_map.remove(&normalize_case(filename)); |
|
177 | 177 | } |
|
178 | 178 | self.get_non_normal_other_parent_entries() |
|
179 | 179 | .0 |
|
180 | 180 | .remove(filename); |
|
181 | 181 | |
|
182 | 182 | Ok(exists) |
|
183 | 183 | } |
|
184 | 184 | |
|
185 | 185 | pub fn clear_ambiguous_times( |
|
186 | 186 | &mut self, |
|
187 | 187 | filenames: Vec<HgPathBuf>, |
|
188 | 188 | now: i32, |
|
189 | 189 | ) { |
|
190 | 190 | for filename in filenames { |
|
191 | 191 | let mut changed = false; |
|
192 | 192 | if let Some(entry) = self.state_map.get_mut(&filename) { |
|
193 | 193 | if entry.state == EntryState::Normal && entry.mtime == now { |
|
194 | 194 | changed = true; |
|
195 | 195 | *entry = DirstateEntry { |
|
196 | 196 | mtime: MTIME_UNSET, |
|
197 | 197 | ..*entry |
|
198 | 198 | }; |
|
199 | 199 | } |
|
200 | 200 | } |
|
201 | 201 | if changed { |
|
202 | 202 | self.get_non_normal_other_parent_entries() |
|
203 | 203 | .0 |
|
204 | 204 | .insert(filename.to_owned()); |
|
205 | 205 | } |
|
206 | 206 | } |
|
207 | 207 | } |
|
208 | 208 | |
|
209 | 209 | pub fn non_normal_entries_remove( |
|
210 | 210 | &mut self, |
|
211 | 211 | key: impl AsRef<HgPath>, |
|
212 | 212 | ) -> bool { |
|
213 | 213 | self.get_non_normal_other_parent_entries() |
|
214 | 214 | .0 |
|
215 | 215 | .remove(key.as_ref()) |
|
216 | 216 | } |
|
217 | 217 | pub fn non_normal_entries_union( |
|
218 | 218 | &mut self, |
|
219 | 219 | other: HashSet<HgPathBuf>, |
|
220 | 220 | ) -> Vec<HgPathBuf> { |
|
221 | 221 | self.get_non_normal_other_parent_entries() |
|
222 | 222 | .0 |
|
223 | 223 | .union(&other) |
|
224 | 224 | .map(ToOwned::to_owned) |
|
225 | 225 | .collect() |
|
226 | 226 | } |
|
227 | 227 | |
|
228 | 228 | pub fn get_non_normal_other_parent_entries( |
|
229 | 229 | &mut self, |
|
230 | 230 | ) -> (&mut HashSet<HgPathBuf>, &mut HashSet<HgPathBuf>) { |
|
231 | 231 | self.set_non_normal_other_parent_entries(false); |
|
232 | 232 | ( |
|
233 | 233 | self.non_normal_set.as_mut().unwrap(), |
|
234 | 234 | self.other_parent_set.as_mut().unwrap(), |
|
235 | 235 | ) |
|
236 | 236 | } |
|
237 | 237 | |
|
238 | 238 | /// Useful to get immutable references to those sets in contexts where |
|
239 | 239 | /// you only have an immutable reference to the `DirstateMap`, like when |
|
240 | 240 | /// sharing references with Python. |
|
241 | 241 | /// |
|
242 | 242 | /// TODO, get rid of this along with the other "setter/getter" stuff when |
|
243 | 243 | /// a nice typestate plan is defined. |
|
244 | 244 | /// |
|
245 | 245 | /// # Panics |
|
246 | 246 | /// |
|
247 | 247 | /// Will panic if either set is `None`. |
|
248 | 248 | pub fn get_non_normal_other_parent_entries_panic( |
|
249 | 249 | &self, |
|
250 | 250 | ) -> (&HashSet<HgPathBuf>, &HashSet<HgPathBuf>) { |
|
251 | 251 | ( |
|
252 | 252 | self.non_normal_set.as_ref().unwrap(), |
|
253 | 253 | self.other_parent_set.as_ref().unwrap(), |
|
254 | 254 | ) |
|
255 | 255 | } |
|
256 | 256 | |
|
257 | #[cfg(not(feature = "dirstate-tree"))] | |
|
258 | 257 | pub fn set_non_normal_other_parent_entries(&mut self, force: bool) { |
|
259 | 258 | if !force |
|
260 | 259 | && self.non_normal_set.is_some() |
|
261 | 260 | && self.other_parent_set.is_some() |
|
262 | 261 | { |
|
263 | 262 | return; |
|
264 | 263 | } |
|
265 | 264 | let mut non_normal = HashSet::new(); |
|
266 | 265 | let mut other_parent = HashSet::new(); |
|
267 | 266 | |
|
268 | 267 | for ( |
|
269 | 268 | filename, |
|
270 | 269 | DirstateEntry { |
|
271 | 270 | state, size, mtime, .. |
|
272 | 271 | }, |
|
273 | 272 | ) in self.state_map.iter() |
|
274 | 273 | { |
|
275 | 274 | if *state != EntryState::Normal || *mtime == MTIME_UNSET { |
|
276 | 275 | non_normal.insert(filename.to_owned()); |
|
277 | 276 | } |
|
278 | 277 | if *state == EntryState::Normal && *size == SIZE_FROM_OTHER_PARENT |
|
279 | 278 | { |
|
280 | 279 | other_parent.insert(filename.to_owned()); |
|
281 | 280 | } |
|
282 | 281 | } |
|
283 | 282 | self.non_normal_set = Some(non_normal); |
|
284 | 283 | self.other_parent_set = Some(other_parent); |
|
285 | 284 | } |
|
286 | #[cfg(feature = "dirstate-tree")] | |
|
287 | pub fn set_non_normal_other_parent_entries(&mut self, force: bool) { | |
|
288 | if !force | |
|
289 | && self.non_normal_set.is_some() | |
|
290 | && self.other_parent_set.is_some() | |
|
291 | { | |
|
292 | return; | |
|
293 | } | |
|
294 | let mut non_normal = HashSet::new(); | |
|
295 | let mut other_parent = HashSet::new(); | |
|
296 | ||
|
297 | for ( | |
|
298 | filename, | |
|
299 | DirstateEntry { | |
|
300 | state, size, mtime, .. | |
|
301 | }, | |
|
302 | ) in self.state_map.iter() | |
|
303 | { | |
|
304 | if state != EntryState::Normal || mtime == MTIME_UNSET { | |
|
305 | non_normal.insert(filename.to_owned()); | |
|
306 | } | |
|
307 | if state == EntryState::Normal && size == SIZE_FROM_OTHER_PARENT { | |
|
308 | other_parent.insert(filename.to_owned()); | |
|
309 | } | |
|
310 | } | |
|
311 | self.non_normal_set = Some(non_normal); | |
|
312 | self.other_parent_set = Some(other_parent); | |
|
313 | } | |
|
314 | 285 | |
|
315 | 286 | /// Both of these setters and their uses appear to be the simplest way to |
|
316 | 287 | /// emulate a Python lazy property, but it is ugly and unidiomatic. |
|
317 | 288 | /// TODO One day, rewriting this struct using the typestate might be a |
|
318 | 289 | /// good idea. |
|
319 | 290 | pub fn set_all_dirs(&mut self) -> Result<(), DirstateMapError> { |
|
320 | 291 | if self.all_dirs.is_none() { |
|
321 | 292 | self.all_dirs = |
|
322 | 293 | Some(DirsMultiset::from_dirstate(&self.state_map, None)?); |
|
323 | 294 | } |
|
324 | 295 | Ok(()) |
|
325 | 296 | } |
|
326 | 297 | |
|
327 | 298 | pub fn set_dirs(&mut self) -> Result<(), DirstateMapError> { |
|
328 | 299 | if self.dirs.is_none() { |
|
329 | 300 | self.dirs = Some(DirsMultiset::from_dirstate( |
|
330 | 301 | &self.state_map, |
|
331 | 302 | Some(EntryState::Removed), |
|
332 | 303 | )?); |
|
333 | 304 | } |
|
334 | 305 | Ok(()) |
|
335 | 306 | } |
|
336 | 307 | |
|
337 | 308 | pub fn has_tracked_dir( |
|
338 | 309 | &mut self, |
|
339 | 310 | directory: &HgPath, |
|
340 | 311 | ) -> Result<bool, DirstateMapError> { |
|
341 | 312 | self.set_dirs()?; |
|
342 | 313 | Ok(self.dirs.as_ref().unwrap().contains(directory)) |
|
343 | 314 | } |
|
344 | 315 | |
|
345 | 316 | pub fn has_dir( |
|
346 | 317 | &mut self, |
|
347 | 318 | directory: &HgPath, |
|
348 | 319 | ) -> Result<bool, DirstateMapError> { |
|
349 | 320 | self.set_all_dirs()?; |
|
350 | 321 | Ok(self.all_dirs.as_ref().unwrap().contains(directory)) |
|
351 | 322 | } |
|
352 | 323 | |
|
353 | 324 | pub fn parents( |
|
354 | 325 | &mut self, |
|
355 | 326 | file_contents: &[u8], |
|
356 | 327 | ) -> Result<&DirstateParents, DirstateError> { |
|
357 | 328 | if let Some(ref parents) = self.parents { |
|
358 | 329 | return Ok(parents); |
|
359 | 330 | } |
|
360 | 331 | let parents; |
|
361 | 332 | if file_contents.len() == PARENT_SIZE * 2 { |
|
362 | 333 | parents = DirstateParents { |
|
363 | 334 | p1: file_contents[..PARENT_SIZE].try_into().unwrap(), |
|
364 | 335 | p2: file_contents[PARENT_SIZE..PARENT_SIZE * 2] |
|
365 | 336 | .try_into() |
|
366 | 337 | .unwrap(), |
|
367 | 338 | }; |
|
368 | 339 | } else if file_contents.is_empty() { |
|
369 | 340 | parents = DirstateParents { |
|
370 | 341 | p1: NULL_NODE, |
|
371 | 342 | p2: NULL_NODE, |
|
372 | 343 | }; |
|
373 | 344 | } else { |
|
374 | 345 | return Err( |
|
375 | 346 | HgError::corrupted("Dirstate appears to be damaged").into() |
|
376 | 347 | ); |
|
377 | 348 | } |
|
378 | 349 | |
|
379 | 350 | self.parents = Some(parents); |
|
380 | 351 | Ok(self.parents.as_ref().unwrap()) |
|
381 | 352 | } |
|
382 | 353 | |
|
383 | 354 | pub fn set_parents(&mut self, parents: &DirstateParents) { |
|
384 | 355 | self.parents = Some(parents.clone()); |
|
385 | 356 | self.dirty_parents = true; |
|
386 | 357 | } |
|
387 | 358 | |
|
388 | 359 | #[timed] |
|
389 | 360 | pub fn read<'a>( |
|
390 | 361 | &mut self, |
|
391 | 362 | file_contents: &'a [u8], |
|
392 | 363 | ) -> Result<Option<&'a DirstateParents>, DirstateError> { |
|
393 | 364 | if file_contents.is_empty() { |
|
394 | 365 | return Ok(None); |
|
395 | 366 | } |
|
396 | 367 | |
|
397 | 368 | let (parents, entries, copies) = parse_dirstate(file_contents)?; |
|
398 | 369 | self.state_map.extend( |
|
399 | 370 | entries |
|
400 | 371 | .into_iter() |
|
401 | 372 | .map(|(path, entry)| (path.to_owned(), entry)), |
|
402 | 373 | ); |
|
403 | 374 | self.copy_map.extend( |
|
404 | 375 | copies |
|
405 | 376 | .into_iter() |
|
406 | 377 | .map(|(path, copy)| (path.to_owned(), copy.to_owned())), |
|
407 | 378 | ); |
|
408 | 379 | |
|
409 | 380 | if !self.dirty_parents { |
|
410 | 381 | self.set_parents(&parents); |
|
411 | 382 | } |
|
412 | 383 | |
|
413 | 384 | Ok(Some(parents)) |
|
414 | 385 | } |
|
415 | 386 | |
|
416 | 387 | pub fn pack( |
|
417 | 388 | &mut self, |
|
418 | 389 | parents: DirstateParents, |
|
419 | 390 | now: Duration, |
|
420 | 391 | ) -> Result<Vec<u8>, DirstateError> { |
|
421 | 392 | let packed = |
|
422 | 393 | pack_dirstate(&mut self.state_map, &self.copy_map, parents, now)?; |
|
423 | 394 | |
|
424 | 395 | self.dirty_parents = false; |
|
425 | 396 | |
|
426 | 397 | self.set_non_normal_other_parent_entries(true); |
|
427 | 398 | Ok(packed) |
|
428 | 399 | } |
|
429 | #[cfg(not(feature = "dirstate-tree"))] | |
|
430 | 400 | pub fn build_file_fold_map(&mut self) -> &FileFoldMap { |
|
431 | 401 | if let Some(ref file_fold_map) = self.file_fold_map { |
|
432 | 402 | return file_fold_map; |
|
433 | 403 | } |
|
434 | 404 | let mut new_file_fold_map = FileFoldMap::default(); |
|
435 | 405 | |
|
436 | 406 | for (filename, DirstateEntry { state, .. }) in self.state_map.iter() { |
|
437 | 407 | if *state != EntryState::Removed { |
|
438 | 408 | new_file_fold_map |
|
439 | 409 | .insert(normalize_case(&filename), filename.to_owned()); |
|
440 | 410 | } |
|
441 | 411 | } |
|
442 | 412 | self.file_fold_map = Some(new_file_fold_map); |
|
443 | 413 | self.file_fold_map.as_ref().unwrap() |
|
444 | 414 | } |
|
445 | #[cfg(feature = "dirstate-tree")] | |
|
446 | pub fn build_file_fold_map(&mut self) -> &FileFoldMap { | |
|
447 | if let Some(ref file_fold_map) = self.file_fold_map { | |
|
448 | return file_fold_map; | |
|
449 | } | |
|
450 | let mut new_file_fold_map = FileFoldMap::default(); | |
|
451 | ||
|
452 | for (filename, DirstateEntry { state, .. }) in self.state_map.iter() { | |
|
453 | if state != EntryState::Removed { | |
|
454 | new_file_fold_map | |
|
455 | .insert(normalize_case(&filename), filename.to_owned()); | |
|
456 | } | |
|
457 | } | |
|
458 | self.file_fold_map = Some(new_file_fold_map); | |
|
459 | self.file_fold_map.as_ref().unwrap() | |
|
460 | } | |
|
461 | 415 | } |
|
462 | 416 | |
|
463 | 417 | #[cfg(test)] |
|
464 | 418 | mod tests { |
|
465 | 419 | use super::*; |
|
466 | 420 | |
|
467 | 421 | #[test] |
|
468 | 422 | fn test_dirs_multiset() { |
|
469 | 423 | let mut map = DirstateMap::new(); |
|
470 | 424 | assert!(map.dirs.is_none()); |
|
471 | 425 | assert!(map.all_dirs.is_none()); |
|
472 | 426 | |
|
473 | 427 | assert_eq!(map.has_dir(HgPath::new(b"nope")).unwrap(), false); |
|
474 | 428 | assert!(map.all_dirs.is_some()); |
|
475 | 429 | assert!(map.dirs.is_none()); |
|
476 | 430 | |
|
477 | 431 | assert_eq!(map.has_tracked_dir(HgPath::new(b"nope")).unwrap(), false); |
|
478 | 432 | assert!(map.dirs.is_some()); |
|
479 | 433 | } |
|
480 | 434 | |
|
481 | 435 | #[test] |
|
482 | 436 | fn test_add_file() { |
|
483 | 437 | let mut map = DirstateMap::new(); |
|
484 | 438 | |
|
485 | 439 | assert_eq!(0, map.len()); |
|
486 | 440 | |
|
487 | 441 | map.add_file( |
|
488 | 442 | HgPath::new(b"meh"), |
|
489 | 443 | EntryState::Normal, |
|
490 | 444 | DirstateEntry { |
|
491 | 445 | state: EntryState::Normal, |
|
492 | 446 | mode: 1337, |
|
493 | 447 | mtime: 1337, |
|
494 | 448 | size: 1337, |
|
495 | 449 | }, |
|
496 | 450 | ) |
|
497 | 451 | .unwrap(); |
|
498 | 452 | |
|
499 | 453 | assert_eq!(1, map.len()); |
|
500 | 454 | assert_eq!(0, map.get_non_normal_other_parent_entries().0.len()); |
|
501 | 455 | assert_eq!(0, map.get_non_normal_other_parent_entries().1.len()); |
|
502 | 456 | } |
|
503 | 457 | |
|
504 | 458 | #[test] |
|
505 | 459 | fn test_non_normal_other_parent_entries() { |
|
506 | 460 | let mut map: DirstateMap = [ |
|
507 | 461 | (b"f1", (EntryState::Removed, 1337, 1337, 1337)), |
|
508 | 462 | (b"f2", (EntryState::Normal, 1337, 1337, -1)), |
|
509 | 463 | (b"f3", (EntryState::Normal, 1337, 1337, 1337)), |
|
510 | 464 | (b"f4", (EntryState::Normal, 1337, -2, 1337)), |
|
511 | 465 | (b"f5", (EntryState::Added, 1337, 1337, 1337)), |
|
512 | 466 | (b"f6", (EntryState::Added, 1337, 1337, -1)), |
|
513 | 467 | (b"f7", (EntryState::Merged, 1337, 1337, -1)), |
|
514 | 468 | (b"f8", (EntryState::Merged, 1337, 1337, 1337)), |
|
515 | 469 | (b"f9", (EntryState::Merged, 1337, -2, 1337)), |
|
516 | 470 | (b"fa", (EntryState::Added, 1337, -2, 1337)), |
|
517 | 471 | (b"fb", (EntryState::Removed, 1337, -2, 1337)), |
|
518 | 472 | ] |
|
519 | 473 | .iter() |
|
520 | 474 | .map(|(fname, (state, mode, size, mtime))| { |
|
521 | 475 | ( |
|
522 | 476 | HgPathBuf::from_bytes(fname.as_ref()), |
|
523 | 477 | DirstateEntry { |
|
524 | 478 | state: *state, |
|
525 | 479 | mode: *mode, |
|
526 | 480 | size: *size, |
|
527 | 481 | mtime: *mtime, |
|
528 | 482 | }, |
|
529 | 483 | ) |
|
530 | 484 | }) |
|
531 | 485 | .collect(); |
|
532 | 486 | |
|
533 | 487 | let mut non_normal = [ |
|
534 | 488 | b"f1", b"f2", b"f5", b"f6", b"f7", b"f8", b"f9", b"fa", b"fb", |
|
535 | 489 | ] |
|
536 | 490 | .iter() |
|
537 | 491 | .map(|x| HgPathBuf::from_bytes(x.as_ref())) |
|
538 | 492 | .collect(); |
|
539 | 493 | |
|
540 | 494 | let mut other_parent = HashSet::new(); |
|
541 | 495 | other_parent.insert(HgPathBuf::from_bytes(b"f4")); |
|
542 | 496 | let entries = map.get_non_normal_other_parent_entries(); |
|
543 | 497 | |
|
544 | 498 | assert_eq!( |
|
545 | 499 | (&mut non_normal, &mut other_parent), |
|
546 | 500 | (entries.0, entries.1) |
|
547 | 501 | ); |
|
548 | 502 | } |
|
549 | 503 | } |
@@ -1,508 +1,434 b'' | |||
|
1 | 1 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> |
|
2 | 2 | // |
|
3 | 3 | // This software may be used and distributed according to the terms of the |
|
4 | 4 | // GNU General Public License version 2 or any later version. |
|
5 | 5 | |
|
6 | 6 | use crate::errors::HgError; |
|
7 | 7 | use crate::utils::hg_path::HgPath; |
|
8 | 8 | use crate::{ |
|
9 | 9 | dirstate::{CopyMap, EntryState, RawEntry, StateMap}, |
|
10 | 10 | DirstateEntry, DirstateParents, |
|
11 | 11 | }; |
|
12 | 12 | use byteorder::{BigEndian, WriteBytesExt}; |
|
13 | 13 | use bytes_cast::BytesCast; |
|
14 | 14 | use micro_timer::timed; |
|
15 | 15 | use std::convert::{TryFrom, TryInto}; |
|
16 | 16 | use std::time::Duration; |
|
17 | 17 | |
|
18 | 18 | /// Parents are stored in the dirstate as byte hashes. |
|
19 | 19 | pub const PARENT_SIZE: usize = 20; |
|
20 | 20 | /// Dirstate entries have a static part of 8 + 32 + 32 + 32 + 32 bits. |
|
21 | 21 | const MIN_ENTRY_SIZE: usize = 17; |
|
22 | 22 | |
|
23 | 23 | type ParseResult<'a> = ( |
|
24 | 24 | &'a DirstateParents, |
|
25 | 25 | Vec<(&'a HgPath, DirstateEntry)>, |
|
26 | 26 | Vec<(&'a HgPath, &'a HgPath)>, |
|
27 | 27 | ); |
|
28 | 28 | |
|
29 | 29 | pub fn parse_dirstate_parents( |
|
30 | 30 | contents: &[u8], |
|
31 | 31 | ) -> Result<&DirstateParents, HgError> { |
|
32 | 32 | let (parents, _rest) = DirstateParents::from_bytes(contents) |
|
33 | 33 | .map_err(|_| HgError::corrupted("Too little data for dirstate."))?; |
|
34 | 34 | Ok(parents) |
|
35 | 35 | } |
|
36 | 36 | |
|
37 | 37 | #[timed] |
|
38 | 38 | pub fn parse_dirstate(mut contents: &[u8]) -> Result<ParseResult, HgError> { |
|
39 | 39 | let mut copies = Vec::new(); |
|
40 | 40 | let mut entries = Vec::new(); |
|
41 | 41 | |
|
42 | 42 | let (parents, rest) = DirstateParents::from_bytes(contents) |
|
43 | 43 | .map_err(|_| HgError::corrupted("Too little data for dirstate."))?; |
|
44 | 44 | contents = rest; |
|
45 | 45 | while !contents.is_empty() { |
|
46 | 46 | let (raw_entry, rest) = RawEntry::from_bytes(contents) |
|
47 | 47 | .map_err(|_| HgError::corrupted("Overflow in dirstate."))?; |
|
48 | 48 | |
|
49 | 49 | let entry = DirstateEntry { |
|
50 | 50 | state: EntryState::try_from(raw_entry.state)?, |
|
51 | 51 | mode: raw_entry.mode.get(), |
|
52 | 52 | mtime: raw_entry.mtime.get(), |
|
53 | 53 | size: raw_entry.size.get(), |
|
54 | 54 | }; |
|
55 | 55 | let (paths, rest) = |
|
56 | 56 | u8::slice_from_bytes(rest, raw_entry.length.get() as usize) |
|
57 | 57 | .map_err(|_| HgError::corrupted("Overflow in dirstate."))?; |
|
58 | 58 | |
|
59 | 59 | // `paths` is either a single path, or two paths separated by a NULL |
|
60 | 60 | // byte |
|
61 | 61 | let mut iter = paths.splitn(2, |&byte| byte == b'\0'); |
|
62 | 62 | let path = HgPath::new( |
|
63 | 63 | iter.next().expect("splitn always yields at least one item"), |
|
64 | 64 | ); |
|
65 | 65 | if let Some(copy_source) = iter.next() { |
|
66 | 66 | copies.push((path, HgPath::new(copy_source))); |
|
67 | 67 | } |
|
68 | 68 | |
|
69 | 69 | entries.push((path, entry)); |
|
70 | 70 | contents = rest; |
|
71 | 71 | } |
|
72 | 72 | Ok((parents, entries, copies)) |
|
73 | 73 | } |
|
74 | 74 | |
|
75 | 75 | /// `now` is the duration in seconds since the Unix epoch |
|
76 | #[cfg(not(feature = "dirstate-tree"))] | |
|
77 | 76 | pub fn pack_dirstate( |
|
78 | 77 | state_map: &mut StateMap, |
|
79 | 78 | copy_map: &CopyMap, |
|
80 | 79 | parents: DirstateParents, |
|
81 | 80 | now: Duration, |
|
82 | 81 | ) -> Result<Vec<u8>, HgError> { |
|
83 | 82 | // TODO move away from i32 before 2038. |
|
84 | 83 | let now: i32 = now.as_secs().try_into().expect("time overflow"); |
|
85 | 84 | |
|
86 | 85 | let expected_size: usize = state_map |
|
87 | 86 | .iter() |
|
88 | 87 | .map(|(filename, _)| { |
|
89 | 88 | let mut length = MIN_ENTRY_SIZE + filename.len(); |
|
90 | 89 | if let Some(copy) = copy_map.get(filename) { |
|
91 | 90 | length += copy.len() + 1; |
|
92 | 91 | } |
|
93 | 92 | length |
|
94 | 93 | }) |
|
95 | 94 | .sum(); |
|
96 | 95 | let expected_size = expected_size + PARENT_SIZE * 2; |
|
97 | 96 | |
|
98 | 97 | let mut packed = Vec::with_capacity(expected_size); |
|
99 | 98 | |
|
100 | 99 | packed.extend(parents.p1.as_bytes()); |
|
101 | 100 | packed.extend(parents.p2.as_bytes()); |
|
102 | 101 | |
|
103 | 102 | for (filename, entry) in state_map.iter_mut() { |
|
104 | 103 | let new_filename = filename.to_owned(); |
|
105 | 104 | let mut new_mtime: i32 = entry.mtime; |
|
106 | 105 | if entry.state == EntryState::Normal && entry.mtime == now { |
|
107 | 106 | // The file was last modified "simultaneously" with the current |
|
108 | 107 | // write to dirstate (i.e. within the same second for file- |
|
109 | 108 | // systems with a granularity of 1 sec). This commonly happens |
|
110 | 109 | // for at least a couple of files on 'update'. |
|
111 | 110 | // The user could change the file without changing its size |
|
112 | 111 | // within the same second. Invalidate the file's mtime in |
|
113 | 112 | // dirstate, forcing future 'status' calls to compare the |
|
114 | 113 | // contents of the file if the size is the same. This prevents |
|
115 | 114 | // mistakenly treating such files as clean. |
|
116 | 115 | new_mtime = -1; |
|
117 | 116 | *entry = DirstateEntry { |
|
118 | 117 | mtime: new_mtime, |
|
119 | 118 | ..*entry |
|
120 | 119 | }; |
|
121 | 120 | } |
|
122 | 121 | let mut new_filename = new_filename.into_vec(); |
|
123 | 122 | if let Some(copy) = copy_map.get(filename) { |
|
124 | 123 | new_filename.push(b'\0'); |
|
125 | 124 | new_filename.extend(copy.bytes()); |
|
126 | 125 | } |
|
127 | 126 | |
|
128 | 127 | // Unwrapping because `impl std::io::Write for Vec<u8>` never errors |
|
129 | 128 | packed.write_u8(entry.state.into()).unwrap(); |
|
130 | 129 | packed.write_i32::<BigEndian>(entry.mode).unwrap(); |
|
131 | 130 | packed.write_i32::<BigEndian>(entry.size).unwrap(); |
|
132 | 131 | packed.write_i32::<BigEndian>(new_mtime).unwrap(); |
|
133 | 132 | packed |
|
134 | 133 | .write_i32::<BigEndian>(new_filename.len() as i32) |
|
135 | 134 | .unwrap(); |
|
136 | 135 | packed.extend(new_filename) |
|
137 | 136 | } |
|
138 | 137 | |
|
139 | 138 | if packed.len() != expected_size { |
|
140 | 139 | return Err(HgError::CorruptedRepository(format!( |
|
141 | 140 | "bad dirstate size: {} != {}", |
|
142 | 141 | expected_size, |
|
143 | 142 | packed.len() |
|
144 | 143 | ))); |
|
145 | 144 | } |
|
146 | 145 | |
|
147 | 146 | Ok(packed) |
|
148 | 147 | } |
|
149 | /// `now` is the duration in seconds since the Unix epoch | |
|
150 | #[cfg(feature = "dirstate-tree")] | |
|
151 | pub fn pack_dirstate( | |
|
152 | state_map: &mut StateMap, | |
|
153 | copy_map: &CopyMap, | |
|
154 | parents: DirstateParents, | |
|
155 | now: Duration, | |
|
156 | ) -> Result<Vec<u8>, DirstatePackError> { | |
|
157 | // TODO move away from i32 before 2038. | |
|
158 | let now: i32 = now.as_secs().try_into().expect("time overflow"); | |
|
159 | ||
|
160 | let expected_size: usize = state_map | |
|
161 | .iter() | |
|
162 | .map(|(filename, _)| { | |
|
163 | let mut length = MIN_ENTRY_SIZE + filename.len(); | |
|
164 | if let Some(copy) = copy_map.get(&filename) { | |
|
165 | length += copy.len() + 1; | |
|
166 | } | |
|
167 | length | |
|
168 | }) | |
|
169 | .sum(); | |
|
170 | let expected_size = expected_size + PARENT_SIZE * 2; | |
|
171 | ||
|
172 | let mut packed = Vec::with_capacity(expected_size); | |
|
173 | let mut new_state_map = vec![]; | |
|
174 | ||
|
175 | packed.extend(&parents.p1); | |
|
176 | packed.extend(&parents.p2); | |
|
177 | ||
|
178 | for (filename, entry) in state_map.iter() { | |
|
179 | let new_filename = filename.to_owned(); | |
|
180 | let mut new_mtime: i32 = entry.mtime; | |
|
181 | if entry.state == EntryState::Normal && entry.mtime == now { | |
|
182 | // The file was last modified "simultaneously" with the current | |
|
183 | // write to dirstate (i.e. within the same second for file- | |
|
184 | // systems with a granularity of 1 sec). This commonly happens | |
|
185 | // for at least a couple of files on 'update'. | |
|
186 | // The user could change the file without changing its size | |
|
187 | // within the same second. Invalidate the file's mtime in | |
|
188 | // dirstate, forcing future 'status' calls to compare the | |
|
189 | // contents of the file if the size is the same. This prevents | |
|
190 | // mistakenly treating such files as clean. | |
|
191 | new_mtime = -1; | |
|
192 | new_state_map.push(( | |
|
193 | filename.to_owned(), | |
|
194 | DirstateEntry { | |
|
195 | mtime: new_mtime, | |
|
196 | ..entry | |
|
197 | }, | |
|
198 | )); | |
|
199 | } | |
|
200 | let mut new_filename = new_filename.into_vec(); | |
|
201 | if let Some(copy) = copy_map.get(&filename) { | |
|
202 | new_filename.push(b'\0'); | |
|
203 | new_filename.extend(copy.bytes()); | |
|
204 | } | |
|
205 | ||
|
206 | packed.write_u8(entry.state.into())?; | |
|
207 | packed.write_i32::<BigEndian>(entry.mode)?; | |
|
208 | packed.write_i32::<BigEndian>(entry.size)?; | |
|
209 | packed.write_i32::<BigEndian>(new_mtime)?; | |
|
210 | packed.write_i32::<BigEndian>(new_filename.len() as i32)?; | |
|
211 | packed.extend(new_filename) | |
|
212 | } | |
|
213 | ||
|
214 | if packed.len() != expected_size { | |
|
215 | return Err(DirstatePackError::BadSize(expected_size, packed.len())); | |
|
216 | } | |
|
217 | ||
|
218 | state_map.extend(new_state_map); | |
|
219 | ||
|
220 | Ok(packed) | |
|
221 | } | |
|
222 | 148 | |
|
223 | 149 | #[cfg(test)] |
|
224 | 150 | mod tests { |
|
225 | 151 | use super::*; |
|
226 | 152 | use crate::{utils::hg_path::HgPathBuf, FastHashMap}; |
|
227 | 153 | use pretty_assertions::assert_eq; |
|
228 | 154 | |
|
229 | 155 | #[test] |
|
230 | 156 | fn test_pack_dirstate_empty() { |
|
231 | 157 | let mut state_map = StateMap::default(); |
|
232 | 158 | let copymap = FastHashMap::default(); |
|
233 | 159 | let parents = DirstateParents { |
|
234 | 160 | p1: b"12345678910111213141".into(), |
|
235 | 161 | p2: b"00000000000000000000".into(), |
|
236 | 162 | }; |
|
237 | 163 | let now = Duration::new(15000000, 0); |
|
238 | 164 | let expected = b"1234567891011121314100000000000000000000".to_vec(); |
|
239 | 165 | |
|
240 | 166 | assert_eq!( |
|
241 | 167 | expected, |
|
242 | 168 | pack_dirstate(&mut state_map, ©map, parents, now).unwrap() |
|
243 | 169 | ); |
|
244 | 170 | |
|
245 | 171 | assert!(state_map.is_empty()) |
|
246 | 172 | } |
|
247 | 173 | #[test] |
|
248 | 174 | fn test_pack_dirstate_one_entry() { |
|
249 | 175 | let expected_state_map: StateMap = [( |
|
250 | 176 | HgPathBuf::from_bytes(b"f1"), |
|
251 | 177 | DirstateEntry { |
|
252 | 178 | state: EntryState::Normal, |
|
253 | 179 | mode: 0o644, |
|
254 | 180 | size: 0, |
|
255 | 181 | mtime: 791231220, |
|
256 | 182 | }, |
|
257 | 183 | )] |
|
258 | 184 | .iter() |
|
259 | 185 | .cloned() |
|
260 | 186 | .collect(); |
|
261 | 187 | let mut state_map = expected_state_map.clone(); |
|
262 | 188 | |
|
263 | 189 | let copymap = FastHashMap::default(); |
|
264 | 190 | let parents = DirstateParents { |
|
265 | 191 | p1: b"12345678910111213141".into(), |
|
266 | 192 | p2: b"00000000000000000000".into(), |
|
267 | 193 | }; |
|
268 | 194 | let now = Duration::new(15000000, 0); |
|
269 | 195 | let expected = [ |
|
270 | 196 | 49, 50, 51, 52, 53, 54, 55, 56, 57, 49, 48, 49, 49, 49, 50, 49, |
|
271 | 197 | 51, 49, 52, 49, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, |
|
272 | 198 | 48, 48, 48, 48, 48, 48, 48, 48, 110, 0, 0, 1, 164, 0, 0, 0, 0, 47, |
|
273 | 199 | 41, 58, 244, 0, 0, 0, 2, 102, 49, |
|
274 | 200 | ] |
|
275 | 201 | .to_vec(); |
|
276 | 202 | |
|
277 | 203 | assert_eq!( |
|
278 | 204 | expected, |
|
279 | 205 | pack_dirstate(&mut state_map, ©map, parents, now).unwrap() |
|
280 | 206 | ); |
|
281 | 207 | |
|
282 | 208 | assert_eq!(expected_state_map, state_map); |
|
283 | 209 | } |
|
284 | 210 | #[test] |
|
285 | 211 | fn test_pack_dirstate_one_entry_with_copy() { |
|
286 | 212 | let expected_state_map: StateMap = [( |
|
287 | 213 | HgPathBuf::from_bytes(b"f1"), |
|
288 | 214 | DirstateEntry { |
|
289 | 215 | state: EntryState::Normal, |
|
290 | 216 | mode: 0o644, |
|
291 | 217 | size: 0, |
|
292 | 218 | mtime: 791231220, |
|
293 | 219 | }, |
|
294 | 220 | )] |
|
295 | 221 | .iter() |
|
296 | 222 | .cloned() |
|
297 | 223 | .collect(); |
|
298 | 224 | let mut state_map = expected_state_map.clone(); |
|
299 | 225 | let mut copymap = FastHashMap::default(); |
|
300 | 226 | copymap.insert( |
|
301 | 227 | HgPathBuf::from_bytes(b"f1"), |
|
302 | 228 | HgPathBuf::from_bytes(b"copyname"), |
|
303 | 229 | ); |
|
304 | 230 | let parents = DirstateParents { |
|
305 | 231 | p1: b"12345678910111213141".into(), |
|
306 | 232 | p2: b"00000000000000000000".into(), |
|
307 | 233 | }; |
|
308 | 234 | let now = Duration::new(15000000, 0); |
|
309 | 235 | let expected = [ |
|
310 | 236 | 49, 50, 51, 52, 53, 54, 55, 56, 57, 49, 48, 49, 49, 49, 50, 49, |
|
311 | 237 | 51, 49, 52, 49, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, |
|
312 | 238 | 48, 48, 48, 48, 48, 48, 48, 48, 110, 0, 0, 1, 164, 0, 0, 0, 0, 47, |
|
313 | 239 | 41, 58, 244, 0, 0, 0, 11, 102, 49, 0, 99, 111, 112, 121, 110, 97, |
|
314 | 240 | 109, 101, |
|
315 | 241 | ] |
|
316 | 242 | .to_vec(); |
|
317 | 243 | |
|
318 | 244 | assert_eq!( |
|
319 | 245 | expected, |
|
320 | 246 | pack_dirstate(&mut state_map, ©map, parents, now).unwrap() |
|
321 | 247 | ); |
|
322 | 248 | assert_eq!(expected_state_map, state_map); |
|
323 | 249 | } |
|
324 | 250 | |
|
325 | 251 | #[test] |
|
326 | 252 | fn test_parse_pack_one_entry_with_copy() { |
|
327 | 253 | let mut state_map: StateMap = [( |
|
328 | 254 | HgPathBuf::from_bytes(b"f1"), |
|
329 | 255 | DirstateEntry { |
|
330 | 256 | state: EntryState::Normal, |
|
331 | 257 | mode: 0o644, |
|
332 | 258 | size: 0, |
|
333 | 259 | mtime: 791231220, |
|
334 | 260 | }, |
|
335 | 261 | )] |
|
336 | 262 | .iter() |
|
337 | 263 | .cloned() |
|
338 | 264 | .collect(); |
|
339 | 265 | let mut copymap = FastHashMap::default(); |
|
340 | 266 | copymap.insert( |
|
341 | 267 | HgPathBuf::from_bytes(b"f1"), |
|
342 | 268 | HgPathBuf::from_bytes(b"copyname"), |
|
343 | 269 | ); |
|
344 | 270 | let parents = DirstateParents { |
|
345 | 271 | p1: b"12345678910111213141".into(), |
|
346 | 272 | p2: b"00000000000000000000".into(), |
|
347 | 273 | }; |
|
348 | 274 | let now = Duration::new(15000000, 0); |
|
349 | 275 | let result = |
|
350 | 276 | pack_dirstate(&mut state_map, ©map, parents.clone(), now) |
|
351 | 277 | .unwrap(); |
|
352 | 278 | |
|
353 | 279 | let (new_parents, entries, copies) = |
|
354 | 280 | parse_dirstate(result.as_slice()).unwrap(); |
|
355 | 281 | let new_state_map: StateMap = entries |
|
356 | 282 | .into_iter() |
|
357 | 283 | .map(|(path, entry)| (path.to_owned(), entry)) |
|
358 | 284 | .collect(); |
|
359 | 285 | let new_copy_map: CopyMap = copies |
|
360 | 286 | .into_iter() |
|
361 | 287 | .map(|(path, copy)| (path.to_owned(), copy.to_owned())) |
|
362 | 288 | .collect(); |
|
363 | 289 | |
|
364 | 290 | assert_eq!( |
|
365 | 291 | (&parents, state_map, copymap), |
|
366 | 292 | (new_parents, new_state_map, new_copy_map) |
|
367 | 293 | ) |
|
368 | 294 | } |
|
369 | 295 | |
|
370 | 296 | #[test] |
|
371 | 297 | fn test_parse_pack_multiple_entries_with_copy() { |
|
372 | 298 | let mut state_map: StateMap = [ |
|
373 | 299 | ( |
|
374 | 300 | HgPathBuf::from_bytes(b"f1"), |
|
375 | 301 | DirstateEntry { |
|
376 | 302 | state: EntryState::Normal, |
|
377 | 303 | mode: 0o644, |
|
378 | 304 | size: 0, |
|
379 | 305 | mtime: 791231220, |
|
380 | 306 | }, |
|
381 | 307 | ), |
|
382 | 308 | ( |
|
383 | 309 | HgPathBuf::from_bytes(b"f2"), |
|
384 | 310 | DirstateEntry { |
|
385 | 311 | state: EntryState::Merged, |
|
386 | 312 | mode: 0o777, |
|
387 | 313 | size: 1000, |
|
388 | 314 | mtime: 791231220, |
|
389 | 315 | }, |
|
390 | 316 | ), |
|
391 | 317 | ( |
|
392 | 318 | HgPathBuf::from_bytes(b"f3"), |
|
393 | 319 | DirstateEntry { |
|
394 | 320 | state: EntryState::Removed, |
|
395 | 321 | mode: 0o644, |
|
396 | 322 | size: 234553, |
|
397 | 323 | mtime: 791231220, |
|
398 | 324 | }, |
|
399 | 325 | ), |
|
400 | 326 | ( |
|
401 | 327 | HgPathBuf::from_bytes(b"f4\xF6"), |
|
402 | 328 | DirstateEntry { |
|
403 | 329 | state: EntryState::Added, |
|
404 | 330 | mode: 0o644, |
|
405 | 331 | size: -1, |
|
406 | 332 | mtime: -1, |
|
407 | 333 | }, |
|
408 | 334 | ), |
|
409 | 335 | ] |
|
410 | 336 | .iter() |
|
411 | 337 | .cloned() |
|
412 | 338 | .collect(); |
|
413 | 339 | let mut copymap = FastHashMap::default(); |
|
414 | 340 | copymap.insert( |
|
415 | 341 | HgPathBuf::from_bytes(b"f1"), |
|
416 | 342 | HgPathBuf::from_bytes(b"copyname"), |
|
417 | 343 | ); |
|
418 | 344 | copymap.insert( |
|
419 | 345 | HgPathBuf::from_bytes(b"f4\xF6"), |
|
420 | 346 | HgPathBuf::from_bytes(b"copyname2"), |
|
421 | 347 | ); |
|
422 | 348 | let parents = DirstateParents { |
|
423 | 349 | p1: b"12345678910111213141".into(), |
|
424 | 350 | p2: b"00000000000000000000".into(), |
|
425 | 351 | }; |
|
426 | 352 | let now = Duration::new(15000000, 0); |
|
427 | 353 | let result = |
|
428 | 354 | pack_dirstate(&mut state_map, ©map, parents.clone(), now) |
|
429 | 355 | .unwrap(); |
|
430 | 356 | |
|
431 | 357 | let (new_parents, entries, copies) = |
|
432 | 358 | parse_dirstate(result.as_slice()).unwrap(); |
|
433 | 359 | let new_state_map: StateMap = entries |
|
434 | 360 | .into_iter() |
|
435 | 361 | .map(|(path, entry)| (path.to_owned(), entry)) |
|
436 | 362 | .collect(); |
|
437 | 363 | let new_copy_map: CopyMap = copies |
|
438 | 364 | .into_iter() |
|
439 | 365 | .map(|(path, copy)| (path.to_owned(), copy.to_owned())) |
|
440 | 366 | .collect(); |
|
441 | 367 | |
|
442 | 368 | assert_eq!( |
|
443 | 369 | (&parents, state_map, copymap), |
|
444 | 370 | (new_parents, new_state_map, new_copy_map) |
|
445 | 371 | ) |
|
446 | 372 | } |
|
447 | 373 | |
|
448 | 374 | #[test] |
|
449 | 375 | /// https://www.mercurial-scm.org/repo/hg/rev/af3f26b6bba4 |
|
450 | 376 | fn test_parse_pack_one_entry_with_copy_and_time_conflict() { |
|
451 | 377 | let mut state_map: StateMap = [( |
|
452 | 378 | HgPathBuf::from_bytes(b"f1"), |
|
453 | 379 | DirstateEntry { |
|
454 | 380 | state: EntryState::Normal, |
|
455 | 381 | mode: 0o644, |
|
456 | 382 | size: 0, |
|
457 | 383 | mtime: 15000000, |
|
458 | 384 | }, |
|
459 | 385 | )] |
|
460 | 386 | .iter() |
|
461 | 387 | .cloned() |
|
462 | 388 | .collect(); |
|
463 | 389 | let mut copymap = FastHashMap::default(); |
|
464 | 390 | copymap.insert( |
|
465 | 391 | HgPathBuf::from_bytes(b"f1"), |
|
466 | 392 | HgPathBuf::from_bytes(b"copyname"), |
|
467 | 393 | ); |
|
468 | 394 | let parents = DirstateParents { |
|
469 | 395 | p1: b"12345678910111213141".into(), |
|
470 | 396 | p2: b"00000000000000000000".into(), |
|
471 | 397 | }; |
|
472 | 398 | let now = Duration::new(15000000, 0); |
|
473 | 399 | let result = |
|
474 | 400 | pack_dirstate(&mut state_map, ©map, parents.clone(), now) |
|
475 | 401 | .unwrap(); |
|
476 | 402 | |
|
477 | 403 | let (new_parents, entries, copies) = |
|
478 | 404 | parse_dirstate(result.as_slice()).unwrap(); |
|
479 | 405 | let new_state_map: StateMap = entries |
|
480 | 406 | .into_iter() |
|
481 | 407 | .map(|(path, entry)| (path.to_owned(), entry)) |
|
482 | 408 | .collect(); |
|
483 | 409 | let new_copy_map: CopyMap = copies |
|
484 | 410 | .into_iter() |
|
485 | 411 | .map(|(path, copy)| (path.to_owned(), copy.to_owned())) |
|
486 | 412 | .collect(); |
|
487 | 413 | |
|
488 | 414 | assert_eq!( |
|
489 | 415 | ( |
|
490 | 416 | &parents, |
|
491 | 417 | [( |
|
492 | 418 | HgPathBuf::from_bytes(b"f1"), |
|
493 | 419 | DirstateEntry { |
|
494 | 420 | state: EntryState::Normal, |
|
495 | 421 | mode: 0o644, |
|
496 | 422 | size: 0, |
|
497 | 423 | mtime: -1 |
|
498 | 424 | } |
|
499 | 425 | )] |
|
500 | 426 | .iter() |
|
501 | 427 | .cloned() |
|
502 | 428 | .collect::<StateMap>(), |
|
503 | 429 | copymap, |
|
504 | 430 | ), |
|
505 | 431 | (new_parents, new_state_map, new_copy_map) |
|
506 | 432 | ) |
|
507 | 433 | } |
|
508 | 434 | } |
@@ -1,994 +1,913 b'' | |||
|
1 | 1 | // status.rs |
|
2 | 2 | // |
|
3 | 3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> |
|
4 | 4 | // |
|
5 | 5 | // This software may be used and distributed according to the terms of the |
|
6 | 6 | // GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | //! Rust implementation of dirstate.status (dirstate.py). |
|
9 | 9 | //! It is currently missing a lot of functionality compared to the Python one |
|
10 | 10 | //! and will only be triggered in narrow cases. |
|
11 | 11 | |
|
12 | #[cfg(feature = "dirstate-tree")] | |
|
13 | use crate::dirstate::dirstate_tree::iter::StatusShortcut; | |
|
14 | #[cfg(not(feature = "dirstate-tree"))] | |
|
15 | 12 | use crate::utils::path_auditor::PathAuditor; |
|
16 | 13 | use crate::{ |
|
17 | 14 | dirstate::SIZE_FROM_OTHER_PARENT, |
|
18 | 15 | filepatterns::PatternFileWarning, |
|
19 | 16 | matchers::{get_ignore_function, Matcher, VisitChildrenSet}, |
|
20 | 17 | utils::{ |
|
21 | 18 | files::{find_dirs, HgMetadata}, |
|
22 | 19 | hg_path::{ |
|
23 | 20 | hg_path_to_path_buf, os_string_to_hg_path_buf, HgPath, HgPathBuf, |
|
24 | 21 | HgPathError, |
|
25 | 22 | }, |
|
26 | 23 | }, |
|
27 | 24 | CopyMap, DirstateEntry, DirstateMap, EntryState, FastHashMap, |
|
28 | 25 | PatternError, |
|
29 | 26 | }; |
|
30 | 27 | use lazy_static::lazy_static; |
|
31 | 28 | use micro_timer::timed; |
|
32 | 29 | use rayon::prelude::*; |
|
33 | 30 | use std::{ |
|
34 | 31 | borrow::Cow, |
|
35 | 32 | collections::HashSet, |
|
36 | 33 | fmt, |
|
37 | 34 | fs::{read_dir, DirEntry}, |
|
38 | 35 | io::ErrorKind, |
|
39 | 36 | ops::Deref, |
|
40 | 37 | path::{Path, PathBuf}, |
|
41 | 38 | }; |
|
42 | 39 | |
|
43 | 40 | /// Wrong type of file from a `BadMatch` |
|
44 | 41 | /// Note: a lot of those don't exist on all platforms. |
|
45 | 42 | #[derive(Debug, Copy, Clone)] |
|
46 | 43 | pub enum BadType { |
|
47 | 44 | CharacterDevice, |
|
48 | 45 | BlockDevice, |
|
49 | 46 | FIFO, |
|
50 | 47 | Socket, |
|
51 | 48 | Directory, |
|
52 | 49 | Unknown, |
|
53 | 50 | } |
|
54 | 51 | |
|
55 | 52 | impl fmt::Display for BadType { |
|
56 | 53 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
|
57 | 54 | f.write_str(match self { |
|
58 | 55 | BadType::CharacterDevice => "character device", |
|
59 | 56 | BadType::BlockDevice => "block device", |
|
60 | 57 | BadType::FIFO => "fifo", |
|
61 | 58 | BadType::Socket => "socket", |
|
62 | 59 | BadType::Directory => "directory", |
|
63 | 60 | BadType::Unknown => "unknown", |
|
64 | 61 | }) |
|
65 | 62 | } |
|
66 | 63 | } |
|
67 | 64 | |
|
68 | 65 | /// Was explicitly matched but cannot be found/accessed |
|
69 | 66 | #[derive(Debug, Copy, Clone)] |
|
70 | 67 | pub enum BadMatch { |
|
71 | 68 | OsError(i32), |
|
72 | 69 | BadType(BadType), |
|
73 | 70 | } |
|
74 | 71 | |
|
75 | 72 | /// Enum used to dispatch new status entries into the right collections. |
|
76 | 73 | /// Is similar to `crate::EntryState`, but represents the transient state of |
|
77 | 74 | /// entries during the lifetime of a command. |
|
78 | 75 | #[derive(Debug, Copy, Clone)] |
|
79 | 76 | pub enum Dispatch { |
|
80 | 77 | Unsure, |
|
81 | 78 | Modified, |
|
82 | 79 | Added, |
|
83 | 80 | Removed, |
|
84 | 81 | Deleted, |
|
85 | 82 | Clean, |
|
86 | 83 | Unknown, |
|
87 | 84 | Ignored, |
|
88 | 85 | /// Empty dispatch, the file is not worth listing |
|
89 | 86 | None, |
|
90 | 87 | /// Was explicitly matched but cannot be found/accessed |
|
91 | 88 | Bad(BadMatch), |
|
92 | 89 | Directory { |
|
93 | 90 | /// True if the directory used to be a file in the dmap so we can say |
|
94 | 91 | /// that it's been removed. |
|
95 | 92 | was_file: bool, |
|
96 | 93 | }, |
|
97 | 94 | } |
|
98 | 95 | |
|
99 | 96 | type IoResult<T> = std::io::Result<T>; |
|
100 | 97 | |
|
101 | 98 | /// `Box<dyn Trait>` is syntactic sugar for `Box<dyn Trait, 'static>`, so add |
|
102 | 99 | /// an explicit lifetime here to not fight `'static` bounds "out of nowhere". |
|
103 | 100 | type IgnoreFnType<'a> = Box<dyn for<'r> Fn(&'r HgPath) -> bool + Sync + 'a>; |
|
104 | 101 | |
|
105 | 102 | /// We have a good mix of owned (from directory traversal) and borrowed (from |
|
106 | 103 | /// the dirstate/explicit) paths, this comes up a lot. |
|
107 | 104 | pub type HgPathCow<'a> = Cow<'a, HgPath>; |
|
108 | 105 | |
|
109 | 106 | /// A path with its computed ``Dispatch`` information |
|
110 | 107 | type DispatchedPath<'a> = (HgPathCow<'a>, Dispatch); |
|
111 | 108 | |
|
112 | 109 | /// The conversion from `HgPath` to a real fs path failed. |
|
113 | 110 | /// `22` is the error code for "Invalid argument" |
|
114 | 111 | const INVALID_PATH_DISPATCH: Dispatch = Dispatch::Bad(BadMatch::OsError(22)); |
|
115 | 112 | |
|
116 | 113 | /// Dates and times that are outside the 31-bit signed range are compared |
|
117 | 114 | /// modulo 2^31. This should prevent hg from behaving badly with very large |
|
118 | 115 | /// files or corrupt dates while still having a high probability of detecting |
|
119 | 116 | /// changes. (issue2608) |
|
120 | 117 | /// TODO I haven't found a way of having `b` be `Into<i32>`, since `From<u64>` |
|
121 | 118 | /// is not defined for `i32`, and there is no `As` trait. This forces the |
|
122 | 119 | /// caller to cast `b` as `i32`. |
|
123 | 120 | fn mod_compare(a: i32, b: i32) -> bool { |
|
124 | 121 | a & i32::max_value() != b & i32::max_value() |
|
125 | 122 | } |
|
126 | 123 | |
|
127 | 124 | /// Return a sorted list containing information about the entries |
|
128 | 125 | /// in the directory. |
|
129 | 126 | /// |
|
130 | 127 | /// * `skip_dot_hg` - Return an empty vec if `path` contains a `.hg` directory |
|
131 | 128 | fn list_directory( |
|
132 | 129 | path: impl AsRef<Path>, |
|
133 | 130 | skip_dot_hg: bool, |
|
134 | 131 | ) -> std::io::Result<Vec<(HgPathBuf, DirEntry)>> { |
|
135 | 132 | let mut results = vec![]; |
|
136 | 133 | let entries = read_dir(path.as_ref())?; |
|
137 | 134 | |
|
138 | 135 | for entry in entries { |
|
139 | 136 | let entry = entry?; |
|
140 | 137 | let filename = os_string_to_hg_path_buf(entry.file_name())?; |
|
141 | 138 | let file_type = entry.file_type()?; |
|
142 | 139 | if skip_dot_hg && filename.as_bytes() == b".hg" && file_type.is_dir() { |
|
143 | 140 | return Ok(vec![]); |
|
144 | 141 | } else { |
|
145 | 142 | results.push((filename, entry)) |
|
146 | 143 | } |
|
147 | 144 | } |
|
148 | 145 | |
|
149 | 146 | results.sort_unstable_by_key(|e| e.0.clone()); |
|
150 | 147 | Ok(results) |
|
151 | 148 | } |
|
152 | 149 | |
|
153 | 150 | /// The file corresponding to the dirstate entry was found on the filesystem. |
|
154 | 151 | fn dispatch_found( |
|
155 | 152 | filename: impl AsRef<HgPath>, |
|
156 | 153 | entry: DirstateEntry, |
|
157 | 154 | metadata: HgMetadata, |
|
158 | 155 | copy_map: &CopyMap, |
|
159 | 156 | options: StatusOptions, |
|
160 | 157 | ) -> Dispatch { |
|
161 | 158 | let DirstateEntry { |
|
162 | 159 | state, |
|
163 | 160 | mode, |
|
164 | 161 | mtime, |
|
165 | 162 | size, |
|
166 | 163 | } = entry; |
|
167 | 164 | |
|
168 | 165 | let HgMetadata { |
|
169 | 166 | st_mode, |
|
170 | 167 | st_size, |
|
171 | 168 | st_mtime, |
|
172 | 169 | .. |
|
173 | 170 | } = metadata; |
|
174 | 171 | |
|
175 | 172 | match state { |
|
176 | 173 | EntryState::Normal => { |
|
177 | 174 | let size_changed = mod_compare(size, st_size as i32); |
|
178 | 175 | let mode_changed = |
|
179 | 176 | (mode ^ st_mode as i32) & 0o100 != 0o000 && options.check_exec; |
|
180 | 177 | let metadata_changed = size >= 0 && (size_changed || mode_changed); |
|
181 | 178 | let other_parent = size == SIZE_FROM_OTHER_PARENT; |
|
182 | 179 | |
|
183 | 180 | if metadata_changed |
|
184 | 181 | || other_parent |
|
185 | 182 | || copy_map.contains_key(filename.as_ref()) |
|
186 | 183 | { |
|
187 | 184 | if metadata.is_symlink() && size_changed { |
|
188 | 185 | // issue6456: Size returned may be longer due to encryption |
|
189 | 186 | // on EXT-4 fscrypt. TODO maybe only do it on EXT4? |
|
190 | 187 | Dispatch::Unsure |
|
191 | 188 | } else { |
|
192 | 189 | Dispatch::Modified |
|
193 | 190 | } |
|
194 | 191 | } else if mod_compare(mtime, st_mtime as i32) |
|
195 | 192 | || st_mtime == options.last_normal_time |
|
196 | 193 | { |
|
197 | 194 | // the file may have just been marked as normal and |
|
198 | 195 | // it may have changed in the same second without |
|
199 | 196 | // changing its size. This can happen if we quickly |
|
200 | 197 | // do multiple commits. Force lookup, so we don't |
|
201 | 198 | // miss such a racy file change. |
|
202 | 199 | Dispatch::Unsure |
|
203 | 200 | } else if options.list_clean { |
|
204 | 201 | Dispatch::Clean |
|
205 | 202 | } else { |
|
206 | 203 | Dispatch::None |
|
207 | 204 | } |
|
208 | 205 | } |
|
209 | 206 | EntryState::Merged => Dispatch::Modified, |
|
210 | 207 | EntryState::Added => Dispatch::Added, |
|
211 | 208 | EntryState::Removed => Dispatch::Removed, |
|
212 | 209 | EntryState::Unknown => Dispatch::Unknown, |
|
213 | 210 | } |
|
214 | 211 | } |
|
215 | 212 | |
|
216 | 213 | /// The file corresponding to this Dirstate entry is missing. |
|
217 | 214 | fn dispatch_missing(state: EntryState) -> Dispatch { |
|
218 | 215 | match state { |
|
219 | 216 | // File was removed from the filesystem during commands |
|
220 | 217 | EntryState::Normal | EntryState::Merged | EntryState::Added => { |
|
221 | 218 | Dispatch::Deleted |
|
222 | 219 | } |
|
223 | 220 | // File was removed, everything is normal |
|
224 | 221 | EntryState::Removed => Dispatch::Removed, |
|
225 | 222 | // File is unknown to Mercurial, everything is normal |
|
226 | 223 | EntryState::Unknown => Dispatch::Unknown, |
|
227 | 224 | } |
|
228 | 225 | } |
|
229 | 226 | |
|
230 | 227 | fn dispatch_os_error(e: &std::io::Error) -> Dispatch { |
|
231 | 228 | Dispatch::Bad(BadMatch::OsError( |
|
232 | 229 | e.raw_os_error().expect("expected real OS error"), |
|
233 | 230 | )) |
|
234 | 231 | } |
|
235 | 232 | |
|
236 | 233 | lazy_static! { |
|
237 | 234 | static ref DEFAULT_WORK: HashSet<&'static HgPath> = { |
|
238 | 235 | let mut h = HashSet::new(); |
|
239 | 236 | h.insert(HgPath::new(b"")); |
|
240 | 237 | h |
|
241 | 238 | }; |
|
242 | 239 | } |
|
243 | 240 | |
|
244 | 241 | #[derive(Debug, Copy, Clone)] |
|
245 | 242 | pub struct StatusOptions { |
|
246 | 243 | /// Remember the most recent modification timeslot for status, to make |
|
247 | 244 | /// sure we won't miss future size-preserving file content modifications |
|
248 | 245 | /// that happen within the same timeslot. |
|
249 | 246 | pub last_normal_time: i64, |
|
250 | 247 | /// Whether we are on a filesystem with UNIX-like exec flags |
|
251 | 248 | pub check_exec: bool, |
|
252 | 249 | pub list_clean: bool, |
|
253 | 250 | pub list_unknown: bool, |
|
254 | 251 | pub list_ignored: bool, |
|
255 | 252 | /// Whether to collect traversed dirs for applying a callback later. |
|
256 | 253 | /// Used by `hg purge` for example. |
|
257 | 254 | pub collect_traversed_dirs: bool, |
|
258 | 255 | } |
|
259 | 256 | |
|
260 | 257 | #[derive(Debug)] |
|
261 | 258 | pub struct DirstateStatus<'a> { |
|
262 | 259 | pub modified: Vec<HgPathCow<'a>>, |
|
263 | 260 | pub added: Vec<HgPathCow<'a>>, |
|
264 | 261 | pub removed: Vec<HgPathCow<'a>>, |
|
265 | 262 | pub deleted: Vec<HgPathCow<'a>>, |
|
266 | 263 | pub clean: Vec<HgPathCow<'a>>, |
|
267 | 264 | pub ignored: Vec<HgPathCow<'a>>, |
|
268 | 265 | pub unknown: Vec<HgPathCow<'a>>, |
|
269 | 266 | pub bad: Vec<(HgPathCow<'a>, BadMatch)>, |
|
270 | 267 | /// Only filled if `collect_traversed_dirs` is `true` |
|
271 | 268 | pub traversed: Vec<HgPathBuf>, |
|
272 | 269 | } |
|
273 | 270 | |
|
274 | 271 | #[derive(Debug, derive_more::From)] |
|
275 | 272 | pub enum StatusError { |
|
276 | 273 | /// Generic IO error |
|
277 | 274 | IO(std::io::Error), |
|
278 | 275 | /// An invalid path that cannot be represented in Mercurial was found |
|
279 | 276 | Path(HgPathError), |
|
280 | 277 | /// An invalid "ignore" pattern was found |
|
281 | 278 | Pattern(PatternError), |
|
282 | 279 | } |
|
283 | 280 | |
|
284 | 281 | pub type StatusResult<T> = Result<T, StatusError>; |
|
285 | 282 | |
|
286 | 283 | impl fmt::Display for StatusError { |
|
287 | 284 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
|
288 | 285 | match self { |
|
289 | 286 | StatusError::IO(error) => error.fmt(f), |
|
290 | 287 | StatusError::Path(error) => error.fmt(f), |
|
291 | 288 | StatusError::Pattern(error) => error.fmt(f), |
|
292 | 289 | } |
|
293 | 290 | } |
|
294 | 291 | } |
|
295 | 292 | |
|
296 | 293 | /// Gives information about which files are changed in the working directory |
|
297 | 294 | /// and how, compared to the revision we're based on |
|
298 | 295 | pub struct Status<'a, M: Matcher + Sync> { |
|
299 | 296 | dmap: &'a DirstateMap, |
|
300 | 297 | pub(crate) matcher: &'a M, |
|
301 | 298 | root_dir: PathBuf, |
|
302 | 299 | pub(crate) options: StatusOptions, |
|
303 | 300 | ignore_fn: IgnoreFnType<'a>, |
|
304 | 301 | } |
|
305 | 302 | |
|
306 | 303 | impl<'a, M> Status<'a, M> |
|
307 | 304 | where |
|
308 | 305 | M: Matcher + Sync, |
|
309 | 306 | { |
|
310 | 307 | pub fn new( |
|
311 | 308 | dmap: &'a DirstateMap, |
|
312 | 309 | matcher: &'a M, |
|
313 | 310 | root_dir: PathBuf, |
|
314 | 311 | ignore_files: Vec<PathBuf>, |
|
315 | 312 | options: StatusOptions, |
|
316 | 313 | ) -> StatusResult<(Self, Vec<PatternFileWarning>)> { |
|
317 | 314 | // Needs to outlive `dir_ignore_fn` since it's captured. |
|
318 | 315 | |
|
319 | 316 | let (ignore_fn, warnings): (IgnoreFnType, _) = |
|
320 | 317 | if options.list_ignored || options.list_unknown { |
|
321 | 318 | get_ignore_function(ignore_files, &root_dir)? |
|
322 | 319 | } else { |
|
323 | 320 | (Box::new(|&_| true), vec![]) |
|
324 | 321 | }; |
|
325 | 322 | |
|
326 | 323 | Ok(( |
|
327 | 324 | Self { |
|
328 | 325 | dmap, |
|
329 | 326 | matcher, |
|
330 | 327 | root_dir, |
|
331 | 328 | options, |
|
332 | 329 | ignore_fn, |
|
333 | 330 | }, |
|
334 | 331 | warnings, |
|
335 | 332 | )) |
|
336 | 333 | } |
|
337 | 334 | |
|
338 | 335 | /// Is the path ignored? |
|
339 | 336 | pub fn is_ignored(&self, path: impl AsRef<HgPath>) -> bool { |
|
340 | 337 | (self.ignore_fn)(path.as_ref()) |
|
341 | 338 | } |
|
342 | 339 | |
|
343 | 340 | /// Is the path or one of its ancestors ignored? |
|
344 | 341 | pub fn dir_ignore(&self, dir: impl AsRef<HgPath>) -> bool { |
|
345 | 342 | // Only involve ignore mechanism if we're listing unknowns or ignored. |
|
346 | 343 | if self.options.list_ignored || self.options.list_unknown { |
|
347 | 344 | if self.is_ignored(&dir) { |
|
348 | 345 | true |
|
349 | 346 | } else { |
|
350 | 347 | for p in find_dirs(dir.as_ref()) { |
|
351 | 348 | if self.is_ignored(p) { |
|
352 | 349 | return true; |
|
353 | 350 | } |
|
354 | 351 | } |
|
355 | 352 | false |
|
356 | 353 | } |
|
357 | 354 | } else { |
|
358 | 355 | true |
|
359 | 356 | } |
|
360 | 357 | } |
|
361 | 358 | |
|
362 | 359 | /// Get stat data about the files explicitly specified by the matcher. |
|
363 | 360 | /// Returns a tuple of the directories that need to be traversed and the |
|
364 | 361 | /// files with their corresponding `Dispatch`. |
|
365 | 362 | /// TODO subrepos |
|
366 | 363 | #[timed] |
|
367 | 364 | pub fn walk_explicit( |
|
368 | 365 | &self, |
|
369 | 366 | traversed_sender: crossbeam_channel::Sender<HgPathBuf>, |
|
370 | 367 | ) -> (Vec<DispatchedPath<'a>>, Vec<DispatchedPath<'a>>) { |
|
371 | 368 | self.matcher |
|
372 | 369 | .file_set() |
|
373 | 370 | .unwrap_or(&DEFAULT_WORK) |
|
374 | 371 | .par_iter() |
|
375 | 372 | .flat_map(|&filename| -> Option<_> { |
|
376 | 373 | // TODO normalization |
|
377 | 374 | let normalized = filename; |
|
378 | 375 | |
|
379 | 376 | let buf = match hg_path_to_path_buf(normalized) { |
|
380 | 377 | Ok(x) => x, |
|
381 | 378 | Err(_) => { |
|
382 | 379 | return Some(( |
|
383 | 380 | Cow::Borrowed(normalized), |
|
384 | 381 | INVALID_PATH_DISPATCH, |
|
385 | 382 | )) |
|
386 | 383 | } |
|
387 | 384 | }; |
|
388 | 385 | let target = self.root_dir.join(buf); |
|
389 | 386 | let st = target.symlink_metadata(); |
|
390 | 387 | let in_dmap = self.dmap.get(normalized); |
|
391 | 388 | match st { |
|
392 | 389 | Ok(meta) => { |
|
393 | 390 | let file_type = meta.file_type(); |
|
394 | 391 | return if file_type.is_file() || file_type.is_symlink() |
|
395 | 392 | { |
|
396 | 393 | if let Some(entry) = in_dmap { |
|
397 | 394 | return Some(( |
|
398 | 395 | Cow::Borrowed(normalized), |
|
399 | 396 | dispatch_found( |
|
400 | 397 | &normalized, |
|
401 | 398 | *entry, |
|
402 | 399 | HgMetadata::from_metadata(meta), |
|
403 | 400 | &self.dmap.copy_map, |
|
404 | 401 | self.options, |
|
405 | 402 | ), |
|
406 | 403 | )); |
|
407 | 404 | } |
|
408 | 405 | Some(( |
|
409 | 406 | Cow::Borrowed(normalized), |
|
410 | 407 | Dispatch::Unknown, |
|
411 | 408 | )) |
|
412 | 409 | } else if file_type.is_dir() { |
|
413 | 410 | if self.options.collect_traversed_dirs { |
|
414 | 411 | traversed_sender |
|
415 | 412 | .send(normalized.to_owned()) |
|
416 | 413 | .expect("receiver should outlive sender"); |
|
417 | 414 | } |
|
418 | 415 | Some(( |
|
419 | 416 | Cow::Borrowed(normalized), |
|
420 | 417 | Dispatch::Directory { |
|
421 | 418 | was_file: in_dmap.is_some(), |
|
422 | 419 | }, |
|
423 | 420 | )) |
|
424 | 421 | } else { |
|
425 | 422 | Some(( |
|
426 | 423 | Cow::Borrowed(normalized), |
|
427 | 424 | Dispatch::Bad(BadMatch::BadType( |
|
428 | 425 | // TODO do more than unknown |
|
429 | 426 | // Support for all `BadType` variant |
|
430 | 427 | // varies greatly between platforms. |
|
431 | 428 | // So far, no tests check the type and |
|
432 | 429 | // this should be good enough for most |
|
433 | 430 | // users. |
|
434 | 431 | BadType::Unknown, |
|
435 | 432 | )), |
|
436 | 433 | )) |
|
437 | 434 | }; |
|
438 | 435 | } |
|
439 | 436 | Err(_) => { |
|
440 | 437 | if let Some(entry) = in_dmap { |
|
441 | 438 | return Some(( |
|
442 | 439 | Cow::Borrowed(normalized), |
|
443 | 440 | dispatch_missing(entry.state), |
|
444 | 441 | )); |
|
445 | 442 | } |
|
446 | 443 | } |
|
447 | 444 | }; |
|
448 | 445 | None |
|
449 | 446 | }) |
|
450 | 447 | .partition(|(_, dispatch)| match dispatch { |
|
451 | 448 | Dispatch::Directory { .. } => true, |
|
452 | 449 | _ => false, |
|
453 | 450 | }) |
|
454 | 451 | } |
|
455 | 452 | |
|
456 | 453 | /// Walk the working directory recursively to look for changes compared to |
|
457 | 454 | /// the current `DirstateMap`. |
|
458 | 455 | /// |
|
459 | 456 | /// This takes a mutable reference to the results to account for the |
|
460 | 457 | /// `extend` in timings |
|
461 | 458 | #[timed] |
|
462 | 459 | pub fn traverse( |
|
463 | 460 | &self, |
|
464 | 461 | path: impl AsRef<HgPath>, |
|
465 | 462 | old_results: &FastHashMap<HgPathCow<'a>, Dispatch>, |
|
466 | 463 | results: &mut Vec<DispatchedPath<'a>>, |
|
467 | 464 | traversed_sender: crossbeam_channel::Sender<HgPathBuf>, |
|
468 | 465 | ) { |
|
469 | 466 | // The traversal is done in parallel, so use a channel to gather |
|
470 | 467 | // entries. `crossbeam_channel::Sender` is `Sync`, while `mpsc::Sender` |
|
471 | 468 | // is not. |
|
472 | 469 | let (files_transmitter, files_receiver) = |
|
473 | 470 | crossbeam_channel::unbounded(); |
|
474 | 471 | |
|
475 | 472 | self.traverse_dir( |
|
476 | 473 | &files_transmitter, |
|
477 | 474 | path, |
|
478 | 475 | &old_results, |
|
479 | 476 | traversed_sender, |
|
480 | 477 | ); |
|
481 | 478 | |
|
482 | 479 | // Disconnect the channel so the receiver stops waiting |
|
483 | 480 | drop(files_transmitter); |
|
484 | 481 | |
|
485 | 482 | let new_results = files_receiver |
|
486 | 483 | .into_iter() |
|
487 | 484 | .par_bridge() |
|
488 | 485 | .map(|(f, d)| (Cow::Owned(f), d)); |
|
489 | 486 | |
|
490 | 487 | results.par_extend(new_results); |
|
491 | 488 | } |
|
492 | 489 | |
|
493 | 490 | /// Dispatch a single entry (file, folder, symlink...) found during |
|
494 | 491 | /// `traverse`. If the entry is a folder that needs to be traversed, it |
|
495 | 492 | /// will be handled in a separate thread. |
|
496 | 493 | fn handle_traversed_entry<'b>( |
|
497 | 494 | &'a self, |
|
498 | 495 | scope: &rayon::Scope<'b>, |
|
499 | 496 | files_sender: &'b crossbeam_channel::Sender<(HgPathBuf, Dispatch)>, |
|
500 | 497 | old_results: &'a FastHashMap<Cow<HgPath>, Dispatch>, |
|
501 | 498 | filename: HgPathBuf, |
|
502 | 499 | dir_entry: DirEntry, |
|
503 | 500 | traversed_sender: crossbeam_channel::Sender<HgPathBuf>, |
|
504 | 501 | ) -> IoResult<()> |
|
505 | 502 | where |
|
506 | 503 | 'a: 'b, |
|
507 | 504 | { |
|
508 | 505 | let file_type = dir_entry.file_type()?; |
|
509 | 506 | let entry_option = self.dmap.get(&filename); |
|
510 | 507 | |
|
511 | 508 | if filename.as_bytes() == b".hg" { |
|
512 | 509 | // Could be a directory or a symlink |
|
513 | 510 | return Ok(()); |
|
514 | 511 | } |
|
515 | 512 | |
|
516 | 513 | if file_type.is_dir() { |
|
517 | 514 | self.handle_traversed_dir( |
|
518 | 515 | scope, |
|
519 | 516 | files_sender, |
|
520 | 517 | old_results, |
|
521 | 518 | entry_option, |
|
522 | 519 | filename, |
|
523 | 520 | traversed_sender, |
|
524 | 521 | ); |
|
525 | 522 | } else if file_type.is_file() || file_type.is_symlink() { |
|
526 | 523 | if let Some(entry) = entry_option { |
|
527 | 524 | if self.matcher.matches_everything() |
|
528 | 525 | || self.matcher.matches(&filename) |
|
529 | 526 | { |
|
530 | 527 | let metadata = dir_entry.metadata()?; |
|
531 | 528 | files_sender |
|
532 | 529 | .send(( |
|
533 | 530 | filename.to_owned(), |
|
534 | 531 | dispatch_found( |
|
535 | 532 | &filename, |
|
536 | 533 | *entry, |
|
537 | 534 | HgMetadata::from_metadata(metadata), |
|
538 | 535 | &self.dmap.copy_map, |
|
539 | 536 | self.options, |
|
540 | 537 | ), |
|
541 | 538 | )) |
|
542 | 539 | .unwrap(); |
|
543 | 540 | } |
|
544 | 541 | } else if (self.matcher.matches_everything() |
|
545 | 542 | || self.matcher.matches(&filename)) |
|
546 | 543 | && !self.is_ignored(&filename) |
|
547 | 544 | { |
|
548 | 545 | if (self.options.list_ignored |
|
549 | 546 | || self.matcher.exact_match(&filename)) |
|
550 | 547 | && self.dir_ignore(&filename) |
|
551 | 548 | { |
|
552 | 549 | if self.options.list_ignored { |
|
553 | 550 | files_sender |
|
554 | 551 | .send((filename.to_owned(), Dispatch::Ignored)) |
|
555 | 552 | .unwrap(); |
|
556 | 553 | } |
|
557 | 554 | } else if self.options.list_unknown { |
|
558 | 555 | files_sender |
|
559 | 556 | .send((filename.to_owned(), Dispatch::Unknown)) |
|
560 | 557 | .unwrap(); |
|
561 | 558 | } |
|
562 | 559 | } else if self.is_ignored(&filename) && self.options.list_ignored { |
|
563 | 560 | files_sender |
|
564 | 561 | .send((filename.to_owned(), Dispatch::Ignored)) |
|
565 | 562 | .unwrap(); |
|
566 | 563 | } |
|
567 | 564 | } else if let Some(entry) = entry_option { |
|
568 | 565 | // Used to be a file or a folder, now something else. |
|
569 | 566 | if self.matcher.matches_everything() |
|
570 | 567 | || self.matcher.matches(&filename) |
|
571 | 568 | { |
|
572 | 569 | files_sender |
|
573 | 570 | .send((filename.to_owned(), dispatch_missing(entry.state))) |
|
574 | 571 | .unwrap(); |
|
575 | 572 | } |
|
576 | 573 | } |
|
577 | 574 | |
|
578 | 575 | Ok(()) |
|
579 | 576 | } |
|
580 | 577 | |
|
581 | 578 | /// A directory was found in the filesystem and needs to be traversed |
|
582 | 579 | fn handle_traversed_dir<'b>( |
|
583 | 580 | &'a self, |
|
584 | 581 | scope: &rayon::Scope<'b>, |
|
585 | 582 | files_sender: &'b crossbeam_channel::Sender<(HgPathBuf, Dispatch)>, |
|
586 | 583 | old_results: &'a FastHashMap<Cow<HgPath>, Dispatch>, |
|
587 | 584 | entry_option: Option<&'a DirstateEntry>, |
|
588 | 585 | directory: HgPathBuf, |
|
589 | 586 | traversed_sender: crossbeam_channel::Sender<HgPathBuf>, |
|
590 | 587 | ) where |
|
591 | 588 | 'a: 'b, |
|
592 | 589 | { |
|
593 | 590 | scope.spawn(move |_| { |
|
594 | 591 | // Nested `if` until `rust-lang/rust#53668` is stable |
|
595 | 592 | if let Some(entry) = entry_option { |
|
596 | 593 | // Used to be a file, is now a folder |
|
597 | 594 | if self.matcher.matches_everything() |
|
598 | 595 | || self.matcher.matches(&directory) |
|
599 | 596 | { |
|
600 | 597 | files_sender |
|
601 | 598 | .send(( |
|
602 | 599 | directory.to_owned(), |
|
603 | 600 | dispatch_missing(entry.state), |
|
604 | 601 | )) |
|
605 | 602 | .unwrap(); |
|
606 | 603 | } |
|
607 | 604 | } |
|
608 | 605 | // Do we need to traverse it? |
|
609 | 606 | if !self.is_ignored(&directory) || self.options.list_ignored { |
|
610 | 607 | self.traverse_dir( |
|
611 | 608 | files_sender, |
|
612 | 609 | directory, |
|
613 | 610 | &old_results, |
|
614 | 611 | traversed_sender, |
|
615 | 612 | ) |
|
616 | 613 | } |
|
617 | 614 | }); |
|
618 | 615 | } |
|
619 | 616 | |
|
620 | 617 | /// Decides whether the directory needs to be listed, and if so handles the |
|
621 | 618 | /// entries in a separate thread. |
|
622 | 619 | fn traverse_dir( |
|
623 | 620 | &self, |
|
624 | 621 | files_sender: &crossbeam_channel::Sender<(HgPathBuf, Dispatch)>, |
|
625 | 622 | directory: impl AsRef<HgPath>, |
|
626 | 623 | old_results: &FastHashMap<Cow<HgPath>, Dispatch>, |
|
627 | 624 | traversed_sender: crossbeam_channel::Sender<HgPathBuf>, |
|
628 | 625 | ) { |
|
629 | 626 | let directory = directory.as_ref(); |
|
630 | 627 | |
|
631 | 628 | if self.options.collect_traversed_dirs { |
|
632 | 629 | traversed_sender |
|
633 | 630 | .send(directory.to_owned()) |
|
634 | 631 | .expect("receiver should outlive sender"); |
|
635 | 632 | } |
|
636 | 633 | |
|
637 | 634 | let visit_entries = match self.matcher.visit_children_set(directory) { |
|
638 | 635 | VisitChildrenSet::Empty => return, |
|
639 | 636 | VisitChildrenSet::This | VisitChildrenSet::Recursive => None, |
|
640 | 637 | VisitChildrenSet::Set(set) => Some(set), |
|
641 | 638 | }; |
|
642 | 639 | let buf = match hg_path_to_path_buf(directory) { |
|
643 | 640 | Ok(b) => b, |
|
644 | 641 | Err(_) => { |
|
645 | 642 | files_sender |
|
646 | 643 | .send((directory.to_owned(), INVALID_PATH_DISPATCH)) |
|
647 | 644 | .expect("receiver should outlive sender"); |
|
648 | 645 | return; |
|
649 | 646 | } |
|
650 | 647 | }; |
|
651 | 648 | let dir_path = self.root_dir.join(buf); |
|
652 | 649 | |
|
653 | 650 | let skip_dot_hg = !directory.as_bytes().is_empty(); |
|
654 | 651 | let entries = match list_directory(dir_path, skip_dot_hg) { |
|
655 | 652 | Err(e) => { |
|
656 | 653 | files_sender |
|
657 | 654 | .send((directory.to_owned(), dispatch_os_error(&e))) |
|
658 | 655 | .expect("receiver should outlive sender"); |
|
659 | 656 | return; |
|
660 | 657 | } |
|
661 | 658 | Ok(entries) => entries, |
|
662 | 659 | }; |
|
663 | 660 | |
|
664 | 661 | rayon::scope(|scope| { |
|
665 | 662 | for (filename, dir_entry) in entries { |
|
666 | 663 | if let Some(ref set) = visit_entries { |
|
667 | 664 | if !set.contains(filename.deref()) { |
|
668 | 665 | continue; |
|
669 | 666 | } |
|
670 | 667 | } |
|
671 | 668 | // TODO normalize |
|
672 | 669 | let filename = if directory.is_empty() { |
|
673 | 670 | filename.to_owned() |
|
674 | 671 | } else { |
|
675 | 672 | directory.join(&filename) |
|
676 | 673 | }; |
|
677 | 674 | |
|
678 | 675 | if !old_results.contains_key(filename.deref()) { |
|
679 | 676 | match self.handle_traversed_entry( |
|
680 | 677 | scope, |
|
681 | 678 | files_sender, |
|
682 | 679 | old_results, |
|
683 | 680 | filename, |
|
684 | 681 | dir_entry, |
|
685 | 682 | traversed_sender.clone(), |
|
686 | 683 | ) { |
|
687 | 684 | Err(e) => { |
|
688 | 685 | files_sender |
|
689 | 686 | .send(( |
|
690 | 687 | directory.to_owned(), |
|
691 | 688 | dispatch_os_error(&e), |
|
692 | 689 | )) |
|
693 | 690 | .expect("receiver should outlive sender"); |
|
694 | 691 | } |
|
695 | 692 | Ok(_) => {} |
|
696 | 693 | } |
|
697 | 694 | } |
|
698 | 695 | } |
|
699 | 696 | }) |
|
700 | 697 | } |
|
701 | 698 | |
|
702 | 699 | /// Add the files in the dirstate to the results. |
|
703 | 700 | /// |
|
704 | 701 | /// This takes a mutable reference to the results to account for the |
|
705 | 702 | /// `extend` in timings |
|
706 | #[cfg(feature = "dirstate-tree")] | |
|
707 | #[timed] | |
|
708 | pub fn extend_from_dmap(&self, results: &mut Vec<DispatchedPath<'a>>) { | |
|
709 | results.par_extend( | |
|
710 | self.dmap | |
|
711 | .fs_iter(self.root_dir.clone()) | |
|
712 | .par_bridge() | |
|
713 | .filter(|(path, _)| self.matcher.matches(path)) | |
|
714 | .map(move |(filename, shortcut)| { | |
|
715 | let entry = match shortcut { | |
|
716 | StatusShortcut::Entry(e) => e, | |
|
717 | StatusShortcut::Dispatch(d) => { | |
|
718 | return (Cow::Owned(filename), d) | |
|
719 | } | |
|
720 | }; | |
|
721 | let filename_as_path = match hg_path_to_path_buf(&filename) | |
|
722 | { | |
|
723 | Ok(f) => f, | |
|
724 | Err(_) => { | |
|
725 | return ( | |
|
726 | Cow::Owned(filename), | |
|
727 | INVALID_PATH_DISPATCH, | |
|
728 | ) | |
|
729 | } | |
|
730 | }; | |
|
731 | let meta = self | |
|
732 | .root_dir | |
|
733 | .join(filename_as_path) | |
|
734 | .symlink_metadata(); | |
|
735 | ||
|
736 | match meta { | |
|
737 | Ok(m) | |
|
738 | if !(m.file_type().is_file() | |
|
739 | || m.file_type().is_symlink()) => | |
|
740 | { | |
|
741 | ( | |
|
742 | Cow::Owned(filename), | |
|
743 | dispatch_missing(entry.state), | |
|
744 | ) | |
|
745 | } | |
|
746 | Ok(m) => { | |
|
747 | let dispatch = dispatch_found( | |
|
748 | &filename, | |
|
749 | entry, | |
|
750 | HgMetadata::from_metadata(m), | |
|
751 | &self.dmap.copy_map, | |
|
752 | self.options, | |
|
753 | ); | |
|
754 | (Cow::Owned(filename), dispatch) | |
|
755 | } | |
|
756 | Err(e) | |
|
757 | if e.kind() == ErrorKind::NotFound | |
|
758 | || e.raw_os_error() == Some(20) => | |
|
759 | { | |
|
760 | // Rust does not yet have an `ErrorKind` for | |
|
761 | // `NotADirectory` (errno 20) | |
|
762 | // It happens if the dirstate contains `foo/bar` | |
|
763 | // and foo is not a | |
|
764 | // directory | |
|
765 | ( | |
|
766 | Cow::Owned(filename), | |
|
767 | dispatch_missing(entry.state), | |
|
768 | ) | |
|
769 | } | |
|
770 | Err(e) => { | |
|
771 | (Cow::Owned(filename), dispatch_os_error(&e)) | |
|
772 | } | |
|
773 | } | |
|
774 | }), | |
|
775 | ); | |
|
776 | } | |
|
777 | ||
|
778 | /// Add the files in the dirstate to the results. | |
|
779 | /// | |
|
780 | /// This takes a mutable reference to the results to account for the | |
|
781 | /// `extend` in timings | |
|
782 | #[cfg(not(feature = "dirstate-tree"))] | |
|
783 | 703 | #[timed] |
|
784 | 704 | pub fn extend_from_dmap(&self, results: &mut Vec<DispatchedPath<'a>>) { |
|
785 | 705 | results.par_extend( |
|
786 | 706 | self.dmap |
|
787 | 707 | .par_iter() |
|
788 | 708 | .filter(|(path, _)| self.matcher.matches(path)) |
|
789 | 709 | .map(move |(filename, entry)| { |
|
790 | 710 | let filename: &HgPath = filename; |
|
791 | 711 | let filename_as_path = match hg_path_to_path_buf(filename) |
|
792 | 712 | { |
|
793 | 713 | Ok(f) => f, |
|
794 | 714 | Err(_) => { |
|
795 | 715 | return ( |
|
796 | 716 | Cow::Borrowed(filename), |
|
797 | 717 | INVALID_PATH_DISPATCH, |
|
798 | 718 | ) |
|
799 | 719 | } |
|
800 | 720 | }; |
|
801 | 721 | let meta = self |
|
802 | 722 | .root_dir |
|
803 | 723 | .join(filename_as_path) |
|
804 | 724 | .symlink_metadata(); |
|
805 | 725 | match meta { |
|
806 | 726 | Ok(m) |
|
807 | 727 | if !(m.file_type().is_file() |
|
808 | 728 | || m.file_type().is_symlink()) => |
|
809 | 729 | { |
|
810 | 730 | ( |
|
811 | 731 | Cow::Borrowed(filename), |
|
812 | 732 | dispatch_missing(entry.state), |
|
813 | 733 | ) |
|
814 | 734 | } |
|
815 | 735 | Ok(m) => ( |
|
816 | 736 | Cow::Borrowed(filename), |
|
817 | 737 | dispatch_found( |
|
818 | 738 | filename, |
|
819 | 739 | *entry, |
|
820 | 740 | HgMetadata::from_metadata(m), |
|
821 | 741 | &self.dmap.copy_map, |
|
822 | 742 | self.options, |
|
823 | 743 | ), |
|
824 | 744 | ), |
|
825 | 745 | Err(e) |
|
826 | 746 | if e.kind() == ErrorKind::NotFound |
|
827 | 747 | || e.raw_os_error() == Some(20) => |
|
828 | 748 | { |
|
829 | 749 | // Rust does not yet have an `ErrorKind` for |
|
830 | 750 | // `NotADirectory` (errno 20) |
|
831 | 751 | // It happens if the dirstate contains `foo/bar` |
|
832 | 752 | // and foo is not a |
|
833 | 753 | // directory |
|
834 | 754 | ( |
|
835 | 755 | Cow::Borrowed(filename), |
|
836 | 756 | dispatch_missing(entry.state), |
|
837 | 757 | ) |
|
838 | 758 | } |
|
839 | 759 | Err(e) => { |
|
840 | 760 | (Cow::Borrowed(filename), dispatch_os_error(&e)) |
|
841 | 761 | } |
|
842 | 762 | } |
|
843 | 763 | }), |
|
844 | 764 | ); |
|
845 | 765 | } |
|
846 | 766 | |
|
847 | 767 | /// Checks all files that are in the dirstate but were not found during the |
|
848 | 768 | /// working directory traversal. This means that the rest must |
|
849 | 769 | /// be either ignored, under a symlink or under a new nested repo. |
|
850 | 770 | /// |
|
851 | 771 | /// This takes a mutable reference to the results to account for the |
|
852 | 772 | /// `extend` in timings |
|
853 | #[cfg(not(feature = "dirstate-tree"))] | |
|
854 | 773 | #[timed] |
|
855 | 774 | pub fn handle_unknowns(&self, results: &mut Vec<DispatchedPath<'a>>) { |
|
856 | 775 | let to_visit: Vec<(&HgPath, &DirstateEntry)> = |
|
857 | 776 | if results.is_empty() && self.matcher.matches_everything() { |
|
858 | 777 | self.dmap.iter().map(|(f, e)| (f.deref(), e)).collect() |
|
859 | 778 | } else { |
|
860 | 779 | // Only convert to a hashmap if needed. |
|
861 | 780 | let old_results: FastHashMap<_, _> = |
|
862 | 781 | results.iter().cloned().collect(); |
|
863 | 782 | self.dmap |
|
864 | 783 | .iter() |
|
865 | 784 | .filter_map(move |(f, e)| { |
|
866 | 785 | if !old_results.contains_key(f.deref()) |
|
867 | 786 | && self.matcher.matches(f) |
|
868 | 787 | { |
|
869 | 788 | Some((f.deref(), e)) |
|
870 | 789 | } else { |
|
871 | 790 | None |
|
872 | 791 | } |
|
873 | 792 | }) |
|
874 | 793 | .collect() |
|
875 | 794 | }; |
|
876 | 795 | |
|
877 | 796 | let path_auditor = PathAuditor::new(&self.root_dir); |
|
878 | 797 | |
|
879 | 798 | let new_results = to_visit.into_par_iter().filter_map( |
|
880 | 799 | |(filename, entry)| -> Option<_> { |
|
881 | 800 | // Report ignored items in the dmap as long as they are not |
|
882 | 801 | // under a symlink directory. |
|
883 | 802 | if path_auditor.check(filename) { |
|
884 | 803 | // TODO normalize for case-insensitive filesystems |
|
885 | 804 | let buf = match hg_path_to_path_buf(filename) { |
|
886 | 805 | Ok(x) => x, |
|
887 | 806 | Err(_) => { |
|
888 | 807 | return Some(( |
|
889 | 808 | Cow::Owned(filename.to_owned()), |
|
890 | 809 | INVALID_PATH_DISPATCH, |
|
891 | 810 | )); |
|
892 | 811 | } |
|
893 | 812 | }; |
|
894 | 813 | Some(( |
|
895 | 814 | Cow::Owned(filename.to_owned()), |
|
896 | 815 | match self.root_dir.join(&buf).symlink_metadata() { |
|
897 | 816 | // File was just ignored, no links, and exists |
|
898 | 817 | Ok(meta) => { |
|
899 | 818 | let metadata = HgMetadata::from_metadata(meta); |
|
900 | 819 | dispatch_found( |
|
901 | 820 | filename, |
|
902 | 821 | *entry, |
|
903 | 822 | metadata, |
|
904 | 823 | &self.dmap.copy_map, |
|
905 | 824 | self.options, |
|
906 | 825 | ) |
|
907 | 826 | } |
|
908 | 827 | // File doesn't exist |
|
909 | 828 | Err(_) => dispatch_missing(entry.state), |
|
910 | 829 | }, |
|
911 | 830 | )) |
|
912 | 831 | } else { |
|
913 | 832 | // It's either missing or under a symlink directory which |
|
914 | 833 | // we, in this case, report as missing. |
|
915 | 834 | Some(( |
|
916 | 835 | Cow::Owned(filename.to_owned()), |
|
917 | 836 | dispatch_missing(entry.state), |
|
918 | 837 | )) |
|
919 | 838 | } |
|
920 | 839 | }, |
|
921 | 840 | ); |
|
922 | 841 | |
|
923 | 842 | results.par_extend(new_results); |
|
924 | 843 | } |
|
925 | 844 | } |
|
926 | 845 | |
|
927 | 846 | #[timed] |
|
928 | 847 | pub fn build_response<'a>( |
|
929 | 848 | results: impl IntoIterator<Item = DispatchedPath<'a>>, |
|
930 | 849 | traversed: Vec<HgPathBuf>, |
|
931 | 850 | ) -> (Vec<HgPathCow<'a>>, DirstateStatus<'a>) { |
|
932 | 851 | let mut lookup = vec![]; |
|
933 | 852 | let mut modified = vec![]; |
|
934 | 853 | let mut added = vec![]; |
|
935 | 854 | let mut removed = vec![]; |
|
936 | 855 | let mut deleted = vec![]; |
|
937 | 856 | let mut clean = vec![]; |
|
938 | 857 | let mut ignored = vec![]; |
|
939 | 858 | let mut unknown = vec![]; |
|
940 | 859 | let mut bad = vec![]; |
|
941 | 860 | |
|
942 | 861 | for (filename, dispatch) in results.into_iter() { |
|
943 | 862 | match dispatch { |
|
944 | 863 | Dispatch::Unknown => unknown.push(filename), |
|
945 | 864 | Dispatch::Unsure => lookup.push(filename), |
|
946 | 865 | Dispatch::Modified => modified.push(filename), |
|
947 | 866 | Dispatch::Added => added.push(filename), |
|
948 | 867 | Dispatch::Removed => removed.push(filename), |
|
949 | 868 | Dispatch::Deleted => deleted.push(filename), |
|
950 | 869 | Dispatch::Clean => clean.push(filename), |
|
951 | 870 | Dispatch::Ignored => ignored.push(filename), |
|
952 | 871 | Dispatch::None => {} |
|
953 | 872 | Dispatch::Bad(reason) => bad.push((filename, reason)), |
|
954 | 873 | Dispatch::Directory { .. } => {} |
|
955 | 874 | } |
|
956 | 875 | } |
|
957 | 876 | |
|
958 | 877 | ( |
|
959 | 878 | lookup, |
|
960 | 879 | DirstateStatus { |
|
961 | 880 | modified, |
|
962 | 881 | added, |
|
963 | 882 | removed, |
|
964 | 883 | deleted, |
|
965 | 884 | clean, |
|
966 | 885 | ignored, |
|
967 | 886 | unknown, |
|
968 | 887 | bad, |
|
969 | 888 | traversed, |
|
970 | 889 | }, |
|
971 | 890 | ) |
|
972 | 891 | } |
|
973 | 892 | |
|
974 | 893 | /// Get the status of files in the working directory. |
|
975 | 894 | /// |
|
976 | 895 | /// This is the current entry-point for `hg-core` and is realistically unusable |
|
977 | 896 | /// outside of a Python context because its arguments need to provide a lot of |
|
978 | 897 | /// information that will not be necessary in the future. |
|
979 | 898 | #[timed] |
|
980 | 899 | pub fn status<'a>( |
|
981 | 900 | dmap: &'a DirstateMap, |
|
982 | 901 | matcher: &'a (impl Matcher + Sync), |
|
983 | 902 | root_dir: PathBuf, |
|
984 | 903 | ignore_files: Vec<PathBuf>, |
|
985 | 904 | options: StatusOptions, |
|
986 | 905 | ) -> StatusResult<( |
|
987 | 906 | (Vec<HgPathCow<'a>>, DirstateStatus<'a>), |
|
988 | 907 | Vec<PatternFileWarning>, |
|
989 | 908 | )> { |
|
990 | 909 | let (status, warnings) = |
|
991 | 910 | Status::new(dmap, matcher, root_dir, ignore_files, options)?; |
|
992 | 911 | |
|
993 | 912 | Ok((status.run()?, warnings)) |
|
994 | 913 | } |
@@ -1,133 +1,73 b'' | |||
|
1 | 1 | // dirstate_status.rs |
|
2 | 2 | // |
|
3 | 3 | // Copyright 2019, Raphaël Gomès <rgomes@octobus.net> |
|
4 | 4 | // |
|
5 | 5 | // This software may be used and distributed according to the terms of the |
|
6 | 6 | // GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | use crate::dirstate::status::{build_response, Dispatch, HgPathCow, Status}; |
|
9 | 9 | use crate::matchers::Matcher; |
|
10 | 10 | use crate::{DirstateStatus, StatusError}; |
|
11 | 11 | |
|
12 | 12 | /// A tuple of the paths that need to be checked in the filelog because it's |
|
13 | 13 | /// ambiguous whether they've changed, and the rest of the already dispatched |
|
14 | 14 | /// files. |
|
15 | 15 | pub type LookupAndStatus<'a> = (Vec<HgPathCow<'a>>, DirstateStatus<'a>); |
|
16 | 16 | |
|
17 | #[cfg(feature = "dirstate-tree")] | |
|
18 | impl<'a, M: Matcher + Sync> Status<'a, M> { | |
|
19 | pub(crate) fn run(&self) -> Result<LookupAndStatus<'a>, StatusError> { | |
|
20 | let (traversed_sender, traversed_receiver) = | |
|
21 | crossbeam_channel::unbounded(); | |
|
22 | ||
|
23 | // Step 1: check the files explicitly mentioned by the user | |
|
24 | let (work, mut results) = self.walk_explicit(traversed_sender.clone()); | |
|
25 | ||
|
26 | // Step 2: Check files in the dirstate | |
|
27 | if !self.matcher.is_exact() { | |
|
28 | self.extend_from_dmap(&mut results); | |
|
29 | } | |
|
30 | // Step 3: Check the working directory if listing unknowns | |
|
31 | if !work.is_empty() { | |
|
32 | // Hashmaps are quite a bit slower to build than vecs, so only | |
|
33 | // build it if needed. | |
|
34 | let mut old_results = None; | |
|
35 | ||
|
36 | // Step 2: recursively check the working directory for changes if | |
|
37 | // needed | |
|
38 | for (dir, dispatch) in work { | |
|
39 | match dispatch { | |
|
40 | Dispatch::Directory { was_file } => { | |
|
41 | if was_file { | |
|
42 | results.push((dir.to_owned(), Dispatch::Removed)); | |
|
43 | } | |
|
44 | if self.options.list_ignored | |
|
45 | || self.options.list_unknown | |
|
46 | && !self.dir_ignore(&dir) | |
|
47 | { | |
|
48 | if old_results.is_none() { | |
|
49 | old_results = | |
|
50 | Some(results.iter().cloned().collect()); | |
|
51 | } | |
|
52 | self.traverse( | |
|
53 | &dir, | |
|
54 | old_results | |
|
55 | .as_ref() | |
|
56 | .expect("old results should exist"), | |
|
57 | &mut results, | |
|
58 | traversed_sender.clone(), | |
|
59 | ); | |
|
60 | } | |
|
61 | } | |
|
62 | _ => { | |
|
63 | unreachable!("There can only be directories in `work`") | |
|
64 | } | |
|
65 | } | |
|
66 | } | |
|
67 | } | |
|
68 | ||
|
69 | drop(traversed_sender); | |
|
70 | let traversed = traversed_receiver.into_iter().collect(); | |
|
71 | ||
|
72 | Ok(build_response(results, traversed)) | |
|
73 | } | |
|
74 | } | |
|
75 | ||
|
76 | #[cfg(not(feature = "dirstate-tree"))] | |
|
77 | 17 | impl<'a, M: Matcher + Sync> Status<'a, M> { |
|
78 | 18 | pub(crate) fn run(&self) -> Result<LookupAndStatus<'a>, StatusError> { |
|
79 | 19 | let (traversed_sender, traversed_receiver) = |
|
80 | 20 | crossbeam_channel::unbounded(); |
|
81 | 21 | |
|
82 | 22 | // Step 1: check the files explicitly mentioned by the user |
|
83 | 23 | let (work, mut results) = self.walk_explicit(traversed_sender.clone()); |
|
84 | 24 | |
|
85 | 25 | if !work.is_empty() { |
|
86 | 26 | // Hashmaps are quite a bit slower to build than vecs, so only |
|
87 | 27 | // build it if needed. |
|
88 | 28 | let old_results = results.iter().cloned().collect(); |
|
89 | 29 | |
|
90 | 30 | // Step 2: recursively check the working directory for changes if |
|
91 | 31 | // needed |
|
92 | 32 | for (dir, dispatch) in work { |
|
93 | 33 | match dispatch { |
|
94 | 34 | Dispatch::Directory { was_file } => { |
|
95 | 35 | if was_file { |
|
96 | 36 | results.push((dir.to_owned(), Dispatch::Removed)); |
|
97 | 37 | } |
|
98 | 38 | if self.options.list_ignored |
|
99 | 39 | || self.options.list_unknown |
|
100 | 40 | && !self.dir_ignore(&dir) |
|
101 | 41 | { |
|
102 | 42 | self.traverse( |
|
103 | 43 | &dir, |
|
104 | 44 | &old_results, |
|
105 | 45 | &mut results, |
|
106 | 46 | traversed_sender.clone(), |
|
107 | 47 | ); |
|
108 | 48 | } |
|
109 | 49 | } |
|
110 | 50 | _ => { |
|
111 | 51 | unreachable!("There can only be directories in `work`") |
|
112 | 52 | } |
|
113 | 53 | } |
|
114 | 54 | } |
|
115 | 55 | } |
|
116 | 56 | |
|
117 | 57 | if !self.matcher.is_exact() { |
|
118 | 58 | if self.options.list_unknown { |
|
119 | 59 | self.handle_unknowns(&mut results); |
|
120 | 60 | } else { |
|
121 | 61 | // TODO this is incorrect, see issue6335 |
|
122 | 62 | // This requires a fix in both Python and Rust that can happen |
|
123 | 63 | // with other pending changes to `status`. |
|
124 | 64 | self.extend_from_dmap(&mut results); |
|
125 | 65 | } |
|
126 | 66 | } |
|
127 | 67 | |
|
128 | 68 | drop(traversed_sender); |
|
129 | 69 | let traversed = traversed_receiver.into_iter().collect(); |
|
130 | 70 | |
|
131 | 71 | Ok(build_response(results, traversed)) |
|
132 | 72 | } |
|
133 | 73 | } |
@@ -1,33 +1,32 b'' | |||
|
1 | 1 | [package] |
|
2 | 2 | name = "hg-cpython" |
|
3 | 3 | version = "0.1.0" |
|
4 | 4 | authors = ["Georges Racinet <gracinet@anybox.fr>"] |
|
5 | 5 | edition = "2018" |
|
6 | 6 | |
|
7 | 7 | [lib] |
|
8 | 8 | name='rusthg' |
|
9 | 9 | crate-type = ["cdylib"] |
|
10 | 10 | |
|
11 | 11 | [features] |
|
12 | 12 | default = ["python27"] |
|
13 | dirstate-tree = ["hg-core/dirstate-tree"] | |
|
14 | 13 | |
|
15 | 14 | # Features to build an extension module: |
|
16 | 15 | python27 = ["cpython/python27-sys", "cpython/extension-module-2-7"] |
|
17 | 16 | python3 = ["cpython/python3-sys", "cpython/extension-module"] |
|
18 | 17 | |
|
19 | 18 | # Enable one of these features to build a test executable linked to libpython: |
|
20 | 19 | # e.g. cargo test --no-default-features --features python27-bin |
|
21 | 20 | python27-bin = ["cpython/python27-sys"] |
|
22 | 21 | python3-bin = ["cpython/python3-sys"] |
|
23 | 22 | |
|
24 | 23 | [dependencies] |
|
25 | 24 | crossbeam-channel = "0.4" |
|
26 | 25 | hg-core = { path = "../hg-core"} |
|
27 | 26 | libc = '*' |
|
28 | 27 | log = "0.4.8" |
|
29 | 28 | env_logger = "0.7.1" |
|
30 | 29 | |
|
31 | 30 | [dependencies.cpython] |
|
32 | 31 | version = "0.5.2" |
|
33 | 32 | default-features = false |
@@ -1,608 +1,588 b'' | |||
|
1 | 1 | // dirstate_map.rs |
|
2 | 2 | // |
|
3 | 3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> |
|
4 | 4 | // |
|
5 | 5 | // This software may be used and distributed according to the terms of the |
|
6 | 6 | // GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | //! Bindings for the `hg::dirstate::dirstate_map` file provided by the |
|
9 | 9 | //! `hg-core` package. |
|
10 | 10 | |
|
11 | 11 | use std::cell::{Ref, RefCell}; |
|
12 | 12 | use std::convert::TryInto; |
|
13 | 13 | use std::time::Duration; |
|
14 | 14 | |
|
15 | 15 | use cpython::{ |
|
16 | 16 | exc, ObjectProtocol, PyBool, PyBytes, PyClone, PyDict, PyErr, PyList, |
|
17 | 17 | PyObject, PyResult, PyString, PyTuple, Python, PythonObject, ToPyObject, |
|
18 | 18 | UnsafePyLeaked, |
|
19 | 19 | }; |
|
20 | 20 | |
|
21 | 21 | use crate::{ |
|
22 | 22 | dirstate::copymap::{CopyMap, CopyMapItemsIterator, CopyMapKeysIterator}, |
|
23 | 23 | dirstate::non_normal_entries::{ |
|
24 | 24 | NonNormalEntries, NonNormalEntriesIterator, |
|
25 | 25 | }, |
|
26 | 26 | dirstate::{dirs_multiset::Dirs, make_dirstate_tuple}, |
|
27 | 27 | parsers::dirstate_parents_to_pytuple, |
|
28 | 28 | }; |
|
29 | 29 | use hg::{ |
|
30 | 30 | errors::HgError, |
|
31 | 31 | revlog::Node, |
|
32 | 32 | utils::hg_path::{HgPath, HgPathBuf}, |
|
33 | 33 | DirsMultiset, DirstateEntry, DirstateMap as RustDirstateMap, |
|
34 | 34 | DirstateMapError, DirstateParents, EntryState, StateMapIter, |
|
35 | 35 | }; |
|
36 | 36 | |
|
37 | 37 | // TODO |
|
38 | 38 | // This object needs to share references to multiple members of its Rust |
|
39 | 39 | // inner struct, namely `copy_map`, `dirs` and `all_dirs`. |
|
40 | 40 | // Right now `CopyMap` is done, but it needs to have an explicit reference |
|
41 | 41 | // to `RustDirstateMap` which itself needs to have an encapsulation for |
|
42 | 42 | // every method in `CopyMap` (copymapcopy, etc.). |
|
43 | 43 | // This is ugly and hard to maintain. |
|
44 | 44 | // The same logic applies to `dirs` and `all_dirs`, however the `Dirs` |
|
45 | 45 | // `py_class!` is already implemented and does not mention |
|
46 | 46 | // `RustDirstateMap`, rightfully so. |
|
47 | 47 | // All attributes also have to have a separate refcount data attribute for |
|
48 | 48 | // leaks, with all methods that go along for reference sharing. |
|
49 | 49 | py_class!(pub class DirstateMap |py| { |
|
50 | 50 | @shared data inner: RustDirstateMap; |
|
51 | 51 | |
|
52 | 52 | def __new__(_cls, _root: PyObject) -> PyResult<Self> { |
|
53 | 53 | let inner = RustDirstateMap::default(); |
|
54 | 54 | Self::create_instance(py, inner) |
|
55 | 55 | } |
|
56 | 56 | |
|
57 | 57 | def clear(&self) -> PyResult<PyObject> { |
|
58 | 58 | self.inner(py).borrow_mut().clear(); |
|
59 | 59 | Ok(py.None()) |
|
60 | 60 | } |
|
61 | 61 | |
|
62 | 62 | def get( |
|
63 | 63 | &self, |
|
64 | 64 | key: PyObject, |
|
65 | 65 | default: Option<PyObject> = None |
|
66 | 66 | ) -> PyResult<Option<PyObject>> { |
|
67 | 67 | let key = key.extract::<PyBytes>(py)?; |
|
68 | 68 | match self.inner(py).borrow().get(HgPath::new(key.data(py))) { |
|
69 | 69 | Some(entry) => { |
|
70 | 70 | Ok(Some(make_dirstate_tuple(py, entry)?)) |
|
71 | 71 | }, |
|
72 | 72 | None => Ok(default) |
|
73 | 73 | } |
|
74 | 74 | } |
|
75 | 75 | |
|
76 | 76 | def addfile( |
|
77 | 77 | &self, |
|
78 | 78 | f: PyObject, |
|
79 | 79 | oldstate: PyObject, |
|
80 | 80 | state: PyObject, |
|
81 | 81 | mode: PyObject, |
|
82 | 82 | size: PyObject, |
|
83 | 83 | mtime: PyObject |
|
84 | 84 | ) -> PyResult<PyObject> { |
|
85 | 85 | self.inner(py).borrow_mut().add_file( |
|
86 | 86 | HgPath::new(f.extract::<PyBytes>(py)?.data(py)), |
|
87 | 87 | oldstate.extract::<PyBytes>(py)?.data(py)[0] |
|
88 | 88 | .try_into() |
|
89 | 89 | .map_err(|e: HgError| { |
|
90 | 90 | PyErr::new::<exc::ValueError, _>(py, e.to_string()) |
|
91 | 91 | })?, |
|
92 | 92 | DirstateEntry { |
|
93 | 93 | state: state.extract::<PyBytes>(py)?.data(py)[0] |
|
94 | 94 | .try_into() |
|
95 | 95 | .map_err(|e: HgError| { |
|
96 | 96 | PyErr::new::<exc::ValueError, _>(py, e.to_string()) |
|
97 | 97 | })?, |
|
98 | 98 | mode: mode.extract(py)?, |
|
99 | 99 | size: size.extract(py)?, |
|
100 | 100 | mtime: mtime.extract(py)?, |
|
101 | 101 | }, |
|
102 | 102 | ).and(Ok(py.None())).or_else(|e: DirstateMapError| { |
|
103 | 103 | Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())) |
|
104 | 104 | }) |
|
105 | 105 | } |
|
106 | 106 | |
|
107 | 107 | def removefile( |
|
108 | 108 | &self, |
|
109 | 109 | f: PyObject, |
|
110 | 110 | oldstate: PyObject, |
|
111 | 111 | size: PyObject |
|
112 | 112 | ) -> PyResult<PyObject> { |
|
113 | 113 | self.inner(py).borrow_mut() |
|
114 | 114 | .remove_file( |
|
115 | 115 | HgPath::new(f.extract::<PyBytes>(py)?.data(py)), |
|
116 | 116 | oldstate.extract::<PyBytes>(py)?.data(py)[0] |
|
117 | 117 | .try_into() |
|
118 | 118 | .map_err(|e: HgError| { |
|
119 | 119 | PyErr::new::<exc::ValueError, _>(py, e.to_string()) |
|
120 | 120 | })?, |
|
121 | 121 | size.extract(py)?, |
|
122 | 122 | ) |
|
123 | 123 | .or_else(|_| { |
|
124 | 124 | Err(PyErr::new::<exc::OSError, _>( |
|
125 | 125 | py, |
|
126 | 126 | "Dirstate error".to_string(), |
|
127 | 127 | )) |
|
128 | 128 | })?; |
|
129 | 129 | Ok(py.None()) |
|
130 | 130 | } |
|
131 | 131 | |
|
132 | 132 | def dropfile( |
|
133 | 133 | &self, |
|
134 | 134 | f: PyObject, |
|
135 | 135 | oldstate: PyObject |
|
136 | 136 | ) -> PyResult<PyBool> { |
|
137 | 137 | self.inner(py).borrow_mut() |
|
138 | 138 | .drop_file( |
|
139 | 139 | HgPath::new(f.extract::<PyBytes>(py)?.data(py)), |
|
140 | 140 | oldstate.extract::<PyBytes>(py)?.data(py)[0] |
|
141 | 141 | .try_into() |
|
142 | 142 | .map_err(|e: HgError| { |
|
143 | 143 | PyErr::new::<exc::ValueError, _>(py, e.to_string()) |
|
144 | 144 | })?, |
|
145 | 145 | ) |
|
146 | 146 | .and_then(|b| Ok(b.to_py_object(py))) |
|
147 | 147 | .or_else(|e| { |
|
148 | 148 | Err(PyErr::new::<exc::OSError, _>( |
|
149 | 149 | py, |
|
150 | 150 | format!("Dirstate error: {}", e.to_string()), |
|
151 | 151 | )) |
|
152 | 152 | }) |
|
153 | 153 | } |
|
154 | 154 | |
|
155 | 155 | def clearambiguoustimes( |
|
156 | 156 | &self, |
|
157 | 157 | files: PyObject, |
|
158 | 158 | now: PyObject |
|
159 | 159 | ) -> PyResult<PyObject> { |
|
160 | 160 | let files: PyResult<Vec<HgPathBuf>> = files |
|
161 | 161 | .iter(py)? |
|
162 | 162 | .map(|filename| { |
|
163 | 163 | Ok(HgPathBuf::from_bytes( |
|
164 | 164 | filename?.extract::<PyBytes>(py)?.data(py), |
|
165 | 165 | )) |
|
166 | 166 | }) |
|
167 | 167 | .collect(); |
|
168 | 168 | self.inner(py).borrow_mut() |
|
169 | 169 | .clear_ambiguous_times(files?, now.extract(py)?); |
|
170 | 170 | Ok(py.None()) |
|
171 | 171 | } |
|
172 | 172 | |
|
173 | 173 | def other_parent_entries(&self) -> PyResult<PyObject> { |
|
174 | 174 | let mut inner_shared = self.inner(py).borrow_mut(); |
|
175 | 175 | let (_, other_parent) = |
|
176 | 176 | inner_shared.get_non_normal_other_parent_entries(); |
|
177 | 177 | |
|
178 | 178 | let locals = PyDict::new(py); |
|
179 | 179 | locals.set_item( |
|
180 | 180 | py, |
|
181 | 181 | "other_parent", |
|
182 | 182 | other_parent |
|
183 | 183 | .iter() |
|
184 | 184 | .map(|v| PyBytes::new(py, v.as_bytes())) |
|
185 | 185 | .collect::<Vec<PyBytes>>() |
|
186 | 186 | .to_py_object(py), |
|
187 | 187 | )?; |
|
188 | 188 | |
|
189 | 189 | py.eval("set(other_parent)", None, Some(&locals)) |
|
190 | 190 | } |
|
191 | 191 | |
|
192 | 192 | def non_normal_entries(&self) -> PyResult<NonNormalEntries> { |
|
193 | 193 | NonNormalEntries::from_inner(py, self.clone_ref(py)) |
|
194 | 194 | } |
|
195 | 195 | |
|
196 | 196 | def non_normal_entries_contains(&self, key: PyObject) -> PyResult<bool> { |
|
197 | 197 | let key = key.extract::<PyBytes>(py)?; |
|
198 | 198 | Ok(self |
|
199 | 199 | .inner(py) |
|
200 | 200 | .borrow_mut() |
|
201 | 201 | .get_non_normal_other_parent_entries().0 |
|
202 | 202 | .contains(HgPath::new(key.data(py)))) |
|
203 | 203 | } |
|
204 | 204 | |
|
205 | 205 | def non_normal_entries_display(&self) -> PyResult<PyString> { |
|
206 | 206 | Ok( |
|
207 | 207 | PyString::new( |
|
208 | 208 | py, |
|
209 | 209 | &format!( |
|
210 | 210 | "NonNormalEntries: {:?}", |
|
211 | 211 | self |
|
212 | 212 | .inner(py) |
|
213 | 213 | .borrow_mut() |
|
214 | 214 | .get_non_normal_other_parent_entries().0 |
|
215 | 215 | .iter().map(|o| o)) |
|
216 | 216 | ) |
|
217 | 217 | ) |
|
218 | 218 | } |
|
219 | 219 | |
|
220 | 220 | def non_normal_entries_remove(&self, key: PyObject) -> PyResult<PyObject> { |
|
221 | 221 | let key = key.extract::<PyBytes>(py)?; |
|
222 | 222 | self |
|
223 | 223 | .inner(py) |
|
224 | 224 | .borrow_mut() |
|
225 | 225 | .non_normal_entries_remove(HgPath::new(key.data(py))); |
|
226 | 226 | Ok(py.None()) |
|
227 | 227 | } |
|
228 | 228 | |
|
229 | 229 | def non_normal_entries_union(&self, other: PyObject) -> PyResult<PyList> { |
|
230 | 230 | let other: PyResult<_> = other.iter(py)? |
|
231 | 231 | .map(|f| { |
|
232 | 232 | Ok(HgPathBuf::from_bytes( |
|
233 | 233 | f?.extract::<PyBytes>(py)?.data(py), |
|
234 | 234 | )) |
|
235 | 235 | }) |
|
236 | 236 | .collect(); |
|
237 | 237 | |
|
238 | 238 | let res = self |
|
239 | 239 | .inner(py) |
|
240 | 240 | .borrow_mut() |
|
241 | 241 | .non_normal_entries_union(other?); |
|
242 | 242 | |
|
243 | 243 | let ret = PyList::new(py, &[]); |
|
244 | 244 | for filename in res.iter() { |
|
245 | 245 | let as_pystring = PyBytes::new(py, filename.as_bytes()); |
|
246 | 246 | ret.append(py, as_pystring.into_object()); |
|
247 | 247 | } |
|
248 | 248 | Ok(ret) |
|
249 | 249 | } |
|
250 | 250 | |
|
251 | 251 | def non_normal_entries_iter(&self) -> PyResult<NonNormalEntriesIterator> { |
|
252 | 252 | // Make sure the sets are defined before we no longer have a mutable |
|
253 | 253 | // reference to the dmap. |
|
254 | 254 | self.inner(py) |
|
255 | 255 | .borrow_mut() |
|
256 | 256 | .set_non_normal_other_parent_entries(false); |
|
257 | 257 | |
|
258 | 258 | let leaked_ref = self.inner(py).leak_immutable(); |
|
259 | 259 | |
|
260 | 260 | NonNormalEntriesIterator::from_inner(py, unsafe { |
|
261 | 261 | leaked_ref.map(py, |o| { |
|
262 | 262 | o.get_non_normal_other_parent_entries_panic().0.iter() |
|
263 | 263 | }) |
|
264 | 264 | }) |
|
265 | 265 | } |
|
266 | 266 | |
|
267 | 267 | def hastrackeddir(&self, d: PyObject) -> PyResult<PyBool> { |
|
268 | 268 | let d = d.extract::<PyBytes>(py)?; |
|
269 | 269 | Ok(self.inner(py).borrow_mut() |
|
270 | 270 | .has_tracked_dir(HgPath::new(d.data(py))) |
|
271 | 271 | .map_err(|e| { |
|
272 | 272 | PyErr::new::<exc::ValueError, _>(py, e.to_string()) |
|
273 | 273 | })? |
|
274 | 274 | .to_py_object(py)) |
|
275 | 275 | } |
|
276 | 276 | |
|
277 | 277 | def hasdir(&self, d: PyObject) -> PyResult<PyBool> { |
|
278 | 278 | let d = d.extract::<PyBytes>(py)?; |
|
279 | 279 | Ok(self.inner(py).borrow_mut() |
|
280 | 280 | .has_dir(HgPath::new(d.data(py))) |
|
281 | 281 | .map_err(|e| { |
|
282 | 282 | PyErr::new::<exc::ValueError, _>(py, e.to_string()) |
|
283 | 283 | })? |
|
284 | 284 | .to_py_object(py)) |
|
285 | 285 | } |
|
286 | 286 | |
|
287 | 287 | def parents(&self, st: PyObject) -> PyResult<PyTuple> { |
|
288 | 288 | self.inner(py).borrow_mut() |
|
289 | 289 | .parents(st.extract::<PyBytes>(py)?.data(py)) |
|
290 | 290 | .map(|parents| dirstate_parents_to_pytuple(py, parents)) |
|
291 | 291 | .or_else(|_| { |
|
292 | 292 | Err(PyErr::new::<exc::OSError, _>( |
|
293 | 293 | py, |
|
294 | 294 | "Dirstate error".to_string(), |
|
295 | 295 | )) |
|
296 | 296 | }) |
|
297 | 297 | } |
|
298 | 298 | |
|
299 | 299 | def setparents(&self, p1: PyObject, p2: PyObject) -> PyResult<PyObject> { |
|
300 | 300 | let p1 = extract_node_id(py, &p1)?; |
|
301 | 301 | let p2 = extract_node_id(py, &p2)?; |
|
302 | 302 | |
|
303 | 303 | self.inner(py).borrow_mut() |
|
304 | 304 | .set_parents(&DirstateParents { p1, p2 }); |
|
305 | 305 | Ok(py.None()) |
|
306 | 306 | } |
|
307 | 307 | |
|
308 | 308 | def read(&self, st: PyObject) -> PyResult<Option<PyObject>> { |
|
309 | 309 | match self.inner(py).borrow_mut() |
|
310 | 310 | .read(st.extract::<PyBytes>(py)?.data(py)) |
|
311 | 311 | { |
|
312 | 312 | Ok(Some(parents)) => Ok(Some( |
|
313 | 313 | dirstate_parents_to_pytuple(py, parents) |
|
314 | 314 | .into_object() |
|
315 | 315 | )), |
|
316 | 316 | Ok(None) => Ok(Some(py.None())), |
|
317 | 317 | Err(_) => Err(PyErr::new::<exc::OSError, _>( |
|
318 | 318 | py, |
|
319 | 319 | "Dirstate error".to_string(), |
|
320 | 320 | )), |
|
321 | 321 | } |
|
322 | 322 | } |
|
323 | 323 | def write( |
|
324 | 324 | &self, |
|
325 | 325 | p1: PyObject, |
|
326 | 326 | p2: PyObject, |
|
327 | 327 | now: PyObject |
|
328 | 328 | ) -> PyResult<PyBytes> { |
|
329 | 329 | let now = Duration::new(now.extract(py)?, 0); |
|
330 | 330 | let parents = DirstateParents { |
|
331 | 331 | p1: extract_node_id(py, &p1)?, |
|
332 | 332 | p2: extract_node_id(py, &p2)?, |
|
333 | 333 | }; |
|
334 | 334 | |
|
335 | 335 | match self.inner(py).borrow_mut().pack(parents, now) { |
|
336 | 336 | Ok(packed) => Ok(PyBytes::new(py, &packed)), |
|
337 | 337 | Err(_) => Err(PyErr::new::<exc::OSError, _>( |
|
338 | 338 | py, |
|
339 | 339 | "Dirstate error".to_string(), |
|
340 | 340 | )), |
|
341 | 341 | } |
|
342 | 342 | } |
|
343 | 343 | |
|
344 | 344 | def filefoldmapasdict(&self) -> PyResult<PyDict> { |
|
345 | 345 | let dict = PyDict::new(py); |
|
346 | 346 | for (key, value) in |
|
347 | 347 | self.inner(py).borrow_mut().build_file_fold_map().iter() |
|
348 | 348 | { |
|
349 | 349 | dict.set_item( |
|
350 | 350 | py, |
|
351 | 351 | PyBytes::new(py, key.as_bytes()).into_object(), |
|
352 | 352 | PyBytes::new(py, value.as_bytes()).into_object(), |
|
353 | 353 | )?; |
|
354 | 354 | } |
|
355 | 355 | Ok(dict) |
|
356 | 356 | } |
|
357 | 357 | |
|
358 | 358 | def __len__(&self) -> PyResult<usize> { |
|
359 | 359 | Ok(self.inner(py).borrow().len()) |
|
360 | 360 | } |
|
361 | 361 | |
|
362 | 362 | def __contains__(&self, key: PyObject) -> PyResult<bool> { |
|
363 | 363 | let key = key.extract::<PyBytes>(py)?; |
|
364 | 364 | Ok(self.inner(py).borrow().contains_key(HgPath::new(key.data(py)))) |
|
365 | 365 | } |
|
366 | 366 | |
|
367 | 367 | def __getitem__(&self, key: PyObject) -> PyResult<PyObject> { |
|
368 | 368 | let key = key.extract::<PyBytes>(py)?; |
|
369 | 369 | let key = HgPath::new(key.data(py)); |
|
370 | 370 | match self.inner(py).borrow().get(key) { |
|
371 | 371 | Some(entry) => { |
|
372 | 372 | Ok(make_dirstate_tuple(py, entry)?) |
|
373 | 373 | }, |
|
374 | 374 | None => Err(PyErr::new::<exc::KeyError, _>( |
|
375 | 375 | py, |
|
376 | 376 | String::from_utf8_lossy(key.as_bytes()), |
|
377 | 377 | )), |
|
378 | 378 | } |
|
379 | 379 | } |
|
380 | 380 | |
|
381 | 381 | def keys(&self) -> PyResult<DirstateMapKeysIterator> { |
|
382 | 382 | let leaked_ref = self.inner(py).leak_immutable(); |
|
383 | 383 | DirstateMapKeysIterator::from_inner( |
|
384 | 384 | py, |
|
385 | 385 | unsafe { leaked_ref.map(py, |o| o.iter()) }, |
|
386 | 386 | ) |
|
387 | 387 | } |
|
388 | 388 | |
|
389 | 389 | def items(&self) -> PyResult<DirstateMapItemsIterator> { |
|
390 | 390 | let leaked_ref = self.inner(py).leak_immutable(); |
|
391 | 391 | DirstateMapItemsIterator::from_inner( |
|
392 | 392 | py, |
|
393 | 393 | unsafe { leaked_ref.map(py, |o| o.iter()) }, |
|
394 | 394 | ) |
|
395 | 395 | } |
|
396 | 396 | |
|
397 | 397 | def __iter__(&self) -> PyResult<DirstateMapKeysIterator> { |
|
398 | 398 | let leaked_ref = self.inner(py).leak_immutable(); |
|
399 | 399 | DirstateMapKeysIterator::from_inner( |
|
400 | 400 | py, |
|
401 | 401 | unsafe { leaked_ref.map(py, |o| o.iter()) }, |
|
402 | 402 | ) |
|
403 | 403 | } |
|
404 | 404 | |
|
405 | 405 | def getdirs(&self) -> PyResult<Dirs> { |
|
406 | 406 | // TODO don't copy, share the reference |
|
407 | 407 | self.inner(py).borrow_mut().set_dirs() |
|
408 | 408 | .map_err(|e| { |
|
409 | 409 | PyErr::new::<exc::ValueError, _>(py, e.to_string()) |
|
410 | 410 | })?; |
|
411 | 411 | Dirs::from_inner( |
|
412 | 412 | py, |
|
413 | 413 | DirsMultiset::from_dirstate( |
|
414 | 414 | &self.inner(py).borrow(), |
|
415 | 415 | Some(EntryState::Removed), |
|
416 | 416 | ) |
|
417 | 417 | .map_err(|e| { |
|
418 | 418 | PyErr::new::<exc::ValueError, _>(py, e.to_string()) |
|
419 | 419 | })?, |
|
420 | 420 | ) |
|
421 | 421 | } |
|
422 | 422 | def getalldirs(&self) -> PyResult<Dirs> { |
|
423 | 423 | // TODO don't copy, share the reference |
|
424 | 424 | self.inner(py).borrow_mut().set_all_dirs() |
|
425 | 425 | .map_err(|e| { |
|
426 | 426 | PyErr::new::<exc::ValueError, _>(py, e.to_string()) |
|
427 | 427 | })?; |
|
428 | 428 | Dirs::from_inner( |
|
429 | 429 | py, |
|
430 | 430 | DirsMultiset::from_dirstate( |
|
431 | 431 | &self.inner(py).borrow(), |
|
432 | 432 | None, |
|
433 | 433 | ).map_err(|e| { |
|
434 | 434 | PyErr::new::<exc::ValueError, _>(py, e.to_string()) |
|
435 | 435 | })?, |
|
436 | 436 | ) |
|
437 | 437 | } |
|
438 | 438 | |
|
439 | 439 | // TODO all copymap* methods, see docstring above |
|
440 | 440 | def copymapcopy(&self) -> PyResult<PyDict> { |
|
441 | 441 | let dict = PyDict::new(py); |
|
442 | 442 | for (key, value) in self.inner(py).borrow().copy_map.iter() { |
|
443 | 443 | dict.set_item( |
|
444 | 444 | py, |
|
445 | 445 | PyBytes::new(py, key.as_bytes()), |
|
446 | 446 | PyBytes::new(py, value.as_bytes()), |
|
447 | 447 | )?; |
|
448 | 448 | } |
|
449 | 449 | Ok(dict) |
|
450 | 450 | } |
|
451 | 451 | |
|
452 | 452 | def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> { |
|
453 | 453 | let key = key.extract::<PyBytes>(py)?; |
|
454 | 454 | match self.inner(py).borrow().copy_map.get(HgPath::new(key.data(py))) { |
|
455 | 455 | Some(copy) => Ok(PyBytes::new(py, copy.as_bytes())), |
|
456 | 456 | None => Err(PyErr::new::<exc::KeyError, _>( |
|
457 | 457 | py, |
|
458 | 458 | String::from_utf8_lossy(key.data(py)), |
|
459 | 459 | )), |
|
460 | 460 | } |
|
461 | 461 | } |
|
462 | 462 | def copymap(&self) -> PyResult<CopyMap> { |
|
463 | 463 | CopyMap::from_inner(py, self.clone_ref(py)) |
|
464 | 464 | } |
|
465 | 465 | |
|
466 | 466 | def copymaplen(&self) -> PyResult<usize> { |
|
467 | 467 | Ok(self.inner(py).borrow().copy_map.len()) |
|
468 | 468 | } |
|
469 | 469 | def copymapcontains(&self, key: PyObject) -> PyResult<bool> { |
|
470 | 470 | let key = key.extract::<PyBytes>(py)?; |
|
471 | 471 | Ok(self |
|
472 | 472 | .inner(py) |
|
473 | 473 | .borrow() |
|
474 | 474 | .copy_map |
|
475 | 475 | .contains_key(HgPath::new(key.data(py)))) |
|
476 | 476 | } |
|
477 | 477 | def copymapget( |
|
478 | 478 | &self, |
|
479 | 479 | key: PyObject, |
|
480 | 480 | default: Option<PyObject> |
|
481 | 481 | ) -> PyResult<Option<PyObject>> { |
|
482 | 482 | let key = key.extract::<PyBytes>(py)?; |
|
483 | 483 | match self |
|
484 | 484 | .inner(py) |
|
485 | 485 | .borrow() |
|
486 | 486 | .copy_map |
|
487 | 487 | .get(HgPath::new(key.data(py))) |
|
488 | 488 | { |
|
489 | 489 | Some(copy) => Ok(Some( |
|
490 | 490 | PyBytes::new(py, copy.as_bytes()).into_object(), |
|
491 | 491 | )), |
|
492 | 492 | None => Ok(default), |
|
493 | 493 | } |
|
494 | 494 | } |
|
495 | 495 | def copymapsetitem( |
|
496 | 496 | &self, |
|
497 | 497 | key: PyObject, |
|
498 | 498 | value: PyObject |
|
499 | 499 | ) -> PyResult<PyObject> { |
|
500 | 500 | let key = key.extract::<PyBytes>(py)?; |
|
501 | 501 | let value = value.extract::<PyBytes>(py)?; |
|
502 | 502 | self.inner(py).borrow_mut().copy_map.insert( |
|
503 | 503 | HgPathBuf::from_bytes(key.data(py)), |
|
504 | 504 | HgPathBuf::from_bytes(value.data(py)), |
|
505 | 505 | ); |
|
506 | 506 | Ok(py.None()) |
|
507 | 507 | } |
|
508 | 508 | def copymappop( |
|
509 | 509 | &self, |
|
510 | 510 | key: PyObject, |
|
511 | 511 | default: Option<PyObject> |
|
512 | 512 | ) -> PyResult<Option<PyObject>> { |
|
513 | 513 | let key = key.extract::<PyBytes>(py)?; |
|
514 | 514 | match self |
|
515 | 515 | .inner(py) |
|
516 | 516 | .borrow_mut() |
|
517 | 517 | .copy_map |
|
518 | 518 | .remove(HgPath::new(key.data(py))) |
|
519 | 519 | { |
|
520 | 520 | Some(_) => Ok(None), |
|
521 | 521 | None => Ok(default), |
|
522 | 522 | } |
|
523 | 523 | } |
|
524 | 524 | |
|
525 | 525 | def copymapiter(&self) -> PyResult<CopyMapKeysIterator> { |
|
526 | 526 | let leaked_ref = self.inner(py).leak_immutable(); |
|
527 | 527 | CopyMapKeysIterator::from_inner( |
|
528 | 528 | py, |
|
529 | 529 | unsafe { leaked_ref.map(py, |o| o.copy_map.iter()) }, |
|
530 | 530 | ) |
|
531 | 531 | } |
|
532 | 532 | |
|
533 | 533 | def copymapitemsiter(&self) -> PyResult<CopyMapItemsIterator> { |
|
534 | 534 | let leaked_ref = self.inner(py).leak_immutable(); |
|
535 | 535 | CopyMapItemsIterator::from_inner( |
|
536 | 536 | py, |
|
537 | 537 | unsafe { leaked_ref.map(py, |o| o.copy_map.iter()) }, |
|
538 | 538 | ) |
|
539 | 539 | } |
|
540 | 540 | |
|
541 | 541 | }); |
|
542 | 542 | |
|
543 | 543 | impl DirstateMap { |
|
544 | 544 | pub fn get_inner<'a>( |
|
545 | 545 | &'a self, |
|
546 | 546 | py: Python<'a>, |
|
547 | 547 | ) -> Ref<'a, RustDirstateMap> { |
|
548 | 548 | self.inner(py).borrow() |
|
549 | 549 | } |
|
550 | #[cfg(not(feature = "dirstate-tree"))] | |
|
551 | 550 | fn translate_key( |
|
552 | 551 | py: Python, |
|
553 | 552 | res: (&HgPathBuf, &DirstateEntry), |
|
554 | 553 | ) -> PyResult<Option<PyBytes>> { |
|
555 | 554 | Ok(Some(PyBytes::new(py, res.0.as_bytes()))) |
|
556 | 555 | } |
|
557 | #[cfg(not(feature = "dirstate-tree"))] | |
|
558 | 556 | fn translate_key_value( |
|
559 | 557 | py: Python, |
|
560 | 558 | res: (&HgPathBuf, &DirstateEntry), |
|
561 | 559 | ) -> PyResult<Option<(PyBytes, PyObject)>> { |
|
562 | 560 | let (f, entry) = res; |
|
563 | 561 | Ok(Some(( |
|
564 | 562 | PyBytes::new(py, f.as_bytes()), |
|
565 | 563 | make_dirstate_tuple(py, &entry)?, |
|
566 | 564 | ))) |
|
567 | 565 | } |
|
568 | #[cfg(feature = "dirstate-tree")] | |
|
569 | fn translate_key( | |
|
570 | py: Python, | |
|
571 | res: (HgPathBuf, DirstateEntry), | |
|
572 | ) -> PyResult<Option<PyBytes>> { | |
|
573 | Ok(Some(PyBytes::new(py, res.0.as_bytes()))) | |
|
574 | } | |
|
575 | #[cfg(feature = "dirstate-tree")] | |
|
576 | fn translate_key_value( | |
|
577 | py: Python, | |
|
578 | res: (HgPathBuf, DirstateEntry), | |
|
579 | ) -> PyResult<Option<(PyBytes, PyObject)>> { | |
|
580 | let (f, entry) = res; | |
|
581 | Ok(Some(( | |
|
582 | PyBytes::new(py, f.as_bytes()), | |
|
583 | make_dirstate_tuple(py, &entry)?, | |
|
584 | ))) | |
|
585 | } | |
|
586 | 566 | } |
|
587 | 567 | |
|
588 | 568 | py_shared_iterator!( |
|
589 | 569 | DirstateMapKeysIterator, |
|
590 | 570 | UnsafePyLeaked<StateMapIter<'static>>, |
|
591 | 571 | DirstateMap::translate_key, |
|
592 | 572 | Option<PyBytes> |
|
593 | 573 | ); |
|
594 | 574 | |
|
595 | 575 | py_shared_iterator!( |
|
596 | 576 | DirstateMapItemsIterator, |
|
597 | 577 | UnsafePyLeaked<StateMapIter<'static>>, |
|
598 | 578 | DirstateMap::translate_key_value, |
|
599 | 579 | Option<(PyBytes, PyObject)> |
|
600 | 580 | ); |
|
601 | 581 | |
|
602 | 582 | fn extract_node_id(py: Python, obj: &PyObject) -> PyResult<Node> { |
|
603 | 583 | let bytes = obj.extract::<PyBytes>(py)?; |
|
604 | 584 | match bytes.data(py).try_into() { |
|
605 | 585 | Ok(s) => Ok(s), |
|
606 | 586 | Err(e) => Err(PyErr::new::<exc::ValueError, _>(py, e.to_string())), |
|
607 | 587 | } |
|
608 | 588 | } |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
General Comments 0
You need to be logged in to leave comments.
Login now