Show More
@@ -55,19 +55,19 b' impl<G: Graph> AncestorsIterator<G> {' | |||||
55 | let filtered_initrevs = initrevs.into_iter().filter(|&r| r >= stoprev); |
|
55 | let filtered_initrevs = initrevs.into_iter().filter(|&r| r >= stoprev); | |
56 | if inclusive { |
|
56 | if inclusive { | |
57 | let visit: BinaryHeap<Revision> = filtered_initrevs.collect(); |
|
57 | let visit: BinaryHeap<Revision> = filtered_initrevs.collect(); | |
58 |
let seen = visit.iter(). |
|
58 | let seen = visit.iter().cloned().collect(); | |
59 | return Ok(AncestorsIterator { |
|
59 | return Ok(AncestorsIterator { | |
60 |
visit |
|
60 | visit, | |
61 |
seen |
|
61 | seen, | |
62 |
stoprev |
|
62 | stoprev, | |
63 |
graph |
|
63 | graph, | |
64 | }); |
|
64 | }); | |
65 | } |
|
65 | } | |
66 | let mut this = AncestorsIterator { |
|
66 | let mut this = AncestorsIterator { | |
67 | visit: BinaryHeap::new(), |
|
67 | visit: BinaryHeap::new(), | |
68 | seen: HashSet::new(), |
|
68 | seen: HashSet::new(), | |
69 |
stoprev |
|
69 | stoprev, | |
70 |
graph |
|
70 | graph, | |
71 | }; |
|
71 | }; | |
72 | this.seen.insert(NULL_REVISION); |
|
72 | this.seen.insert(NULL_REVISION); | |
73 | for rev in filtered_initrevs { |
|
73 | for rev in filtered_initrevs { | |
@@ -107,7 +107,7 b' impl<G: Graph> AncestorsIterator<G> {' | |||||
107 | } |
|
107 | } | |
108 |
|
108 | |||
109 | pub fn peek(&self) -> Option<Revision> { |
|
109 | pub fn peek(&self) -> Option<Revision> { | |
110 |
self.visit.peek(). |
|
110 | self.visit.peek().cloned() | |
111 | } |
|
111 | } | |
112 |
|
112 | |||
113 | /// Tell if the iterator is about an empty set |
|
113 | /// Tell if the iterator is about an empty set | |
@@ -182,8 +182,8 b' impl<G: Graph + Clone> LazyAncestors<G> ' | |||||
182 | inclusive, |
|
182 | inclusive, | |
183 | )?, |
|
183 | )?, | |
184 | initrevs: v, |
|
184 | initrevs: v, | |
185 |
stoprev |
|
185 | stoprev, | |
186 |
inclusive |
|
186 | inclusive, | |
187 | }) |
|
187 | }) | |
188 | } |
|
188 | } | |
189 |
|
189 | |||
@@ -211,7 +211,7 b' impl<G: Graph + Clone> LazyAncestors<G> ' | |||||
211 | impl<G: Graph> MissingAncestors<G> { |
|
211 | impl<G: Graph> MissingAncestors<G> { | |
212 | pub fn new(graph: G, bases: impl IntoIterator<Item = Revision>) -> Self { |
|
212 | pub fn new(graph: G, bases: impl IntoIterator<Item = Revision>) -> Self { | |
213 | let mut created = MissingAncestors { |
|
213 | let mut created = MissingAncestors { | |
214 |
graph |
|
214 | graph, | |
215 | bases: HashSet::new(), |
|
215 | bases: HashSet::new(), | |
216 | max_base: NULL_REVISION, |
|
216 | max_base: NULL_REVISION, | |
217 | }; |
|
217 | }; |
@@ -16,10 +16,10 b' use super::{Graph, GraphError, Revision,' | |||||
16 | use crate::ancestors::AncestorsIterator; |
|
16 | use crate::ancestors::AncestorsIterator; | |
17 | use std::collections::{BTreeSet, HashSet}; |
|
17 | use std::collections::{BTreeSet, HashSet}; | |
18 |
|
18 | |||
19 | fn remove_parents( |
|
19 | fn remove_parents<S: std::hash::BuildHasher>( | |
20 | graph: &impl Graph, |
|
20 | graph: &impl Graph, | |
21 | rev: Revision, |
|
21 | rev: Revision, | |
22 | set: &mut HashSet<Revision>, |
|
22 | set: &mut HashSet<Revision, S>, | |
23 | ) -> Result<(), GraphError> { |
|
23 | ) -> Result<(), GraphError> { | |
24 | for parent in graph.parents(rev)?.iter() { |
|
24 | for parent in graph.parents(rev)?.iter() { | |
25 | if *parent != NULL_REVISION { |
|
25 | if *parent != NULL_REVISION { | |
@@ -65,9 +65,9 b" pub fn heads<'a>(" | |||||
65 | /// |
|
65 | /// | |
66 | /// # Performance notes |
|
66 | /// # Performance notes | |
67 | /// Internally, this function will store a full copy of `revs` in a `Vec`. |
|
67 | /// Internally, this function will store a full copy of `revs` in a `Vec`. | |
68 | pub fn retain_heads( |
|
68 | pub fn retain_heads<S: std::hash::BuildHasher>( | |
69 | graph: &impl Graph, |
|
69 | graph: &impl Graph, | |
70 | revs: &mut HashSet<Revision>, |
|
70 | revs: &mut HashSet<Revision, S>, | |
71 | ) -> Result<(), GraphError> { |
|
71 | ) -> Result<(), GraphError> { | |
72 | revs.remove(&NULL_REVISION); |
|
72 | revs.remove(&NULL_REVISION); | |
73 | // we need to construct an iterable copy of revs to avoid itering while |
|
73 | // we need to construct an iterable copy of revs to avoid itering while | |
@@ -84,9 +84,9 b' pub fn retain_heads(' | |||||
84 | /// Roots of `revs`, passed as a `HashSet` |
|
84 | /// Roots of `revs`, passed as a `HashSet` | |
85 | /// |
|
85 | /// | |
86 | /// They are returned in arbitrary order |
|
86 | /// They are returned in arbitrary order | |
87 | pub fn roots<G: Graph>( |
|
87 | pub fn roots<G: Graph, S: std::hash::BuildHasher>( | |
88 | graph: &G, |
|
88 | graph: &G, | |
89 | revs: &HashSet<Revision>, |
|
89 | revs: &HashSet<Revision, S>, | |
90 | ) -> Result<Vec<Revision>, GraphError> { |
|
90 | ) -> Result<Vec<Revision>, GraphError> { | |
91 | let mut roots: Vec<Revision> = Vec::new(); |
|
91 | let mut roots: Vec<Revision> = Vec::new(); | |
92 | for rev in revs { |
|
92 | for rev in revs { | |
@@ -229,7 +229,8 b' mod tests {' | |||||
229 | graph: &impl Graph, |
|
229 | graph: &impl Graph, | |
230 | revs: &[Revision], |
|
230 | revs: &[Revision], | |
231 | ) -> Result<Vec<Revision>, GraphError> { |
|
231 | ) -> Result<Vec<Revision>, GraphError> { | |
232 |
let |
|
232 | let set: HashSet<_> = revs.iter().cloned().collect(); | |
|
233 | let mut as_vec = roots(graph, &set)?; | |||
233 | as_vec.sort(); |
|
234 | as_vec.sort(); | |
234 | Ok(as_vec) |
|
235 | Ok(as_vec) | |
235 | } |
|
236 | } |
@@ -108,7 +108,7 b' impl DirsMultiset {' | |||||
108 | for subpath in files::find_dirs(path.as_ref()) { |
|
108 | for subpath in files::find_dirs(path.as_ref()) { | |
109 | match self.inner.entry(subpath.to_owned()) { |
|
109 | match self.inner.entry(subpath.to_owned()) { | |
110 | Entry::Occupied(mut entry) => { |
|
110 | Entry::Occupied(mut entry) => { | |
111 |
let val = entry.get() |
|
111 | let val = *entry.get(); | |
112 | if val > 1 { |
|
112 | if val > 1 { | |
113 | entry.insert(val - 1); |
|
113 | entry.insert(val - 1); | |
114 | break; |
|
114 | break; | |
@@ -137,6 +137,10 b' impl DirsMultiset {' | |||||
137 | pub fn len(&self) -> usize { |
|
137 | pub fn len(&self) -> usize { | |
138 | self.inner.len() |
|
138 | self.inner.len() | |
139 | } |
|
139 | } | |
|
140 | ||||
|
141 | pub fn is_empty(&self) -> bool { | |||
|
142 | self.len() == 0 | |||
|
143 | } | |||
140 | } |
|
144 | } | |
141 |
|
145 | |||
142 | /// This is basically a reimplementation of `DirsMultiset` that stores the |
|
146 | /// This is basically a reimplementation of `DirsMultiset` that stores the | |
@@ -156,7 +160,7 b" impl<'a> DirsChildrenMultiset<'a> {" | |||||
156 | let mut new = Self { |
|
160 | let mut new = Self { | |
157 | inner: HashMap::default(), |
|
161 | inner: HashMap::default(), | |
158 | only_include: only_include |
|
162 | only_include: only_include | |
159 |
.map(|s| s.iter().map( |
|
163 | .map(|s| s.iter().map(AsRef::as_ref).collect()), | |
160 | }; |
|
164 | }; | |
161 |
|
165 | |||
162 | for path in paths { |
|
166 | for path in paths { |
@@ -223,7 +223,7 b' impl DirstateMap {' | |||||
223 | self.get_non_normal_other_parent_entries() |
|
223 | self.get_non_normal_other_parent_entries() | |
224 | .0 |
|
224 | .0 | |
225 | .union(&other) |
|
225 | .union(&other) | |
226 |
.map( |
|
226 | .map(ToOwned::to_owned) | |
227 | .collect() |
|
227 | .collect() | |
228 | } |
|
228 | } | |
229 |
|
229 |
@@ -135,7 +135,7 b' pub fn pack_dirstate(' | |||||
135 | } |
|
135 | } | |
136 | let mut new_filename = new_filename.into_vec(); |
|
136 | let mut new_filename = new_filename.into_vec(); | |
137 | if let Some(copy) = copy_map.get(filename) { |
|
137 | if let Some(copy) = copy_map.get(filename) { | |
138 |
new_filename.push('\0' |
|
138 | new_filename.push(b'\0'); | |
139 | new_filename.extend(copy.bytes()); |
|
139 | new_filename.extend(copy.bytes()); | |
140 | } |
|
140 | } | |
141 |
|
141 |
@@ -127,7 +127,7 b' fn list_directory(' | |||||
127 | if skip_dot_hg && filename.as_bytes() == b".hg" && file_type.is_dir() { |
|
127 | if skip_dot_hg && filename.as_bytes() == b".hg" && file_type.is_dir() { | |
128 | return Ok(vec![]); |
|
128 | return Ok(vec![]); | |
129 | } else { |
|
129 | } else { | |
130 |
results.push(( |
|
130 | results.push((filename, entry)) | |
131 | } |
|
131 | } | |
132 | } |
|
132 | } | |
133 |
|
133 | |||
@@ -164,14 +164,15 b' fn dispatch_found(' | |||||
164 | (mode ^ st_mode as i32) & 0o100 != 0o000 && options.check_exec; |
|
164 | (mode ^ st_mode as i32) & 0o100 != 0o000 && options.check_exec; | |
165 | let metadata_changed = size >= 0 && (size_changed || mode_changed); |
|
165 | let metadata_changed = size >= 0 && (size_changed || mode_changed); | |
166 | let other_parent = size == SIZE_FROM_OTHER_PARENT; |
|
166 | let other_parent = size == SIZE_FROM_OTHER_PARENT; | |
|
167 | ||||
167 | if metadata_changed |
|
168 | if metadata_changed | |
168 | || other_parent |
|
169 | || other_parent | |
169 | || copy_map.contains_key(filename.as_ref()) |
|
170 | || copy_map.contains_key(filename.as_ref()) | |
170 | { |
|
171 | { | |
171 | Dispatch::Modified |
|
172 | Dispatch::Modified | |
172 |
} else if mod_compare(mtime, st_mtime as i32) |
|
173 | } else if mod_compare(mtime, st_mtime as i32) | |
173 | Dispatch::Unsure |
|
174 | || st_mtime == options.last_normal_time | |
174 | } else if st_mtime == options.last_normal_time { |
|
175 | { | |
175 | // the file may have just been marked as normal and |
|
176 | // the file may have just been marked as normal and | |
176 | // it may have changed in the same second without |
|
177 | // it may have changed in the same second without | |
177 | // changing its size. This can happen if we quickly |
|
178 | // changing its size. This can happen if we quickly | |
@@ -226,9 +227,9 b" fn walk_explicit<'a>(" | |||||
226 | files |
|
227 | files | |
227 | .unwrap_or(&DEFAULT_WORK) |
|
228 | .unwrap_or(&DEFAULT_WORK) | |
228 | .par_iter() |
|
229 | .par_iter() | |
229 | .map(move |filename| { |
|
230 | .map(move |&filename| { | |
230 | // TODO normalization |
|
231 | // TODO normalization | |
231 |
let normalized = filename |
|
232 | let normalized = filename; | |
232 |
|
233 | |||
233 | let buf = match hg_path_to_path_buf(normalized) { |
|
234 | let buf = match hg_path_to_path_buf(normalized) { | |
234 | Ok(x) => x, |
|
235 | Ok(x) => x, | |
@@ -254,33 +255,31 b" fn walk_explicit<'a>(" | |||||
254 | ))); |
|
255 | ))); | |
255 | } |
|
256 | } | |
256 | Some(Ok((normalized, Dispatch::Unknown))) |
|
257 | Some(Ok((normalized, Dispatch::Unknown))) | |
|
258 | } else if file_type.is_dir() { | |||
|
259 | if options.collect_traversed_dirs { | |||
|
260 | traversed_sender | |||
|
261 | .send(normalized.to_owned()) | |||
|
262 | .expect("receiver should outlive sender"); | |||
|
263 | } | |||
|
264 | Some(Ok(( | |||
|
265 | normalized, | |||
|
266 | Dispatch::Directory { | |||
|
267 | was_file: in_dmap.is_some(), | |||
|
268 | }, | |||
|
269 | ))) | |||
257 | } else { |
|
270 | } else { | |
258 |
|
|
271 | Some(Ok(( | |
259 |
|
|
272 | normalized, | |
260 | traversed_sender |
|
273 | Dispatch::Bad(BadMatch::BadType( | |
261 |
|
|
274 | // TODO do more than unknown | |
262 | .expect("receiver should outlive sender"); |
|
275 | // Support for all `BadType` variant | |
263 | } |
|
276 | // varies greatly between platforms. | |
264 | Some(Ok(( |
|
277 | // So far, no tests check the type and | |
265 |
|
|
278 | // this should be good enough for most | |
266 |
|
|
279 | // users. | |
267 |
|
|
280 | BadType::Unknown, | |
268 |
|
|
281 | )), | |
269 |
|
|
282 | ))) | |
270 | } else { |
|
|||
271 | Some(Ok(( |
|
|||
272 | normalized, |
|
|||
273 | Dispatch::Bad(BadMatch::BadType( |
|
|||
274 | // TODO do more than unknown |
|
|||
275 | // Support for all `BadType` variant |
|
|||
276 | // varies greatly between platforms. |
|
|||
277 | // So far, no tests check the type and |
|
|||
278 | // this should be good enough for most |
|
|||
279 | // users. |
|
|||
280 | BadType::Unknown, |
|
|||
281 | )), |
|
|||
282 | ))) |
|
|||
283 | } |
|
|||
284 | }; |
|
283 | }; | |
285 | } |
|
284 | } | |
286 | Err(_) => { |
|
285 | Err(_) => { | |
@@ -381,12 +380,10 b" fn handle_traversed_entry<'a>(" | |||||
381 | .send(Ok((filename.to_owned(), Dispatch::Ignored))) |
|
380 | .send(Ok((filename.to_owned(), Dispatch::Ignored))) | |
382 | .unwrap(); |
|
381 | .unwrap(); | |
383 | } |
|
382 | } | |
384 | } else { |
|
383 | } else if options.list_unknown { | |
385 | if options.list_unknown { |
|
384 | files_sender | |
386 | files_sender |
|
385 | .send(Ok((filename.to_owned(), Dispatch::Unknown))) | |
387 | .send(Ok((filename.to_owned(), Dispatch::Unknown))) |
|
386 | .unwrap(); | |
388 | .unwrap(); |
|
|||
389 | } |
|
|||
390 | } |
|
387 | } | |
391 | } else if ignore_fn(&filename) && options.list_ignored { |
|
388 | } else if ignore_fn(&filename) && options.list_ignored { | |
392 | files_sender |
|
389 | files_sender |
@@ -181,8 +181,8 b' impl<G: Graph + Clone> PartialDiscovery<' | |||||
181 | common: MissingAncestors::new(graph, vec![]), |
|
181 | common: MissingAncestors::new(graph, vec![]), | |
182 | missing: HashSet::new(), |
|
182 | missing: HashSet::new(), | |
183 | rng: Rng::from_seed(seed), |
|
183 | rng: Rng::from_seed(seed), | |
184 |
respect_size |
|
184 | respect_size, | |
185 |
randomize |
|
185 | randomize, | |
186 | } |
|
186 | } | |
187 | } |
|
187 | } | |
188 |
|
188 | |||
@@ -284,7 +284,7 b' impl<G: Graph + Clone> PartialDiscovery<' | |||||
284 |
|
284 | |||
285 | /// Did we acquire full knowledge of our Revisions that the peer has? |
|
285 | /// Did we acquire full knowledge of our Revisions that the peer has? | |
286 | pub fn is_complete(&self) -> bool { |
|
286 | pub fn is_complete(&self) -> bool { | |
287 |
self.undecided.as_ref().map_or(false, |
|
287 | self.undecided.as_ref().map_or(false, HashSet::is_empty) | |
288 | } |
|
288 | } | |
289 |
|
289 | |||
290 | /// Return the heads of the currently known common set of revisions. |
|
290 | /// Return the heads of the currently known common set of revisions. | |
@@ -332,7 +332,7 b' impl<G: Graph + Clone> PartialDiscovery<' | |||||
332 | FastHashMap::default(); |
|
332 | FastHashMap::default(); | |
333 | for &rev in self.undecided.as_ref().unwrap() { |
|
333 | for &rev in self.undecided.as_ref().unwrap() { | |
334 | for p in ParentsIterator::graph_parents(&self.graph, rev)? { |
|
334 | for p in ParentsIterator::graph_parents(&self.graph, rev)? { | |
335 |
children.entry(p).or_insert_with( |
|
335 | children.entry(p).or_insert_with(Vec::new).push(rev); | |
336 | } |
|
336 | } | |
337 | } |
|
337 | } | |
338 | self.children_cache = Some(children); |
|
338 | self.children_cache = Some(children); | |
@@ -342,7 +342,7 b' impl<G: Graph + Clone> PartialDiscovery<' | |||||
342 | /// Provide statistics about the current state of the discovery process |
|
342 | /// Provide statistics about the current state of the discovery process | |
343 | pub fn stats(&self) -> DiscoveryStats { |
|
343 | pub fn stats(&self) -> DiscoveryStats { | |
344 | DiscoveryStats { |
|
344 | DiscoveryStats { | |
345 |
undecided: self.undecided.as_ref().map( |
|
345 | undecided: self.undecided.as_ref().map(HashSet::len), | |
346 | } |
|
346 | } | |
347 | } |
|
347 | } | |
348 |
|
348 |
@@ -324,6 +324,8 b' pub fn parse_pattern_file_contents<P: As' | |||||
324 | warn: bool, |
|
324 | warn: bool, | |
325 | ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> { |
|
325 | ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> { | |
326 | let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap(); |
|
326 | let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap(); | |
|
327 | ||||
|
328 | #[allow(clippy::trivial_regex)] | |||
327 | let comment_escape_regex = Regex::new(r"\\#").unwrap(); |
|
329 | let comment_escape_regex = Regex::new(r"\\#").unwrap(); | |
328 | let mut inputs: Vec<IgnorePattern> = vec![]; |
|
330 | let mut inputs: Vec<IgnorePattern> = vec![]; | |
329 | let mut warnings: Vec<PatternFileWarning> = vec![]; |
|
331 | let mut warnings: Vec<PatternFileWarning> = vec![]; | |
@@ -458,9 +460,7 b' pub fn get_patterns_from_file(' | |||||
458 | .into_iter() |
|
460 | .into_iter() | |
459 | .flat_map(|entry| -> PatternResult<_> { |
|
461 | .flat_map(|entry| -> PatternResult<_> { | |
460 | let IgnorePattern { |
|
462 | let IgnorePattern { | |
461 | syntax, |
|
463 | syntax, pattern, .. | |
462 | pattern, |
|
|||
463 | source: _, |
|
|||
464 | } = &entry; |
|
464 | } = &entry; | |
465 | Ok(match syntax { |
|
465 | Ok(match syntax { | |
466 | PatternSyntax::Include => { |
|
466 | PatternSyntax::Include => { | |
@@ -504,10 +504,11 b' impl SubInclude {' | |||||
504 | normalize_path_bytes(&get_bytes_from_path(source)); |
|
504 | normalize_path_bytes(&get_bytes_from_path(source)); | |
505 |
|
505 | |||
506 | let source_root = get_path_from_bytes(&normalized_source); |
|
506 | let source_root = get_path_from_bytes(&normalized_source); | |
507 | let source_root = source_root.parent().unwrap_or(source_root.deref()); |
|
507 | let source_root = | |
|
508 | source_root.parent().unwrap_or_else(|| source_root.deref()); | |||
508 |
|
509 | |||
509 | let path = source_root.join(get_path_from_bytes(pattern)); |
|
510 | let path = source_root.join(get_path_from_bytes(pattern)); | |
510 | let new_root = path.parent().unwrap_or(path.deref()); |
|
511 | let new_root = path.parent().unwrap_or_else(|| path.deref()); | |
511 |
|
512 | |||
512 | let prefix = canonical_path(&root_dir, &root_dir, new_root)?; |
|
513 | let prefix = canonical_path(&root_dir, &root_dir, new_root)?; | |
513 |
|
514 |
@@ -164,7 +164,7 b" impl<'a> FileMatcher<'a> {" | |||||
164 | files: &'a [impl AsRef<HgPath>], |
|
164 | files: &'a [impl AsRef<HgPath>], | |
165 | ) -> Result<Self, DirstateMapError> { |
|
165 | ) -> Result<Self, DirstateMapError> { | |
166 | Ok(Self { |
|
166 | Ok(Self { | |
167 |
files: HashSet::from_iter(files.iter().map( |
|
167 | files: HashSet::from_iter(files.iter().map(AsRef::as_ref)), | |
168 | dirs: DirsMultiset::from_manifest(files)?, |
|
168 | dirs: DirsMultiset::from_manifest(files)?, | |
169 | }) |
|
169 | }) | |
170 | } |
|
170 | } | |
@@ -190,10 +190,10 b" impl<'a> Matcher for FileMatcher<'a> {" | |||||
190 | if self.files.is_empty() || !self.dirs.contains(&directory) { |
|
190 | if self.files.is_empty() || !self.dirs.contains(&directory) { | |
191 | return VisitChildrenSet::Empty; |
|
191 | return VisitChildrenSet::Empty; | |
192 | } |
|
192 | } | |
193 |
let dirs_as_set = self.dirs.iter().map( |
|
193 | let dirs_as_set = self.dirs.iter().map(Deref::deref).collect(); | |
194 |
|
194 | |||
195 | let mut candidates: HashSet<&HgPath> = |
|
195 | let mut candidates: HashSet<&HgPath> = | |
196 |
self.files.union(&dirs_as_set). |
|
196 | self.files.union(&dirs_as_set).cloned().collect(); | |
197 | candidates.remove(HgPath::new(b"")); |
|
197 | candidates.remove(HgPath::new(b"")); | |
198 |
|
198 | |||
199 | if !directory.as_ref().is_empty() { |
|
199 | if !directory.as_ref().is_empty() { | |
@@ -470,7 +470,7 b' fn roots_dirs_and_parents(' | |||||
470 | _ => unreachable!(), |
|
470 | _ => unreachable!(), | |
471 | })? |
|
471 | })? | |
472 | .iter() |
|
472 | .iter() | |
473 |
.map( |
|
473 | .map(ToOwned::to_owned), | |
474 | ); |
|
474 | ); | |
475 | parents.extend( |
|
475 | parents.extend( | |
476 | DirsMultiset::from_manifest(&roots) |
|
476 | DirsMultiset::from_manifest(&roots) | |
@@ -479,7 +479,7 b' fn roots_dirs_and_parents(' | |||||
479 | _ => unreachable!(), |
|
479 | _ => unreachable!(), | |
480 | })? |
|
480 | })? | |
481 | .iter() |
|
481 | .iter() | |
482 |
.map( |
|
482 | .map(ToOwned::to_owned), | |
483 | ); |
|
483 | ); | |
484 |
|
484 | |||
485 | Ok(RootsDirsAndParents { |
|
485 | Ok(RootsDirsAndParents { | |
@@ -523,7 +523,7 b" fn build_match<'a, 'b>(" | |||||
523 | let match_subinclude = move |filename: &HgPath| { |
|
523 | let match_subinclude = move |filename: &HgPath| { | |
524 | for prefix in prefixes.iter() { |
|
524 | for prefix in prefixes.iter() { | |
525 | if let Some(rel) = filename.relative_to(prefix) { |
|
525 | if let Some(rel) = filename.relative_to(prefix) { | |
526 |
if (submatchers |
|
526 | if (submatchers[prefix])(rel) { | |
527 | return true; |
|
527 | return true; | |
528 | } |
|
528 | } | |
529 | } |
|
529 | } |
@@ -25,6 +25,7 b' pub const NULL_REVISION: Revision = -1;' | |||||
25 | /// |
|
25 | /// | |
26 | /// This is also equal to `i32::max_value()`, but it's better to spell |
|
26 | /// This is also equal to `i32::max_value()`, but it's better to spell | |
27 | /// it out explicitely, same as in `mercurial.node` |
|
27 | /// it out explicitely, same as in `mercurial.node` | |
|
28 | #[allow(clippy::unreadable_literal)] | |||
28 | pub const WORKING_DIRECTORY_REVISION: Revision = 0x7fffffff; |
|
29 | pub const WORKING_DIRECTORY_REVISION: Revision = 0x7fffffff; | |
29 |
|
30 | |||
30 | /// The simplest expression of what we need of Mercurial DAGs. |
|
31 | /// The simplest expression of what we need of Mercurial DAGs. | |
@@ -49,6 +50,10 b' pub trait RevlogIndex {' | |||||
49 | /// Total number of Revisions referenced in this index |
|
50 | /// Total number of Revisions referenced in this index | |
50 | fn len(&self) -> usize; |
|
51 | fn len(&self) -> usize; | |
51 |
|
52 | |||
|
53 | fn is_empty(&self) -> bool { | |||
|
54 | self.len() == 0 | |||
|
55 | } | |||
|
56 | ||||
52 | /// Return a reference to the Node or `None` if rev is out of bounds |
|
57 | /// Return a reference to the Node or `None` if rev is out of bounds | |
53 | /// |
|
58 | /// | |
54 | /// `NULL_REVISION` is not considered to be out of bounds. |
|
59 | /// `NULL_REVISION` is not considered to be out of bounds. |
@@ -208,6 +208,10 b" impl<'a> NodePrefixRef<'a> {" | |||||
208 | } |
|
208 | } | |
209 | } |
|
209 | } | |
210 |
|
210 | |||
|
211 | pub fn is_empty(&self) -> bool { | |||
|
212 | self.len() == 0 | |||
|
213 | } | |||
|
214 | ||||
211 | pub fn is_prefix_of(&self, node: &Node) -> bool { |
|
215 | pub fn is_prefix_of(&self, node: &Node) -> bool { | |
212 | if self.is_odd { |
|
216 | if self.is_odd { | |
213 | let buf = self.buf; |
|
217 | let buf = self.buf; | |
@@ -242,13 +246,13 b" impl<'a> NodePrefixRef<'a> {" | |||||
242 | } else { |
|
246 | } else { | |
243 | buf.len() |
|
247 | buf.len() | |
244 | }; |
|
248 | }; | |
245 | for i in 0..until { |
|
249 | for (i, item) in buf.iter().enumerate().take(until) { | |
246 |
if |
|
250 | if *item != node.data[i] { | |
247 |
if |
|
251 | return if *item & 0xf0 == node.data[i] & 0xf0 { | |
248 |
|
|
252 | Some(2 * i + 1) | |
249 | } else { |
|
253 | } else { | |
250 |
|
|
254 | Some(2 * i) | |
251 | } |
|
255 | }; | |
252 | } |
|
256 | } | |
253 | } |
|
257 | } | |
254 | if self.is_odd && buf[until] & 0xf0 != node.data[until] & 0xf0 { |
|
258 | if self.is_odd && buf[until] & 0xf0 != node.data[until] & 0xf0 { |
@@ -218,7 +218,7 b' pub struct Block([u8; BLOCK_SIZE]);' | |||||
218 | /// Not derivable for arrays of length >32 until const generics are stable |
|
218 | /// Not derivable for arrays of length >32 until const generics are stable | |
219 | impl PartialEq for Block { |
|
219 | impl PartialEq for Block { | |
220 | fn eq(&self, other: &Self) -> bool { |
|
220 | fn eq(&self, other: &Self) -> bool { | |
221 |
|
|
221 | self.0[..] == other.0[..] | |
222 | } |
|
222 | } | |
223 | } |
|
223 | } | |
224 |
|
224 | |||
@@ -343,14 +343,11 b' impl NodeTree {' | |||||
343 | /// |
|
343 | /// | |
344 | /// We keep `readonly` and clone its root block if it isn't empty. |
|
344 | /// We keep `readonly` and clone its root block if it isn't empty. | |
345 | fn new(readonly: Box<dyn Deref<Target = [Block]> + Send>) -> Self { |
|
345 | fn new(readonly: Box<dyn Deref<Target = [Block]> + Send>) -> Self { | |
346 | let root = readonly |
|
346 | let root = readonly.last().cloned().unwrap_or_else(Block::new); | |
347 | .last() |
|
|||
348 | .map(|b| b.clone()) |
|
|||
349 | .unwrap_or_else(|| Block::new()); |
|
|||
350 | NodeTree { |
|
347 | NodeTree { | |
351 |
readonly |
|
348 | readonly, | |
352 | growable: Vec::new(), |
|
349 | growable: Vec::new(), | |
353 |
root |
|
350 | root, | |
354 | masked_inner_blocks: 0, |
|
351 | masked_inner_blocks: 0, | |
355 | } |
|
352 | } | |
356 | } |
|
353 | } | |
@@ -461,7 +458,7 b' impl NodeTree {' | |||||
461 | ) -> NodeTreeVisitor<'n, 'p> { |
|
458 | ) -> NodeTreeVisitor<'n, 'p> { | |
462 | NodeTreeVisitor { |
|
459 | NodeTreeVisitor { | |
463 | nt: self, |
|
460 | nt: self, | |
464 |
prefix |
|
461 | prefix, | |
465 | visit: self.len() - 1, |
|
462 | visit: self.len() - 1, | |
466 | nybble_idx: 0, |
|
463 | nybble_idx: 0, | |
467 | done: false, |
|
464 | done: false, | |
@@ -486,8 +483,7 b' impl NodeTree {' | |||||
486 | let glen = self.growable.len(); |
|
483 | let glen = self.growable.len(); | |
487 | if idx < ro_len { |
|
484 | if idx < ro_len { | |
488 | self.masked_inner_blocks += 1; |
|
485 | self.masked_inner_blocks += 1; | |
489 | // TODO OPTIM I think this makes two copies |
|
486 | self.growable.push(ro_blocks[idx]); | |
490 | self.growable.push(ro_blocks[idx].clone()); |
|
|||
491 | (glen + ro_len, &mut self.growable[glen], glen + 1) |
|
487 | (glen + ro_len, &mut self.growable[glen], glen + 1) | |
492 | } else if glen + ro_len == idx { |
|
488 | } else if glen + ro_len == idx { | |
493 | (idx, &mut self.root, glen) |
|
489 | (idx, &mut self.root, glen) | |
@@ -674,8 +670,8 b" impl<'n, 'p> Iterator for NodeTreeVisito" | |||||
674 |
|
670 | |||
675 | Some(NodeTreeVisitItem { |
|
671 | Some(NodeTreeVisitItem { | |
676 | block_idx: visit, |
|
672 | block_idx: visit, | |
677 |
nybble |
|
673 | nybble, | |
678 |
element |
|
674 | element, | |
679 | }) |
|
675 | }) | |
680 | } |
|
676 | } | |
681 | } |
|
677 | } |
@@ -68,6 +68,7 b' pub trait SliceExt {' | |||||
68 | fn drop_prefix(&self, needle: &Self) -> Option<&Self>; |
|
68 | fn drop_prefix(&self, needle: &Self) -> Option<&Self>; | |
69 | } |
|
69 | } | |
70 |
|
70 | |||
|
71 | #[allow(clippy::trivially_copy_pass_by_ref)] | |||
71 | fn is_not_whitespace(c: &u8) -> bool { |
|
72 | fn is_not_whitespace(c: &u8) -> bool { | |
72 | !(*c as char).is_whitespace() |
|
73 | !(*c as char).is_whitespace() | |
73 | } |
|
74 | } | |
@@ -75,7 +76,7 b' fn is_not_whitespace(c: &u8) -> bool {' | |||||
75 | impl SliceExt for [u8] { |
|
76 | impl SliceExt for [u8] { | |
76 | fn trim_end(&self) -> &[u8] { |
|
77 | fn trim_end(&self) -> &[u8] { | |
77 | if let Some(last) = self.iter().rposition(is_not_whitespace) { |
|
78 | if let Some(last) = self.iter().rposition(is_not_whitespace) { | |
78 |
&self[..last |
|
79 | &self[..=last] | |
79 | } else { |
|
80 | } else { | |
80 | &[] |
|
81 | &[] | |
81 | } |
|
82 | } | |
@@ -151,7 +152,7 b' impl Escaped for u8 {' | |||||
151 |
|
152 | |||
152 | impl<'a, T: Escaped> Escaped for &'a [T] { |
|
153 | impl<'a, T: Escaped> Escaped for &'a [T] { | |
153 | fn escaped_bytes(&self) -> Vec<u8> { |
|
154 | fn escaped_bytes(&self) -> Vec<u8> { | |
154 |
self.iter().flat_map( |
|
155 | self.iter().flat_map(Escaped::escaped_bytes).collect() | |
155 | } |
|
156 | } | |
156 | } |
|
157 | } | |
157 |
|
158 |
@@ -98,7 +98,7 b" impl<'a> FusedIterator for AncestorsWith" | |||||
98 | /// |
|
98 | /// | |
99 | /// The path itself isn't included unless it is b"" (meaning the root |
|
99 | /// The path itself isn't included unless it is b"" (meaning the root | |
100 | /// directory.) |
|
100 | /// directory.) | |
101 |
pub fn find_dirs |
|
101 | pub fn find_dirs(path: &HgPath) -> Ancestors { | |
102 | let mut dirs = Ancestors { next: Some(path) }; |
|
102 | let mut dirs = Ancestors { next: Some(path) }; | |
103 | if !path.is_empty() { |
|
103 | if !path.is_empty() { | |
104 | dirs.next(); // skip itself |
|
104 | dirs.next(); // skip itself | |
@@ -113,9 +113,7 b" pub fn find_dirs<'a>(path: &'a HgPath) -" | |||||
113 | /// |
|
113 | /// | |
114 | /// The path itself isn't included unless it is b"" (meaning the root |
|
114 | /// The path itself isn't included unless it is b"" (meaning the root | |
115 | /// directory.) |
|
115 | /// directory.) | |
116 |
pub(crate) fn find_dirs_with_base |
|
116 | pub(crate) fn find_dirs_with_base(path: &HgPath) -> AncestorsWithBase { | |
117 | path: &'a HgPath, |
|
|||
118 | ) -> AncestorsWithBase<'a> { |
|
|||
119 | let mut dirs = AncestorsWithBase { |
|
117 | let mut dirs = AncestorsWithBase { | |
120 | next: Some((path, HgPath::new(b""))), |
|
118 | next: Some((path, HgPath::new(b""))), | |
121 | }; |
|
119 | }; | |
@@ -214,9 +212,9 b' pub fn canonical_path(' | |||||
214 | if name != root && name.starts_with(&root) { |
|
212 | if name != root && name.starts_with(&root) { | |
215 | let name = name.strip_prefix(&root).unwrap(); |
|
213 | let name = name.strip_prefix(&root).unwrap(); | |
216 | auditor.audit_path(path_to_hg_path_buf(name)?)?; |
|
214 | auditor.audit_path(path_to_hg_path_buf(name)?)?; | |
217 |
|
|
215 | Ok(name.to_owned()) | |
218 | } else if name == root { |
|
216 | } else if name == root { | |
219 |
|
|
217 | Ok("".into()) | |
220 | } else { |
|
218 | } else { | |
221 | // Determine whether `name' is in the hierarchy at or beneath `root', |
|
219 | // Determine whether `name' is in the hierarchy at or beneath `root', | |
222 | // by iterating name=name.parent() until it returns `None` (can't |
|
220 | // by iterating name=name.parent() until it returns `None` (can't |
@@ -208,7 +208,7 b' impl HgPath {' | |||||
208 | } |
|
208 | } | |
209 | pub fn join<T: ?Sized + AsRef<Self>>(&self, other: &T) -> HgPathBuf { |
|
209 | pub fn join<T: ?Sized + AsRef<Self>>(&self, other: &T) -> HgPathBuf { | |
210 | let mut inner = self.inner.to_owned(); |
|
210 | let mut inner = self.inner.to_owned(); | |
211 |
if inner. |
|
211 | if !inner.is_empty() && inner.last() != Some(&b'/') { | |
212 | inner.push(b'/'); |
|
212 | inner.push(b'/'); | |
213 | } |
|
213 | } | |
214 | inner.extend(other.as_ref().bytes()); |
|
214 | inner.extend(other.as_ref().bytes()); | |
@@ -315,7 +315,7 b' impl HgPath {' | |||||
315 | /// This generates fine-grained errors useful for debugging. |
|
315 | /// This generates fine-grained errors useful for debugging. | |
316 | /// To simply check if the path is valid during tests, use `is_valid`. |
|
316 | /// To simply check if the path is valid during tests, use `is_valid`. | |
317 | pub fn check_state(&self) -> Result<(), HgPathError> { |
|
317 | pub fn check_state(&self) -> Result<(), HgPathError> { | |
318 |
if self. |
|
318 | if self.is_empty() { | |
319 | return Ok(()); |
|
319 | return Ok(()); | |
320 | } |
|
320 | } | |
321 | let bytes = self.as_bytes(); |
|
321 | let bytes = self.as_bytes(); | |
@@ -366,14 +366,14 b' impl fmt::Display for HgPath {' | |||||
366 | } |
|
366 | } | |
367 | } |
|
367 | } | |
368 |
|
368 | |||
369 | #[derive(Eq, Ord, Clone, PartialEq, PartialOrd, Hash)] |
|
369 | #[derive(Default, Eq, Ord, Clone, PartialEq, PartialOrd, Hash)] | |
370 | pub struct HgPathBuf { |
|
370 | pub struct HgPathBuf { | |
371 | inner: Vec<u8>, |
|
371 | inner: Vec<u8>, | |
372 | } |
|
372 | } | |
373 |
|
373 | |||
374 | impl HgPathBuf { |
|
374 | impl HgPathBuf { | |
375 | pub fn new() -> Self { |
|
375 | pub fn new() -> Self { | |
376 | Self { inner: Vec::new() } |
|
376 | Default::default() | |
377 | } |
|
377 | } | |
378 | pub fn push(&mut self, byte: u8) { |
|
378 | pub fn push(&mut self, byte: u8) { | |
379 | self.inner.push(byte); |
|
379 | self.inner.push(byte); | |
@@ -384,9 +384,6 b' impl HgPathBuf {' | |||||
384 | pub fn into_vec(self) -> Vec<u8> { |
|
384 | pub fn into_vec(self) -> Vec<u8> { | |
385 | self.inner |
|
385 | self.inner | |
386 | } |
|
386 | } | |
387 | pub fn as_ref(&self) -> &[u8] { |
|
|||
388 | self.inner.as_ref() |
|
|||
389 | } |
|
|||
390 | } |
|
387 | } | |
391 |
|
388 | |||
392 | impl fmt::Debug for HgPathBuf { |
|
389 | impl fmt::Debug for HgPathBuf { |
@@ -112,7 +112,7 b' impl PathAuditor {' | |||||
112 | // accidentally traverse a symlink into some other filesystem (which |
|
112 | // accidentally traverse a symlink into some other filesystem (which | |
113 | // is potentially expensive to access). |
|
113 | // is potentially expensive to access). | |
114 | for index in 0..parts.len() { |
|
114 | for index in 0..parts.len() { | |
115 |
let prefix = &parts[..index |
|
115 | let prefix = &parts[..=index].join(&b'/'); | |
116 | let prefix = HgPath::new(prefix); |
|
116 | let prefix = HgPath::new(prefix); | |
117 | if self.audited_dirs.read().unwrap().contains(prefix) { |
|
117 | if self.audited_dirs.read().unwrap().contains(prefix) { | |
118 | continue; |
|
118 | continue; |
@@ -90,10 +90,7 b' impl Index {' | |||||
90 | ), |
|
90 | ), | |
91 | )); |
|
91 | )); | |
92 | } |
|
92 | } | |
93 | Ok(Index { |
|
93 | Ok(Index { index, capi }) | |
94 | index: index, |
|
|||
95 | capi: capi, |
|
|||
96 | }) |
|
|||
97 | } |
|
94 | } | |
98 |
|
95 | |||
99 | /// return a reference to the CPython Index object in this Struct |
|
96 | /// return a reference to the CPython Index object in this Struct | |
@@ -158,7 +155,7 b' impl RevlogIndex for Index {' | |||||
158 | unsafe { (self.capi.index_length)(self.index.as_ptr()) as usize } |
|
155 | unsafe { (self.capi.index_length)(self.index.as_ptr()) as usize } | |
159 | } |
|
156 | } | |
160 |
|
157 | |||
161 |
fn node |
|
158 | fn node(&self, rev: Revision) -> Option<&Node> { | |
162 | let raw = unsafe { |
|
159 | let raw = unsafe { | |
163 | (self.capi.index_node)(self.index.as_ptr(), rev as c_int) |
|
160 | (self.capi.index_node)(self.index.as_ptr(), rev as c_int) | |
164 | }; |
|
161 | }; |
@@ -89,7 +89,7 b' impl CopyMap {' | |||||
89 | py: Python, |
|
89 | py: Python, | |
90 | res: (&HgPathBuf, &HgPathBuf), |
|
90 | res: (&HgPathBuf, &HgPathBuf), | |
91 | ) -> PyResult<Option<PyBytes>> { |
|
91 | ) -> PyResult<Option<PyBytes>> { | |
92 |
Ok(Some(PyBytes::new(py, res.0.as_ |
|
92 | Ok(Some(PyBytes::new(py, res.0.as_bytes()))) | |
93 | } |
|
93 | } | |
94 | fn translate_key_value( |
|
94 | fn translate_key_value( | |
95 | py: Python, |
|
95 | py: Python, | |
@@ -97,8 +97,8 b' impl CopyMap {' | |||||
97 | ) -> PyResult<Option<(PyBytes, PyBytes)>> { |
|
97 | ) -> PyResult<Option<(PyBytes, PyBytes)>> { | |
98 | let (k, v) = res; |
|
98 | let (k, v) = res; | |
99 | Ok(Some(( |
|
99 | Ok(Some(( | |
100 |
PyBytes::new(py, k.as_ |
|
100 | PyBytes::new(py, k.as_bytes()), | |
101 |
PyBytes::new(py, v.as_ |
|
101 | PyBytes::new(py, v.as_bytes()), | |
102 | ))) |
|
102 | ))) | |
103 | } |
|
103 | } | |
104 | } |
|
104 | } |
@@ -128,7 +128,7 b' impl Dirs {' | |||||
128 | py: Python, |
|
128 | py: Python, | |
129 | res: &HgPathBuf, |
|
129 | res: &HgPathBuf, | |
130 | ) -> PyResult<Option<PyBytes>> { |
|
130 | ) -> PyResult<Option<PyBytes>> { | |
131 |
Ok(Some(PyBytes::new(py, res.as_ |
|
131 | Ok(Some(PyBytes::new(py, res.as_bytes()))) | |
132 | } |
|
132 | } | |
133 | } |
|
133 | } | |
134 |
|
134 |
@@ -179,7 +179,7 b' py_class!(pub class DirstateMap |py| {' | |||||
179 | "other_parent", |
|
179 | "other_parent", | |
180 | other_parent |
|
180 | other_parent | |
181 | .iter() |
|
181 | .iter() | |
182 |
.map(|v| PyBytes::new(py, v.as_ |
|
182 | .map(|v| PyBytes::new(py, v.as_bytes())) | |
183 | .collect::<Vec<PyBytes>>() |
|
183 | .collect::<Vec<PyBytes>>() | |
184 | .to_py_object(py), |
|
184 | .to_py_object(py), | |
185 | )?; |
|
185 | )?; | |
@@ -348,7 +348,11 b' py_class!(pub class DirstateMap |py| {' | |||||
348 | for (key, value) in |
|
348 | for (key, value) in | |
349 | self.inner(py).borrow_mut().build_file_fold_map().iter() |
|
349 | self.inner(py).borrow_mut().build_file_fold_map().iter() | |
350 | { |
|
350 | { | |
351 | dict.set_item(py, key.as_ref().to_vec(), value.as_ref().to_vec())?; |
|
351 | dict.set_item( | |
|
352 | py, | |||
|
353 | key.as_bytes().to_vec(), | |||
|
354 | value.as_bytes().to_vec(), | |||
|
355 | )?; | |||
352 | } |
|
356 | } | |
353 | Ok(dict) |
|
357 | Ok(dict) | |
354 | } |
|
358 | } | |
@@ -440,8 +444,8 b' py_class!(pub class DirstateMap |py| {' | |||||
440 | for (key, value) in self.inner(py).borrow().copy_map.iter() { |
|
444 | for (key, value) in self.inner(py).borrow().copy_map.iter() { | |
441 | dict.set_item( |
|
445 | dict.set_item( | |
442 | py, |
|
446 | py, | |
443 |
PyBytes::new(py, key.as_ |
|
447 | PyBytes::new(py, key.as_bytes()), | |
444 |
PyBytes::new(py, value.as_ |
|
448 | PyBytes::new(py, value.as_bytes()), | |
445 | )?; |
|
449 | )?; | |
446 | } |
|
450 | } | |
447 | Ok(dict) |
|
451 | Ok(dict) | |
@@ -450,7 +454,7 b' py_class!(pub class DirstateMap |py| {' | |||||
450 | def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> { |
|
454 | def copymapgetitem(&self, key: PyObject) -> PyResult<PyBytes> { | |
451 | let key = key.extract::<PyBytes>(py)?; |
|
455 | let key = key.extract::<PyBytes>(py)?; | |
452 | match self.inner(py).borrow().copy_map.get(HgPath::new(key.data(py))) { |
|
456 | match self.inner(py).borrow().copy_map.get(HgPath::new(key.data(py))) { | |
453 |
Some(copy) => Ok(PyBytes::new(py, copy.as_ |
|
457 | Some(copy) => Ok(PyBytes::new(py, copy.as_bytes())), | |
454 | None => Err(PyErr::new::<exc::KeyError, _>( |
|
458 | None => Err(PyErr::new::<exc::KeyError, _>( | |
455 | py, |
|
459 | py, | |
456 | String::from_utf8_lossy(key.data(py)), |
|
460 | String::from_utf8_lossy(key.data(py)), | |
@@ -485,7 +489,7 b' py_class!(pub class DirstateMap |py| {' | |||||
485 | .get(HgPath::new(key.data(py))) |
|
489 | .get(HgPath::new(key.data(py))) | |
486 | { |
|
490 | { | |
487 | Some(copy) => Ok(Some( |
|
491 | Some(copy) => Ok(Some( | |
488 |
PyBytes::new(py, copy.as_ |
|
492 | PyBytes::new(py, copy.as_bytes()).into_object(), | |
489 | )), |
|
493 | )), | |
490 | None => Ok(default), |
|
494 | None => Ok(default), | |
491 | } |
|
495 | } | |
@@ -549,7 +553,7 b' impl DirstateMap {' | |||||
549 | py: Python, |
|
553 | py: Python, | |
550 | res: (&HgPathBuf, &DirstateEntry), |
|
554 | res: (&HgPathBuf, &DirstateEntry), | |
551 | ) -> PyResult<Option<PyBytes>> { |
|
555 | ) -> PyResult<Option<PyBytes>> { | |
552 |
Ok(Some(PyBytes::new(py, res.0.as_ |
|
556 | Ok(Some(PyBytes::new(py, res.0.as_bytes()))) | |
553 | } |
|
557 | } | |
554 | fn translate_key_value( |
|
558 | fn translate_key_value( | |
555 | py: Python, |
|
559 | py: Python, | |
@@ -557,7 +561,7 b' impl DirstateMap {' | |||||
557 | ) -> PyResult<Option<(PyBytes, PyObject)>> { |
|
561 | ) -> PyResult<Option<(PyBytes, PyObject)>> { | |
558 | let (f, entry) = res; |
|
562 | let (f, entry) = res; | |
559 | Ok(Some(( |
|
563 | Ok(Some(( | |
560 |
PyBytes::new(py, f.as_ |
|
564 | PyBytes::new(py, f.as_bytes()), | |
561 | make_dirstate_tuple(py, entry)?, |
|
565 | make_dirstate_tuple(py, entry)?, | |
562 | ))) |
|
566 | ))) | |
563 | } |
|
567 | } |
@@ -62,7 +62,7 b' impl NonNormalEntries {' | |||||
62 | py: Python, |
|
62 | py: Python, | |
63 | key: &HgPathBuf, |
|
63 | key: &HgPathBuf, | |
64 | ) -> PyResult<Option<PyBytes>> { |
|
64 | ) -> PyResult<Option<PyBytes>> { | |
65 |
Ok(Some(PyBytes::new(py, key.as_ |
|
65 | Ok(Some(PyBytes::new(py, key.as_bytes()))) | |
66 | } |
|
66 | } | |
67 | } |
|
67 | } | |
68 |
|
68 |
@@ -236,12 +236,10 b' pub fn status_wrapper(' | |||||
236 |
|
236 | |||
237 | build_response(py, lookup, status_res, all_warnings) |
|
237 | build_response(py, lookup, status_res, all_warnings) | |
238 | } |
|
238 | } | |
239 | e => { |
|
239 | e => Err(PyErr::new::<ValueError, _>( | |
240 | return Err(PyErr::new::<ValueError, _>( |
|
240 | py, | |
241 | py, |
|
241 | format!("Unsupported matcher {}", e), | |
242 | format!("Unsupported matcher {}", e), |
|
242 | )), | |
243 | )); |
|
|||
244 | } |
|
|||
245 | } |
|
243 | } | |
246 | } |
|
244 | } | |
247 |
|
245 |
@@ -37,15 +37,15 b' fn parse_dirstate_wrapper(' | |||||
37 | for (filename, entry) in &dirstate_map { |
|
37 | for (filename, entry) in &dirstate_map { | |
38 | dmap.set_item( |
|
38 | dmap.set_item( | |
39 | py, |
|
39 | py, | |
40 |
PyBytes::new(py, filename.as_ |
|
40 | PyBytes::new(py, filename.as_bytes()), | |
41 | make_dirstate_tuple(py, entry)?, |
|
41 | make_dirstate_tuple(py, entry)?, | |
42 | )?; |
|
42 | )?; | |
43 | } |
|
43 | } | |
44 | for (path, copy_path) in copies { |
|
44 | for (path, copy_path) in copies { | |
45 | copymap.set_item( |
|
45 | copymap.set_item( | |
46 | py, |
|
46 | py, | |
47 |
PyBytes::new(py, path.as_ |
|
47 | PyBytes::new(py, path.as_bytes()), | |
48 |
PyBytes::new(py, copy_path.as_ |
|
48 | PyBytes::new(py, copy_path.as_bytes()), | |
49 | )?; |
|
49 | )?; | |
50 | } |
|
50 | } | |
51 | Ok( |
|
51 | Ok( | |
@@ -116,7 +116,7 b' fn pack_dirstate_wrapper(' | |||||
116 | for (filename, entry) in &dirstate_map { |
|
116 | for (filename, entry) in &dirstate_map { | |
117 | dmap.set_item( |
|
117 | dmap.set_item( | |
118 | py, |
|
118 | py, | |
119 |
PyBytes::new(py, filename.as_ |
|
119 | PyBytes::new(py, filename.as_bytes()), | |
120 | make_dirstate_tuple(py, entry)?, |
|
120 | make_dirstate_tuple(py, entry)?, | |
121 | )?; |
|
121 | )?; | |
122 | } |
|
122 | } |
@@ -32,10 +32,7 b" pub fn node_from_py_object<'a>(" | |||||
32 |
|
32 | |||
33 | /// Clone incoming Python bytes given as `PyBytes` as a `Node`, |
|
33 | /// Clone incoming Python bytes given as `PyBytes` as a `Node`, | |
34 | /// doing the necessary checks. |
|
34 | /// doing the necessary checks. | |
35 | pub fn node_from_py_bytes<'a>( |
|
35 | pub fn node_from_py_bytes(py: Python, bytes: &PyBytes) -> PyResult<Node> { | |
36 | py: Python, |
|
|||
37 | bytes: &'a PyBytes, |
|
|||
38 | ) -> PyResult<Node> { |
|
|||
39 | <NodeData>::try_from(bytes.data(py)) |
|
36 | <NodeData>::try_from(bytes.data(py)) | |
40 | .map_err(|_| { |
|
37 | .map_err(|_| { | |
41 | PyErr::new::<ValueError, _>( |
|
38 | PyErr::new::<ValueError, _>( | |
@@ -43,5 +40,5 b" pub fn node_from_py_bytes<'a>(" | |||||
43 | format!("{}-byte hash required", NODE_BYTES_LENGTH), |
|
40 | format!("{}-byte hash required", NODE_BYTES_LENGTH), | |
44 | ) |
|
41 | ) | |
45 | }) |
|
42 | }) | |
46 |
.map( |
|
43 | .map(Into::into) | |
47 | } |
|
44 | } |
General Comments 0
You need to be logged in to leave comments.
Login now