# HG changeset patch # User Raphaël Gomès # Date 2020-06-15 16:26:40 # Node ID 26114bd6ec60cb48b5dc52946c2f51a5fb698c7a # Parent ef8dcee272acbe22af5829e31c55b694f345ceba rust: do a clippy pass This is the result of running `cargo clippy` on hg-core/hg-cpython and fixing the lints that do not require too much code churn (and would warrant a separate commit/complete refactor) and only come from our code (a lot of warnings in hg-cpython come from `rust-cpython`). Most of those were good lints, two of them was the linter not being smart enough (or compiler to get up to `clippy`'s level depending on how you see it). Maybe in the future we could have `clippy` be part of the CI. Differential Revision: https://phab.mercurial-scm.org/D8635 diff --git a/rust/hg-core/src/ancestors.rs b/rust/hg-core/src/ancestors.rs --- a/rust/hg-core/src/ancestors.rs +++ b/rust/hg-core/src/ancestors.rs @@ -55,19 +55,19 @@ impl AncestorsIterator { let filtered_initrevs = initrevs.into_iter().filter(|&r| r >= stoprev); if inclusive { let visit: BinaryHeap = filtered_initrevs.collect(); - let seen = visit.iter().map(|&x| x).collect(); + let seen = visit.iter().cloned().collect(); return Ok(AncestorsIterator { - visit: visit, - seen: seen, - stoprev: stoprev, - graph: graph, + visit, + seen, + stoprev, + graph, }); } let mut this = AncestorsIterator { visit: BinaryHeap::new(), seen: HashSet::new(), - stoprev: stoprev, - graph: graph, + stoprev, + graph, }; this.seen.insert(NULL_REVISION); for rev in filtered_initrevs { @@ -107,7 +107,7 @@ impl AncestorsIterator { } pub fn peek(&self) -> Option { - self.visit.peek().map(|&r| r) + self.visit.peek().cloned() } /// Tell if the iterator is about an empty set @@ -182,8 +182,8 @@ impl LazyAncestors inclusive, )?, initrevs: v, - stoprev: stoprev, - inclusive: inclusive, + stoprev, + inclusive, }) } @@ -211,7 +211,7 @@ impl LazyAncestors impl MissingAncestors { pub fn new(graph: G, bases: impl IntoIterator) -> Self { let mut created = MissingAncestors { - graph: graph, + graph, bases: HashSet::new(), max_base: NULL_REVISION, }; diff --git a/rust/hg-core/src/dagops.rs b/rust/hg-core/src/dagops.rs --- a/rust/hg-core/src/dagops.rs +++ b/rust/hg-core/src/dagops.rs @@ -16,10 +16,10 @@ use super::{Graph, GraphError, Revision, use crate::ancestors::AncestorsIterator; use std::collections::{BTreeSet, HashSet}; -fn remove_parents( +fn remove_parents( graph: &impl Graph, rev: Revision, - set: &mut HashSet, + set: &mut HashSet, ) -> Result<(), GraphError> { for parent in graph.parents(rev)?.iter() { if *parent != NULL_REVISION { @@ -65,9 +65,9 @@ pub fn heads<'a>( /// /// # Performance notes /// Internally, this function will store a full copy of `revs` in a `Vec`. -pub fn retain_heads( +pub fn retain_heads( graph: &impl Graph, - revs: &mut HashSet, + revs: &mut HashSet, ) -> Result<(), GraphError> { revs.remove(&NULL_REVISION); // we need to construct an iterable copy of revs to avoid itering while @@ -84,9 +84,9 @@ pub fn retain_heads( /// Roots of `revs`, passed as a `HashSet` /// /// They are returned in arbitrary order -pub fn roots( +pub fn roots( graph: &G, - revs: &HashSet, + revs: &HashSet, ) -> Result, GraphError> { let mut roots: Vec = Vec::new(); for rev in revs { @@ -229,7 +229,8 @@ mod tests { graph: &impl Graph, revs: &[Revision], ) -> Result, GraphError> { - let mut as_vec = roots(graph, &revs.iter().cloned().collect())?; + let set: HashSet<_> = revs.iter().cloned().collect(); + let mut as_vec = roots(graph, &set)?; as_vec.sort(); Ok(as_vec) } diff --git a/rust/hg-core/src/dirstate/dirs_multiset.rs b/rust/hg-core/src/dirstate/dirs_multiset.rs --- a/rust/hg-core/src/dirstate/dirs_multiset.rs +++ b/rust/hg-core/src/dirstate/dirs_multiset.rs @@ -108,7 +108,7 @@ impl DirsMultiset { for subpath in files::find_dirs(path.as_ref()) { match self.inner.entry(subpath.to_owned()) { Entry::Occupied(mut entry) => { - let val = entry.get().clone(); + let val = *entry.get(); if val > 1 { entry.insert(val - 1); break; @@ -137,6 +137,10 @@ impl DirsMultiset { pub fn len(&self) -> usize { self.inner.len() } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } } /// This is basically a reimplementation of `DirsMultiset` that stores the @@ -156,7 +160,7 @@ impl<'a> DirsChildrenMultiset<'a> { let mut new = Self { inner: HashMap::default(), only_include: only_include - .map(|s| s.iter().map(|p| p.as_ref()).collect()), + .map(|s| s.iter().map(AsRef::as_ref).collect()), }; for path in paths { diff --git a/rust/hg-core/src/dirstate/dirstate_map.rs b/rust/hg-core/src/dirstate/dirstate_map.rs --- a/rust/hg-core/src/dirstate/dirstate_map.rs +++ b/rust/hg-core/src/dirstate/dirstate_map.rs @@ -223,7 +223,7 @@ impl DirstateMap { self.get_non_normal_other_parent_entries() .0 .union(&other) - .map(|e| e.to_owned()) + .map(ToOwned::to_owned) .collect() } diff --git a/rust/hg-core/src/dirstate/parsers.rs b/rust/hg-core/src/dirstate/parsers.rs --- a/rust/hg-core/src/dirstate/parsers.rs +++ b/rust/hg-core/src/dirstate/parsers.rs @@ -135,7 +135,7 @@ pub fn pack_dirstate( } let mut new_filename = new_filename.into_vec(); if let Some(copy) = copy_map.get(filename) { - new_filename.push('\0' as u8); + new_filename.push(b'\0'); new_filename.extend(copy.bytes()); } diff --git a/rust/hg-core/src/dirstate/status.rs b/rust/hg-core/src/dirstate/status.rs --- a/rust/hg-core/src/dirstate/status.rs +++ b/rust/hg-core/src/dirstate/status.rs @@ -127,7 +127,7 @@ fn list_directory( if skip_dot_hg && filename.as_bytes() == b".hg" && file_type.is_dir() { return Ok(vec![]); } else { - results.push((HgPathBuf::from(filename), entry)) + results.push((filename, entry)) } } @@ -164,14 +164,15 @@ fn dispatch_found( (mode ^ st_mode as i32) & 0o100 != 0o000 && options.check_exec; let metadata_changed = size >= 0 && (size_changed || mode_changed); let other_parent = size == SIZE_FROM_OTHER_PARENT; + if metadata_changed || other_parent || copy_map.contains_key(filename.as_ref()) { Dispatch::Modified - } else if mod_compare(mtime, st_mtime as i32) { - Dispatch::Unsure - } else if st_mtime == options.last_normal_time { + } else if mod_compare(mtime, st_mtime as i32) + || st_mtime == options.last_normal_time + { // the file may have just been marked as normal and // it may have changed in the same second without // changing its size. This can happen if we quickly @@ -226,9 +227,9 @@ fn walk_explicit<'a>( files .unwrap_or(&DEFAULT_WORK) .par_iter() - .map(move |filename| { + .map(move |&filename| { // TODO normalization - let normalized = filename.as_ref(); + let normalized = filename; let buf = match hg_path_to_path_buf(normalized) { Ok(x) => x, @@ -254,33 +255,31 @@ fn walk_explicit<'a>( ))); } Some(Ok((normalized, Dispatch::Unknown))) + } else if file_type.is_dir() { + if options.collect_traversed_dirs { + traversed_sender + .send(normalized.to_owned()) + .expect("receiver should outlive sender"); + } + Some(Ok(( + normalized, + Dispatch::Directory { + was_file: in_dmap.is_some(), + }, + ))) } else { - if file_type.is_dir() { - if options.collect_traversed_dirs { - traversed_sender - .send(normalized.to_owned()) - .expect("receiver should outlive sender"); - } - Some(Ok(( - normalized, - Dispatch::Directory { - was_file: in_dmap.is_some(), - }, - ))) - } else { - Some(Ok(( - normalized, - Dispatch::Bad(BadMatch::BadType( - // TODO do more than unknown - // Support for all `BadType` variant - // varies greatly between platforms. - // So far, no tests check the type and - // this should be good enough for most - // users. - BadType::Unknown, - )), - ))) - } + Some(Ok(( + normalized, + Dispatch::Bad(BadMatch::BadType( + // TODO do more than unknown + // Support for all `BadType` variant + // varies greatly between platforms. + // So far, no tests check the type and + // this should be good enough for most + // users. + BadType::Unknown, + )), + ))) }; } Err(_) => { @@ -381,12 +380,10 @@ fn handle_traversed_entry<'a>( .send(Ok((filename.to_owned(), Dispatch::Ignored))) .unwrap(); } - } else { - if options.list_unknown { - files_sender - .send(Ok((filename.to_owned(), Dispatch::Unknown))) - .unwrap(); - } + } else if options.list_unknown { + files_sender + .send(Ok((filename.to_owned(), Dispatch::Unknown))) + .unwrap(); } } else if ignore_fn(&filename) && options.list_ignored { files_sender diff --git a/rust/hg-core/src/discovery.rs b/rust/hg-core/src/discovery.rs --- a/rust/hg-core/src/discovery.rs +++ b/rust/hg-core/src/discovery.rs @@ -181,8 +181,8 @@ impl PartialDiscovery< common: MissingAncestors::new(graph, vec![]), missing: HashSet::new(), rng: Rng::from_seed(seed), - respect_size: respect_size, - randomize: randomize, + respect_size, + randomize, } } @@ -284,7 +284,7 @@ impl PartialDiscovery< /// Did we acquire full knowledge of our Revisions that the peer has? pub fn is_complete(&self) -> bool { - self.undecided.as_ref().map_or(false, |s| s.is_empty()) + self.undecided.as_ref().map_or(false, HashSet::is_empty) } /// Return the heads of the currently known common set of revisions. @@ -332,7 +332,7 @@ impl PartialDiscovery< FastHashMap::default(); for &rev in self.undecided.as_ref().unwrap() { for p in ParentsIterator::graph_parents(&self.graph, rev)? { - children.entry(p).or_insert_with(|| Vec::new()).push(rev); + children.entry(p).or_insert_with(Vec::new).push(rev); } } self.children_cache = Some(children); @@ -342,7 +342,7 @@ impl PartialDiscovery< /// Provide statistics about the current state of the discovery process pub fn stats(&self) -> DiscoveryStats { DiscoveryStats { - undecided: self.undecided.as_ref().map(|s| s.len()), + undecided: self.undecided.as_ref().map(HashSet::len), } } diff --git a/rust/hg-core/src/filepatterns.rs b/rust/hg-core/src/filepatterns.rs --- a/rust/hg-core/src/filepatterns.rs +++ b/rust/hg-core/src/filepatterns.rs @@ -324,6 +324,8 @@ pub fn parse_pattern_file_contents Result<(Vec, Vec), PatternError> { let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap(); + + #[allow(clippy::trivial_regex)] let comment_escape_regex = Regex::new(r"\\#").unwrap(); let mut inputs: Vec = vec![]; let mut warnings: Vec = vec![]; @@ -458,9 +460,7 @@ pub fn get_patterns_from_file( .into_iter() .flat_map(|entry| -> PatternResult<_> { let IgnorePattern { - syntax, - pattern, - source: _, + syntax, pattern, .. } = &entry; Ok(match syntax { PatternSyntax::Include => { @@ -504,10 +504,11 @@ impl SubInclude { normalize_path_bytes(&get_bytes_from_path(source)); let source_root = get_path_from_bytes(&normalized_source); - let source_root = source_root.parent().unwrap_or(source_root.deref()); + let source_root = + source_root.parent().unwrap_or_else(|| source_root.deref()); let path = source_root.join(get_path_from_bytes(pattern)); - let new_root = path.parent().unwrap_or(path.deref()); + let new_root = path.parent().unwrap_or_else(|| path.deref()); let prefix = canonical_path(&root_dir, &root_dir, new_root)?; diff --git a/rust/hg-core/src/matchers.rs b/rust/hg-core/src/matchers.rs --- a/rust/hg-core/src/matchers.rs +++ b/rust/hg-core/src/matchers.rs @@ -164,7 +164,7 @@ impl<'a> FileMatcher<'a> { files: &'a [impl AsRef], ) -> Result { Ok(Self { - files: HashSet::from_iter(files.iter().map(|f| f.as_ref())), + files: HashSet::from_iter(files.iter().map(AsRef::as_ref)), dirs: DirsMultiset::from_manifest(files)?, }) } @@ -190,10 +190,10 @@ impl<'a> Matcher for FileMatcher<'a> { if self.files.is_empty() || !self.dirs.contains(&directory) { return VisitChildrenSet::Empty; } - let dirs_as_set = self.dirs.iter().map(|k| k.deref()).collect(); + let dirs_as_set = self.dirs.iter().map(Deref::deref).collect(); let mut candidates: HashSet<&HgPath> = - self.files.union(&dirs_as_set).map(|k| *k).collect(); + self.files.union(&dirs_as_set).cloned().collect(); candidates.remove(HgPath::new(b"")); if !directory.as_ref().is_empty() { @@ -470,7 +470,7 @@ fn roots_dirs_and_parents( _ => unreachable!(), })? .iter() - .map(|k| k.to_owned()), + .map(ToOwned::to_owned), ); parents.extend( DirsMultiset::from_manifest(&roots) @@ -479,7 +479,7 @@ fn roots_dirs_and_parents( _ => unreachable!(), })? .iter() - .map(|k| k.to_owned()), + .map(ToOwned::to_owned), ); Ok(RootsDirsAndParents { @@ -523,7 +523,7 @@ fn build_match<'a, 'b>( let match_subinclude = move |filename: &HgPath| { for prefix in prefixes.iter() { if let Some(rel) = filename.relative_to(prefix) { - if (submatchers.get(prefix).unwrap())(rel) { + if (submatchers[prefix])(rel) { return true; } } diff --git a/rust/hg-core/src/revlog.rs b/rust/hg-core/src/revlog.rs --- a/rust/hg-core/src/revlog.rs +++ b/rust/hg-core/src/revlog.rs @@ -25,6 +25,7 @@ pub const NULL_REVISION: Revision = -1; /// /// This is also equal to `i32::max_value()`, but it's better to spell /// it out explicitely, same as in `mercurial.node` +#[allow(clippy::unreadable_literal)] pub const WORKING_DIRECTORY_REVISION: Revision = 0x7fffffff; /// The simplest expression of what we need of Mercurial DAGs. @@ -49,6 +50,10 @@ pub trait RevlogIndex { /// Total number of Revisions referenced in this index fn len(&self) -> usize; + fn is_empty(&self) -> bool { + self.len() == 0 + } + /// Return a reference to the Node or `None` if rev is out of bounds /// /// `NULL_REVISION` is not considered to be out of bounds. diff --git a/rust/hg-core/src/revlog/node.rs b/rust/hg-core/src/revlog/node.rs --- a/rust/hg-core/src/revlog/node.rs +++ b/rust/hg-core/src/revlog/node.rs @@ -208,6 +208,10 @@ impl<'a> NodePrefixRef<'a> { } } + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + pub fn is_prefix_of(&self, node: &Node) -> bool { if self.is_odd { let buf = self.buf; @@ -242,13 +246,13 @@ impl<'a> NodePrefixRef<'a> { } else { buf.len() }; - for i in 0..until { - if buf[i] != node.data[i] { - if buf[i] & 0xf0 == node.data[i] & 0xf0 { - return Some(2 * i + 1); + for (i, item) in buf.iter().enumerate().take(until) { + if *item != node.data[i] { + return if *item & 0xf0 == node.data[i] & 0xf0 { + Some(2 * i + 1) } else { - return Some(2 * i); - } + Some(2 * i) + }; } } if self.is_odd && buf[until] & 0xf0 != node.data[until] & 0xf0 { diff --git a/rust/hg-core/src/revlog/nodemap.rs b/rust/hg-core/src/revlog/nodemap.rs --- a/rust/hg-core/src/revlog/nodemap.rs +++ b/rust/hg-core/src/revlog/nodemap.rs @@ -218,7 +218,7 @@ pub struct Block([u8; BLOCK_SIZE]); /// Not derivable for arrays of length >32 until const generics are stable impl PartialEq for Block { fn eq(&self, other: &Self) -> bool { - &self.0[..] == &other.0[..] + self.0[..] == other.0[..] } } @@ -343,14 +343,11 @@ impl NodeTree { /// /// We keep `readonly` and clone its root block if it isn't empty. fn new(readonly: Box + Send>) -> Self { - let root = readonly - .last() - .map(|b| b.clone()) - .unwrap_or_else(|| Block::new()); + let root = readonly.last().cloned().unwrap_or_else(Block::new); NodeTree { - readonly: readonly, + readonly, growable: Vec::new(), - root: root, + root, masked_inner_blocks: 0, } } @@ -461,7 +458,7 @@ impl NodeTree { ) -> NodeTreeVisitor<'n, 'p> { NodeTreeVisitor { nt: self, - prefix: prefix, + prefix, visit: self.len() - 1, nybble_idx: 0, done: false, @@ -486,8 +483,7 @@ impl NodeTree { let glen = self.growable.len(); if idx < ro_len { self.masked_inner_blocks += 1; - // TODO OPTIM I think this makes two copies - self.growable.push(ro_blocks[idx].clone()); + self.growable.push(ro_blocks[idx]); (glen + ro_len, &mut self.growable[glen], glen + 1) } else if glen + ro_len == idx { (idx, &mut self.root, glen) @@ -674,8 +670,8 @@ impl<'n, 'p> Iterator for NodeTreeVisito Some(NodeTreeVisitItem { block_idx: visit, - nybble: nybble, - element: element, + nybble, + element, }) } } diff --git a/rust/hg-core/src/utils.rs b/rust/hg-core/src/utils.rs --- a/rust/hg-core/src/utils.rs +++ b/rust/hg-core/src/utils.rs @@ -68,6 +68,7 @@ pub trait SliceExt { fn drop_prefix(&self, needle: &Self) -> Option<&Self>; } +#[allow(clippy::trivially_copy_pass_by_ref)] fn is_not_whitespace(c: &u8) -> bool { !(*c as char).is_whitespace() } @@ -75,7 +76,7 @@ fn is_not_whitespace(c: &u8) -> bool { impl SliceExt for [u8] { fn trim_end(&self) -> &[u8] { if let Some(last) = self.iter().rposition(is_not_whitespace) { - &self[..last + 1] + &self[..=last] } else { &[] } @@ -151,7 +152,7 @@ impl Escaped for u8 { impl<'a, T: Escaped> Escaped for &'a [T] { fn escaped_bytes(&self) -> Vec { - self.iter().flat_map(|item| item.escaped_bytes()).collect() + self.iter().flat_map(Escaped::escaped_bytes).collect() } } diff --git a/rust/hg-core/src/utils/files.rs b/rust/hg-core/src/utils/files.rs --- a/rust/hg-core/src/utils/files.rs +++ b/rust/hg-core/src/utils/files.rs @@ -98,7 +98,7 @@ impl<'a> FusedIterator for AncestorsWith /// /// The path itself isn't included unless it is b"" (meaning the root /// directory.) -pub fn find_dirs<'a>(path: &'a HgPath) -> Ancestors<'a> { +pub fn find_dirs(path: &HgPath) -> Ancestors { let mut dirs = Ancestors { next: Some(path) }; if !path.is_empty() { dirs.next(); // skip itself @@ -113,9 +113,7 @@ pub fn find_dirs<'a>(path: &'a HgPath) - /// /// The path itself isn't included unless it is b"" (meaning the root /// directory.) -pub(crate) fn find_dirs_with_base<'a>( - path: &'a HgPath, -) -> AncestorsWithBase<'a> { +pub(crate) fn find_dirs_with_base(path: &HgPath) -> AncestorsWithBase { let mut dirs = AncestorsWithBase { next: Some((path, HgPath::new(b""))), }; @@ -214,9 +212,9 @@ pub fn canonical_path( if name != root && name.starts_with(&root) { let name = name.strip_prefix(&root).unwrap(); auditor.audit_path(path_to_hg_path_buf(name)?)?; - return Ok(name.to_owned()); + Ok(name.to_owned()) } else if name == root { - return Ok("".into()); + Ok("".into()) } else { // Determine whether `name' is in the hierarchy at or beneath `root', // by iterating name=name.parent() until it returns `None` (can't diff --git a/rust/hg-core/src/utils/hg_path.rs b/rust/hg-core/src/utils/hg_path.rs --- a/rust/hg-core/src/utils/hg_path.rs +++ b/rust/hg-core/src/utils/hg_path.rs @@ -208,7 +208,7 @@ impl HgPath { } pub fn join>(&self, other: &T) -> HgPathBuf { let mut inner = self.inner.to_owned(); - if inner.len() != 0 && inner.last() != Some(&b'/') { + if !inner.is_empty() && inner.last() != Some(&b'/') { inner.push(b'/'); } inner.extend(other.as_ref().bytes()); @@ -315,7 +315,7 @@ impl HgPath { /// This generates fine-grained errors useful for debugging. /// To simply check if the path is valid during tests, use `is_valid`. pub fn check_state(&self) -> Result<(), HgPathError> { - if self.len() == 0 { + if self.is_empty() { return Ok(()); } let bytes = self.as_bytes(); @@ -366,14 +366,14 @@ impl fmt::Display for HgPath { } } -#[derive(Eq, Ord, Clone, PartialEq, PartialOrd, Hash)] +#[derive(Default, Eq, Ord, Clone, PartialEq, PartialOrd, Hash)] pub struct HgPathBuf { inner: Vec, } impl HgPathBuf { pub fn new() -> Self { - Self { inner: Vec::new() } + Default::default() } pub fn push(&mut self, byte: u8) { self.inner.push(byte); @@ -384,9 +384,6 @@ impl HgPathBuf { pub fn into_vec(self) -> Vec { self.inner } - pub fn as_ref(&self) -> &[u8] { - self.inner.as_ref() - } } impl fmt::Debug for HgPathBuf { diff --git a/rust/hg-core/src/utils/path_auditor.rs b/rust/hg-core/src/utils/path_auditor.rs --- a/rust/hg-core/src/utils/path_auditor.rs +++ b/rust/hg-core/src/utils/path_auditor.rs @@ -112,7 +112,7 @@ impl PathAuditor { // accidentally traverse a symlink into some other filesystem (which // is potentially expensive to access). for index in 0..parts.len() { - let prefix = &parts[..index + 1].join(&b'/'); + let prefix = &parts[..=index].join(&b'/'); let prefix = HgPath::new(prefix); if self.audited_dirs.read().unwrap().contains(prefix) { continue; diff --git a/rust/hg-cpython/src/cindex.rs b/rust/hg-cpython/src/cindex.rs --- a/rust/hg-cpython/src/cindex.rs +++ b/rust/hg-cpython/src/cindex.rs @@ -90,10 +90,7 @@ impl Index { ), )); } - Ok(Index { - index: index, - capi: capi, - }) + Ok(Index { index, capi }) } /// return a reference to the CPython Index object in this Struct @@ -158,7 +155,7 @@ impl RevlogIndex for Index { unsafe { (self.capi.index_length)(self.index.as_ptr()) as usize } } - fn node<'a>(&'a self, rev: Revision) -> Option<&'a Node> { + fn node(&self, rev: Revision) -> Option<&Node> { let raw = unsafe { (self.capi.index_node)(self.index.as_ptr(), rev as c_int) }; diff --git a/rust/hg-cpython/src/dirstate/copymap.rs b/rust/hg-cpython/src/dirstate/copymap.rs --- a/rust/hg-cpython/src/dirstate/copymap.rs +++ b/rust/hg-cpython/src/dirstate/copymap.rs @@ -89,7 +89,7 @@ impl CopyMap { py: Python, res: (&HgPathBuf, &HgPathBuf), ) -> PyResult> { - Ok(Some(PyBytes::new(py, res.0.as_ref()))) + Ok(Some(PyBytes::new(py, res.0.as_bytes()))) } fn translate_key_value( py: Python, @@ -97,8 +97,8 @@ impl CopyMap { ) -> PyResult> { let (k, v) = res; Ok(Some(( - PyBytes::new(py, k.as_ref()), - PyBytes::new(py, v.as_ref()), + PyBytes::new(py, k.as_bytes()), + PyBytes::new(py, v.as_bytes()), ))) } } diff --git a/rust/hg-cpython/src/dirstate/dirs_multiset.rs b/rust/hg-cpython/src/dirstate/dirs_multiset.rs --- a/rust/hg-cpython/src/dirstate/dirs_multiset.rs +++ b/rust/hg-cpython/src/dirstate/dirs_multiset.rs @@ -128,7 +128,7 @@ impl Dirs { py: Python, res: &HgPathBuf, ) -> PyResult> { - Ok(Some(PyBytes::new(py, res.as_ref()))) + Ok(Some(PyBytes::new(py, res.as_bytes()))) } } diff --git a/rust/hg-cpython/src/dirstate/dirstate_map.rs b/rust/hg-cpython/src/dirstate/dirstate_map.rs --- a/rust/hg-cpython/src/dirstate/dirstate_map.rs +++ b/rust/hg-cpython/src/dirstate/dirstate_map.rs @@ -179,7 +179,7 @@ py_class!(pub class DirstateMap |py| { "other_parent", other_parent .iter() - .map(|v| PyBytes::new(py, v.as_ref())) + .map(|v| PyBytes::new(py, v.as_bytes())) .collect::>() .to_py_object(py), )?; @@ -348,7 +348,11 @@ py_class!(pub class DirstateMap |py| { for (key, value) in self.inner(py).borrow_mut().build_file_fold_map().iter() { - dict.set_item(py, key.as_ref().to_vec(), value.as_ref().to_vec())?; + dict.set_item( + py, + key.as_bytes().to_vec(), + value.as_bytes().to_vec(), + )?; } Ok(dict) } @@ -440,8 +444,8 @@ py_class!(pub class DirstateMap |py| { for (key, value) in self.inner(py).borrow().copy_map.iter() { dict.set_item( py, - PyBytes::new(py, key.as_ref()), - PyBytes::new(py, value.as_ref()), + PyBytes::new(py, key.as_bytes()), + PyBytes::new(py, value.as_bytes()), )?; } Ok(dict) @@ -450,7 +454,7 @@ py_class!(pub class DirstateMap |py| { def copymapgetitem(&self, key: PyObject) -> PyResult { let key = key.extract::(py)?; match self.inner(py).borrow().copy_map.get(HgPath::new(key.data(py))) { - Some(copy) => Ok(PyBytes::new(py, copy.as_ref())), + Some(copy) => Ok(PyBytes::new(py, copy.as_bytes())), None => Err(PyErr::new::( py, String::from_utf8_lossy(key.data(py)), @@ -485,7 +489,7 @@ py_class!(pub class DirstateMap |py| { .get(HgPath::new(key.data(py))) { Some(copy) => Ok(Some( - PyBytes::new(py, copy.as_ref()).into_object(), + PyBytes::new(py, copy.as_bytes()).into_object(), )), None => Ok(default), } @@ -549,7 +553,7 @@ impl DirstateMap { py: Python, res: (&HgPathBuf, &DirstateEntry), ) -> PyResult> { - Ok(Some(PyBytes::new(py, res.0.as_ref()))) + Ok(Some(PyBytes::new(py, res.0.as_bytes()))) } fn translate_key_value( py: Python, @@ -557,7 +561,7 @@ impl DirstateMap { ) -> PyResult> { let (f, entry) = res; Ok(Some(( - PyBytes::new(py, f.as_ref()), + PyBytes::new(py, f.as_bytes()), make_dirstate_tuple(py, entry)?, ))) } diff --git a/rust/hg-cpython/src/dirstate/non_normal_entries.rs b/rust/hg-cpython/src/dirstate/non_normal_entries.rs --- a/rust/hg-cpython/src/dirstate/non_normal_entries.rs +++ b/rust/hg-cpython/src/dirstate/non_normal_entries.rs @@ -62,7 +62,7 @@ impl NonNormalEntries { py: Python, key: &HgPathBuf, ) -> PyResult> { - Ok(Some(PyBytes::new(py, key.as_ref()))) + Ok(Some(PyBytes::new(py, key.as_bytes()))) } } diff --git a/rust/hg-cpython/src/dirstate/status.rs b/rust/hg-cpython/src/dirstate/status.rs --- a/rust/hg-cpython/src/dirstate/status.rs +++ b/rust/hg-cpython/src/dirstate/status.rs @@ -236,12 +236,10 @@ pub fn status_wrapper( build_response(py, lookup, status_res, all_warnings) } - e => { - return Err(PyErr::new::( - py, - format!("Unsupported matcher {}", e), - )); - } + e => Err(PyErr::new::( + py, + format!("Unsupported matcher {}", e), + )), } } diff --git a/rust/hg-cpython/src/parsers.rs b/rust/hg-cpython/src/parsers.rs --- a/rust/hg-cpython/src/parsers.rs +++ b/rust/hg-cpython/src/parsers.rs @@ -37,15 +37,15 @@ fn parse_dirstate_wrapper( for (filename, entry) in &dirstate_map { dmap.set_item( py, - PyBytes::new(py, filename.as_ref()), + PyBytes::new(py, filename.as_bytes()), make_dirstate_tuple(py, entry)?, )?; } for (path, copy_path) in copies { copymap.set_item( py, - PyBytes::new(py, path.as_ref()), - PyBytes::new(py, copy_path.as_ref()), + PyBytes::new(py, path.as_bytes()), + PyBytes::new(py, copy_path.as_bytes()), )?; } Ok( @@ -116,7 +116,7 @@ fn pack_dirstate_wrapper( for (filename, entry) in &dirstate_map { dmap.set_item( py, - PyBytes::new(py, filename.as_ref()), + PyBytes::new(py, filename.as_bytes()), make_dirstate_tuple(py, entry)?, )?; } diff --git a/rust/hg-cpython/src/utils.rs b/rust/hg-cpython/src/utils.rs --- a/rust/hg-cpython/src/utils.rs +++ b/rust/hg-cpython/src/utils.rs @@ -32,10 +32,7 @@ pub fn node_from_py_object<'a>( /// Clone incoming Python bytes given as `PyBytes` as a `Node`, /// doing the necessary checks. -pub fn node_from_py_bytes<'a>( - py: Python, - bytes: &'a PyBytes, -) -> PyResult { +pub fn node_from_py_bytes(py: Python, bytes: &PyBytes) -> PyResult { ::try_from(bytes.data(py)) .map_err(|_| { PyErr::new::( @@ -43,5 +40,5 @@ pub fn node_from_py_bytes<'a>( format!("{}-byte hash required", NODE_BYTES_LENGTH), ) }) - .map(|n| n.into()) + .map(Into::into) }