##// END OF EJS Templates
rust-clippy: fix most warnings in `hg-core`...
Raphaël Gomès -
r50825:e98fd81b default
parent child Browse files
Show More
@@ -175,7 +175,7 b' impl<G: Graph> MissingAncestors<G> {'
175 175 ///
176 176 /// This is useful in unit tests, but also setdiscovery.py does
177 177 /// read the bases attribute of a ancestor.missingancestors instance.
178 pub fn get_bases<'a>(&'a self) -> &'a HashSet<Revision> {
178 pub fn get_bases(&self) -> &HashSet<Revision> {
179 179 &self.bases
180 180 }
181 181
@@ -288,7 +288,7 b' impl<G: Graph> MissingAncestors<G> {'
288 288 .collect();
289 289 let revs_visit = &mut revs;
290 290 let mut both_visit: HashSet<Revision> =
291 revs_visit.intersection(&bases_visit).cloned().collect();
291 revs_visit.intersection(bases_visit).cloned().collect();
292 292 if revs_visit.is_empty() {
293 293 return Ok(Vec::new());
294 294 }
@@ -503,18 +503,18 b' mod tests {'
503 503 MissingAncestors::new(SampleGraph, [5, 3, 1, 3].iter().cloned());
504 504 let mut as_vec: Vec<Revision> =
505 505 missing_ancestors.get_bases().iter().cloned().collect();
506 as_vec.sort();
506 as_vec.sort_unstable();
507 507 assert_eq!(as_vec, [1, 3, 5]);
508 508 assert_eq!(missing_ancestors.max_base, 5);
509 509
510 510 missing_ancestors.add_bases([3, 7, 8].iter().cloned());
511 511 as_vec = missing_ancestors.get_bases().iter().cloned().collect();
512 as_vec.sort();
512 as_vec.sort_unstable();
513 513 assert_eq!(as_vec, [1, 3, 5, 7, 8]);
514 514 assert_eq!(missing_ancestors.max_base, 8);
515 515
516 516 as_vec = missing_ancestors.bases_heads()?.iter().cloned().collect();
517 as_vec.sort();
517 as_vec.sort_unstable();
518 518 assert_eq!(as_vec, [3, 5, 7, 8]);
519 519 Ok(())
520 520 }
@@ -531,7 +531,7 b' mod tests {'
531 531 .remove_ancestors_from(&mut revset)
532 532 .unwrap();
533 533 let mut as_vec: Vec<Revision> = revset.into_iter().collect();
534 as_vec.sort();
534 as_vec.sort_unstable();
535 535 assert_eq!(as_vec.as_slice(), expected);
536 536 }
537 537
@@ -572,6 +572,7 b' mod tests {'
572 572 /// the one in test-ancestor.py. An early version of Rust MissingAncestors
573 573 /// failed this, yet none of the integration tests of the whole suite
574 574 /// catched it.
575 #[allow(clippy::unnecessary_cast)]
575 576 #[test]
576 577 fn test_remove_ancestors_from_case1() {
577 578 let graph: VecGraph = vec![
@@ -117,7 +117,7 b' fn should_ignore(plain: &PlainInfo, sect'
117 117 }
118 118 let sections_to_delete: &[&[u8]] =
119 119 &[b"defaults", b"commands", b"command-templates"];
120 return sections_to_delete.contains(&section);
120 sections_to_delete.contains(&section)
121 121 }
122 122
123 123 impl Config {
@@ -207,7 +207,7 b' impl Config {'
207 207 file_paths.sort();
208 208 for file_path in &file_paths {
209 209 if file_path.extension() == Some(std::ffi::OsStr::new("rc")) {
210 self.add_trusted_file(&file_path)?
210 self.add_trusted_file(file_path)?
211 211 }
212 212 }
213 213 }
@@ -259,7 +259,7 b' impl Config {'
259 259 // `mercurial/helptext/config.txt` suggests it should be reversed
260 260 if let Some(installation_prefix) = hg.parent().and_then(Path::parent) {
261 261 if installation_prefix != root {
262 add_for_prefix(&installation_prefix)?
262 add_for_prefix(installation_prefix)?
263 263 }
264 264 }
265 265 add_for_prefix(root)?;
@@ -348,7 +348,7 b' impl Config {'
348 348 expected_type: &'static str,
349 349 parse: impl Fn(&'config [u8]) -> Option<T>,
350 350 ) -> Result<Option<T>, ConfigValueParseError> {
351 match self.get_inner(&section, &item) {
351 match self.get_inner(section, item) {
352 352 Some((layer, v)) => match parse(&v.bytes) {
353 353 Some(b) => Ok(Some(b)),
354 354 None => Err(ConfigValueParseError {
@@ -463,7 +463,7 b' impl Config {'
463 463 ) -> Option<(&ConfigLayer, &ConfigValue)> {
464 464 // Filter out the config items that are hidden by [PLAIN].
465 465 // This differs from python hg where we delete them from the config.
466 let should_ignore = should_ignore(&self.plain, &section, &item);
466 let should_ignore = should_ignore(&self.plain, section, item);
467 467 for layer in self.layers.iter().rev() {
468 468 if !layer.trusted {
469 469 continue;
@@ -480,8 +480,8 b' impl Config {'
480 480 {
481 481 continue;
482 482 }
483 if let Some(v) = layer.get(&section, &item) {
484 return Some((&layer, v));
483 if let Some(v) = layer.get(section, item) {
484 return Some((layer, v));
485 485 }
486 486 }
487 487 None
@@ -561,7 +561,7 b' impl Config {'
561 561 fn get_all(&self, section: &[u8], item: &[u8]) -> Vec<&[u8]> {
562 562 let mut res = vec![];
563 563 for layer in self.layers.iter().rev() {
564 if let Some(v) = layer.get(&section, &item) {
564 if let Some(v) = layer.get(section, item) {
565 565 res.push(v.bytes.as_ref());
566 566 }
567 567 }
@@ -592,11 +592,11 b' impl Config {'
592 592 add(b"git", b"git", b"1");
593 593 add(b"git", b"showfunc", b"1");
594 594 add(b"git", b"word-diff", b"1");
595 return layer;
595 layer
596 596 }
597 597
598 598 // introduce the tweaked defaults as implied by ui.tweakdefaults
599 pub fn tweakdefaults<'a>(&mut self) -> () {
599 pub fn tweakdefaults(&mut self) {
600 600 self.layers.insert(0, Config::tweakdefaults_layer());
601 601 }
602 602 }
@@ -94,11 +94,7 b' impl ConfigLayer {'
94 94
95 95 /// Returns whether this layer comes from `--config` CLI arguments
96 96 pub(crate) fn is_from_command_line(&self) -> bool {
97 if let ConfigOrigin::CommandLine = self.origin {
98 true
99 } else {
100 false
101 }
97 matches!(self.origin, ConfigOrigin::CommandLine)
102 98 }
103 99
104 100 /// Add an entry to the config, overwriting the old one if already present.
@@ -111,13 +107,13 b' impl ConfigLayer {'
111 107 ) {
112 108 self.sections
113 109 .entry(section)
114 .or_insert_with(|| HashMap::new())
110 .or_insert_with(HashMap::new)
115 111 .insert(item, ConfigValue { bytes: value, line });
116 112 }
117 113
118 114 /// Returns the config value in `<section>.<item>` if it exists
119 115 pub fn get(&self, section: &[u8], item: &[u8]) -> Option<&ConfigValue> {
120 Some(self.sections.get(section)?.get(item)?)
116 self.sections.get(section)?.get(item)
121 117 }
122 118
123 119 /// Returns the keys defined in the given section
@@ -171,7 +167,7 b' impl ConfigLayer {'
171 167
172 168 while let Some((index, bytes)) = lines_iter.next() {
173 169 let line = Some(index + 1);
174 if let Some(m) = INCLUDE_RE.captures(&bytes) {
170 if let Some(m) = INCLUDE_RE.captures(bytes) {
175 171 let filename_bytes = &m[1];
176 172 let filename_bytes = crate::utils::expand_vars(filename_bytes);
177 173 // `Path::parent` only fails for the root directory,
@@ -205,18 +201,18 b' impl ConfigLayer {'
205 201 }
206 202 }
207 203 }
208 } else if let Some(_) = EMPTY_RE.captures(&bytes) {
209 } else if let Some(m) = SECTION_RE.captures(&bytes) {
204 } else if EMPTY_RE.captures(bytes).is_some() {
205 } else if let Some(m) = SECTION_RE.captures(bytes) {
210 206 section = m[1].to_vec();
211 } else if let Some(m) = ITEM_RE.captures(&bytes) {
207 } else if let Some(m) = ITEM_RE.captures(bytes) {
212 208 let item = m[1].to_vec();
213 209 let mut value = m[2].to_vec();
214 210 loop {
215 211 match lines_iter.peek() {
216 212 None => break,
217 213 Some((_, v)) => {
218 if let Some(_) = COMMENT_RE.captures(&v) {
219 } else if let Some(_) = CONT_RE.captures(&v) {
214 if COMMENT_RE.captures(v).is_some() {
215 } else if CONT_RE.captures(v).is_some() {
220 216 value.extend(b"\n");
221 217 value.extend(&m[1]);
222 218 } else {
@@ -227,7 +223,7 b' impl ConfigLayer {'
227 223 lines_iter.next();
228 224 }
229 225 current_layer.add(section.clone(), item, value, line);
230 } else if let Some(m) = UNSET_RE.captures(&bytes) {
226 } else if let Some(m) = UNSET_RE.captures(bytes) {
231 227 if let Some(map) = current_layer.sections.get_mut(&section) {
232 228 map.remove(&m[1]);
233 229 }
@@ -261,7 +257,7 b' impl DisplayBytes for ConfigLayer {'
261 257 sections.sort_by(|e0, e1| e0.0.cmp(e1.0));
262 258
263 259 for (section, items) in sections.into_iter() {
264 let mut items: Vec<_> = items.into_iter().collect();
260 let mut items: Vec<_> = items.iter().collect();
265 261 items.sort_by(|e0, e1| e0.0.cmp(e1.0));
266 262
267 263 for (item, config_entry) in items {
@@ -200,11 +200,7 b' fn parse_list_without_trim_start(input: '
200 200
201 201 // https://docs.python.org/3/library/stdtypes.html?#bytes.isspace
202 202 fn is_space(byte: u8) -> bool {
203 if let b' ' | b'\t' | b'\n' | b'\r' | b'\x0b' | b'\x0c' = byte {
204 true
205 } else {
206 false
207 }
203 matches!(byte, b' ' | b'\t' | b'\n' | b'\r' | b'\x0b' | b'\x0c')
208 204 }
209 205 }
210 206
@@ -59,7 +59,7 b' impl CopySource {'
59 59 Self {
60 60 rev,
61 61 path: winner.path,
62 overwritten: overwritten,
62 overwritten,
63 63 }
64 64 }
65 65
@@ -489,7 +489,7 b" fn chain_changes<'a>("
489 489 if cs1 == cs2 {
490 490 cs1.mark_delete(current_rev);
491 491 } else {
492 cs1.mark_delete_with_pair(current_rev, &cs2);
492 cs1.mark_delete_with_pair(current_rev, cs2);
493 493 }
494 494 e2.insert(cs1.clone());
495 495 }
@@ -513,15 +513,14 b' fn add_one_copy('
513 513 ) {
514 514 let dest = path_map.tokenize(path_dest);
515 515 let source = path_map.tokenize(path_source);
516 let entry;
517 if let Some(v) = base_copies.get(&source) {
518 entry = match &v.path {
516 let entry = if let Some(v) = base_copies.get(&source) {
517 match &v.path {
519 518 Some(path) => Some((*(path)).to_owned()),
520 519 None => Some(source.to_owned()),
521 520 }
522 521 } else {
523 entry = Some(source.to_owned());
524 }
522 Some(source.to_owned())
523 };
525 524 // Each new entry is introduced by the children, we
526 525 // record this information as we will need it to take
527 526 // the right decision when merging conflicting copy
@@ -563,17 +562,15 b' fn merge_copies_dict('
563 562 MergePick::Major | MergePick::Any => (src_major, src_minor),
564 563 MergePick::Minor => (src_minor, src_major),
565 564 };
566 MergeResult::UseNewValue(CopySource::new_from_merge(
565 MergeResult::NewValue(CopySource::new_from_merge(
567 566 current_merge,
568 567 winner,
569 568 loser,
570 569 ))
571 570 } else {
572 571 match pick {
573 MergePick::Any | MergePick::Major => {
574 MergeResult::UseRightValue
575 }
576 MergePick::Minor => MergeResult::UseLeftValue,
572 MergePick::Any | MergePick::Major => MergeResult::RightValue,
573 MergePick::Minor => MergeResult::LeftValue,
577 574 }
578 575 }
579 576 })
@@ -181,7 +181,7 b' mod tests {'
181 181 let mut revs: HashSet<Revision> = revs.iter().cloned().collect();
182 182 retain_heads(graph, &mut revs)?;
183 183 let mut as_vec: Vec<Revision> = revs.iter().cloned().collect();
184 as_vec.sort();
184 as_vec.sort_unstable();
185 185 Ok(as_vec)
186 186 }
187 187
@@ -206,7 +206,7 b' mod tests {'
206 206 ) -> Result<Vec<Revision>, GraphError> {
207 207 let heads = heads(graph, revs.iter())?;
208 208 let mut as_vec: Vec<Revision> = heads.iter().cloned().collect();
209 as_vec.sort();
209 as_vec.sort_unstable();
210 210 Ok(as_vec)
211 211 }
212 212
@@ -231,7 +231,7 b' mod tests {'
231 231 ) -> Result<Vec<Revision>, GraphError> {
232 232 let set: HashSet<_> = revs.iter().cloned().collect();
233 233 let mut as_vec = roots(graph, &set)?;
234 as_vec.sort();
234 as_vec.sort_unstable();
235 235 Ok(as_vec)
236 236 }
237 237
@@ -32,7 +32,7 b' impl DirstateParents {'
32 32 };
33 33
34 34 pub fn is_merge(&self) -> bool {
35 return !(self.p2 == NULL_NODE);
35 !(self.p2 == NULL_NODE)
36 36 }
37 37 }
38 38
@@ -232,7 +232,7 b' mod tests {'
232 232 #[test]
233 233 fn test_delete_path_empty_path() {
234 234 let mut map =
235 DirsMultiset::from_manifest(&vec![HgPathBuf::new()]).unwrap();
235 DirsMultiset::from_manifest(&[HgPathBuf::new()]).unwrap();
236 236 let path = HgPath::new(b"");
237 237 assert_eq!(Ok(()), map.delete_path(path));
238 238 assert_eq!(
@@ -180,11 +180,7 b' impl TruncatedTimestamp {'
180 180 if self.truncated_seconds != other.truncated_seconds {
181 181 false
182 182 } else if self.nanoseconds == 0 || other.nanoseconds == 0 {
183 if self.second_ambiguous {
184 false
185 } else {
186 true
187 }
183 !self.second_ambiguous
188 184 } else {
189 185 self.nanoseconds == other.nanoseconds
190 186 }
@@ -706,9 +702,9 b' impl TryFrom<u8> for EntryState {'
706 702 }
707 703 }
708 704
709 impl Into<u8> for EntryState {
710 fn into(self) -> u8 {
711 match self {
705 impl From<EntryState> for u8 {
706 fn from(val: EntryState) -> Self {
707 match val {
712 708 EntryState::Normal => b'n',
713 709 EntryState::Added => b'a',
714 710 EntryState::Removed => b'r',
@@ -320,9 +320,7 b" impl<'tree, 'on_disk> NodeRef<'tree, 'on"
320 320 on_disk: &'on_disk [u8],
321 321 ) -> Result<Option<&'tree HgPath>, DirstateV2ParseError> {
322 322 match self {
323 NodeRef::InMemory(_path, node) => {
324 Ok(node.copy_source.as_ref().map(|s| &**s))
325 }
323 NodeRef::InMemory(_path, node) => Ok(node.copy_source.as_deref()),
326 324 NodeRef::OnDisk(node) => node.copy_source(on_disk),
327 325 }
328 326 }
@@ -340,9 +338,9 b" impl<'tree, 'on_disk> NodeRef<'tree, 'on"
340 338 Cow::Owned(in_memory) => BorrowedPath::InMemory(in_memory),
341 339 })
342 340 }
343 NodeRef::OnDisk(node) => node
344 .copy_source(on_disk)?
345 .map(|source| BorrowedPath::OnDisk(source)),
341 NodeRef::OnDisk(node) => {
342 node.copy_source(on_disk)?.map(BorrowedPath::OnDisk)
343 }
346 344 })
347 345 }
348 346
@@ -418,10 +416,7 b' impl Default for NodeData {'
418 416
419 417 impl NodeData {
420 418 fn has_entry(&self) -> bool {
421 match self {
422 NodeData::Entry(_) => true,
423 _ => false,
424 }
419 matches!(self, NodeData::Entry(_))
425 420 }
426 421
427 422 fn as_entry(&self) -> Option<&DirstateEntry> {
@@ -509,7 +504,7 b" impl<'on_disk> DirstateMap<'on_disk> {"
509 504 Ok(())
510 505 },
511 506 )?;
512 let parents = Some(parents.clone());
507 let parents = Some(*parents);
513 508
514 509 Ok((map, parents))
515 510 }
@@ -681,10 +676,8 b" impl<'on_disk> DirstateMap<'on_disk> {"
681 676 .checked_sub(1)
682 677 .expect("tracked count to be >= 0");
683 678 }
684 } else {
685 if wc_tracked {
686 ancestor.tracked_descendants_count += 1;
687 }
679 } else if wc_tracked {
680 ancestor.tracked_descendants_count += 1;
688 681 }
689 682 })?;
690 683
@@ -734,7 +727,7 b" impl<'on_disk> DirstateMap<'on_disk> {"
734 727 ancestor.tracked_descendants_count += tracked_count_increment;
735 728 })?;
736 729 if let Some(old_entry) = old_entry_opt {
737 let mut e = old_entry.clone();
730 let mut e = old_entry;
738 731 if e.tracked() {
739 732 // XXX
740 733 // This is probably overkill for more case, but we need this to
@@ -775,7 +768,7 b" impl<'on_disk> DirstateMap<'on_disk> {"
775 768 .expect("tracked_descendants_count should be >= 0");
776 769 })?
777 770 .expect("node should exist");
778 let mut new_entry = old_entry.clone();
771 let mut new_entry = old_entry;
779 772 new_entry.set_untracked();
780 773 node.data = NodeData::Entry(new_entry);
781 774 Ok(())
@@ -803,7 +796,7 b" impl<'on_disk> DirstateMap<'on_disk> {"
803 796 }
804 797 })?
805 798 .expect("node should exist");
806 let mut new_entry = old_entry.clone();
799 let mut new_entry = old_entry;
807 800 new_entry.set_clean(mode, size, mtime);
808 801 node.data = NodeData::Entry(new_entry);
809 802 Ok(())
@@ -1364,7 +1357,7 b' impl OwningDirstateMap {'
1364 1357 value: &HgPath,
1365 1358 ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
1366 1359 self.with_dmap_mut(|map| {
1367 let node = map.get_or_insert_node(&key, |_ancestor| {})?;
1360 let node = map.get_or_insert_node(key, |_ancestor| {})?;
1368 1361 let had_copy_source = node.copy_source.is_none();
1369 1362 let old = node
1370 1363 .copy_source
@@ -1864,11 +1857,8 b' mod tests {'
1864 1857 map.set_untracked(p(b"some/nested/removed"))?;
1865 1858 assert_eq!(map.get_map().unreachable_bytes, 0);
1866 1859
1867 match map.get_map().root {
1868 ChildNodes::InMemory(_) => {
1869 panic!("root should not have been mutated")
1870 }
1871 _ => (),
1860 if let ChildNodes::InMemory(_) = map.get_map().root {
1861 panic!("root should not have been mutated")
1872 1862 }
1873 1863 // We haven't mutated enough (nothing, actually), we should still be in
1874 1864 // the append strategy
@@ -1879,9 +1869,8 b' mod tests {'
1879 1869 let unreachable_bytes = map.get_map().unreachable_bytes;
1880 1870 assert!(unreachable_bytes > 0);
1881 1871
1882 match map.get_map().root {
1883 ChildNodes::OnDisk(_) => panic!("root should have been mutated"),
1884 _ => (),
1872 if let ChildNodes::OnDisk(_) = map.get_map().root {
1873 panic!("root should have been mutated")
1885 1874 }
1886 1875
1887 1876 // This should not mutate the structure either, since `root` has
@@ -1889,22 +1878,20 b' mod tests {'
1889 1878 map.set_untracked(p(b"merged"))?;
1890 1879 assert_eq!(map.get_map().unreachable_bytes, unreachable_bytes);
1891 1880
1892 match map.get_map().get_node(p(b"other/added_with_p2"))?.unwrap() {
1893 NodeRef::InMemory(_, _) => {
1894 panic!("'other/added_with_p2' should not have been mutated")
1895 }
1896 _ => (),
1881 if let NodeRef::InMemory(_, _) =
1882 map.get_map().get_node(p(b"other/added_with_p2"))?.unwrap()
1883 {
1884 panic!("'other/added_with_p2' should not have been mutated")
1897 1885 }
1898 1886 // But this should, since it's in a different path
1899 1887 // than `<root>some/nested/add`
1900 1888 map.set_untracked(p(b"other/added_with_p2"))?;
1901 1889 assert!(map.get_map().unreachable_bytes > unreachable_bytes);
1902 1890
1903 match map.get_map().get_node(p(b"other/added_with_p2"))?.unwrap() {
1904 NodeRef::OnDisk(_) => {
1905 panic!("'other/added_with_p2' should have been mutated")
1906 }
1907 _ => (),
1891 if let NodeRef::OnDisk(_) =
1892 map.get_map().get_node(p(b"other/added_with_p2"))?.unwrap()
1893 {
1894 panic!("'other/added_with_p2' should have been mutated")
1908 1895 }
1909 1896
1910 1897 // We have rewritten most of the tree, we should create a new file
@@ -246,11 +246,9 b" impl<'on_disk> Docket<'on_disk> {"
246 246 pub fn parents(&self) -> DirstateParents {
247 247 use crate::Node;
248 248 let p1 = Node::try_from(&self.header.parent_1[..USED_NODE_ID_BYTES])
249 .unwrap()
250 .clone();
249 .unwrap();
251 250 let p2 = Node::try_from(&self.header.parent_2[..USED_NODE_ID_BYTES])
252 .unwrap()
253 .clone();
251 .unwrap();
254 252 DirstateParents { p1, p2 }
255 253 }
256 254
@@ -322,7 +320,7 b' impl Node {'
322 320 read_hg_path(on_disk, self.full_path)
323 321 }
324 322
325 pub(super) fn base_name_start<'on_disk>(
323 pub(super) fn base_name_start(
326 324 &self,
327 325 ) -> Result<usize, DirstateV2ParseError> {
328 326 let start = self.base_name_start.get();
@@ -355,7 +353,7 b' impl Node {'
355 353 ))
356 354 }
357 355
358 pub(super) fn has_copy_source<'on_disk>(&self) -> bool {
356 pub(super) fn has_copy_source(&self) -> bool {
359 357 self.copy_source.start.get() != 0
360 358 }
361 359
@@ -414,12 +412,12 b' impl Node {'
414 412 } else {
415 413 libc::S_IFREG
416 414 };
417 let permisions = if self.flags().contains(Flags::MODE_EXEC_PERM) {
415 let permissions = if self.flags().contains(Flags::MODE_EXEC_PERM) {
418 416 0o755
419 417 } else {
420 418 0o644
421 419 };
422 (file_type | permisions).into()
420 file_type | permissions
423 421 }
424 422
425 423 fn mtime(&self) -> Result<TruncatedTimestamp, DirstateV2ParseError> {
@@ -24,7 +24,7 b' impl OwningDirstateMap {'
24 24
25 25 OwningDirstateMapBuilder {
26 26 on_disk,
27 map_builder: |bytes| DirstateMap::empty(&bytes),
27 map_builder: |bytes| DirstateMap::empty(bytes),
28 28 }
29 29 .build()
30 30 }
@@ -42,7 +42,7 b' impl OwningDirstateMap {'
42 42 OwningDirstateMapTryBuilder {
43 43 on_disk,
44 44 map_builder: |bytes| {
45 DirstateMap::new_v1(&bytes).map(|(dmap, p)| {
45 DirstateMap::new_v1(bytes).map(|(dmap, p)| {
46 46 parents = p.unwrap_or(DirstateParents::NULL);
47 47 dmap
48 48 })
@@ -66,7 +66,7 b' impl OwningDirstateMap {'
66 66 OwningDirstateMapTryBuilder {
67 67 on_disk,
68 68 map_builder: |bytes| {
69 DirstateMap::new_v2(&bytes, data_size, metadata)
69 DirstateMap::new_v2(bytes, data_size, metadata)
70 70 },
71 71 }
72 72 .try_build()
@@ -154,7 +154,7 b" pub fn status<'dirstate>("
154 154 hg_path,
155 155 &DirEntry {
156 156 hg_path: Cow::Borrowed(HgPath::new(b"")),
157 fs_path: Cow::Borrowed(&root_dir),
157 fs_path: Cow::Borrowed(root_dir),
158 158 symlink_metadata: None,
159 159 file_type: FakeFileType::Directory,
160 160 },
@@ -245,7 +245,7 b" impl<'a> HasIgnoredAncestor<'a> {"
245 245 None => false,
246 246 Some(parent) => {
247 247 *(parent.cache.get_or_init(|| {
248 parent.force(ignore_fn) || ignore_fn(&self.path)
248 parent.force(ignore_fn) || ignore_fn(self.path)
249 249 }))
250 250 }
251 251 }
@@ -402,7 +402,7 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
402 402 let entry = DirEntry {
403 403 hg_path: Cow::Borrowed(
404 404 dirstate_node
405 .full_path(&self.dmap.on_disk)?,
405 .full_path(self.dmap.on_disk)?,
406 406 ),
407 407 fs_path: Cow::Borrowed(&fs_path),
408 408 symlink_metadata: Some(fs_metadata),
@@ -420,7 +420,8 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
420 420 Err(error) => {
421 421 let hg_path =
422 422 dirstate_node.full_path(self.dmap.on_disk)?;
423 Ok(self.io_error(error, hg_path))
423 self.io_error(error, hg_path);
424 Ok(())
424 425 }
425 426 }
426 427 })
@@ -472,28 +473,25 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
472 473 .par_bridge()
473 474 .map(|pair| {
474 475 use itertools::EitherOrBoth::*;
475 let has_dirstate_node_or_is_ignored;
476 match pair {
476 let has_dirstate_node_or_is_ignored = match pair {
477 477 Both(dirstate_node, fs_entry) => {
478 478 self.traverse_fs_and_dirstate(
479 &fs_entry,
479 fs_entry,
480 480 dirstate_node,
481 481 has_ignored_ancestor,
482 482 )?;
483 has_dirstate_node_or_is_ignored = true
483 true
484 484 }
485 485 Left(dirstate_node) => {
486 486 self.traverse_dirstate_only(dirstate_node)?;
487 has_dirstate_node_or_is_ignored = true;
487 true
488 488 }
489 Right(fs_entry) => {
490 has_dirstate_node_or_is_ignored = self.traverse_fs_only(
491 has_ignored_ancestor.force(&self.ignore_fn),
492 directory_hg_path,
493 fs_entry,
494 )
495 }
496 }
489 Right(fs_entry) => self.traverse_fs_only(
490 has_ignored_ancestor.force(&self.ignore_fn),
491 directory_hg_path,
492 fs_entry,
493 ),
494 };
497 495 Ok(has_dirstate_node_or_is_ignored)
498 496 })
499 497 .try_reduce(|| true, |a, b| Ok(a && b))
@@ -524,7 +522,7 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
524 522 .push(hg_path.detach_from_tree())
525 523 }
526 524 let is_ignored = HasIgnoredAncestor::create(
527 Some(&has_ignored_ancestor),
525 Some(has_ignored_ancestor),
528 526 hg_path,
529 527 );
530 528 let is_at_repo_root = false;
@@ -544,14 +542,14 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
544 542 outdated_dircache,
545 543 )?
546 544 } else {
547 if file_or_symlink && self.matcher.matches(&hg_path) {
545 if file_or_symlink && self.matcher.matches(hg_path) {
548 546 if let Some(entry) = dirstate_node.entry()? {
549 547 if !entry.any_tracked() {
550 548 // Forward-compat if we start tracking unknown/ignored
551 549 // files for caching reasons
552 550 self.mark_unknown_or_ignored(
553 551 has_ignored_ancestor.force(&self.ignore_fn),
554 &hg_path,
552 hg_path,
555 553 );
556 554 }
557 555 if entry.added() {
@@ -620,12 +618,13 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
620 618 Ok(meta) => meta,
621 619 Err(_) => return Ok(()),
622 620 };
623 let directory_mtime = if let Ok(option) =
624 TruncatedTimestamp::for_reliable_mtime_of(&metadata, status_start)
625 {
626 if let Some(directory_mtime) = option {
627 directory_mtime
628 } else {
621
622 let directory_mtime = match TruncatedTimestamp::for_reliable_mtime_of(
623 &metadata,
624 status_start,
625 ) {
626 Ok(Some(directory_mtime)) => directory_mtime,
627 Ok(None) => {
629 628 // The directory was modified too recently,
630 629 // don’t cache its `read_dir` results.
631 630 //
@@ -643,9 +642,10 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
643 642 // by the same script.
644 643 return Ok(());
645 644 }
646 } else {
647 // OS/libc does not support mtime?
648 return Ok(());
645 Err(_) => {
646 // OS/libc does not support mtime?
647 return Ok(());
648 }
649 649 };
650 650 // We’ve observed (through `status_start`) that time has
651 651 // “progressed” since `directory_mtime`, so any further
@@ -713,8 +713,9 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
713 713 {
714 714 self.push_outcome(Outcome::Modified, dirstate_node)?
715 715 } else {
716 let mtime_looks_clean;
717 if let Some(dirstate_mtime) = entry.truncated_mtime() {
716 let mtime_looks_clean = if let Some(dirstate_mtime) =
717 entry.truncated_mtime()
718 {
718 719 let fs_mtime = TruncatedTimestamp::for_mtime_of(&fs_metadata)
719 720 .expect("OS/libc does not support mtime?");
720 721 // There might be a change in the future if for example the
@@ -722,10 +723,10 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
722 723 // case where the issues the user would face
723 724 // would be a lot worse and there is nothing we
724 725 // can really do.
725 mtime_looks_clean = fs_mtime.likely_equal(dirstate_mtime)
726 fs_mtime.likely_equal(dirstate_mtime)
726 727 } else {
727 728 // No mtime in the dirstate entry
728 mtime_looks_clean = false
729 false
729 730 };
730 731 if !mtime_looks_clean {
731 732 self.push_outcome(Outcome::Unsure, dirstate_node)?
@@ -769,7 +770,7 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
769 770 if entry.removed() {
770 771 self.push_outcome(Outcome::Removed, dirstate_node)?
771 772 } else {
772 self.push_outcome(Outcome::Deleted, &dirstate_node)?
773 self.push_outcome(Outcome::Deleted, dirstate_node)?
773 774 }
774 775 }
775 776 }
@@ -816,26 +817,24 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
816 817 }
817 818 }
818 819 is_ignored
820 } else if file_or_symlink {
821 if self.matcher.matches(&hg_path) {
822 self.mark_unknown_or_ignored(
823 has_ignored_ancestor,
824 &BorrowedPath::InMemory(&hg_path),
825 )
826 } else {
827 // We haven’t computed whether this path is ignored. It
828 // might not be, and a future run of status might have a
829 // different matcher that matches it. So treat it as not
830 // ignored. That is, inhibit readdir caching of the parent
831 // directory.
832 false
833 }
819 834 } else {
820 if file_or_symlink {
821 if self.matcher.matches(&hg_path) {
822 self.mark_unknown_or_ignored(
823 has_ignored_ancestor,
824 &BorrowedPath::InMemory(&hg_path),
825 )
826 } else {
827 // We haven’t computed whether this path is ignored. It
828 // might not be, and a future run of status might have a
829 // different matcher that matches it. So treat it as not
830 // ignored. That is, inhibit readdir caching of the parent
831 // directory.
832 false
833 }
834 } else {
835 // This is neither a directory, a plain file, or a symlink.
836 // Treat it like an ignored file.
837 true
838 }
835 // This is neither a directory, a plain file, or a symlink.
836 // Treat it like an ignored file.
837 true
839 838 }
840 839 }
841 840
@@ -845,7 +844,7 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
845 844 has_ignored_ancestor: bool,
846 845 hg_path: &BorrowedPath<'_, 'on_disk>,
847 846 ) -> bool {
848 let is_ignored = has_ignored_ancestor || (self.ignore_fn)(&hg_path);
847 let is_ignored = has_ignored_ancestor || (self.ignore_fn)(hg_path);
849 848 if is_ignored {
850 849 if self.options.list_ignored {
851 850 self.push_outcome_without_copy_source(
@@ -853,13 +852,8 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
853 852 hg_path,
854 853 )
855 854 }
856 } else {
857 if self.options.list_unknown {
858 self.push_outcome_without_copy_source(
859 Outcome::Unknown,
860 hg_path,
861 )
862 }
855 } else if self.options.list_unknown {
856 self.push_outcome_without_copy_source(Outcome::Unknown, hg_path)
863 857 }
864 858 is_ignored
865 859 }
@@ -194,7 +194,7 b' impl<G: Graph + Clone> PartialDiscovery<'
194 194 size: usize,
195 195 ) -> Vec<Revision> {
196 196 if !self.randomize {
197 sample.sort();
197 sample.sort_unstable();
198 198 sample.truncate(size);
199 199 return sample;
200 200 }
@@ -513,14 +513,14 b' mod tests {'
513 513 ) -> Vec<Revision> {
514 514 let mut as_vec: Vec<Revision> =
515 515 disco.undecided.as_ref().unwrap().iter().cloned().collect();
516 as_vec.sort();
516 as_vec.sort_unstable();
517 517 as_vec
518 518 }
519 519
520 520 fn sorted_missing(disco: &PartialDiscovery<SampleGraph>) -> Vec<Revision> {
521 521 let mut as_vec: Vec<Revision> =
522 522 disco.missing.iter().cloned().collect();
523 as_vec.sort();
523 as_vec.sort_unstable();
524 524 as_vec
525 525 }
526 526
@@ -529,7 +529,7 b' mod tests {'
529 529 ) -> Result<Vec<Revision>, GraphError> {
530 530 let mut as_vec: Vec<Revision> =
531 531 disco.common_heads()?.iter().cloned().collect();
532 as_vec.sort();
532 as_vec.sort_unstable();
533 533 Ok(as_vec)
534 534 }
535 535
@@ -621,7 +621,7 b' mod tests {'
621 621 disco.undecided = Some((1..=13).collect());
622 622
623 623 let mut sample_vec = disco.take_quick_sample(vec![], 4)?;
624 sample_vec.sort();
624 sample_vec.sort_unstable();
625 625 assert_eq!(sample_vec, vec![10, 11, 12, 13]);
626 626 Ok(())
627 627 }
@@ -632,7 +632,7 b' mod tests {'
632 632 disco.ensure_undecided()?;
633 633
634 634 let mut sample_vec = disco.take_quick_sample(vec![12], 4)?;
635 sample_vec.sort();
635 sample_vec.sort_unstable();
636 636 // r12's only parent is r9, whose unique grand-parent through the
637 637 // diamond shape is r4. This ends there because the distance from r4
638 638 // to the root is only 3.
@@ -650,11 +650,11 b' mod tests {'
650 650 assert_eq!(cache.get(&10).cloned(), None);
651 651
652 652 let mut children_4 = cache.get(&4).cloned().unwrap();
653 children_4.sort();
653 children_4.sort_unstable();
654 654 assert_eq!(children_4, vec![5, 6, 7]);
655 655
656 656 let mut children_7 = cache.get(&7).cloned().unwrap();
657 children_7.sort();
657 children_7.sort_unstable();
658 658 assert_eq!(children_7, vec![9, 11]);
659 659
660 660 Ok(())
@@ -684,7 +684,7 b' mod tests {'
684 684 let (sample_set, size) = disco.bidirectional_sample(7)?;
685 685 assert_eq!(size, 7);
686 686 let mut sample: Vec<Revision> = sample_set.into_iter().collect();
687 sample.sort();
687 sample.sort_unstable();
688 688 // our DAG is a bit too small for the results to be really interesting
689 689 // at least it shows that
690 690 // - we went both ways
@@ -313,7 +313,7 b' pub fn build_single_regex('
313 313 PatternSyntax::RootGlob
314 314 | PatternSyntax::Path
315 315 | PatternSyntax::RelGlob
316 | PatternSyntax::RootFiles => normalize_path_bytes(&pattern),
316 | PatternSyntax::RootFiles => normalize_path_bytes(pattern),
317 317 PatternSyntax::Include | PatternSyntax::SubInclude => {
318 318 return Err(PatternError::NonRegexPattern(entry.clone()))
319 319 }
@@ -368,7 +368,7 b' pub fn parse_pattern_file_contents('
368 368 let mut warnings: Vec<PatternFileWarning> = vec![];
369 369
370 370 let mut current_syntax =
371 default_syntax_override.unwrap_or(b"relre:".as_ref());
371 default_syntax_override.unwrap_or_else(|| b"relre:".as_ref());
372 372
373 373 for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() {
374 374 let line_number = line_number + 1;
@@ -402,7 +402,7 b' pub fn parse_pattern_file_contents('
402 402 continue;
403 403 }
404 404
405 let mut line_syntax: &[u8] = &current_syntax;
405 let mut line_syntax: &[u8] = current_syntax;
406 406
407 407 for (s, rels) in SYNTAXES.iter() {
408 408 if let Some(rest) = line.drop_prefix(rels) {
@@ -418,7 +418,7 b' pub fn parse_pattern_file_contents('
418 418 }
419 419
420 420 inputs.push(IgnorePattern::new(
421 parse_pattern_syntax(&line_syntax).map_err(|e| match e {
421 parse_pattern_syntax(line_syntax).map_err(|e| match e {
422 422 PatternError::UnsupportedSyntax(syntax) => {
423 423 PatternError::UnsupportedSyntaxInFile(
424 424 syntax,
@@ -428,7 +428,7 b' pub fn parse_pattern_file_contents('
428 428 }
429 429 _ => e,
430 430 })?,
431 &line,
431 line,
432 432 file_path,
433 433 ));
434 434 }
@@ -502,7 +502,7 b' pub fn get_patterns_from_file('
502 502 }
503 503 PatternSyntax::SubInclude => {
504 504 let mut sub_include = SubInclude::new(
505 &root_dir,
505 root_dir,
506 506 &entry.pattern,
507 507 &entry.source,
508 508 )?;
@@ -564,11 +564,11 b' impl SubInclude {'
564 564 let prefix = canonical_path(root_dir, root_dir, new_root)?;
565 565
566 566 Ok(Self {
567 prefix: path_to_hg_path_buf(prefix).and_then(|mut p| {
567 prefix: path_to_hg_path_buf(prefix).map(|mut p| {
568 568 if !p.is_empty() {
569 569 p.push_byte(b'/');
570 570 }
571 Ok(p)
571 p
572 572 })?,
573 573 path: path.to_owned(),
574 574 root: new_root.to_owned(),
@@ -107,7 +107,7 b' fn unlock(hg_vfs: Vfs, lock_filename: &s'
107 107 fn lock_should_be_broken(data: &Option<String>) -> bool {
108 108 (|| -> Option<bool> {
109 109 let (prefix, pid) = data.as_ref()?.split_once(':')?;
110 if prefix != &*LOCK_PREFIX {
110 if prefix != *LOCK_PREFIX {
111 111 return Some(false);
112 112 }
113 113 let process_is_running;
@@ -144,6 +144,8 b' lazy_static::lazy_static! {'
144 144
145 145 /// Same as https://github.com/python/cpython/blob/v3.10.0/Modules/socketmodule.c#L5414
146 146 const BUFFER_SIZE: usize = 1024;
147 // This cast is *needed* for platforms with signed chars
148 #[allow(clippy::unnecessary_cast)]
147 149 let mut buffer = [0 as libc::c_char; BUFFER_SIZE];
148 150 let hostname_bytes = unsafe {
149 151 let result = libc::gethostname(buffer.as_mut_ptr(), BUFFER_SIZE);
@@ -302,11 +302,11 b" impl<'a> Matcher for IncludeMatcher<'a> "
302 302 }
303 303
304 304 fn matches(&self, filename: &HgPath) -> bool {
305 (self.match_fn)(filename.as_ref())
305 (self.match_fn)(filename)
306 306 }
307 307
308 308 fn visit_children_set(&self, directory: &HgPath) -> VisitChildrenSet {
309 let dir = directory.as_ref();
309 let dir = directory;
310 310 if self.prefix && self.roots.contains(dir) {
311 311 return VisitChildrenSet::Recursive;
312 312 }
@@ -318,11 +318,11 b" impl<'a> Matcher for IncludeMatcher<'a> "
318 318 return VisitChildrenSet::This;
319 319 }
320 320
321 if self.parents.contains(directory.as_ref()) {
321 if self.parents.contains(dir.as_ref()) {
322 322 let multiset = self.get_all_parents_children();
323 323 if let Some(children) = multiset.get(dir) {
324 324 return VisitChildrenSet::Set(
325 children.into_iter().map(HgPathBuf::from).collect(),
325 children.iter().map(HgPathBuf::from).collect(),
326 326 );
327 327 }
328 328 }
@@ -446,7 +446,7 b' impl Matcher for IntersectionMatcher {'
446 446 VisitChildrenSet::This
447 447 }
448 448 (VisitChildrenSet::Set(m1), VisitChildrenSet::Set(m2)) => {
449 let set: HashSet<_> = m1.intersection(&m2).cloned().collect();
449 let set: HashSet<_> = m1.intersection(m2).cloned().collect();
450 450 if set.is_empty() {
451 451 VisitChildrenSet::Empty
452 452 } else {
@@ -699,10 +699,9 b' fn roots_and_dirs('
699 699 PatternSyntax::RootGlob | PatternSyntax::Glob => {
700 700 let mut root = HgPathBuf::new();
701 701 for p in pattern.split(|c| *c == b'/') {
702 if p.iter().any(|c| match *c {
703 b'[' | b'{' | b'*' | b'?' => true,
704 _ => false,
705 }) {
702 if p.iter()
703 .any(|c| matches!(*c, b'[' | b'{' | b'*' | b'?'))
704 {
706 705 break;
707 706 }
708 707 root.push(HgPathBuf::from_bytes(p).as_ref());
@@ -780,10 +779,10 b' fn roots_dirs_and_parents('
780 779
781 780 /// Returns a function that checks whether a given file (in the general sense)
782 781 /// should be matched.
783 fn build_match<'a, 'b>(
782 fn build_match<'a>(
784 783 ignore_patterns: Vec<IgnorePattern>,
785 ) -> PatternResult<(Vec<u8>, IgnoreFnType<'b>)> {
786 let mut match_funcs: Vec<IgnoreFnType<'b>> = vec![];
784 ) -> PatternResult<(Vec<u8>, IgnoreFnType<'a>)> {
785 let mut match_funcs: Vec<IgnoreFnType<'a>> = vec![];
787 786 // For debugging and printing
788 787 let mut patterns = vec![];
789 788
@@ -921,9 +920,8 b" impl<'a> IncludeMatcher<'a> {"
921 920 dirs,
922 921 parents,
923 922 } = roots_dirs_and_parents(&ignore_patterns)?;
924 let prefix = ignore_patterns.iter().all(|k| match k.syntax {
925 PatternSyntax::Path | PatternSyntax::RelPath => true,
926 _ => false,
923 let prefix = ignore_patterns.iter().all(|k| {
924 matches!(k.syntax, PatternSyntax::Path | PatternSyntax::RelPath)
927 925 });
928 926 let (patterns, match_fn) = build_match(ignore_patterns)?;
929 927
@@ -37,9 +37,11 b' pub fn matcher('
37 37 }
38 38 // Treat "narrowspec does not exist" the same as "narrowspec file exists
39 39 // and is empty".
40 let store_spec = repo.store_vfs().try_read(FILENAME)?.unwrap_or(vec![]);
41 let working_copy_spec =
42 repo.hg_vfs().try_read(DIRSTATE_FILENAME)?.unwrap_or(vec![]);
40 let store_spec = repo.store_vfs().try_read(FILENAME)?.unwrap_or_default();
41 let working_copy_spec = repo
42 .hg_vfs()
43 .try_read(DIRSTATE_FILENAME)?
44 .unwrap_or_default();
43 45 if store_spec != working_copy_spec {
44 46 return Err(HgError::abort(
45 47 "working copy's narrowspec is stale",
@@ -70,7 +70,7 b" fn find_files_in_manifest<'query>("
70 70 Some(item) => res.push((file, item)),
71 71 }
72 72 }
73 return Ok((res, missing));
73 Ok((res, missing))
74 74 }
75 75
76 76 /// Output the given revision of files
@@ -94,10 +94,8 b" pub fn cat<'a>("
94 94
95 95 files.sort_unstable();
96 96
97 let (found, missing) = find_files_in_manifest(
98 &manifest,
99 files.into_iter().map(|f| f.as_ref()),
100 )?;
97 let (found, missing) =
98 find_files_in_manifest(&manifest, files.into_iter())?;
101 99
102 100 for (file_path, file_node) in found {
103 101 found_any = true;
@@ -68,9 +68,9 b' impl Repo {'
68 68 return Ok(ancestor.to_path_buf());
69 69 }
70 70 }
71 return Err(RepoError::NotFound {
71 Err(RepoError::NotFound {
72 72 at: current_directory,
73 });
73 })
74 74 }
75 75
76 76 /// Find a repository, either at the given path (which must contain a `.hg`
@@ -87,13 +87,11 b' impl Repo {'
87 87 ) -> Result<Self, RepoError> {
88 88 if let Some(root) = explicit_path {
89 89 if is_dir(root.join(".hg"))? {
90 Self::new_at_path(root.to_owned(), config)
90 Self::new_at_path(root, config)
91 91 } else if is_file(&root)? {
92 92 Err(HgError::unsupported("bundle repository").into())
93 93 } else {
94 Err(RepoError::NotFound {
95 at: root.to_owned(),
96 })
94 Err(RepoError::NotFound { at: root })
97 95 }
98 96 } else {
99 97 let root = Self::find_repo_root()?;
@@ -108,9 +106,8 b' impl Repo {'
108 106 ) -> Result<Self, RepoError> {
109 107 let dot_hg = working_directory.join(".hg");
110 108
111 let mut repo_config_files = Vec::new();
112 repo_config_files.push(dot_hg.join("hgrc"));
113 repo_config_files.push(dot_hg.join("hgrc-not-shared"));
109 let mut repo_config_files =
110 vec![dot_hg.join("hgrc"), dot_hg.join("hgrc-not-shared")];
114 111
115 112 let hg_vfs = Vfs { base: &dot_hg };
116 113 let mut reqs = requirements::load_if_exists(hg_vfs)?;
@@ -254,7 +251,7 b' impl Repo {'
254 251 .hg_vfs()
255 252 .read("dirstate")
256 253 .io_not_found_as_none()?
257 .unwrap_or(Vec::new()))
254 .unwrap_or_default())
258 255 }
259 256
260 257 pub fn dirstate_parents(&self) -> Result<DirstateParents, HgError> {
@@ -277,8 +274,7 b' impl Repo {'
277 274 .set(Some(docket.uuid.to_owned()));
278 275 docket.parents()
279 276 } else {
280 crate::dirstate::parsers::parse_dirstate_parents(&dirstate)?
281 .clone()
277 *crate::dirstate::parsers::parse_dirstate_parents(&dirstate)?
282 278 };
283 279 self.dirstate_parents.set(parents);
284 280 Ok(parents)
@@ -165,7 +165,7 b" impl<'changelog> ChangelogRevisionData<'"
165 165 pub fn files(&self) -> impl Iterator<Item = &HgPath> {
166 166 self.bytes[self.timestamp_end + 1..self.files_end]
167 167 .split(|b| b == &b'\n')
168 .map(|path| HgPath::new(path))
168 .map(HgPath::new)
169 169 }
170 170
171 171 /// The change description.
@@ -49,7 +49,7 b' impl Filelog {'
49 49 file_rev: Revision,
50 50 ) -> Result<FilelogRevisionData, RevlogError> {
51 51 let data: Vec<u8> = self.revlog.get_rev_data(file_rev)?.into_owned();
52 Ok(FilelogRevisionData(data.into()))
52 Ok(FilelogRevisionData(data))
53 53 }
54 54
55 55 /// The given node ID is that of the file as found in a filelog, not of a
@@ -161,7 +161,7 b" impl FilelogEntry<'_> {"
161 161 // this `FilelogEntry` does not have such metadata:
162 162 let file_data_len = uncompressed_len;
163 163
164 return file_data_len != other_len;
164 file_data_len != other_len
165 165 }
166 166
167 167 pub fn data(&self) -> Result<FilelogRevisionData, HgError> {
@@ -21,11 +21,11 b' pub struct IndexHeaderFlags {'
21 21 impl IndexHeaderFlags {
22 22 /// Corresponds to FLAG_INLINE_DATA in python
23 23 pub fn is_inline(self) -> bool {
24 return self.flags & 1 != 0;
24 self.flags & 1 != 0
25 25 }
26 26 /// Corresponds to FLAG_GENERALDELTA in python
27 27 pub fn uses_generaldelta(self) -> bool {
28 return self.flags & 2 != 0;
28 self.flags & 2 != 0
29 29 }
30 30 }
31 31
@@ -35,9 +35,9 b' impl IndexHeader {'
35 35 fn format_flags(&self) -> IndexHeaderFlags {
36 36 // No "unknown flags" check here, unlike in python. Maybe there should
37 37 // be.
38 return IndexHeaderFlags {
38 IndexHeaderFlags {
39 39 flags: BigEndian::read_u16(&self.header_bytes[0..2]),
40 };
40 }
41 41 }
42 42
43 43 /// The only revlog version currently supported by rhg.
@@ -45,7 +45,7 b' impl IndexHeader {'
45 45
46 46 /// Corresponds to `_format_version` in Python.
47 47 fn format_version(&self) -> u16 {
48 return BigEndian::read_u16(&self.header_bytes[2..4]);
48 BigEndian::read_u16(&self.header_bytes[2..4])
49 49 }
50 50
51 51 const EMPTY_INDEX_HEADER: IndexHeader = IndexHeader {
@@ -59,7 +59,7 b' impl IndexHeader {'
59 59 };
60 60
61 61 fn parse(index_bytes: &[u8]) -> Result<IndexHeader, HgError> {
62 if index_bytes.len() == 0 {
62 if index_bytes.is_empty() {
63 63 return Ok(IndexHeader::EMPTY_INDEX_HEADER);
64 64 }
65 65 if index_bytes.len() < 4 {
@@ -67,13 +67,13 b' impl IndexHeader {'
67 67 "corrupted revlog: can't read the index format header",
68 68 ));
69 69 }
70 return Ok(IndexHeader {
70 Ok(IndexHeader {
71 71 header_bytes: {
72 72 let bytes: [u8; 4] =
73 73 index_bytes[0..4].try_into().expect("impossible");
74 74 bytes
75 75 },
76 });
76 })
77 77 }
78 78 }
79 79
@@ -127,8 +127,7 b' impl Index {'
127 127 uses_generaldelta,
128 128 })
129 129 } else {
130 Err(HgError::corrupted("unexpected inline revlog length")
131 .into())
130 Err(HgError::corrupted("unexpected inline revlog length"))
132 131 }
133 132 } else {
134 133 Ok(Self {
@@ -466,8 +465,8 b' mod tests {'
466 465 .with_inline(false)
467 466 .build();
468 467
469 assert_eq!(is_inline(&bytes), false);
470 assert_eq!(uses_generaldelta(&bytes), false);
468 assert!(!is_inline(&bytes));
469 assert!(!uses_generaldelta(&bytes));
471 470 }
472 471
473 472 #[test]
@@ -478,8 +477,8 b' mod tests {'
478 477 .with_inline(true)
479 478 .build();
480 479
481 assert_eq!(is_inline(&bytes), true);
482 assert_eq!(uses_generaldelta(&bytes), false);
480 assert!(is_inline(&bytes));
481 assert!(!uses_generaldelta(&bytes));
483 482 }
484 483
485 484 #[test]
@@ -490,8 +489,8 b' mod tests {'
490 489 .with_inline(true)
491 490 .build();
492 491
493 assert_eq!(is_inline(&bytes), true);
494 assert_eq!(uses_generaldelta(&bytes), true);
492 assert!(is_inline(&bytes));
493 assert!(uses_generaldelta(&bytes));
495 494 }
496 495
497 496 #[test]
@@ -71,7 +71,7 b' pub trait NodeMap {'
71 71 ///
72 72 /// If several Revisions match the given prefix, a [`MultipleResults`]
73 73 /// error is returned.
74 fn find_bin<'a>(
74 fn find_bin(
75 75 &self,
76 76 idx: &impl RevlogIndex,
77 77 prefix: NodePrefix,
@@ -88,7 +88,7 b' pub trait NodeMap {'
88 88 ///
89 89 /// If several Revisions match the given prefix, a [`MultipleResults`]
90 90 /// error is returned.
91 fn unique_prefix_len_bin<'a>(
91 fn unique_prefix_len_bin(
92 92 &self,
93 93 idx: &impl RevlogIndex,
94 94 node_prefix: NodePrefix,
@@ -249,7 +249,7 b' fn has_prefix_or_none('
249 249 rev: Revision,
250 250 ) -> Result<Option<Revision>, NodeMapError> {
251 251 idx.node(rev)
252 .ok_or_else(|| NodeMapError::RevisionNotInIndex(rev))
252 .ok_or(NodeMapError::RevisionNotInIndex(rev))
253 253 .map(|node| {
254 254 if prefix.is_prefix_of(node) {
255 255 Some(rev)
@@ -468,7 +468,7 b' impl NodeTree {'
468 468 if let Element::Rev(old_rev) = deepest.element {
469 469 let old_node = index
470 470 .node(old_rev)
471 .ok_or_else(|| NodeMapError::RevisionNotInIndex(old_rev))?;
471 .ok_or(NodeMapError::RevisionNotInIndex(old_rev))?;
472 472 if old_node == node {
473 473 return Ok(()); // avoid creating lots of useless blocks
474 474 }
@@ -865,7 +865,7 b' mod tests {'
865 865 hex: &str,
866 866 ) -> Result<(), NodeMapError> {
867 867 let node = pad_node(hex);
868 self.index.insert(rev, node.clone());
868 self.index.insert(rev, node);
869 869 self.nt.insert(&self.index, &node, rev)?;
870 870 Ok(())
871 871 }
@@ -887,13 +887,13 b' mod tests {'
887 887 /// Drain `added` and restart a new one
888 888 fn commit(self) -> Self {
889 889 let mut as_vec: Vec<Block> =
890 self.nt.readonly.iter().map(|block| block.clone()).collect();
890 self.nt.readonly.iter().copied().collect();
891 891 as_vec.extend(self.nt.growable);
892 892 as_vec.push(self.nt.root);
893 893
894 894 Self {
895 895 index: self.index,
896 nt: NodeTree::from(as_vec).into(),
896 nt: NodeTree::from(as_vec),
897 897 }
898 898 }
899 899 }
@@ -967,15 +967,15 b' mod tests {'
967 967 let idx = &mut nt_idx.index;
968 968
969 969 let node0_hex = hex_pad_right("444444");
970 let mut node1_hex = hex_pad_right("444444").clone();
970 let mut node1_hex = hex_pad_right("444444");
971 971 node1_hex.pop();
972 972 node1_hex.push('5');
973 973 let node0 = Node::from_hex(&node0_hex).unwrap();
974 974 let node1 = Node::from_hex(&node1_hex).unwrap();
975 975
976 idx.insert(0, node0.clone());
976 idx.insert(0, node0);
977 977 nt.insert(idx, &node0, 0)?;
978 idx.insert(1, node1.clone());
978 idx.insert(1, node1);
979 979 nt.insert(idx, &node1, 1)?;
980 980
981 981 assert_eq!(nt.find_bin(idx, (&node0).into())?, Some(0));
@@ -2,6 +2,7 b' use sha1::{Digest, Sha1};'
2 2
3 3 #[derive(PartialEq, Debug)]
4 4 #[allow(non_camel_case_types)]
5 #[allow(clippy::upper_case_acronyms)]
5 6 enum path_state {
6 7 START, /* first byte of a path component */
7 8 A, /* "AUX" */
@@ -27,6 +28,7 b' enum path_state {'
27 28
28 29 /* state machine for dir-encoding */
29 30 #[allow(non_camel_case_types)]
31 #[allow(clippy::upper_case_acronyms)]
30 32 enum dir_state {
31 33 DDOT,
32 34 DH,
@@ -61,7 +63,7 b" fn rewrap_option<'a, 'b: 'a>("
61 63 }
62 64 }
63 65
64 fn hexencode<'a>(mut dest: Option<&'a mut [u8]>, destlen: &mut usize, c: u8) {
66 fn hexencode(mut dest: Option<&mut [u8]>, destlen: &mut usize, c: u8) {
65 67 let hexdigit = b"0123456789abcdef";
66 68 charcopy(
67 69 rewrap_option(&mut dest),
@@ -534,10 +536,7 b' fn hash_mangle(src: &[u8], sha: &[u8]) -'
534 536 let last_slash = src.iter().rposition(|b| *b == b'/');
535 537 let last_dot: Option<usize> = {
536 538 let s = last_slash.unwrap_or(0);
537 src[s..]
538 .iter()
539 .rposition(|b| *b == b'.')
540 .and_then(|i| Some(i + s))
539 src[s..].iter().rposition(|b| *b == b'.').map(|i| i + s)
541 540 };
542 541
543 542 let mut dest = vec![0; MAXSTOREPATHLEN];
@@ -545,8 +544,8 b' fn hash_mangle(src: &[u8], sha: &[u8]) -'
545 544
546 545 {
547 546 let mut first = true;
548 for slice in src[..last_slash.unwrap_or_else(|| src.len())]
549 .split(|b| *b == b'/')
547 for slice in
548 src[..last_slash.unwrap_or(src.len())].split(|b| *b == b'/')
550 549 {
551 550 let slice = &slice[..std::cmp::min(slice.len(), dirprefixlen)];
552 551 if destlen + (slice.len() + if first { 0 } else { 1 })
@@ -641,6 +640,6 b' pub fn path_encode(path: &[u8]) -> Vec<u'
641 640 res
642 641 }
643 642 } else {
644 hash_encode(&path)
643 hash_encode(path)
645 644 }
646 645 }
@@ -174,10 +174,11 b' impl Revlog {'
174 174 // optimize these cases.
175 175 let mut found_by_prefix = None;
176 176 for rev in (0..self.len() as Revision).rev() {
177 let index_entry =
178 self.index.get_entry(rev).ok_or(HgError::corrupted(
177 let index_entry = self.index.get_entry(rev).ok_or_else(|| {
178 HgError::corrupted(
179 179 "revlog references a revision not in the index",
180 ))?;
180 )
181 })?;
181 182 if node == *index_entry.hash() {
182 183 return Ok(rev);
183 184 }
@@ -230,7 +231,7 b' impl Revlog {'
230 231 None => &NULL_NODE,
231 232 };
232 233
233 &hash(data, h1.as_bytes(), h2.as_bytes()) == expected
234 hash(data, h1.as_bytes(), h2.as_bytes()) == expected
234 235 }
235 236
236 237 /// Build the full data of a revision out its snapshot
@@ -253,8 +254,8 b' impl Revlog {'
253 254
254 255 /// Return the revlog data.
255 256 fn data(&self) -> &[u8] {
256 match self.data_bytes {
257 Some(ref data_bytes) => &data_bytes,
257 match &self.data_bytes {
258 Some(data_bytes) => data_bytes,
258 259 None => panic!(
259 260 "forgot to load the data or trying to access inline data"
260 261 ),
@@ -21,7 +21,7 b' pub fn resolve_single('
21 21 match input {
22 22 "." => {
23 23 let p1 = repo.dirstate_parents()?.p1;
24 return Ok(changelog.revlog.rev_from_node(p1.into())?);
24 return changelog.revlog.rev_from_node(p1.into());
25 25 }
26 26 "null" => return Ok(NULL_REVISION),
27 27 _ => {}
@@ -33,7 +33,7 b' pub fn resolve_single('
33 33 let msg = format!("cannot parse revset '{}'", input);
34 34 Err(HgError::unsupported(msg).into())
35 35 }
36 result => return result,
36 result => result,
37 37 }
38 38 }
39 39
@@ -164,7 +164,7 b' pub(crate) fn parse_config('
164 164 fn read_temporary_includes(
165 165 repo: &Repo,
166 166 ) -> Result<Vec<Vec<u8>>, SparseConfigError> {
167 let raw = repo.hg_vfs().try_read("tempsparse")?.unwrap_or(vec![]);
167 let raw = repo.hg_vfs().try_read("tempsparse")?.unwrap_or_default();
168 168 if raw.is_empty() {
169 169 return Ok(vec![]);
170 170 }
@@ -179,7 +179,7 b' fn patterns_for_rev('
179 179 if !repo.has_sparse() {
180 180 return Ok(None);
181 181 }
182 let raw = repo.hg_vfs().try_read("sparse")?.unwrap_or(vec![]);
182 let raw = repo.hg_vfs().try_read("sparse")?.unwrap_or_default();
183 183
184 184 if raw.is_empty() {
185 185 return Ok(None);
@@ -200,9 +200,10 b' fn patterns_for_rev('
200 200 let output =
201 201 cat(repo, &rev.to_string(), vec![HgPath::new(&profile)])
202 202 .map_err(|_| {
203 HgError::corrupted(format!(
203 HgError::corrupted(
204 204 "dirstate points to non-existent parent node"
205 ))
205 .to_string(),
206 )
206 207 })?;
207 208 if output.results.is_empty() {
208 209 config.warnings.push(SparseWarning::ProfileNotFound {
@@ -252,9 +253,9 b' pub fn matcher('
252 253 repo.changelog()?
253 254 .rev_from_node(parents.p1.into())
254 255 .map_err(|_| {
255 HgError::corrupted(format!(
256 "dirstate points to non-existent parent node"
257 ))
256 HgError::corrupted(
257 "dirstate points to non-existent parent node".to_string(),
258 )
258 259 })?;
259 260 if p1_rev != NULL_REVISION {
260 261 revs.push(p1_rev)
@@ -263,9 +264,9 b' pub fn matcher('
263 264 repo.changelog()?
264 265 .rev_from_node(parents.p2.into())
265 266 .map_err(|_| {
266 HgError::corrupted(format!(
267 "dirstate points to non-existent parent node"
268 ))
267 HgError::corrupted(
268 "dirstate points to non-existent parent node".to_string(),
269 )
269 270 })?;
270 271 if p2_rev != NULL_REVISION {
271 272 revs.push(p2_rev)
@@ -325,7 +326,7 b' fn force_include_matcher('
325 326 }
326 327 let forced_include_matcher = IncludeMatcher::new(
327 328 temp_includes
328 .into_iter()
329 .iter()
329 330 .map(|include| {
330 331 IgnorePattern::new(PatternSyntax::Path, include, Path::new(""))
331 332 })
@@ -137,11 +137,8 b' impl SliceExt for [u8] {'
137 137 }
138 138
139 139 fn split_2_by_slice(&self, separator: &[u8]) -> Option<(&[u8], &[u8])> {
140 if let Some(pos) = find_slice_in_slice(self, separator) {
141 Some((&self[..pos], &self[pos + separator.len()..]))
142 } else {
143 None
144 }
140 find_slice_in_slice(self, separator)
141 .map(|pos| (&self[..pos], &self[pos + separator.len()..]))
145 142 }
146 143 }
147 144
@@ -369,7 +366,7 b' where'
369 366 MergeResult::RightValue => {
370 367 left.insert(key, right_value);
371 368 }
372 MergeResult::UseNewValue(new_value) => {
369 MergeResult::NewValue(new_value) => {
373 370 left.insert(key, new_value);
374 371 }
375 372 },
@@ -230,7 +230,7 b' pub fn canonical_path('
230 230 // TODO hint to the user about using --cwd
231 231 // Bubble up the responsibility to Python for now
232 232 Err(HgPathError::NotUnderRoot {
233 path: original_name.to_owned(),
233 path: original_name,
234 234 root: root.to_owned(),
235 235 })
236 236 }
@@ -424,7 +424,7 b' mod tests {'
424 424 assert_eq!(
425 425 canonical_path(&root, Path::new(""), &beneath_repo),
426 426 Err(HgPathError::NotUnderRoot {
427 path: beneath_repo.to_owned(),
427 path: beneath_repo,
428 428 root: root.to_owned()
429 429 })
430 430 );
@@ -205,7 +205,7 b' impl HgPath {'
205 205 /// ```
206 206 pub fn split_filename(&self) -> (&Self, &Self) {
207 207 match &self.inner.iter().rposition(|c| *c == b'/') {
208 None => (HgPath::new(""), &self),
208 None => (HgPath::new(""), self),
209 209 Some(size) => (
210 210 HgPath::new(&self.inner[..*size]),
211 211 HgPath::new(&self.inner[*size + 1..]),
@@ -326,7 +326,7 b' impl HgPath {'
326 326 #[cfg(unix)]
327 327 /// Split a pathname into drive and path. On Posix, drive is always empty.
328 328 pub fn split_drive(&self) -> (&HgPath, &HgPath) {
329 (HgPath::new(b""), &self)
329 (HgPath::new(b""), self)
330 330 }
331 331
332 332 /// Checks for errors in the path, short-circuiting at the first one.
@@ -396,7 +396,7 b' impl HgPathBuf {'
396 396 Default::default()
397 397 }
398 398
399 pub fn push<T: ?Sized + AsRef<HgPath>>(&mut self, other: &T) -> () {
399 pub fn push<T: ?Sized + AsRef<HgPath>>(&mut self, other: &T) {
400 400 if !self.inner.is_empty() && self.inner.last() != Some(&b'/') {
401 401 self.inner.push(b'/');
402 402 }
@@ -431,7 +431,7 b' impl Deref for HgPathBuf {'
431 431
432 432 #[inline]
433 433 fn deref(&self) -> &HgPath {
434 &HgPath::new(&self.inner)
434 HgPath::new(&self.inner)
435 435 }
436 436 }
437 437
@@ -441,15 +441,15 b' impl<T: ?Sized + AsRef<HgPath>> From<&T>'
441 441 }
442 442 }
443 443
444 impl Into<Vec<u8>> for HgPathBuf {
445 fn into(self) -> Vec<u8> {
446 self.inner
444 impl From<HgPathBuf> for Vec<u8> {
445 fn from(val: HgPathBuf) -> Self {
446 val.inner
447 447 }
448 448 }
449 449
450 450 impl Borrow<HgPath> for HgPathBuf {
451 451 fn borrow(&self) -> &HgPath {
452 &HgPath::new(self.as_bytes())
452 HgPath::new(self.as_bytes())
453 453 }
454 454 }
455 455
@@ -491,7 +491,7 b' pub fn hg_path_to_os_string<P: AsRef<HgP'
491 491 #[cfg(unix)]
492 492 {
493 493 use std::os::unix::ffi::OsStrExt;
494 os_str = std::ffi::OsStr::from_bytes(&hg_path.as_ref().as_bytes());
494 os_str = std::ffi::OsStr::from_bytes(hg_path.as_ref().as_bytes());
495 495 }
496 496 // TODO Handle other platforms
497 497 // TODO: convert from WTF8 to Windows MBCS (ANSI encoding).
@@ -511,7 +511,7 b' pub fn os_string_to_hg_path_buf<S: AsRef'
511 511 #[cfg(unix)]
512 512 {
513 513 use std::os::unix::ffi::OsStrExt;
514 buf = HgPathBuf::from_bytes(&os_string.as_ref().as_bytes());
514 buf = HgPathBuf::from_bytes(os_string.as_ref().as_bytes());
515 515 }
516 516 // TODO Handle other platforms
517 517 // TODO: convert from WTF8 to Windows MBCS (ANSI encoding).
@@ -528,7 +528,7 b' pub fn path_to_hg_path_buf<P: AsRef<Path'
528 528 #[cfg(unix)]
529 529 {
530 530 use std::os::unix::ffi::OsStrExt;
531 buf = HgPathBuf::from_bytes(&os_str.as_bytes());
531 buf = HgPathBuf::from_bytes(os_str.as_bytes());
532 532 }
533 533 // TODO Handle other platforms
534 534 // TODO: convert from WTF8 to Windows MBCS (ANSI encoding).
@@ -53,7 +53,7 b' fn build_random_graph('
53 53 /// Compute the ancestors set of all revisions of a VecGraph
54 54 fn ancestors_sets(vg: &VecGraph) -> Vec<HashSet<Revision>> {
55 55 let mut ancs: Vec<HashSet<Revision>> = Vec::new();
56 for i in 0..vg.len() {
56 (0..vg.len()).for_each(|i| {
57 57 let mut ancs_i = HashSet::new();
58 58 ancs_i.insert(i as Revision);
59 59 for p in vg[i].iter().cloned() {
@@ -62,7 +62,7 b' fn ancestors_sets(vg: &VecGraph) -> Vec<'
62 62 }
63 63 }
64 64 ancs.push(ancs_i);
65 }
65 });
66 66 ancs
67 67 }
68 68
General Comments 0
You need to be logged in to leave comments. Login now