Show More
@@ -283,8 +283,16 b' We define:' | |||||
283 | in inclusion order. This definition is recursive, as included files can |
|
283 | in inclusion order. This definition is recursive, as included files can | |
284 | themselves include more files. |
|
284 | themselves include more files. | |
285 |
|
285 | |||
286 | This hash is defined as the SHA-1 of the concatenation (in sorted |
|
286 | * "filepath" as the bytes of the ignore file path | |
287 | order) of the "expanded contents" of each "root" ignore file. |
|
287 | relative to the root of the repository if inside the repository, | |
|
288 | or the untouched path as defined in the configuration. | |||
|
289 | ||||
|
290 | This hash is defined as the SHA-1 of the following line format: | |||
|
291 | ||||
|
292 | <filepath> <sha1 of the "expanded contents">\n | |||
|
293 | ||||
|
294 | for each "root" ignore file. (in sorted order) | |||
|
295 | ||||
288 | (Note that computing this does not require actually concatenating |
|
296 | (Note that computing this does not require actually concatenating | |
289 | into a single contiguous byte sequence. |
|
297 | into a single contiguous byte sequence. | |
290 | Instead a SHA-1 hasher object can be created |
|
298 | Instead a SHA-1 hasher object can be created |
@@ -10,6 +10,7 b' use crate::dirstate_tree::on_disk::Dirst' | |||||
10 | use crate::matchers::get_ignore_function; |
|
10 | use crate::matchers::get_ignore_function; | |
11 | use crate::matchers::Matcher; |
|
11 | use crate::matchers::Matcher; | |
12 | use crate::utils::files::get_bytes_from_os_string; |
|
12 | use crate::utils::files::get_bytes_from_os_string; | |
|
13 | use crate::utils::files::get_bytes_from_path; | |||
13 | use crate::utils::files::get_path_from_bytes; |
|
14 | use crate::utils::files::get_path_from_bytes; | |
14 | use crate::utils::hg_path::HgPath; |
|
15 | use crate::utils::hg_path::HgPath; | |
15 | use crate::BadMatch; |
|
16 | use crate::BadMatch; | |
@@ -66,7 +67,7 b" pub fn status<'dirstate>(" | |||||
66 | let (ignore_fn, warnings) = get_ignore_function( |
|
67 | let (ignore_fn, warnings) = get_ignore_function( | |
67 | ignore_files, |
|
68 | ignore_files, | |
68 | &root_dir, |
|
69 | &root_dir, | |
69 | &mut |_pattern_bytes| {}, |
|
70 | &mut |_source, _pattern_bytes| {}, | |
70 | )?; |
|
71 | )?; | |
71 | (ignore_fn, warnings, None) |
|
72 | (ignore_fn, warnings, None) | |
72 | } |
|
73 | } | |
@@ -75,7 +76,24 b" pub fn status<'dirstate>(" | |||||
75 | let (ignore_fn, warnings) = get_ignore_function( |
|
76 | let (ignore_fn, warnings) = get_ignore_function( | |
76 | ignore_files, |
|
77 | ignore_files, | |
77 | &root_dir, |
|
78 | &root_dir, | |
78 |
&mut |pattern_bytes| |
|
79 | &mut |source, pattern_bytes| { | |
|
80 | // If inside the repo, use the relative version to | |||
|
81 | // make it deterministic inside tests. | |||
|
82 | // The performance hit should be negligible. | |||
|
83 | let source = source | |||
|
84 | .strip_prefix(&root_dir) | |||
|
85 | .unwrap_or(source); | |||
|
86 | let source = get_bytes_from_path(source); | |||
|
87 | ||||
|
88 | let mut subhasher = Sha1::new(); | |||
|
89 | subhasher.update(pattern_bytes); | |||
|
90 | let patterns_hash = subhasher.finalize(); | |||
|
91 | ||||
|
92 | hasher.update(source); | |||
|
93 | hasher.update(b" "); | |||
|
94 | hasher.update(patterns_hash); | |||
|
95 | hasher.update(b"\n"); | |||
|
96 | }, | |||
79 | )?; |
|
97 | )?; | |
80 | let new_hash = *hasher.finalize().as_ref(); |
|
98 | let new_hash = *hasher.finalize().as_ref(); | |
81 | let changed = new_hash != dmap.ignore_patterns_hash; |
|
99 | let changed = new_hash != dmap.ignore_patterns_hash; |
@@ -412,11 +412,11 b' pub fn parse_pattern_file_contents(' | |||||
412 | pub fn read_pattern_file( |
|
412 | pub fn read_pattern_file( | |
413 | file_path: &Path, |
|
413 | file_path: &Path, | |
414 | warn: bool, |
|
414 | warn: bool, | |
415 | inspect_pattern_bytes: &mut impl FnMut(&[u8]), |
|
415 | inspect_pattern_bytes: &mut impl FnMut(&Path, &[u8]), | |
416 | ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> { |
|
416 | ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> { | |
417 | match std::fs::read(file_path) { |
|
417 | match std::fs::read(file_path) { | |
418 | Ok(contents) => { |
|
418 | Ok(contents) => { | |
419 | inspect_pattern_bytes(&contents); |
|
419 | inspect_pattern_bytes(file_path, &contents); | |
420 | parse_pattern_file_contents(&contents, file_path, None, warn) |
|
420 | parse_pattern_file_contents(&contents, file_path, None, warn) | |
421 | } |
|
421 | } | |
422 | Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(( |
|
422 | Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(( | |
@@ -455,7 +455,7 b' pub type PatternResult<T> = Result<T, Pa' | |||||
455 | pub fn get_patterns_from_file( |
|
455 | pub fn get_patterns_from_file( | |
456 | pattern_file: &Path, |
|
456 | pattern_file: &Path, | |
457 | root_dir: &Path, |
|
457 | root_dir: &Path, | |
458 | inspect_pattern_bytes: &mut impl FnMut(&[u8]), |
|
458 | inspect_pattern_bytes: &mut impl FnMut(&Path, &[u8]), | |
459 | ) -> PatternResult<(Vec<IgnorePattern>, Vec<PatternFileWarning>)> { |
|
459 | ) -> PatternResult<(Vec<IgnorePattern>, Vec<PatternFileWarning>)> { | |
460 | let (patterns, mut warnings) = |
|
460 | let (patterns, mut warnings) = | |
461 | read_pattern_file(pattern_file, true, inspect_pattern_bytes)?; |
|
461 | read_pattern_file(pattern_file, true, inspect_pattern_bytes)?; |
@@ -838,7 +838,7 b" fn build_match<'a, 'b>(" | |||||
838 | pub fn get_ignore_matcher<'a>( |
|
838 | pub fn get_ignore_matcher<'a>( | |
839 | mut all_pattern_files: Vec<PathBuf>, |
|
839 | mut all_pattern_files: Vec<PathBuf>, | |
840 | root_dir: &Path, |
|
840 | root_dir: &Path, | |
841 | inspect_pattern_bytes: &mut impl FnMut(&[u8]), |
|
841 | inspect_pattern_bytes: &mut impl FnMut(&Path, &[u8]), | |
842 | ) -> PatternResult<(IncludeMatcher<'a>, Vec<PatternFileWarning>)> { |
|
842 | ) -> PatternResult<(IncludeMatcher<'a>, Vec<PatternFileWarning>)> { | |
843 | let mut all_patterns = vec![]; |
|
843 | let mut all_patterns = vec![]; | |
844 | let mut all_warnings = vec![]; |
|
844 | let mut all_warnings = vec![]; | |
@@ -871,7 +871,7 b" pub fn get_ignore_matcher<'a>(" | |||||
871 | pub fn get_ignore_function<'a>( |
|
871 | pub fn get_ignore_function<'a>( | |
872 | all_pattern_files: Vec<PathBuf>, |
|
872 | all_pattern_files: Vec<PathBuf>, | |
873 | root_dir: &Path, |
|
873 | root_dir: &Path, | |
874 | inspect_pattern_bytes: &mut impl FnMut(&[u8]), |
|
874 | inspect_pattern_bytes: &mut impl FnMut(&Path, &[u8]), | |
875 | ) -> PatternResult<(IgnoreFnType<'a>, Vec<PatternFileWarning>)> { |
|
875 | ) -> PatternResult<(IgnoreFnType<'a>, Vec<PatternFileWarning>)> { | |
876 | let res = |
|
876 | let res = | |
877 | get_ignore_matcher(all_pattern_files, root_dir, inspect_pattern_bytes); |
|
877 | get_ignore_matcher(all_pattern_files, root_dir, inspect_pattern_bytes); |
@@ -25,7 +25,7 b' pub fn run(invocation: &crate::CliInvoca' | |||||
25 | let (ignore_matcher, warnings) = get_ignore_matcher( |
|
25 | let (ignore_matcher, warnings) = get_ignore_matcher( | |
26 | vec![ignore_file], |
|
26 | vec![ignore_file], | |
27 | &repo.working_directory_path().to_owned(), |
|
27 | &repo.working_directory_path().to_owned(), | |
28 | &mut |_pattern_bytes| (), |
|
28 | &mut |_source, _pattern_bytes| (), | |
29 | ) |
|
29 | ) | |
30 | .map_err(|e| StatusError::from(e))?; |
|
30 | .map_err(|e| StatusError::from(e))?; | |
31 |
|
31 |
@@ -421,18 +421,24 b' Windows paths are accepted on input' | |||||
421 | Check the hash of ignore patterns written in the dirstate |
|
421 | Check the hash of ignore patterns written in the dirstate | |
422 | This is an optimization that is only relevant when using the Rust extensions |
|
422 | This is an optimization that is only relevant when using the Rust extensions | |
423 |
|
423 | |||
|
424 | $ cat_filename_and_hash () { | |||
|
425 | > for i in "$@"; do | |||
|
426 | > printf "$i " | |||
|
427 | > cat "$i" | "$TESTDIR"/f --raw-sha1 | sed 's/^raw-sha1=//' | |||
|
428 | > done | |||
|
429 | > } | |||
424 | $ hg status > /dev/null |
|
430 | $ hg status > /dev/null | |
425 | $ cat .hg/testhgignore .hg/testhgignorerel .hgignore dir2/.hgignore dir1/.hgignore dir1/.hgignoretwo | $TESTDIR/f --sha1 |
|
431 | $ cat_filename_and_hash .hg/testhgignore .hg/testhgignorerel .hgignore dir2/.hgignore dir1/.hgignore dir1/.hgignoretwo | $TESTDIR/f --sha1 | |
426 | sha1=6e315b60f15fb5dfa02be00f3e2c8f923051f5ff |
|
432 | sha1=c0beb296395d48ced8e14f39009c4ea6e409bfe6 | |
427 | $ hg debugstate --docket | grep ignore |
|
433 | $ hg debugstate --docket | grep ignore | |
428 | ignore pattern hash: 6e315b60f15fb5dfa02be00f3e2c8f923051f5ff |
|
434 | ignore pattern hash: c0beb296395d48ced8e14f39009c4ea6e409bfe6 | |
429 |
|
435 | |||
430 | $ echo rel > .hg/testhgignorerel |
|
436 | $ echo rel > .hg/testhgignorerel | |
431 | $ hg status > /dev/null |
|
437 | $ hg status > /dev/null | |
432 | $ cat .hg/testhgignore .hg/testhgignorerel .hgignore dir2/.hgignore dir1/.hgignore dir1/.hgignoretwo | $TESTDIR/f --sha1 |
|
438 | $ cat_filename_and_hash .hg/testhgignore .hg/testhgignorerel .hgignore dir2/.hgignore dir1/.hgignore dir1/.hgignoretwo | $TESTDIR/f --sha1 | |
433 | sha1=dea19cc7119213f24b6b582a4bae7b0cb063e34e |
|
439 | sha1=b8e63d3428ec38abc68baa27631516d5ec46b7fa | |
434 | $ hg debugstate --docket | grep ignore |
|
440 | $ hg debugstate --docket | grep ignore | |
435 | ignore pattern hash: dea19cc7119213f24b6b582a4bae7b0cb063e34e |
|
441 | ignore pattern hash: b8e63d3428ec38abc68baa27631516d5ec46b7fa | |
436 | $ cd .. |
|
442 | $ cd .. | |
437 |
|
443 | |||
438 | Check that the hash depends on the source of the hgignore patterns |
|
444 | Check that the hash depends on the source of the hgignore patterns | |
@@ -460,6 +466,6 b' Check that the hash depends on the sourc' | |||||
460 | $ hg status |
|
466 | $ hg status | |
461 | M dir1/.hgignore |
|
467 | M dir1/.hgignore | |
462 | M dir2/.hgignore |
|
468 | M dir2/.hgignore | |
463 |
? dir1/subdir/ignored1 |
|
469 | ? dir1/subdir/ignored1 | |
464 |
|
470 | |||
465 | #endif |
|
471 | #endif |
General Comments 0
You need to be logged in to leave comments.
Login now