##// END OF EJS Templates
rhg-status: add support for narrow clones
Raphaël Gomès -
r50383:7c93e38a default
parent child Browse files
Show More
@@ -0,0 +1,111 b''
1 use std::path::Path;
2
3 use crate::{
4 errors::HgError,
5 exit_codes,
6 filepatterns::parse_pattern_file_contents,
7 matchers::{
8 AlwaysMatcher, DifferenceMatcher, IncludeMatcher, Matcher,
9 NeverMatcher,
10 },
11 repo::Repo,
12 requirements::NARROW_REQUIREMENT,
13 sparse::{self, SparseConfigError, SparseWarning},
14 };
15
16 /// The file in .hg/store/ that indicates which paths exit in the store
17 const FILENAME: &str = "narrowspec";
18 /// The file in .hg/ that indicates which paths exit in the dirstate
19 const DIRSTATE_FILENAME: &str = "narrowspec.dirstate";
20
21 /// Pattern prefixes that are allowed in narrow patterns. This list MUST
22 /// only contain patterns that are fast and safe to evaluate. Keep in mind
23 /// that patterns are supplied by clients and executed on remote servers
24 /// as part of wire protocol commands. That means that changes to this
25 /// data structure influence the wire protocol and should not be taken
26 /// lightly - especially removals.
27 const VALID_PREFIXES: [&str; 2] = ["path:", "rootfilesin:"];
28
29 /// Return the matcher for the current narrow spec, and all configuration
30 /// warnings to display.
31 pub fn matcher(
32 repo: &Repo,
33 ) -> Result<(Box<dyn Matcher + Sync>, Vec<SparseWarning>), SparseConfigError> {
34 let mut warnings = vec![];
35 if !repo.requirements().contains(NARROW_REQUIREMENT) {
36 return Ok((Box::new(AlwaysMatcher), warnings));
37 }
38 // Treat "narrowspec does not exist" the same as "narrowspec file exists
39 // and is empty".
40 let store_spec = repo.store_vfs().try_read(FILENAME)?.unwrap_or(vec![]);
41 let working_copy_spec =
42 repo.hg_vfs().try_read(DIRSTATE_FILENAME)?.unwrap_or(vec![]);
43 if store_spec != working_copy_spec {
44 return Err(HgError::abort(
45 "working copy's narrowspec is stale",
46 exit_codes::STATE_ERROR,
47 Some("run 'hg tracked --update-working-copy'".into()),
48 )
49 .into());
50 }
51
52 let config = sparse::parse_config(
53 &store_spec,
54 sparse::SparseConfigContext::Narrow,
55 )?;
56
57 warnings.extend(config.warnings);
58
59 if !config.profiles.is_empty() {
60 // TODO (from Python impl) maybe do something with profiles?
61 return Err(SparseConfigError::IncludesInNarrow);
62 }
63 validate_patterns(&config.includes)?;
64 validate_patterns(&config.excludes)?;
65
66 if config.includes.is_empty() {
67 return Ok((Box::new(NeverMatcher), warnings));
68 }
69
70 let (patterns, subwarnings) = parse_pattern_file_contents(
71 &config.includes,
72 Path::new(""),
73 None,
74 false,
75 )?;
76 warnings.extend(subwarnings.into_iter().map(From::from));
77
78 let mut m: Box<dyn Matcher + Sync> =
79 Box::new(IncludeMatcher::new(patterns)?);
80
81 let (patterns, subwarnings) = parse_pattern_file_contents(
82 &config.excludes,
83 Path::new(""),
84 None,
85 false,
86 )?;
87 if !patterns.is_empty() {
88 warnings.extend(subwarnings.into_iter().map(From::from));
89 let exclude_matcher = Box::new(IncludeMatcher::new(patterns)?);
90 m = Box::new(DifferenceMatcher::new(m, exclude_matcher));
91 }
92
93 Ok((m, warnings))
94 }
95
96 fn validate_patterns(patterns: &[u8]) -> Result<(), SparseConfigError> {
97 for pattern in patterns.split(|c| *c == b'\n') {
98 if pattern.is_empty() {
99 continue;
100 }
101 for prefix in VALID_PREFIXES.iter() {
102 if pattern.starts_with(prefix.as_bytes()) {
103 break;
104 }
105 return Err(SparseConfigError::InvalidNarrowPrefix(
106 pattern.to_owned(),
107 ));
108 }
109 }
110 Ok(())
111 }
@@ -1,22 +1,26 b''
1 pub type ExitCode = i32;
1 pub type ExitCode = i32;
2
2
3 /// Successful exit
3 /// Successful exit
4 pub const OK: ExitCode = 0;
4 pub const OK: ExitCode = 0;
5
5
6 /// Generic abort
6 /// Generic abort
7 pub const ABORT: ExitCode = 255;
7 pub const ABORT: ExitCode = 255;
8
8
9 // Abort when there is a config related error
9 // Abort when there is a config related error
10 pub const CONFIG_ERROR_ABORT: ExitCode = 30;
10 pub const CONFIG_ERROR_ABORT: ExitCode = 30;
11
11
12 /// Indicates that the operation might work if retried in a different state.
13 /// Examples: Unresolved merge conflicts, unfinished operations
14 pub const STATE_ERROR: ExitCode = 20;
15
12 // Abort when there is an error while parsing config
16 // Abort when there is an error while parsing config
13 pub const CONFIG_PARSE_ERROR_ABORT: ExitCode = 10;
17 pub const CONFIG_PARSE_ERROR_ABORT: ExitCode = 10;
14
18
15 /// Generic something completed but did not succeed
19 /// Generic something completed but did not succeed
16 pub const UNSUCCESSFUL: ExitCode = 1;
20 pub const UNSUCCESSFUL: ExitCode = 1;
17
21
18 /// Command or feature not implemented by rhg
22 /// Command or feature not implemented by rhg
19 pub const UNIMPLEMENTED: ExitCode = 252;
23 pub const UNIMPLEMENTED: ExitCode = 252;
20
24
21 /// The fallback path is not valid
25 /// The fallback path is not valid
22 pub const INVALID_FALLBACK: ExitCode = 253;
26 pub const INVALID_FALLBACK: ExitCode = 253;
@@ -1,704 +1,706 b''
1 // filepatterns.rs
1 // filepatterns.rs
2 //
2 //
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
3 // Copyright 2019 Raphaël Gomès <rgomes@octobus.net>
4 //
4 //
5 // This software may be used and distributed according to the terms of the
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
6 // GNU General Public License version 2 or any later version.
7
7
8 //! Handling of Mercurial-specific patterns.
8 //! Handling of Mercurial-specific patterns.
9
9
10 use crate::{
10 use crate::{
11 utils::{
11 utils::{
12 files::{canonical_path, get_bytes_from_path, get_path_from_bytes},
12 files::{canonical_path, get_bytes_from_path, get_path_from_bytes},
13 hg_path::{path_to_hg_path_buf, HgPathBuf, HgPathError},
13 hg_path::{path_to_hg_path_buf, HgPathBuf, HgPathError},
14 SliceExt,
14 SliceExt,
15 },
15 },
16 FastHashMap, PatternError,
16 FastHashMap, PatternError,
17 };
17 };
18 use lazy_static::lazy_static;
18 use lazy_static::lazy_static;
19 use regex::bytes::{NoExpand, Regex};
19 use regex::bytes::{NoExpand, Regex};
20 use std::ops::Deref;
20 use std::ops::Deref;
21 use std::path::{Path, PathBuf};
21 use std::path::{Path, PathBuf};
22 use std::vec::Vec;
22 use std::vec::Vec;
23
23
24 lazy_static! {
24 lazy_static! {
25 static ref RE_ESCAPE: Vec<Vec<u8>> = {
25 static ref RE_ESCAPE: Vec<Vec<u8>> = {
26 let mut v: Vec<Vec<u8>> = (0..=255).map(|byte| vec![byte]).collect();
26 let mut v: Vec<Vec<u8>> = (0..=255).map(|byte| vec![byte]).collect();
27 let to_escape = b"()[]{}?*+-|^$\\.&~# \t\n\r\x0b\x0c";
27 let to_escape = b"()[]{}?*+-|^$\\.&~# \t\n\r\x0b\x0c";
28 for byte in to_escape {
28 for byte in to_escape {
29 v[*byte as usize].insert(0, b'\\');
29 v[*byte as usize].insert(0, b'\\');
30 }
30 }
31 v
31 v
32 };
32 };
33 }
33 }
34
34
35 /// These are matched in order
35 /// These are matched in order
36 const GLOB_REPLACEMENTS: &[(&[u8], &[u8])] =
36 const GLOB_REPLACEMENTS: &[(&[u8], &[u8])] =
37 &[(b"*/", b"(?:.*/)?"), (b"*", b".*"), (b"", b"[^/]*")];
37 &[(b"*/", b"(?:.*/)?"), (b"*", b".*"), (b"", b"[^/]*")];
38
38
39 /// Appended to the regexp of globs
39 /// Appended to the regexp of globs
40 const GLOB_SUFFIX: &[u8; 7] = b"(?:/|$)";
40 const GLOB_SUFFIX: &[u8; 7] = b"(?:/|$)";
41
41
42 #[derive(Debug, Clone, PartialEq, Eq)]
42 #[derive(Debug, Clone, PartialEq, Eq)]
43 pub enum PatternSyntax {
43 pub enum PatternSyntax {
44 /// A regular expression
44 /// A regular expression
45 Regexp,
45 Regexp,
46 /// Glob that matches at the front of the path
46 /// Glob that matches at the front of the path
47 RootGlob,
47 RootGlob,
48 /// Glob that matches at any suffix of the path (still anchored at
48 /// Glob that matches at any suffix of the path (still anchored at
49 /// slashes)
49 /// slashes)
50 Glob,
50 Glob,
51 /// a path relative to repository root, which is matched recursively
51 /// a path relative to repository root, which is matched recursively
52 Path,
52 Path,
53 /// A path relative to cwd
53 /// A path relative to cwd
54 RelPath,
54 RelPath,
55 /// an unrooted glob (*.rs matches Rust files in all dirs)
55 /// an unrooted glob (*.rs matches Rust files in all dirs)
56 RelGlob,
56 RelGlob,
57 /// A regexp that needn't match the start of a name
57 /// A regexp that needn't match the start of a name
58 RelRegexp,
58 RelRegexp,
59 /// A path relative to repository root, which is matched non-recursively
59 /// A path relative to repository root, which is matched non-recursively
60 /// (will not match subdirectories)
60 /// (will not match subdirectories)
61 RootFiles,
61 RootFiles,
62 /// A file of patterns to read and include
62 /// A file of patterns to read and include
63 Include,
63 Include,
64 /// A file of patterns to match against files under the same directory
64 /// A file of patterns to match against files under the same directory
65 SubInclude,
65 SubInclude,
66 /// SubInclude with the result of parsing the included file
66 /// SubInclude with the result of parsing the included file
67 ///
67 ///
68 /// Note: there is no ExpandedInclude because that expansion can be done
68 /// Note: there is no ExpandedInclude because that expansion can be done
69 /// in place by replacing the Include pattern by the included patterns.
69 /// in place by replacing the Include pattern by the included patterns.
70 /// SubInclude requires more handling.
70 /// SubInclude requires more handling.
71 ///
71 ///
72 /// Note: `Box` is used to minimize size impact on other enum variants
72 /// Note: `Box` is used to minimize size impact on other enum variants
73 ExpandedSubInclude(Box<SubInclude>),
73 ExpandedSubInclude(Box<SubInclude>),
74 }
74 }
75
75
76 /// Transforms a glob pattern into a regex
76 /// Transforms a glob pattern into a regex
77 fn glob_to_re(pat: &[u8]) -> Vec<u8> {
77 fn glob_to_re(pat: &[u8]) -> Vec<u8> {
78 let mut input = pat;
78 let mut input = pat;
79 let mut res: Vec<u8> = vec![];
79 let mut res: Vec<u8> = vec![];
80 let mut group_depth = 0;
80 let mut group_depth = 0;
81
81
82 while let Some((c, rest)) = input.split_first() {
82 while let Some((c, rest)) = input.split_first() {
83 input = rest;
83 input = rest;
84
84
85 match c {
85 match c {
86 b'*' => {
86 b'*' => {
87 for (source, repl) in GLOB_REPLACEMENTS {
87 for (source, repl) in GLOB_REPLACEMENTS {
88 if let Some(rest) = input.drop_prefix(source) {
88 if let Some(rest) = input.drop_prefix(source) {
89 input = rest;
89 input = rest;
90 res.extend(*repl);
90 res.extend(*repl);
91 break;
91 break;
92 }
92 }
93 }
93 }
94 }
94 }
95 b'?' => res.extend(b"."),
95 b'?' => res.extend(b"."),
96 b'[' => {
96 b'[' => {
97 match input.iter().skip(1).position(|b| *b == b']') {
97 match input.iter().skip(1).position(|b| *b == b']') {
98 None => res.extend(b"\\["),
98 None => res.extend(b"\\["),
99 Some(end) => {
99 Some(end) => {
100 // Account for the one we skipped
100 // Account for the one we skipped
101 let end = end + 1;
101 let end = end + 1;
102
102
103 res.extend(b"[");
103 res.extend(b"[");
104
104
105 for (i, b) in input[..end].iter().enumerate() {
105 for (i, b) in input[..end].iter().enumerate() {
106 if *b == b'!' && i == 0 {
106 if *b == b'!' && i == 0 {
107 res.extend(b"^")
107 res.extend(b"^")
108 } else if *b == b'^' && i == 0 {
108 } else if *b == b'^' && i == 0 {
109 res.extend(b"\\^")
109 res.extend(b"\\^")
110 } else if *b == b'\\' {
110 } else if *b == b'\\' {
111 res.extend(b"\\\\")
111 res.extend(b"\\\\")
112 } else {
112 } else {
113 res.push(*b)
113 res.push(*b)
114 }
114 }
115 }
115 }
116 res.extend(b"]");
116 res.extend(b"]");
117 input = &input[end + 1..];
117 input = &input[end + 1..];
118 }
118 }
119 }
119 }
120 }
120 }
121 b'{' => {
121 b'{' => {
122 group_depth += 1;
122 group_depth += 1;
123 res.extend(b"(?:")
123 res.extend(b"(?:")
124 }
124 }
125 b'}' if group_depth > 0 => {
125 b'}' if group_depth > 0 => {
126 group_depth -= 1;
126 group_depth -= 1;
127 res.extend(b")");
127 res.extend(b")");
128 }
128 }
129 b',' if group_depth > 0 => res.extend(b"|"),
129 b',' if group_depth > 0 => res.extend(b"|"),
130 b'\\' => {
130 b'\\' => {
131 let c = {
131 let c = {
132 if let Some((c, rest)) = input.split_first() {
132 if let Some((c, rest)) = input.split_first() {
133 input = rest;
133 input = rest;
134 c
134 c
135 } else {
135 } else {
136 c
136 c
137 }
137 }
138 };
138 };
139 res.extend(&RE_ESCAPE[*c as usize])
139 res.extend(&RE_ESCAPE[*c as usize])
140 }
140 }
141 _ => res.extend(&RE_ESCAPE[*c as usize]),
141 _ => res.extend(&RE_ESCAPE[*c as usize]),
142 }
142 }
143 }
143 }
144 res
144 res
145 }
145 }
146
146
147 fn escape_pattern(pattern: &[u8]) -> Vec<u8> {
147 fn escape_pattern(pattern: &[u8]) -> Vec<u8> {
148 pattern
148 pattern
149 .iter()
149 .iter()
150 .flat_map(|c| RE_ESCAPE[*c as usize].clone())
150 .flat_map(|c| RE_ESCAPE[*c as usize].clone())
151 .collect()
151 .collect()
152 }
152 }
153
153
154 pub fn parse_pattern_syntax(
154 pub fn parse_pattern_syntax(
155 kind: &[u8],
155 kind: &[u8],
156 ) -> Result<PatternSyntax, PatternError> {
156 ) -> Result<PatternSyntax, PatternError> {
157 match kind {
157 match kind {
158 b"re:" => Ok(PatternSyntax::Regexp),
158 b"re:" => Ok(PatternSyntax::Regexp),
159 b"path:" => Ok(PatternSyntax::Path),
159 b"path:" => Ok(PatternSyntax::Path),
160 b"relpath:" => Ok(PatternSyntax::RelPath),
160 b"relpath:" => Ok(PatternSyntax::RelPath),
161 b"rootfilesin:" => Ok(PatternSyntax::RootFiles),
161 b"rootfilesin:" => Ok(PatternSyntax::RootFiles),
162 b"relglob:" => Ok(PatternSyntax::RelGlob),
162 b"relglob:" => Ok(PatternSyntax::RelGlob),
163 b"relre:" => Ok(PatternSyntax::RelRegexp),
163 b"relre:" => Ok(PatternSyntax::RelRegexp),
164 b"glob:" => Ok(PatternSyntax::Glob),
164 b"glob:" => Ok(PatternSyntax::Glob),
165 b"rootglob:" => Ok(PatternSyntax::RootGlob),
165 b"rootglob:" => Ok(PatternSyntax::RootGlob),
166 b"include:" => Ok(PatternSyntax::Include),
166 b"include:" => Ok(PatternSyntax::Include),
167 b"subinclude:" => Ok(PatternSyntax::SubInclude),
167 b"subinclude:" => Ok(PatternSyntax::SubInclude),
168 _ => Err(PatternError::UnsupportedSyntax(
168 _ => Err(PatternError::UnsupportedSyntax(
169 String::from_utf8_lossy(kind).to_string(),
169 String::from_utf8_lossy(kind).to_string(),
170 )),
170 )),
171 }
171 }
172 }
172 }
173
173
174 /// Builds the regex that corresponds to the given pattern.
174 /// Builds the regex that corresponds to the given pattern.
175 /// If within a `syntax: regexp` context, returns the pattern,
175 /// If within a `syntax: regexp` context, returns the pattern,
176 /// otherwise, returns the corresponding regex.
176 /// otherwise, returns the corresponding regex.
177 fn _build_single_regex(entry: &IgnorePattern) -> Vec<u8> {
177 fn _build_single_regex(entry: &IgnorePattern) -> Vec<u8> {
178 let IgnorePattern {
178 let IgnorePattern {
179 syntax, pattern, ..
179 syntax, pattern, ..
180 } = entry;
180 } = entry;
181 if pattern.is_empty() {
181 if pattern.is_empty() {
182 return vec![];
182 return vec![];
183 }
183 }
184 match syntax {
184 match syntax {
185 PatternSyntax::Regexp => pattern.to_owned(),
185 PatternSyntax::Regexp => pattern.to_owned(),
186 PatternSyntax::RelRegexp => {
186 PatternSyntax::RelRegexp => {
187 // The `regex` crate accepts `**` while `re2` and Python's `re`
187 // The `regex` crate accepts `**` while `re2` and Python's `re`
188 // do not. Checking for `*` correctly triggers the same error all
188 // do not. Checking for `*` correctly triggers the same error all
189 // engines.
189 // engines.
190 if pattern[0] == b'^'
190 if pattern[0] == b'^'
191 || pattern[0] == b'*'
191 || pattern[0] == b'*'
192 || pattern.starts_with(b".*")
192 || pattern.starts_with(b".*")
193 {
193 {
194 return pattern.to_owned();
194 return pattern.to_owned();
195 }
195 }
196 [&b".*"[..], pattern].concat()
196 [&b".*"[..], pattern].concat()
197 }
197 }
198 PatternSyntax::Path | PatternSyntax::RelPath => {
198 PatternSyntax::Path | PatternSyntax::RelPath => {
199 if pattern == b"." {
199 if pattern == b"." {
200 return vec![];
200 return vec![];
201 }
201 }
202 [escape_pattern(pattern).as_slice(), b"(?:/|$)"].concat()
202 [escape_pattern(pattern).as_slice(), b"(?:/|$)"].concat()
203 }
203 }
204 PatternSyntax::RootFiles => {
204 PatternSyntax::RootFiles => {
205 let mut res = if pattern == b"." {
205 let mut res = if pattern == b"." {
206 vec![]
206 vec![]
207 } else {
207 } else {
208 // Pattern is a directory name.
208 // Pattern is a directory name.
209 [escape_pattern(pattern).as_slice(), b"/"].concat()
209 [escape_pattern(pattern).as_slice(), b"/"].concat()
210 };
210 };
211
211
212 // Anything after the pattern must be a non-directory.
212 // Anything after the pattern must be a non-directory.
213 res.extend(b"[^/]+$");
213 res.extend(b"[^/]+$");
214 res
214 res
215 }
215 }
216 PatternSyntax::RelGlob => {
216 PatternSyntax::RelGlob => {
217 let glob_re = glob_to_re(pattern);
217 let glob_re = glob_to_re(pattern);
218 if let Some(rest) = glob_re.drop_prefix(b"[^/]*") {
218 if let Some(rest) = glob_re.drop_prefix(b"[^/]*") {
219 [b".*", rest, GLOB_SUFFIX].concat()
219 [b".*", rest, GLOB_SUFFIX].concat()
220 } else {
220 } else {
221 [b"(?:.*/)?", glob_re.as_slice(), GLOB_SUFFIX].concat()
221 [b"(?:.*/)?", glob_re.as_slice(), GLOB_SUFFIX].concat()
222 }
222 }
223 }
223 }
224 PatternSyntax::Glob | PatternSyntax::RootGlob => {
224 PatternSyntax::Glob | PatternSyntax::RootGlob => {
225 [glob_to_re(pattern).as_slice(), GLOB_SUFFIX].concat()
225 [glob_to_re(pattern).as_slice(), GLOB_SUFFIX].concat()
226 }
226 }
227 PatternSyntax::Include
227 PatternSyntax::Include
228 | PatternSyntax::SubInclude
228 | PatternSyntax::SubInclude
229 | PatternSyntax::ExpandedSubInclude(_) => unreachable!(),
229 | PatternSyntax::ExpandedSubInclude(_) => unreachable!(),
230 }
230 }
231 }
231 }
232
232
233 const GLOB_SPECIAL_CHARACTERS: [u8; 7] =
233 const GLOB_SPECIAL_CHARACTERS: [u8; 7] =
234 [b'*', b'?', b'[', b']', b'{', b'}', b'\\'];
234 [b'*', b'?', b'[', b']', b'{', b'}', b'\\'];
235
235
236 /// TODO support other platforms
236 /// TODO support other platforms
237 #[cfg(unix)]
237 #[cfg(unix)]
238 pub fn normalize_path_bytes(bytes: &[u8]) -> Vec<u8> {
238 pub fn normalize_path_bytes(bytes: &[u8]) -> Vec<u8> {
239 if bytes.is_empty() {
239 if bytes.is_empty() {
240 return b".".to_vec();
240 return b".".to_vec();
241 }
241 }
242 let sep = b'/';
242 let sep = b'/';
243
243
244 let mut initial_slashes = bytes.iter().take_while(|b| **b == sep).count();
244 let mut initial_slashes = bytes.iter().take_while(|b| **b == sep).count();
245 if initial_slashes > 2 {
245 if initial_slashes > 2 {
246 // POSIX allows one or two initial slashes, but treats three or more
246 // POSIX allows one or two initial slashes, but treats three or more
247 // as single slash.
247 // as single slash.
248 initial_slashes = 1;
248 initial_slashes = 1;
249 }
249 }
250 let components = bytes
250 let components = bytes
251 .split(|b| *b == sep)
251 .split(|b| *b == sep)
252 .filter(|c| !(c.is_empty() || c == b"."))
252 .filter(|c| !(c.is_empty() || c == b"."))
253 .fold(vec![], |mut acc, component| {
253 .fold(vec![], |mut acc, component| {
254 if component != b".."
254 if component != b".."
255 || (initial_slashes == 0 && acc.is_empty())
255 || (initial_slashes == 0 && acc.is_empty())
256 || (!acc.is_empty() && acc[acc.len() - 1] == b"..")
256 || (!acc.is_empty() && acc[acc.len() - 1] == b"..")
257 {
257 {
258 acc.push(component)
258 acc.push(component)
259 } else if !acc.is_empty() {
259 } else if !acc.is_empty() {
260 acc.pop();
260 acc.pop();
261 }
261 }
262 acc
262 acc
263 });
263 });
264 let mut new_bytes = components.join(&sep);
264 let mut new_bytes = components.join(&sep);
265
265
266 if initial_slashes > 0 {
266 if initial_slashes > 0 {
267 let mut buf: Vec<_> = (0..initial_slashes).map(|_| sep).collect();
267 let mut buf: Vec<_> = (0..initial_slashes).map(|_| sep).collect();
268 buf.extend(new_bytes);
268 buf.extend(new_bytes);
269 new_bytes = buf;
269 new_bytes = buf;
270 }
270 }
271 if new_bytes.is_empty() {
271 if new_bytes.is_empty() {
272 b".".to_vec()
272 b".".to_vec()
273 } else {
273 } else {
274 new_bytes
274 new_bytes
275 }
275 }
276 }
276 }
277
277
278 /// Wrapper function to `_build_single_regex` that short-circuits 'exact' globs
278 /// Wrapper function to `_build_single_regex` that short-circuits 'exact' globs
279 /// that don't need to be transformed into a regex.
279 /// that don't need to be transformed into a regex.
280 pub fn build_single_regex(
280 pub fn build_single_regex(
281 entry: &IgnorePattern,
281 entry: &IgnorePattern,
282 ) -> Result<Option<Vec<u8>>, PatternError> {
282 ) -> Result<Option<Vec<u8>>, PatternError> {
283 let IgnorePattern {
283 let IgnorePattern {
284 pattern, syntax, ..
284 pattern, syntax, ..
285 } = entry;
285 } = entry;
286 let pattern = match syntax {
286 let pattern = match syntax {
287 PatternSyntax::RootGlob
287 PatternSyntax::RootGlob
288 | PatternSyntax::Path
288 | PatternSyntax::Path
289 | PatternSyntax::RelGlob
289 | PatternSyntax::RelGlob
290 | PatternSyntax::RootFiles => normalize_path_bytes(&pattern),
290 | PatternSyntax::RootFiles => normalize_path_bytes(&pattern),
291 PatternSyntax::Include | PatternSyntax::SubInclude => {
291 PatternSyntax::Include | PatternSyntax::SubInclude => {
292 return Err(PatternError::NonRegexPattern(entry.clone()))
292 return Err(PatternError::NonRegexPattern(entry.clone()))
293 }
293 }
294 _ => pattern.to_owned(),
294 _ => pattern.to_owned(),
295 };
295 };
296 if *syntax == PatternSyntax::RootGlob
296 if *syntax == PatternSyntax::RootGlob
297 && !pattern.iter().any(|b| GLOB_SPECIAL_CHARACTERS.contains(b))
297 && !pattern.iter().any(|b| GLOB_SPECIAL_CHARACTERS.contains(b))
298 {
298 {
299 Ok(None)
299 Ok(None)
300 } else {
300 } else {
301 let mut entry = entry.clone();
301 let mut entry = entry.clone();
302 entry.pattern = pattern;
302 entry.pattern = pattern;
303 Ok(Some(_build_single_regex(&entry)))
303 Ok(Some(_build_single_regex(&entry)))
304 }
304 }
305 }
305 }
306
306
307 lazy_static! {
307 lazy_static! {
308 static ref SYNTAXES: FastHashMap<&'static [u8], &'static [u8]> = {
308 static ref SYNTAXES: FastHashMap<&'static [u8], &'static [u8]> = {
309 let mut m = FastHashMap::default();
309 let mut m = FastHashMap::default();
310
310
311 m.insert(b"re".as_ref(), b"relre:".as_ref());
311 m.insert(b"re".as_ref(), b"relre:".as_ref());
312 m.insert(b"regexp".as_ref(), b"relre:".as_ref());
312 m.insert(b"regexp".as_ref(), b"relre:".as_ref());
313 m.insert(b"glob".as_ref(), b"relglob:".as_ref());
313 m.insert(b"glob".as_ref(), b"relglob:".as_ref());
314 m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref());
314 m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref());
315 m.insert(b"include".as_ref(), b"include:".as_ref());
315 m.insert(b"include".as_ref(), b"include:".as_ref());
316 m.insert(b"subinclude".as_ref(), b"subinclude:".as_ref());
316 m.insert(b"subinclude".as_ref(), b"subinclude:".as_ref());
317 m.insert(b"path".as_ref(), b"path:".as_ref());
318 m.insert(b"rootfilesin".as_ref(), b"rootfilesin:".as_ref());
317 m
319 m
318 };
320 };
319 }
321 }
320
322
321 #[derive(Debug)]
323 #[derive(Debug)]
322 pub enum PatternFileWarning {
324 pub enum PatternFileWarning {
323 /// (file path, syntax bytes)
325 /// (file path, syntax bytes)
324 InvalidSyntax(PathBuf, Vec<u8>),
326 InvalidSyntax(PathBuf, Vec<u8>),
325 /// File path
327 /// File path
326 NoSuchFile(PathBuf),
328 NoSuchFile(PathBuf),
327 }
329 }
328
330
329 pub fn parse_pattern_file_contents(
331 pub fn parse_pattern_file_contents(
330 lines: &[u8],
332 lines: &[u8],
331 file_path: &Path,
333 file_path: &Path,
332 default_syntax_override: Option<&[u8]>,
334 default_syntax_override: Option<&[u8]>,
333 warn: bool,
335 warn: bool,
334 ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> {
336 ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> {
335 let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap();
337 let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap();
336
338
337 #[allow(clippy::trivial_regex)]
339 #[allow(clippy::trivial_regex)]
338 let comment_escape_regex = Regex::new(r"\\#").unwrap();
340 let comment_escape_regex = Regex::new(r"\\#").unwrap();
339 let mut inputs: Vec<IgnorePattern> = vec![];
341 let mut inputs: Vec<IgnorePattern> = vec![];
340 let mut warnings: Vec<PatternFileWarning> = vec![];
342 let mut warnings: Vec<PatternFileWarning> = vec![];
341
343
342 let mut current_syntax =
344 let mut current_syntax =
343 default_syntax_override.unwrap_or(b"relre:".as_ref());
345 default_syntax_override.unwrap_or(b"relre:".as_ref());
344
346
345 for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() {
347 for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() {
346 let line_number = line_number + 1;
348 let line_number = line_number + 1;
347
349
348 let line_buf;
350 let line_buf;
349 if line.contains(&b'#') {
351 if line.contains(&b'#') {
350 if let Some(cap) = comment_regex.captures(line) {
352 if let Some(cap) = comment_regex.captures(line) {
351 line = &line[..cap.get(1).unwrap().end()]
353 line = &line[..cap.get(1).unwrap().end()]
352 }
354 }
353 line_buf = comment_escape_regex.replace_all(line, NoExpand(b"#"));
355 line_buf = comment_escape_regex.replace_all(line, NoExpand(b"#"));
354 line = &line_buf;
356 line = &line_buf;
355 }
357 }
356
358
357 let mut line = line.trim_end();
359 let mut line = line.trim_end();
358
360
359 if line.is_empty() {
361 if line.is_empty() {
360 continue;
362 continue;
361 }
363 }
362
364
363 if let Some(syntax) = line.drop_prefix(b"syntax:") {
365 if let Some(syntax) = line.drop_prefix(b"syntax:") {
364 let syntax = syntax.trim();
366 let syntax = syntax.trim();
365
367
366 if let Some(rel_syntax) = SYNTAXES.get(syntax) {
368 if let Some(rel_syntax) = SYNTAXES.get(syntax) {
367 current_syntax = rel_syntax;
369 current_syntax = rel_syntax;
368 } else if warn {
370 } else if warn {
369 warnings.push(PatternFileWarning::InvalidSyntax(
371 warnings.push(PatternFileWarning::InvalidSyntax(
370 file_path.to_owned(),
372 file_path.to_owned(),
371 syntax.to_owned(),
373 syntax.to_owned(),
372 ));
374 ));
373 }
375 }
374 continue;
376 continue;
375 }
377 }
376
378
377 let mut line_syntax: &[u8] = &current_syntax;
379 let mut line_syntax: &[u8] = &current_syntax;
378
380
379 for (s, rels) in SYNTAXES.iter() {
381 for (s, rels) in SYNTAXES.iter() {
380 if let Some(rest) = line.drop_prefix(rels) {
382 if let Some(rest) = line.drop_prefix(rels) {
381 line_syntax = rels;
383 line_syntax = rels;
382 line = rest;
384 line = rest;
383 break;
385 break;
384 }
386 }
385 if let Some(rest) = line.drop_prefix(&[s, &b":"[..]].concat()) {
387 if let Some(rest) = line.drop_prefix(&[s, &b":"[..]].concat()) {
386 line_syntax = rels;
388 line_syntax = rels;
387 line = rest;
389 line = rest;
388 break;
390 break;
389 }
391 }
390 }
392 }
391
393
392 inputs.push(IgnorePattern::new(
394 inputs.push(IgnorePattern::new(
393 parse_pattern_syntax(&line_syntax).map_err(|e| match e {
395 parse_pattern_syntax(&line_syntax).map_err(|e| match e {
394 PatternError::UnsupportedSyntax(syntax) => {
396 PatternError::UnsupportedSyntax(syntax) => {
395 PatternError::UnsupportedSyntaxInFile(
397 PatternError::UnsupportedSyntaxInFile(
396 syntax,
398 syntax,
397 file_path.to_string_lossy().into(),
399 file_path.to_string_lossy().into(),
398 line_number,
400 line_number,
399 )
401 )
400 }
402 }
401 _ => e,
403 _ => e,
402 })?,
404 })?,
403 &line,
405 &line,
404 file_path,
406 file_path,
405 ));
407 ));
406 }
408 }
407 Ok((inputs, warnings))
409 Ok((inputs, warnings))
408 }
410 }
409
411
410 pub fn read_pattern_file(
412 pub fn read_pattern_file(
411 file_path: &Path,
413 file_path: &Path,
412 warn: bool,
414 warn: bool,
413 inspect_pattern_bytes: &mut impl FnMut(&[u8]),
415 inspect_pattern_bytes: &mut impl FnMut(&[u8]),
414 ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> {
416 ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> {
415 match std::fs::read(file_path) {
417 match std::fs::read(file_path) {
416 Ok(contents) => {
418 Ok(contents) => {
417 inspect_pattern_bytes(&contents);
419 inspect_pattern_bytes(&contents);
418 parse_pattern_file_contents(&contents, file_path, None, warn)
420 parse_pattern_file_contents(&contents, file_path, None, warn)
419 }
421 }
420 Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok((
422 Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok((
421 vec![],
423 vec![],
422 vec![PatternFileWarning::NoSuchFile(file_path.to_owned())],
424 vec![PatternFileWarning::NoSuchFile(file_path.to_owned())],
423 )),
425 )),
424 Err(e) => Err(e.into()),
426 Err(e) => Err(e.into()),
425 }
427 }
426 }
428 }
427
429
428 /// Represents an entry in an "ignore" file.
430 /// Represents an entry in an "ignore" file.
429 #[derive(Debug, Eq, PartialEq, Clone)]
431 #[derive(Debug, Eq, PartialEq, Clone)]
430 pub struct IgnorePattern {
432 pub struct IgnorePattern {
431 pub syntax: PatternSyntax,
433 pub syntax: PatternSyntax,
432 pub pattern: Vec<u8>,
434 pub pattern: Vec<u8>,
433 pub source: PathBuf,
435 pub source: PathBuf,
434 }
436 }
435
437
436 impl IgnorePattern {
438 impl IgnorePattern {
437 pub fn new(syntax: PatternSyntax, pattern: &[u8], source: &Path) -> Self {
439 pub fn new(syntax: PatternSyntax, pattern: &[u8], source: &Path) -> Self {
438 Self {
440 Self {
439 syntax,
441 syntax,
440 pattern: pattern.to_owned(),
442 pattern: pattern.to_owned(),
441 source: source.to_owned(),
443 source: source.to_owned(),
442 }
444 }
443 }
445 }
444 }
446 }
445
447
446 pub type PatternResult<T> = Result<T, PatternError>;
448 pub type PatternResult<T> = Result<T, PatternError>;
447
449
448 /// Wrapper for `read_pattern_file` that also recursively expands `include:`
450 /// Wrapper for `read_pattern_file` that also recursively expands `include:`
449 /// and `subinclude:` patterns.
451 /// and `subinclude:` patterns.
450 ///
452 ///
451 /// The former are expanded in place, while `PatternSyntax::ExpandedSubInclude`
453 /// The former are expanded in place, while `PatternSyntax::ExpandedSubInclude`
452 /// is used for the latter to form a tree of patterns.
454 /// is used for the latter to form a tree of patterns.
453 pub fn get_patterns_from_file(
455 pub fn get_patterns_from_file(
454 pattern_file: &Path,
456 pattern_file: &Path,
455 root_dir: &Path,
457 root_dir: &Path,
456 inspect_pattern_bytes: &mut impl FnMut(&[u8]),
458 inspect_pattern_bytes: &mut impl FnMut(&[u8]),
457 ) -> PatternResult<(Vec<IgnorePattern>, Vec<PatternFileWarning>)> {
459 ) -> PatternResult<(Vec<IgnorePattern>, Vec<PatternFileWarning>)> {
458 let (patterns, mut warnings) =
460 let (patterns, mut warnings) =
459 read_pattern_file(pattern_file, true, inspect_pattern_bytes)?;
461 read_pattern_file(pattern_file, true, inspect_pattern_bytes)?;
460 let patterns = patterns
462 let patterns = patterns
461 .into_iter()
463 .into_iter()
462 .flat_map(|entry| -> PatternResult<_> {
464 .flat_map(|entry| -> PatternResult<_> {
463 Ok(match &entry.syntax {
465 Ok(match &entry.syntax {
464 PatternSyntax::Include => {
466 PatternSyntax::Include => {
465 let inner_include =
467 let inner_include =
466 root_dir.join(get_path_from_bytes(&entry.pattern));
468 root_dir.join(get_path_from_bytes(&entry.pattern));
467 let (inner_pats, inner_warnings) = get_patterns_from_file(
469 let (inner_pats, inner_warnings) = get_patterns_from_file(
468 &inner_include,
470 &inner_include,
469 root_dir,
471 root_dir,
470 inspect_pattern_bytes,
472 inspect_pattern_bytes,
471 )?;
473 )?;
472 warnings.extend(inner_warnings);
474 warnings.extend(inner_warnings);
473 inner_pats
475 inner_pats
474 }
476 }
475 PatternSyntax::SubInclude => {
477 PatternSyntax::SubInclude => {
476 let mut sub_include = SubInclude::new(
478 let mut sub_include = SubInclude::new(
477 &root_dir,
479 &root_dir,
478 &entry.pattern,
480 &entry.pattern,
479 &entry.source,
481 &entry.source,
480 )?;
482 )?;
481 let (inner_patterns, inner_warnings) =
483 let (inner_patterns, inner_warnings) =
482 get_patterns_from_file(
484 get_patterns_from_file(
483 &sub_include.path,
485 &sub_include.path,
484 &sub_include.root,
486 &sub_include.root,
485 inspect_pattern_bytes,
487 inspect_pattern_bytes,
486 )?;
488 )?;
487 sub_include.included_patterns = inner_patterns;
489 sub_include.included_patterns = inner_patterns;
488 warnings.extend(inner_warnings);
490 warnings.extend(inner_warnings);
489 vec![IgnorePattern {
491 vec![IgnorePattern {
490 syntax: PatternSyntax::ExpandedSubInclude(Box::new(
492 syntax: PatternSyntax::ExpandedSubInclude(Box::new(
491 sub_include,
493 sub_include,
492 )),
494 )),
493 ..entry
495 ..entry
494 }]
496 }]
495 }
497 }
496 _ => vec![entry],
498 _ => vec![entry],
497 })
499 })
498 })
500 })
499 .flatten()
501 .flatten()
500 .collect();
502 .collect();
501
503
502 Ok((patterns, warnings))
504 Ok((patterns, warnings))
503 }
505 }
504
506
505 /// Holds all the information needed to handle a `subinclude:` pattern.
507 /// Holds all the information needed to handle a `subinclude:` pattern.
506 #[derive(Debug, PartialEq, Eq, Clone)]
508 #[derive(Debug, PartialEq, Eq, Clone)]
507 pub struct SubInclude {
509 pub struct SubInclude {
508 /// Will be used for repository (hg) paths that start with this prefix.
510 /// Will be used for repository (hg) paths that start with this prefix.
509 /// It is relative to the current working directory, so comparing against
511 /// It is relative to the current working directory, so comparing against
510 /// repository paths is painless.
512 /// repository paths is painless.
511 pub prefix: HgPathBuf,
513 pub prefix: HgPathBuf,
512 /// The file itself, containing the patterns
514 /// The file itself, containing the patterns
513 pub path: PathBuf,
515 pub path: PathBuf,
514 /// Folder in the filesystem where this it applies
516 /// Folder in the filesystem where this it applies
515 pub root: PathBuf,
517 pub root: PathBuf,
516
518
517 pub included_patterns: Vec<IgnorePattern>,
519 pub included_patterns: Vec<IgnorePattern>,
518 }
520 }
519
521
520 impl SubInclude {
522 impl SubInclude {
521 pub fn new(
523 pub fn new(
522 root_dir: &Path,
524 root_dir: &Path,
523 pattern: &[u8],
525 pattern: &[u8],
524 source: &Path,
526 source: &Path,
525 ) -> Result<SubInclude, HgPathError> {
527 ) -> Result<SubInclude, HgPathError> {
526 let normalized_source =
528 let normalized_source =
527 normalize_path_bytes(&get_bytes_from_path(source));
529 normalize_path_bytes(&get_bytes_from_path(source));
528
530
529 let source_root = get_path_from_bytes(&normalized_source);
531 let source_root = get_path_from_bytes(&normalized_source);
530 let source_root =
532 let source_root =
531 source_root.parent().unwrap_or_else(|| source_root.deref());
533 source_root.parent().unwrap_or_else(|| source_root.deref());
532
534
533 let path = source_root.join(get_path_from_bytes(pattern));
535 let path = source_root.join(get_path_from_bytes(pattern));
534 let new_root = path.parent().unwrap_or_else(|| path.deref());
536 let new_root = path.parent().unwrap_or_else(|| path.deref());
535
537
536 let prefix = canonical_path(root_dir, root_dir, new_root)?;
538 let prefix = canonical_path(root_dir, root_dir, new_root)?;
537
539
538 Ok(Self {
540 Ok(Self {
539 prefix: path_to_hg_path_buf(prefix).and_then(|mut p| {
541 prefix: path_to_hg_path_buf(prefix).and_then(|mut p| {
540 if !p.is_empty() {
542 if !p.is_empty() {
541 p.push_byte(b'/');
543 p.push_byte(b'/');
542 }
544 }
543 Ok(p)
545 Ok(p)
544 })?,
546 })?,
545 path: path.to_owned(),
547 path: path.to_owned(),
546 root: new_root.to_owned(),
548 root: new_root.to_owned(),
547 included_patterns: Vec::new(),
549 included_patterns: Vec::new(),
548 })
550 })
549 }
551 }
550 }
552 }
551
553
552 /// Separate and pre-process subincludes from other patterns for the "ignore"
554 /// Separate and pre-process subincludes from other patterns for the "ignore"
553 /// phase.
555 /// phase.
554 pub fn filter_subincludes(
556 pub fn filter_subincludes(
555 ignore_patterns: Vec<IgnorePattern>,
557 ignore_patterns: Vec<IgnorePattern>,
556 ) -> Result<(Vec<Box<SubInclude>>, Vec<IgnorePattern>), HgPathError> {
558 ) -> Result<(Vec<Box<SubInclude>>, Vec<IgnorePattern>), HgPathError> {
557 let mut subincludes = vec![];
559 let mut subincludes = vec![];
558 let mut others = vec![];
560 let mut others = vec![];
559
561
560 for pattern in ignore_patterns {
562 for pattern in ignore_patterns {
561 if let PatternSyntax::ExpandedSubInclude(sub_include) = pattern.syntax
563 if let PatternSyntax::ExpandedSubInclude(sub_include) = pattern.syntax
562 {
564 {
563 subincludes.push(sub_include);
565 subincludes.push(sub_include);
564 } else {
566 } else {
565 others.push(pattern)
567 others.push(pattern)
566 }
568 }
567 }
569 }
568 Ok((subincludes, others))
570 Ok((subincludes, others))
569 }
571 }
570
572
571 #[cfg(test)]
573 #[cfg(test)]
572 mod tests {
574 mod tests {
573 use super::*;
575 use super::*;
574 use pretty_assertions::assert_eq;
576 use pretty_assertions::assert_eq;
575
577
576 #[test]
578 #[test]
577 fn escape_pattern_test() {
579 fn escape_pattern_test() {
578 let untouched =
580 let untouched =
579 br#"!"%',/0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ_`abcdefghijklmnopqrstuvwxyz"#;
581 br#"!"%',/0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ_`abcdefghijklmnopqrstuvwxyz"#;
580 assert_eq!(escape_pattern(untouched), untouched.to_vec());
582 assert_eq!(escape_pattern(untouched), untouched.to_vec());
581 // All escape codes
583 // All escape codes
582 assert_eq!(
584 assert_eq!(
583 escape_pattern(br#"()[]{}?*+-|^$\\.&~# \t\n\r\v\f"#),
585 escape_pattern(br#"()[]{}?*+-|^$\\.&~# \t\n\r\v\f"#),
584 br#"\(\)\[\]\{\}\?\*\+\-\|\^\$\\\\\.\&\~\#\ \\t\\n\\r\\v\\f"#
586 br#"\(\)\[\]\{\}\?\*\+\-\|\^\$\\\\\.\&\~\#\ \\t\\n\\r\\v\\f"#
585 .to_vec()
587 .to_vec()
586 );
588 );
587 }
589 }
588
590
589 #[test]
591 #[test]
590 fn glob_test() {
592 fn glob_test() {
591 assert_eq!(glob_to_re(br#"?"#), br#"."#);
593 assert_eq!(glob_to_re(br#"?"#), br#"."#);
592 assert_eq!(glob_to_re(br#"*"#), br#"[^/]*"#);
594 assert_eq!(glob_to_re(br#"*"#), br#"[^/]*"#);
593 assert_eq!(glob_to_re(br#"**"#), br#".*"#);
595 assert_eq!(glob_to_re(br#"**"#), br#".*"#);
594 assert_eq!(glob_to_re(br#"**/a"#), br#"(?:.*/)?a"#);
596 assert_eq!(glob_to_re(br#"**/a"#), br#"(?:.*/)?a"#);
595 assert_eq!(glob_to_re(br#"a/**/b"#), br#"a/(?:.*/)?b"#);
597 assert_eq!(glob_to_re(br#"a/**/b"#), br#"a/(?:.*/)?b"#);
596 assert_eq!(glob_to_re(br#"[a*?!^][^b][!c]"#), br#"[a*?!^][\^b][^c]"#);
598 assert_eq!(glob_to_re(br#"[a*?!^][^b][!c]"#), br#"[a*?!^][\^b][^c]"#);
597 assert_eq!(glob_to_re(br#"{a,b}"#), br#"(?:a|b)"#);
599 assert_eq!(glob_to_re(br#"{a,b}"#), br#"(?:a|b)"#);
598 assert_eq!(glob_to_re(br#".\*\?"#), br#"\.\*\?"#);
600 assert_eq!(glob_to_re(br#".\*\?"#), br#"\.\*\?"#);
599 }
601 }
600
602
601 #[test]
603 #[test]
602 fn test_parse_pattern_file_contents() {
604 fn test_parse_pattern_file_contents() {
603 let lines = b"syntax: glob\n*.elc";
605 let lines = b"syntax: glob\n*.elc";
604
606
605 assert_eq!(
607 assert_eq!(
606 parse_pattern_file_contents(
608 parse_pattern_file_contents(
607 lines,
609 lines,
608 Path::new("file_path"),
610 Path::new("file_path"),
609 None,
611 None,
610 false
612 false
611 )
613 )
612 .unwrap()
614 .unwrap()
613 .0,
615 .0,
614 vec![IgnorePattern::new(
616 vec![IgnorePattern::new(
615 PatternSyntax::RelGlob,
617 PatternSyntax::RelGlob,
616 b"*.elc",
618 b"*.elc",
617 Path::new("file_path")
619 Path::new("file_path")
618 )],
620 )],
619 );
621 );
620
622
621 let lines = b"syntax: include\nsyntax: glob";
623 let lines = b"syntax: include\nsyntax: glob";
622
624
623 assert_eq!(
625 assert_eq!(
624 parse_pattern_file_contents(
626 parse_pattern_file_contents(
625 lines,
627 lines,
626 Path::new("file_path"),
628 Path::new("file_path"),
627 None,
629 None,
628 false
630 false
629 )
631 )
630 .unwrap()
632 .unwrap()
631 .0,
633 .0,
632 vec![]
634 vec![]
633 );
635 );
634 let lines = b"glob:**.o";
636 let lines = b"glob:**.o";
635 assert_eq!(
637 assert_eq!(
636 parse_pattern_file_contents(
638 parse_pattern_file_contents(
637 lines,
639 lines,
638 Path::new("file_path"),
640 Path::new("file_path"),
639 None,
641 None,
640 false
642 false
641 )
643 )
642 .unwrap()
644 .unwrap()
643 .0,
645 .0,
644 vec![IgnorePattern::new(
646 vec![IgnorePattern::new(
645 PatternSyntax::RelGlob,
647 PatternSyntax::RelGlob,
646 b"**.o",
648 b"**.o",
647 Path::new("file_path")
649 Path::new("file_path")
648 )]
650 )]
649 );
651 );
650 }
652 }
651
653
652 #[test]
654 #[test]
653 fn test_build_single_regex() {
655 fn test_build_single_regex() {
654 assert_eq!(
656 assert_eq!(
655 build_single_regex(&IgnorePattern::new(
657 build_single_regex(&IgnorePattern::new(
656 PatternSyntax::RelGlob,
658 PatternSyntax::RelGlob,
657 b"rust/target/",
659 b"rust/target/",
658 Path::new("")
660 Path::new("")
659 ))
661 ))
660 .unwrap(),
662 .unwrap(),
661 Some(br"(?:.*/)?rust/target(?:/|$)".to_vec()),
663 Some(br"(?:.*/)?rust/target(?:/|$)".to_vec()),
662 );
664 );
663 assert_eq!(
665 assert_eq!(
664 build_single_regex(&IgnorePattern::new(
666 build_single_regex(&IgnorePattern::new(
665 PatternSyntax::Regexp,
667 PatternSyntax::Regexp,
666 br"rust/target/\d+",
668 br"rust/target/\d+",
667 Path::new("")
669 Path::new("")
668 ))
670 ))
669 .unwrap(),
671 .unwrap(),
670 Some(br"rust/target/\d+".to_vec()),
672 Some(br"rust/target/\d+".to_vec()),
671 );
673 );
672 }
674 }
673
675
674 #[test]
676 #[test]
675 fn test_build_single_regex_shortcut() {
677 fn test_build_single_regex_shortcut() {
676 assert_eq!(
678 assert_eq!(
677 build_single_regex(&IgnorePattern::new(
679 build_single_regex(&IgnorePattern::new(
678 PatternSyntax::RootGlob,
680 PatternSyntax::RootGlob,
679 b"",
681 b"",
680 Path::new("")
682 Path::new("")
681 ))
683 ))
682 .unwrap(),
684 .unwrap(),
683 None,
685 None,
684 );
686 );
685 assert_eq!(
687 assert_eq!(
686 build_single_regex(&IgnorePattern::new(
688 build_single_regex(&IgnorePattern::new(
687 PatternSyntax::RootGlob,
689 PatternSyntax::RootGlob,
688 b"whatever",
690 b"whatever",
689 Path::new("")
691 Path::new("")
690 ))
692 ))
691 .unwrap(),
693 .unwrap(),
692 None,
694 None,
693 );
695 );
694 assert_eq!(
696 assert_eq!(
695 build_single_regex(&IgnorePattern::new(
697 build_single_regex(&IgnorePattern::new(
696 PatternSyntax::RootGlob,
698 PatternSyntax::RootGlob,
697 b"*.o",
699 b"*.o",
698 Path::new("")
700 Path::new("")
699 ))
701 ))
700 .unwrap(),
702 .unwrap(),
701 Some(br"[^/]*\.o(?:/|$)".to_vec()),
703 Some(br"[^/]*\.o(?:/|$)".to_vec()),
702 );
704 );
703 }
705 }
704 }
706 }
@@ -1,138 +1,139 b''
1 // Copyright 2018-2020 Georges Racinet <georges.racinet@octobus.net>
1 // Copyright 2018-2020 Georges Racinet <georges.racinet@octobus.net>
2 // and Mercurial contributors
2 // and Mercurial contributors
3 //
3 //
4 // This software may be used and distributed according to the terms of the
4 // This software may be used and distributed according to the terms of the
5 // GNU General Public License version 2 or any later version.
5 // GNU General Public License version 2 or any later version.
6
6
7 mod ancestors;
7 mod ancestors;
8 pub mod dagops;
8 pub mod dagops;
9 pub mod errors;
9 pub mod errors;
10 pub mod narrow;
10 pub mod sparse;
11 pub mod sparse;
11 pub use ancestors::{AncestorsIterator, MissingAncestors};
12 pub use ancestors::{AncestorsIterator, MissingAncestors};
12 pub mod dirstate;
13 pub mod dirstate;
13 pub mod dirstate_tree;
14 pub mod dirstate_tree;
14 pub mod discovery;
15 pub mod discovery;
15 pub mod exit_codes;
16 pub mod exit_codes;
16 pub mod requirements;
17 pub mod requirements;
17 pub mod testing; // unconditionally built, for use from integration tests
18 pub mod testing; // unconditionally built, for use from integration tests
18 pub use dirstate::{
19 pub use dirstate::{
19 dirs_multiset::{DirsMultiset, DirsMultisetIter},
20 dirs_multiset::{DirsMultiset, DirsMultisetIter},
20 status::{
21 status::{
21 BadMatch, BadType, DirstateStatus, HgPathCow, StatusError,
22 BadMatch, BadType, DirstateStatus, HgPathCow, StatusError,
22 StatusOptions,
23 StatusOptions,
23 },
24 },
24 DirstateEntry, DirstateParents, EntryState,
25 DirstateEntry, DirstateParents, EntryState,
25 };
26 };
26 pub mod copy_tracing;
27 pub mod copy_tracing;
27 mod filepatterns;
28 mod filepatterns;
28 pub mod matchers;
29 pub mod matchers;
29 pub mod repo;
30 pub mod repo;
30 pub mod revlog;
31 pub mod revlog;
31 pub use revlog::*;
32 pub use revlog::*;
32 pub mod config;
33 pub mod config;
33 pub mod lock;
34 pub mod lock;
34 pub mod logging;
35 pub mod logging;
35 pub mod operations;
36 pub mod operations;
36 pub mod revset;
37 pub mod revset;
37 pub mod utils;
38 pub mod utils;
38 pub mod vfs;
39 pub mod vfs;
39
40
40 use crate::utils::hg_path::{HgPathBuf, HgPathError};
41 use crate::utils::hg_path::{HgPathBuf, HgPathError};
41 pub use filepatterns::{
42 pub use filepatterns::{
42 parse_pattern_syntax, read_pattern_file, IgnorePattern,
43 parse_pattern_syntax, read_pattern_file, IgnorePattern,
43 PatternFileWarning, PatternSyntax,
44 PatternFileWarning, PatternSyntax,
44 };
45 };
45 use std::collections::HashMap;
46 use std::collections::HashMap;
46 use std::fmt;
47 use std::fmt;
47 use twox_hash::RandomXxHashBuilder64;
48 use twox_hash::RandomXxHashBuilder64;
48
49
49 /// This is a contract between the `micro-timer` crate and us, to expose
50 /// This is a contract between the `micro-timer` crate and us, to expose
50 /// the `log` crate as `crate::log`.
51 /// the `log` crate as `crate::log`.
51 use log;
52 use log;
52
53
53 pub type LineNumber = usize;
54 pub type LineNumber = usize;
54
55
55 /// Rust's default hasher is too slow because it tries to prevent collision
56 /// Rust's default hasher is too slow because it tries to prevent collision
56 /// attacks. We are not concerned about those: if an ill-minded person has
57 /// attacks. We are not concerned about those: if an ill-minded person has
57 /// write access to your repository, you have other issues.
58 /// write access to your repository, you have other issues.
58 pub type FastHashMap<K, V> = HashMap<K, V, RandomXxHashBuilder64>;
59 pub type FastHashMap<K, V> = HashMap<K, V, RandomXxHashBuilder64>;
59
60
60 // TODO: should this be the default `FastHashMap` for all of hg-core, not just
61 // TODO: should this be the default `FastHashMap` for all of hg-core, not just
61 // dirstate_tree? How does XxHash compare with AHash, hashbrown’s default?
62 // dirstate_tree? How does XxHash compare with AHash, hashbrown’s default?
62 pub type FastHashbrownMap<K, V> =
63 pub type FastHashbrownMap<K, V> =
63 hashbrown::HashMap<K, V, RandomXxHashBuilder64>;
64 hashbrown::HashMap<K, V, RandomXxHashBuilder64>;
64
65
65 #[derive(Debug, PartialEq)]
66 #[derive(Debug, PartialEq)]
66 pub enum DirstateMapError {
67 pub enum DirstateMapError {
67 PathNotFound(HgPathBuf),
68 PathNotFound(HgPathBuf),
68 EmptyPath,
69 EmptyPath,
69 InvalidPath(HgPathError),
70 InvalidPath(HgPathError),
70 }
71 }
71
72
72 impl fmt::Display for DirstateMapError {
73 impl fmt::Display for DirstateMapError {
73 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
74 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
74 match self {
75 match self {
75 DirstateMapError::PathNotFound(_) => {
76 DirstateMapError::PathNotFound(_) => {
76 f.write_str("expected a value, found none")
77 f.write_str("expected a value, found none")
77 }
78 }
78 DirstateMapError::EmptyPath => {
79 DirstateMapError::EmptyPath => {
79 f.write_str("Overflow in dirstate.")
80 f.write_str("Overflow in dirstate.")
80 }
81 }
81 DirstateMapError::InvalidPath(path_error) => path_error.fmt(f),
82 DirstateMapError::InvalidPath(path_error) => path_error.fmt(f),
82 }
83 }
83 }
84 }
84 }
85 }
85
86
86 #[derive(Debug, derive_more::From)]
87 #[derive(Debug, derive_more::From)]
87 pub enum DirstateError {
88 pub enum DirstateError {
88 Map(DirstateMapError),
89 Map(DirstateMapError),
89 Common(errors::HgError),
90 Common(errors::HgError),
90 }
91 }
91
92
92 impl fmt::Display for DirstateError {
93 impl fmt::Display for DirstateError {
93 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
94 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
94 match self {
95 match self {
95 DirstateError::Map(error) => error.fmt(f),
96 DirstateError::Map(error) => error.fmt(f),
96 DirstateError::Common(error) => error.fmt(f),
97 DirstateError::Common(error) => error.fmt(f),
97 }
98 }
98 }
99 }
99 }
100 }
100
101
101 #[derive(Debug, derive_more::From)]
102 #[derive(Debug, derive_more::From)]
102 pub enum PatternError {
103 pub enum PatternError {
103 #[from]
104 #[from]
104 Path(HgPathError),
105 Path(HgPathError),
105 UnsupportedSyntax(String),
106 UnsupportedSyntax(String),
106 UnsupportedSyntaxInFile(String, String, usize),
107 UnsupportedSyntaxInFile(String, String, usize),
107 TooLong(usize),
108 TooLong(usize),
108 #[from]
109 #[from]
109 IO(std::io::Error),
110 IO(std::io::Error),
110 /// Needed a pattern that can be turned into a regex but got one that
111 /// Needed a pattern that can be turned into a regex but got one that
111 /// can't. This should only happen through programmer error.
112 /// can't. This should only happen through programmer error.
112 NonRegexPattern(IgnorePattern),
113 NonRegexPattern(IgnorePattern),
113 }
114 }
114
115
115 impl fmt::Display for PatternError {
116 impl fmt::Display for PatternError {
116 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
117 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
117 match self {
118 match self {
118 PatternError::UnsupportedSyntax(syntax) => {
119 PatternError::UnsupportedSyntax(syntax) => {
119 write!(f, "Unsupported syntax {}", syntax)
120 write!(f, "Unsupported syntax {}", syntax)
120 }
121 }
121 PatternError::UnsupportedSyntaxInFile(syntax, file_path, line) => {
122 PatternError::UnsupportedSyntaxInFile(syntax, file_path, line) => {
122 write!(
123 write!(
123 f,
124 f,
124 "{}:{}: unsupported syntax {}",
125 "{}:{}: unsupported syntax {}",
125 file_path, line, syntax
126 file_path, line, syntax
126 )
127 )
127 }
128 }
128 PatternError::TooLong(size) => {
129 PatternError::TooLong(size) => {
129 write!(f, "matcher pattern is too long ({} bytes)", size)
130 write!(f, "matcher pattern is too long ({} bytes)", size)
130 }
131 }
131 PatternError::IO(error) => error.fmt(f),
132 PatternError::IO(error) => error.fmt(f),
132 PatternError::Path(error) => error.fmt(f),
133 PatternError::Path(error) => error.fmt(f),
133 PatternError::NonRegexPattern(pattern) => {
134 PatternError::NonRegexPattern(pattern) => {
134 write!(f, "'{:?}' cannot be turned into a regex", pattern)
135 write!(f, "'{:?}' cannot be turned into a regex", pattern)
135 }
136 }
136 }
137 }
137 }
138 }
138 }
139 }
@@ -1,333 +1,338 b''
1 use std::{collections::HashSet, path::Path};
1 use std::{collections::HashSet, path::Path};
2
2
3 use format_bytes::{write_bytes, DisplayBytes};
3 use format_bytes::{write_bytes, DisplayBytes};
4
4
5 use crate::{
5 use crate::{
6 errors::HgError,
6 errors::HgError,
7 filepatterns::parse_pattern_file_contents,
7 filepatterns::parse_pattern_file_contents,
8 matchers::{
8 matchers::{
9 AlwaysMatcher, DifferenceMatcher, IncludeMatcher, Matcher,
9 AlwaysMatcher, DifferenceMatcher, IncludeMatcher, Matcher,
10 UnionMatcher,
10 UnionMatcher,
11 },
11 },
12 operations::cat,
12 operations::cat,
13 repo::Repo,
13 repo::Repo,
14 requirements::SPARSE_REQUIREMENT,
14 requirements::SPARSE_REQUIREMENT,
15 utils::{hg_path::HgPath, SliceExt},
15 utils::{hg_path::HgPath, SliceExt},
16 IgnorePattern, PatternError, PatternFileWarning, PatternSyntax, Revision,
16 IgnorePattern, PatternError, PatternFileWarning, PatternSyntax, Revision,
17 NULL_REVISION,
17 NULL_REVISION,
18 };
18 };
19
19
20 /// Command which is triggering the config read
20 /// Command which is triggering the config read
21 #[derive(Copy, Clone, Debug)]
21 #[derive(Copy, Clone, Debug)]
22 pub enum SparseConfigContext {
22 pub enum SparseConfigContext {
23 Sparse,
23 Sparse,
24 Narrow,
24 Narrow,
25 }
25 }
26
26
27 impl DisplayBytes for SparseConfigContext {
27 impl DisplayBytes for SparseConfigContext {
28 fn display_bytes(
28 fn display_bytes(
29 &self,
29 &self,
30 output: &mut dyn std::io::Write,
30 output: &mut dyn std::io::Write,
31 ) -> std::io::Result<()> {
31 ) -> std::io::Result<()> {
32 match self {
32 match self {
33 SparseConfigContext::Sparse => write_bytes!(output, b"sparse"),
33 SparseConfigContext::Sparse => write_bytes!(output, b"sparse"),
34 SparseConfigContext::Narrow => write_bytes!(output, b"narrow"),
34 SparseConfigContext::Narrow => write_bytes!(output, b"narrow"),
35 }
35 }
36 }
36 }
37 }
37 }
38
38
39 /// Possible warnings when reading sparse configuration
39 /// Possible warnings when reading sparse configuration
40 #[derive(Debug, derive_more::From)]
40 #[derive(Debug, derive_more::From)]
41 pub enum SparseWarning {
41 pub enum SparseWarning {
42 /// Warns about improper paths that start with "/"
42 /// Warns about improper paths that start with "/"
43 RootWarning {
43 RootWarning {
44 context: SparseConfigContext,
44 context: SparseConfigContext,
45 line: Vec<u8>,
45 line: Vec<u8>,
46 },
46 },
47 /// Warns about a profile missing from the given changelog revision
47 /// Warns about a profile missing from the given changelog revision
48 ProfileNotFound { profile: Vec<u8>, rev: Revision },
48 ProfileNotFound { profile: Vec<u8>, rev: Revision },
49 #[from]
49 #[from]
50 Pattern(PatternFileWarning),
50 Pattern(PatternFileWarning),
51 }
51 }
52
52
53 /// Parsed sparse config
53 /// Parsed sparse config
54 #[derive(Debug, Default)]
54 #[derive(Debug, Default)]
55 pub struct SparseConfig {
55 pub struct SparseConfig {
56 // Line-separated
56 // Line-separated
57 includes: Vec<u8>,
57 pub(crate) includes: Vec<u8>,
58 // Line-separated
58 // Line-separated
59 excludes: Vec<u8>,
59 pub(crate) excludes: Vec<u8>,
60 profiles: HashSet<Vec<u8>>,
60 pub(crate) profiles: HashSet<Vec<u8>>,
61 warnings: Vec<SparseWarning>,
61 pub(crate) warnings: Vec<SparseWarning>,
62 }
62 }
63
63
64 /// All possible errors when reading sparse config
64 /// All possible errors when reading sparse/narrow config
65 #[derive(Debug, derive_more::From)]
65 #[derive(Debug, derive_more::From)]
66 pub enum SparseConfigError {
66 pub enum SparseConfigError {
67 IncludesAfterExcludes {
67 IncludesAfterExcludes {
68 context: SparseConfigContext,
68 context: SparseConfigContext,
69 },
69 },
70 EntryOutsideSection {
70 EntryOutsideSection {
71 context: SparseConfigContext,
71 context: SparseConfigContext,
72 line: Vec<u8>,
72 line: Vec<u8>,
73 },
73 },
74 /// Narrow config does not support '%include' directives
75 IncludesInNarrow,
76 /// An invalid pattern prefix was given to the narrow spec. Includes the
77 /// entire pattern for context.
78 InvalidNarrowPrefix(Vec<u8>),
74 #[from]
79 #[from]
75 HgError(HgError),
80 HgError(HgError),
76 #[from]
81 #[from]
77 PatternError(PatternError),
82 PatternError(PatternError),
78 }
83 }
79
84
80 /// Parse sparse config file content.
85 /// Parse sparse config file content.
81 fn parse_config(
86 pub(crate) fn parse_config(
82 raw: &[u8],
87 raw: &[u8],
83 context: SparseConfigContext,
88 context: SparseConfigContext,
84 ) -> Result<SparseConfig, SparseConfigError> {
89 ) -> Result<SparseConfig, SparseConfigError> {
85 let mut includes = vec![];
90 let mut includes = vec![];
86 let mut excludes = vec![];
91 let mut excludes = vec![];
87 let mut profiles = HashSet::new();
92 let mut profiles = HashSet::new();
88 let mut warnings = vec![];
93 let mut warnings = vec![];
89
94
90 #[derive(PartialEq, Eq)]
95 #[derive(PartialEq, Eq)]
91 enum Current {
96 enum Current {
92 Includes,
97 Includes,
93 Excludes,
98 Excludes,
94 None,
99 None,
95 };
100 };
96
101
97 let mut current = Current::None;
102 let mut current = Current::None;
98 let mut in_section = false;
103 let mut in_section = false;
99
104
100 for line in raw.split(|c| *c == b'\n') {
105 for line in raw.split(|c| *c == b'\n') {
101 let line = line.trim();
106 let line = line.trim();
102 if line.is_empty() || line[0] == b'#' {
107 if line.is_empty() || line[0] == b'#' {
103 // empty or comment line, skip
108 // empty or comment line, skip
104 continue;
109 continue;
105 }
110 }
106 if line.starts_with(b"%include ") {
111 if line.starts_with(b"%include ") {
107 let profile = line[b"%include ".len()..].trim();
112 let profile = line[b"%include ".len()..].trim();
108 if !profile.is_empty() {
113 if !profile.is_empty() {
109 profiles.insert(profile.into());
114 profiles.insert(profile.into());
110 }
115 }
111 } else if line == b"[include]" {
116 } else if line == b"[include]" {
112 if in_section && current == Current::Includes {
117 if in_section && current == Current::Includes {
113 return Err(SparseConfigError::IncludesAfterExcludes {
118 return Err(SparseConfigError::IncludesAfterExcludes {
114 context,
119 context,
115 });
120 });
116 }
121 }
117 in_section = true;
122 in_section = true;
118 current = Current::Includes;
123 current = Current::Includes;
119 continue;
124 continue;
120 } else if line == b"[exclude]" {
125 } else if line == b"[exclude]" {
121 in_section = true;
126 in_section = true;
122 current = Current::Excludes;
127 current = Current::Excludes;
123 } else {
128 } else {
124 if current == Current::None {
129 if current == Current::None {
125 return Err(SparseConfigError::EntryOutsideSection {
130 return Err(SparseConfigError::EntryOutsideSection {
126 context,
131 context,
127 line: line.into(),
132 line: line.into(),
128 });
133 });
129 }
134 }
130 if line.trim().starts_with(b"/") {
135 if line.trim().starts_with(b"/") {
131 warnings.push(SparseWarning::RootWarning {
136 warnings.push(SparseWarning::RootWarning {
132 context,
137 context,
133 line: line.into(),
138 line: line.into(),
134 });
139 });
135 continue;
140 continue;
136 }
141 }
137 match current {
142 match current {
138 Current::Includes => {
143 Current::Includes => {
139 includes.push(b'\n');
144 includes.push(b'\n');
140 includes.extend(line.iter());
145 includes.extend(line.iter());
141 }
146 }
142 Current::Excludes => {
147 Current::Excludes => {
143 excludes.push(b'\n');
148 excludes.push(b'\n');
144 excludes.extend(line.iter());
149 excludes.extend(line.iter());
145 }
150 }
146 Current::None => unreachable!(),
151 Current::None => unreachable!(),
147 }
152 }
148 }
153 }
149 }
154 }
150
155
151 Ok(SparseConfig {
156 Ok(SparseConfig {
152 includes,
157 includes,
153 excludes,
158 excludes,
154 profiles,
159 profiles,
155 warnings,
160 warnings,
156 })
161 })
157 }
162 }
158
163
159 fn read_temporary_includes(
164 fn read_temporary_includes(
160 repo: &Repo,
165 repo: &Repo,
161 ) -> Result<Vec<Vec<u8>>, SparseConfigError> {
166 ) -> Result<Vec<Vec<u8>>, SparseConfigError> {
162 let raw = repo.hg_vfs().try_read("tempsparse")?.unwrap_or(vec![]);
167 let raw = repo.hg_vfs().try_read("tempsparse")?.unwrap_or(vec![]);
163 if raw.is_empty() {
168 if raw.is_empty() {
164 return Ok(vec![]);
169 return Ok(vec![]);
165 }
170 }
166 Ok(raw.split(|c| *c == b'\n').map(ToOwned::to_owned).collect())
171 Ok(raw.split(|c| *c == b'\n').map(ToOwned::to_owned).collect())
167 }
172 }
168
173
169 /// Obtain sparse checkout patterns for the given revision
174 /// Obtain sparse checkout patterns for the given revision
170 fn patterns_for_rev(
175 fn patterns_for_rev(
171 repo: &Repo,
176 repo: &Repo,
172 rev: Revision,
177 rev: Revision,
173 ) -> Result<Option<SparseConfig>, SparseConfigError> {
178 ) -> Result<Option<SparseConfig>, SparseConfigError> {
174 if !repo.has_sparse() {
179 if !repo.has_sparse() {
175 return Ok(None);
180 return Ok(None);
176 }
181 }
177 let raw = repo.hg_vfs().try_read("sparse")?.unwrap_or(vec![]);
182 let raw = repo.hg_vfs().try_read("sparse")?.unwrap_or(vec![]);
178
183
179 if raw.is_empty() {
184 if raw.is_empty() {
180 return Ok(None);
185 return Ok(None);
181 }
186 }
182
187
183 let mut config = parse_config(&raw, SparseConfigContext::Sparse)?;
188 let mut config = parse_config(&raw, SparseConfigContext::Sparse)?;
184
189
185 if !config.profiles.is_empty() {
190 if !config.profiles.is_empty() {
186 let mut profiles: Vec<Vec<u8>> = config.profiles.into_iter().collect();
191 let mut profiles: Vec<Vec<u8>> = config.profiles.into_iter().collect();
187 let mut visited = HashSet::new();
192 let mut visited = HashSet::new();
188
193
189 while let Some(profile) = profiles.pop() {
194 while let Some(profile) = profiles.pop() {
190 if visited.contains(&profile) {
195 if visited.contains(&profile) {
191 continue;
196 continue;
192 }
197 }
193 visited.insert(profile.to_owned());
198 visited.insert(profile.to_owned());
194
199
195 let output =
200 let output =
196 cat(repo, &rev.to_string(), vec![HgPath::new(&profile)])
201 cat(repo, &rev.to_string(), vec![HgPath::new(&profile)])
197 .map_err(|_| {
202 .map_err(|_| {
198 HgError::corrupted(format!(
203 HgError::corrupted(format!(
199 "dirstate points to non-existent parent node"
204 "dirstate points to non-existent parent node"
200 ))
205 ))
201 })?;
206 })?;
202 if output.results.is_empty() {
207 if output.results.is_empty() {
203 config.warnings.push(SparseWarning::ProfileNotFound {
208 config.warnings.push(SparseWarning::ProfileNotFound {
204 profile: profile.to_owned(),
209 profile: profile.to_owned(),
205 rev,
210 rev,
206 })
211 })
207 }
212 }
208
213
209 let subconfig = parse_config(
214 let subconfig = parse_config(
210 &output.results[0].1,
215 &output.results[0].1,
211 SparseConfigContext::Sparse,
216 SparseConfigContext::Sparse,
212 )?;
217 )?;
213 if !subconfig.includes.is_empty() {
218 if !subconfig.includes.is_empty() {
214 config.includes.push(b'\n');
219 config.includes.push(b'\n');
215 config.includes.extend(&subconfig.includes);
220 config.includes.extend(&subconfig.includes);
216 }
221 }
217 if !subconfig.includes.is_empty() {
222 if !subconfig.includes.is_empty() {
218 config.includes.push(b'\n');
223 config.includes.push(b'\n');
219 config.excludes.extend(&subconfig.excludes);
224 config.excludes.extend(&subconfig.excludes);
220 }
225 }
221 config.warnings.extend(subconfig.warnings.into_iter());
226 config.warnings.extend(subconfig.warnings.into_iter());
222 profiles.extend(subconfig.profiles.into_iter());
227 profiles.extend(subconfig.profiles.into_iter());
223 }
228 }
224
229
225 config.profiles = visited;
230 config.profiles = visited;
226 }
231 }
227
232
228 if !config.includes.is_empty() {
233 if !config.includes.is_empty() {
229 config.includes.extend(b"\n.hg*");
234 config.includes.extend(b"\n.hg*");
230 }
235 }
231
236
232 Ok(Some(config))
237 Ok(Some(config))
233 }
238 }
234
239
235 /// Obtain a matcher for sparse working directories.
240 /// Obtain a matcher for sparse working directories.
236 pub fn matcher(
241 pub fn matcher(
237 repo: &Repo,
242 repo: &Repo,
238 ) -> Result<(Box<dyn Matcher + Sync>, Vec<SparseWarning>), SparseConfigError> {
243 ) -> Result<(Box<dyn Matcher + Sync>, Vec<SparseWarning>), SparseConfigError> {
239 let mut warnings = vec![];
244 let mut warnings = vec![];
240 if !repo.requirements().contains(SPARSE_REQUIREMENT) {
245 if !repo.requirements().contains(SPARSE_REQUIREMENT) {
241 return Ok((Box::new(AlwaysMatcher), warnings));
246 return Ok((Box::new(AlwaysMatcher), warnings));
242 }
247 }
243
248
244 let parents = repo.dirstate_parents()?;
249 let parents = repo.dirstate_parents()?;
245 let mut revs = vec![];
250 let mut revs = vec![];
246 let p1_rev =
251 let p1_rev =
247 repo.changelog()?
252 repo.changelog()?
248 .rev_from_node(parents.p1.into())
253 .rev_from_node(parents.p1.into())
249 .map_err(|_| {
254 .map_err(|_| {
250 HgError::corrupted(format!(
255 HgError::corrupted(format!(
251 "dirstate points to non-existent parent node"
256 "dirstate points to non-existent parent node"
252 ))
257 ))
253 })?;
258 })?;
254 if p1_rev != NULL_REVISION {
259 if p1_rev != NULL_REVISION {
255 revs.push(p1_rev)
260 revs.push(p1_rev)
256 }
261 }
257 let p2_rev =
262 let p2_rev =
258 repo.changelog()?
263 repo.changelog()?
259 .rev_from_node(parents.p2.into())
264 .rev_from_node(parents.p2.into())
260 .map_err(|_| {
265 .map_err(|_| {
261 HgError::corrupted(format!(
266 HgError::corrupted(format!(
262 "dirstate points to non-existent parent node"
267 "dirstate points to non-existent parent node"
263 ))
268 ))
264 })?;
269 })?;
265 if p2_rev != NULL_REVISION {
270 if p2_rev != NULL_REVISION {
266 revs.push(p2_rev)
271 revs.push(p2_rev)
267 }
272 }
268 let mut matchers = vec![];
273 let mut matchers = vec![];
269
274
270 for rev in revs.iter() {
275 for rev in revs.iter() {
271 let config = patterns_for_rev(repo, *rev);
276 let config = patterns_for_rev(repo, *rev);
272 if let Ok(Some(config)) = config {
277 if let Ok(Some(config)) = config {
273 warnings.extend(config.warnings);
278 warnings.extend(config.warnings);
274 let mut m: Box<dyn Matcher + Sync> = Box::new(AlwaysMatcher);
279 let mut m: Box<dyn Matcher + Sync> = Box::new(AlwaysMatcher);
275 if !config.includes.is_empty() {
280 if !config.includes.is_empty() {
276 let (patterns, subwarnings) = parse_pattern_file_contents(
281 let (patterns, subwarnings) = parse_pattern_file_contents(
277 &config.includes,
282 &config.includes,
278 Path::new(""),
283 Path::new(""),
279 Some(b"relglob:".as_ref()),
284 Some(b"relglob:".as_ref()),
280 false,
285 false,
281 )?;
286 )?;
282 warnings.extend(subwarnings.into_iter().map(From::from));
287 warnings.extend(subwarnings.into_iter().map(From::from));
283 m = Box::new(IncludeMatcher::new(patterns)?);
288 m = Box::new(IncludeMatcher::new(patterns)?);
284 }
289 }
285 if !config.excludes.is_empty() {
290 if !config.excludes.is_empty() {
286 let (patterns, subwarnings) = parse_pattern_file_contents(
291 let (patterns, subwarnings) = parse_pattern_file_contents(
287 &config.excludes,
292 &config.excludes,
288 Path::new(""),
293 Path::new(""),
289 Some(b"relglob:".as_ref()),
294 Some(b"relglob:".as_ref()),
290 false,
295 false,
291 )?;
296 )?;
292 warnings.extend(subwarnings.into_iter().map(From::from));
297 warnings.extend(subwarnings.into_iter().map(From::from));
293 m = Box::new(DifferenceMatcher::new(
298 m = Box::new(DifferenceMatcher::new(
294 m,
299 m,
295 Box::new(IncludeMatcher::new(patterns)?),
300 Box::new(IncludeMatcher::new(patterns)?),
296 ));
301 ));
297 }
302 }
298 matchers.push(m);
303 matchers.push(m);
299 }
304 }
300 }
305 }
301 let result: Box<dyn Matcher + Sync> = match matchers.len() {
306 let result: Box<dyn Matcher + Sync> = match matchers.len() {
302 0 => Box::new(AlwaysMatcher),
307 0 => Box::new(AlwaysMatcher),
303 1 => matchers.pop().expect("1 is equal to 0"),
308 1 => matchers.pop().expect("1 is equal to 0"),
304 _ => Box::new(UnionMatcher::new(matchers)),
309 _ => Box::new(UnionMatcher::new(matchers)),
305 };
310 };
306
311
307 let matcher =
312 let matcher =
308 force_include_matcher(result, &read_temporary_includes(repo)?)?;
313 force_include_matcher(result, &read_temporary_includes(repo)?)?;
309 Ok((matcher, warnings))
314 Ok((matcher, warnings))
310 }
315 }
311
316
312 /// Returns a matcher that returns true for any of the forced includes before
317 /// Returns a matcher that returns true for any of the forced includes before
313 /// testing against the actual matcher
318 /// testing against the actual matcher
314 fn force_include_matcher(
319 fn force_include_matcher(
315 result: Box<dyn Matcher + Sync>,
320 result: Box<dyn Matcher + Sync>,
316 temp_includes: &[Vec<u8>],
321 temp_includes: &[Vec<u8>],
317 ) -> Result<Box<dyn Matcher + Sync>, PatternError> {
322 ) -> Result<Box<dyn Matcher + Sync>, PatternError> {
318 if temp_includes.is_empty() {
323 if temp_includes.is_empty() {
319 return Ok(result);
324 return Ok(result);
320 }
325 }
321 let forced_include_matcher = IncludeMatcher::new(
326 let forced_include_matcher = IncludeMatcher::new(
322 temp_includes
327 temp_includes
323 .into_iter()
328 .into_iter()
324 .map(|include| {
329 .map(|include| {
325 IgnorePattern::new(PatternSyntax::Path, include, Path::new(""))
330 IgnorePattern::new(PatternSyntax::Path, include, Path::new(""))
326 })
331 })
327 .collect(),
332 .collect(),
328 )?;
333 )?;
329 Ok(Box::new(UnionMatcher::new(vec![
334 Ok(Box::new(UnionMatcher::new(vec![
330 Box::new(forced_include_matcher),
335 Box::new(forced_include_matcher),
331 result,
336 result,
332 ])))
337 ])))
333 }
338 }
@@ -1,617 +1,620 b''
1 // status.rs
1 // status.rs
2 //
2 //
3 // Copyright 2020, Georges Racinet <georges.racinets@octobus.net>
3 // Copyright 2020, Georges Racinet <georges.racinets@octobus.net>
4 //
4 //
5 // This software may be used and distributed according to the terms of the
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
6 // GNU General Public License version 2 or any later version.
7
7
8 use crate::error::CommandError;
8 use crate::error::CommandError;
9 use crate::ui::Ui;
9 use crate::ui::Ui;
10 use crate::utils::path_utils::RelativizePaths;
10 use crate::utils::path_utils::RelativizePaths;
11 use clap::{Arg, SubCommand};
11 use clap::{Arg, SubCommand};
12 use format_bytes::format_bytes;
12 use format_bytes::format_bytes;
13 use hg;
14 use hg::config::Config;
13 use hg::config::Config;
15 use hg::dirstate::has_exec_bit;
14 use hg::dirstate::has_exec_bit;
16 use hg::dirstate::status::StatusPath;
15 use hg::dirstate::status::StatusPath;
17 use hg::dirstate::TruncatedTimestamp;
16 use hg::dirstate::TruncatedTimestamp;
18 use hg::errors::{HgError, IoResultExt};
17 use hg::errors::{HgError, IoResultExt};
19 use hg::lock::LockError;
18 use hg::lock::LockError;
20 use hg::manifest::Manifest;
19 use hg::manifest::Manifest;
20 use hg::matchers::{AlwaysMatcher, IntersectionMatcher};
21 use hg::repo::Repo;
21 use hg::repo::Repo;
22 use hg::sparse::{matcher, SparseWarning};
23 use hg::utils::files::get_bytes_from_os_string;
22 use hg::utils::files::get_bytes_from_os_string;
24 use hg::utils::files::get_bytes_from_path;
23 use hg::utils::files::get_bytes_from_path;
25 use hg::utils::files::get_path_from_bytes;
24 use hg::utils::files::get_path_from_bytes;
26 use hg::utils::hg_path::{hg_path_to_path_buf, HgPath};
25 use hg::utils::hg_path::{hg_path_to_path_buf, HgPath};
27 use hg::DirstateStatus;
26 use hg::DirstateStatus;
28 use hg::PatternFileWarning;
27 use hg::PatternFileWarning;
29 use hg::StatusError;
28 use hg::StatusError;
30 use hg::StatusOptions;
29 use hg::StatusOptions;
30 use hg::{self, narrow, sparse};
31 use log::info;
31 use log::info;
32 use std::io;
32 use std::io;
33 use std::path::PathBuf;
33 use std::path::PathBuf;
34
34
35 pub const HELP_TEXT: &str = "
35 pub const HELP_TEXT: &str = "
36 Show changed files in the working directory
36 Show changed files in the working directory
37
37
38 This is a pure Rust version of `hg status`.
38 This is a pure Rust version of `hg status`.
39
39
40 Some options might be missing, check the list below.
40 Some options might be missing, check the list below.
41 ";
41 ";
42
42
43 pub fn args() -> clap::App<'static, 'static> {
43 pub fn args() -> clap::App<'static, 'static> {
44 SubCommand::with_name("status")
44 SubCommand::with_name("status")
45 .alias("st")
45 .alias("st")
46 .about(HELP_TEXT)
46 .about(HELP_TEXT)
47 .arg(
47 .arg(
48 Arg::with_name("all")
48 Arg::with_name("all")
49 .help("show status of all files")
49 .help("show status of all files")
50 .short("-A")
50 .short("-A")
51 .long("--all"),
51 .long("--all"),
52 )
52 )
53 .arg(
53 .arg(
54 Arg::with_name("modified")
54 Arg::with_name("modified")
55 .help("show only modified files")
55 .help("show only modified files")
56 .short("-m")
56 .short("-m")
57 .long("--modified"),
57 .long("--modified"),
58 )
58 )
59 .arg(
59 .arg(
60 Arg::with_name("added")
60 Arg::with_name("added")
61 .help("show only added files")
61 .help("show only added files")
62 .short("-a")
62 .short("-a")
63 .long("--added"),
63 .long("--added"),
64 )
64 )
65 .arg(
65 .arg(
66 Arg::with_name("removed")
66 Arg::with_name("removed")
67 .help("show only removed files")
67 .help("show only removed files")
68 .short("-r")
68 .short("-r")
69 .long("--removed"),
69 .long("--removed"),
70 )
70 )
71 .arg(
71 .arg(
72 Arg::with_name("clean")
72 Arg::with_name("clean")
73 .help("show only clean files")
73 .help("show only clean files")
74 .short("-c")
74 .short("-c")
75 .long("--clean"),
75 .long("--clean"),
76 )
76 )
77 .arg(
77 .arg(
78 Arg::with_name("deleted")
78 Arg::with_name("deleted")
79 .help("show only deleted files")
79 .help("show only deleted files")
80 .short("-d")
80 .short("-d")
81 .long("--deleted"),
81 .long("--deleted"),
82 )
82 )
83 .arg(
83 .arg(
84 Arg::with_name("unknown")
84 Arg::with_name("unknown")
85 .help("show only unknown (not tracked) files")
85 .help("show only unknown (not tracked) files")
86 .short("-u")
86 .short("-u")
87 .long("--unknown"),
87 .long("--unknown"),
88 )
88 )
89 .arg(
89 .arg(
90 Arg::with_name("ignored")
90 Arg::with_name("ignored")
91 .help("show only ignored files")
91 .help("show only ignored files")
92 .short("-i")
92 .short("-i")
93 .long("--ignored"),
93 .long("--ignored"),
94 )
94 )
95 .arg(
95 .arg(
96 Arg::with_name("copies")
96 Arg::with_name("copies")
97 .help("show source of copied files (DEFAULT: ui.statuscopies)")
97 .help("show source of copied files (DEFAULT: ui.statuscopies)")
98 .short("-C")
98 .short("-C")
99 .long("--copies"),
99 .long("--copies"),
100 )
100 )
101 .arg(
101 .arg(
102 Arg::with_name("no-status")
102 Arg::with_name("no-status")
103 .help("hide status prefix")
103 .help("hide status prefix")
104 .short("-n")
104 .short("-n")
105 .long("--no-status"),
105 .long("--no-status"),
106 )
106 )
107 .arg(
107 .arg(
108 Arg::with_name("verbose")
108 Arg::with_name("verbose")
109 .help("enable additional output")
109 .help("enable additional output")
110 .short("-v")
110 .short("-v")
111 .long("--verbose"),
111 .long("--verbose"),
112 )
112 )
113 }
113 }
114
114
115 /// Pure data type allowing the caller to specify file states to display
115 /// Pure data type allowing the caller to specify file states to display
116 #[derive(Copy, Clone, Debug)]
116 #[derive(Copy, Clone, Debug)]
117 pub struct DisplayStates {
117 pub struct DisplayStates {
118 pub modified: bool,
118 pub modified: bool,
119 pub added: bool,
119 pub added: bool,
120 pub removed: bool,
120 pub removed: bool,
121 pub clean: bool,
121 pub clean: bool,
122 pub deleted: bool,
122 pub deleted: bool,
123 pub unknown: bool,
123 pub unknown: bool,
124 pub ignored: bool,
124 pub ignored: bool,
125 }
125 }
126
126
127 pub const DEFAULT_DISPLAY_STATES: DisplayStates = DisplayStates {
127 pub const DEFAULT_DISPLAY_STATES: DisplayStates = DisplayStates {
128 modified: true,
128 modified: true,
129 added: true,
129 added: true,
130 removed: true,
130 removed: true,
131 clean: false,
131 clean: false,
132 deleted: true,
132 deleted: true,
133 unknown: true,
133 unknown: true,
134 ignored: false,
134 ignored: false,
135 };
135 };
136
136
137 pub const ALL_DISPLAY_STATES: DisplayStates = DisplayStates {
137 pub const ALL_DISPLAY_STATES: DisplayStates = DisplayStates {
138 modified: true,
138 modified: true,
139 added: true,
139 added: true,
140 removed: true,
140 removed: true,
141 clean: true,
141 clean: true,
142 deleted: true,
142 deleted: true,
143 unknown: true,
143 unknown: true,
144 ignored: true,
144 ignored: true,
145 };
145 };
146
146
147 impl DisplayStates {
147 impl DisplayStates {
148 pub fn is_empty(&self) -> bool {
148 pub fn is_empty(&self) -> bool {
149 !(self.modified
149 !(self.modified
150 || self.added
150 || self.added
151 || self.removed
151 || self.removed
152 || self.clean
152 || self.clean
153 || self.deleted
153 || self.deleted
154 || self.unknown
154 || self.unknown
155 || self.ignored)
155 || self.ignored)
156 }
156 }
157 }
157 }
158
158
159 fn has_unfinished_merge(repo: &Repo) -> Result<bool, CommandError> {
159 fn has_unfinished_merge(repo: &Repo) -> Result<bool, CommandError> {
160 return Ok(repo.dirstate_parents()?.is_merge());
160 return Ok(repo.dirstate_parents()?.is_merge());
161 }
161 }
162
162
163 fn has_unfinished_state(repo: &Repo) -> Result<bool, CommandError> {
163 fn has_unfinished_state(repo: &Repo) -> Result<bool, CommandError> {
164 // These are all the known values for the [fname] argument of
164 // These are all the known values for the [fname] argument of
165 // [addunfinished] function in [state.py]
165 // [addunfinished] function in [state.py]
166 let known_state_files: &[&str] = &[
166 let known_state_files: &[&str] = &[
167 "bisect.state",
167 "bisect.state",
168 "graftstate",
168 "graftstate",
169 "histedit-state",
169 "histedit-state",
170 "rebasestate",
170 "rebasestate",
171 "shelvedstate",
171 "shelvedstate",
172 "transplant/journal",
172 "transplant/journal",
173 "updatestate",
173 "updatestate",
174 ];
174 ];
175 if has_unfinished_merge(repo)? {
175 if has_unfinished_merge(repo)? {
176 return Ok(true);
176 return Ok(true);
177 };
177 };
178 for f in known_state_files {
178 for f in known_state_files {
179 if repo.hg_vfs().join(f).exists() {
179 if repo.hg_vfs().join(f).exists() {
180 return Ok(true);
180 return Ok(true);
181 }
181 }
182 }
182 }
183 return Ok(false);
183 return Ok(false);
184 }
184 }
185
185
186 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
186 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
187 // TODO: lift these limitations
187 // TODO: lift these limitations
188 if invocation.config.get_bool(b"ui", b"tweakdefaults")? {
188 if invocation.config.get_bool(b"ui", b"tweakdefaults")? {
189 return Err(CommandError::unsupported(
189 return Err(CommandError::unsupported(
190 "ui.tweakdefaults is not yet supported with rhg status",
190 "ui.tweakdefaults is not yet supported with rhg status",
191 ));
191 ));
192 }
192 }
193 if invocation.config.get_bool(b"ui", b"statuscopies")? {
193 if invocation.config.get_bool(b"ui", b"statuscopies")? {
194 return Err(CommandError::unsupported(
194 return Err(CommandError::unsupported(
195 "ui.statuscopies is not yet supported with rhg status",
195 "ui.statuscopies is not yet supported with rhg status",
196 ));
196 ));
197 }
197 }
198 if invocation
198 if invocation
199 .config
199 .config
200 .get(b"commands", b"status.terse")
200 .get(b"commands", b"status.terse")
201 .is_some()
201 .is_some()
202 {
202 {
203 return Err(CommandError::unsupported(
203 return Err(CommandError::unsupported(
204 "status.terse is not yet supported with rhg status",
204 "status.terse is not yet supported with rhg status",
205 ));
205 ));
206 }
206 }
207
207
208 let ui = invocation.ui;
208 let ui = invocation.ui;
209 let config = invocation.config;
209 let config = invocation.config;
210 let args = invocation.subcommand_args;
210 let args = invocation.subcommand_args;
211
211
212 let verbose = !ui.plain(None)
212 let verbose = !ui.plain(None)
213 && !args.is_present("print0")
213 && !args.is_present("print0")
214 && (args.is_present("verbose")
214 && (args.is_present("verbose")
215 || config.get_bool(b"ui", b"verbose")?
215 || config.get_bool(b"ui", b"verbose")?
216 || config.get_bool(b"commands", b"status.verbose")?);
216 || config.get_bool(b"commands", b"status.verbose")?);
217
217
218 let all = args.is_present("all");
218 let all = args.is_present("all");
219 let display_states = if all {
219 let display_states = if all {
220 // TODO when implementing `--quiet`: it excludes clean files
220 // TODO when implementing `--quiet`: it excludes clean files
221 // from `--all`
221 // from `--all`
222 ALL_DISPLAY_STATES
222 ALL_DISPLAY_STATES
223 } else {
223 } else {
224 let requested = DisplayStates {
224 let requested = DisplayStates {
225 modified: args.is_present("modified"),
225 modified: args.is_present("modified"),
226 added: args.is_present("added"),
226 added: args.is_present("added"),
227 removed: args.is_present("removed"),
227 removed: args.is_present("removed"),
228 clean: args.is_present("clean"),
228 clean: args.is_present("clean"),
229 deleted: args.is_present("deleted"),
229 deleted: args.is_present("deleted"),
230 unknown: args.is_present("unknown"),
230 unknown: args.is_present("unknown"),
231 ignored: args.is_present("ignored"),
231 ignored: args.is_present("ignored"),
232 };
232 };
233 if requested.is_empty() {
233 if requested.is_empty() {
234 DEFAULT_DISPLAY_STATES
234 DEFAULT_DISPLAY_STATES
235 } else {
235 } else {
236 requested
236 requested
237 }
237 }
238 };
238 };
239 let no_status = args.is_present("no-status");
239 let no_status = args.is_present("no-status");
240 let list_copies = all
240 let list_copies = all
241 || args.is_present("copies")
241 || args.is_present("copies")
242 || config.get_bool(b"ui", b"statuscopies")?;
242 || config.get_bool(b"ui", b"statuscopies")?;
243
243
244 let repo = invocation.repo?;
244 let repo = invocation.repo?;
245
245
246 if verbose {
246 if verbose {
247 if has_unfinished_state(repo)? {
247 if has_unfinished_state(repo)? {
248 return Err(CommandError::unsupported(
248 return Err(CommandError::unsupported(
249 "verbose status output is not supported by rhg (and is needed because we're in an unfinished operation)",
249 "verbose status output is not supported by rhg (and is needed because we're in an unfinished operation)",
250 ));
250 ));
251 };
251 };
252 }
252 }
253
253
254 if repo.has_narrow() {
255 return Err(CommandError::unsupported(
256 "rhg status is not supported for narrow clones yet",
257 ));
258 }
259
260 let mut dmap = repo.dirstate_map_mut()?;
254 let mut dmap = repo.dirstate_map_mut()?;
261
255
262 let options = StatusOptions {
256 let options = StatusOptions {
263 // we're currently supporting file systems with exec flags only
257 // we're currently supporting file systems with exec flags only
264 // anyway
258 // anyway
265 check_exec: true,
259 check_exec: true,
266 list_clean: display_states.clean,
260 list_clean: display_states.clean,
267 list_unknown: display_states.unknown,
261 list_unknown: display_states.unknown,
268 list_ignored: display_states.ignored,
262 list_ignored: display_states.ignored,
269 list_copies,
263 list_copies,
270 collect_traversed_dirs: false,
264 collect_traversed_dirs: false,
271 };
265 };
272
266
273 type StatusResult<'a> =
267 type StatusResult<'a> =
274 Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>;
268 Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>;
275
269
276 let after_status = |res: StatusResult| -> Result<_, CommandError> {
270 let after_status = |res: StatusResult| -> Result<_, CommandError> {
277 let (mut ds_status, pattern_warnings) = res?;
271 let (mut ds_status, pattern_warnings) = res?;
278 for warning in pattern_warnings {
272 for warning in pattern_warnings {
279 ui.write_stderr(&print_pattern_file_warning(&warning, &repo))?;
273 ui.write_stderr(&print_pattern_file_warning(&warning, &repo))?;
280 }
274 }
281
275
282 for (path, error) in ds_status.bad {
276 for (path, error) in ds_status.bad {
283 let error = match error {
277 let error = match error {
284 hg::BadMatch::OsError(code) => {
278 hg::BadMatch::OsError(code) => {
285 std::io::Error::from_raw_os_error(code).to_string()
279 std::io::Error::from_raw_os_error(code).to_string()
286 }
280 }
287 hg::BadMatch::BadType(ty) => {
281 hg::BadMatch::BadType(ty) => {
288 format!("unsupported file type (type is {})", ty)
282 format!("unsupported file type (type is {})", ty)
289 }
283 }
290 };
284 };
291 ui.write_stderr(&format_bytes!(
285 ui.write_stderr(&format_bytes!(
292 b"{}: {}\n",
286 b"{}: {}\n",
293 path.as_bytes(),
287 path.as_bytes(),
294 error.as_bytes()
288 error.as_bytes()
295 ))?
289 ))?
296 }
290 }
297 if !ds_status.unsure.is_empty() {
291 if !ds_status.unsure.is_empty() {
298 info!(
292 info!(
299 "Files to be rechecked by retrieval from filelog: {:?}",
293 "Files to be rechecked by retrieval from filelog: {:?}",
300 ds_status.unsure.iter().map(|s| &s.path).collect::<Vec<_>>()
294 ds_status.unsure.iter().map(|s| &s.path).collect::<Vec<_>>()
301 );
295 );
302 }
296 }
303 let mut fixup = Vec::new();
297 let mut fixup = Vec::new();
304 if !ds_status.unsure.is_empty()
298 if !ds_status.unsure.is_empty()
305 && (display_states.modified || display_states.clean)
299 && (display_states.modified || display_states.clean)
306 {
300 {
307 let p1 = repo.dirstate_parents()?.p1;
301 let p1 = repo.dirstate_parents()?.p1;
308 let manifest = repo.manifest_for_node(p1).map_err(|e| {
302 let manifest = repo.manifest_for_node(p1).map_err(|e| {
309 CommandError::from((e, &*format!("{:x}", p1.short())))
303 CommandError::from((e, &*format!("{:x}", p1.short())))
310 })?;
304 })?;
311 for to_check in ds_status.unsure {
305 for to_check in ds_status.unsure {
312 if unsure_is_modified(repo, &manifest, &to_check.path)? {
306 if unsure_is_modified(repo, &manifest, &to_check.path)? {
313 if display_states.modified {
307 if display_states.modified {
314 ds_status.modified.push(to_check);
308 ds_status.modified.push(to_check);
315 }
309 }
316 } else {
310 } else {
317 if display_states.clean {
311 if display_states.clean {
318 ds_status.clean.push(to_check.clone());
312 ds_status.clean.push(to_check.clone());
319 }
313 }
320 fixup.push(to_check.path.into_owned())
314 fixup.push(to_check.path.into_owned())
321 }
315 }
322 }
316 }
323 }
317 }
324 let relative_paths = (!ui.plain(None))
318 let relative_paths = (!ui.plain(None))
325 && config
319 && config
326 .get_option(b"commands", b"status.relative")?
320 .get_option(b"commands", b"status.relative")?
327 .unwrap_or(config.get_bool(b"ui", b"relative-paths")?);
321 .unwrap_or(config.get_bool(b"ui", b"relative-paths")?);
328 let output = DisplayStatusPaths {
322 let output = DisplayStatusPaths {
329 ui,
323 ui,
330 no_status,
324 no_status,
331 relativize: if relative_paths {
325 relativize: if relative_paths {
332 Some(RelativizePaths::new(repo)?)
326 Some(RelativizePaths::new(repo)?)
333 } else {
327 } else {
334 None
328 None
335 },
329 },
336 };
330 };
337 if display_states.modified {
331 if display_states.modified {
338 output.display(b"M ", "status.modified", ds_status.modified)?;
332 output.display(b"M ", "status.modified", ds_status.modified)?;
339 }
333 }
340 if display_states.added {
334 if display_states.added {
341 output.display(b"A ", "status.added", ds_status.added)?;
335 output.display(b"A ", "status.added", ds_status.added)?;
342 }
336 }
343 if display_states.removed {
337 if display_states.removed {
344 output.display(b"R ", "status.removed", ds_status.removed)?;
338 output.display(b"R ", "status.removed", ds_status.removed)?;
345 }
339 }
346 if display_states.deleted {
340 if display_states.deleted {
347 output.display(b"! ", "status.deleted", ds_status.deleted)?;
341 output.display(b"! ", "status.deleted", ds_status.deleted)?;
348 }
342 }
349 if display_states.unknown {
343 if display_states.unknown {
350 output.display(b"? ", "status.unknown", ds_status.unknown)?;
344 output.display(b"? ", "status.unknown", ds_status.unknown)?;
351 }
345 }
352 if display_states.ignored {
346 if display_states.ignored {
353 output.display(b"I ", "status.ignored", ds_status.ignored)?;
347 output.display(b"I ", "status.ignored", ds_status.ignored)?;
354 }
348 }
355 if display_states.clean {
349 if display_states.clean {
356 output.display(b"C ", "status.clean", ds_status.clean)?;
350 output.display(b"C ", "status.clean", ds_status.clean)?;
357 }
351 }
358
352
359 let dirstate_write_needed = ds_status.dirty;
353 let dirstate_write_needed = ds_status.dirty;
360 let filesystem_time_at_status_start =
354 let filesystem_time_at_status_start =
361 ds_status.filesystem_time_at_status_start;
355 ds_status.filesystem_time_at_status_start;
362
356
363 Ok((
357 Ok((
364 fixup,
358 fixup,
365 dirstate_write_needed,
359 dirstate_write_needed,
366 filesystem_time_at_status_start,
360 filesystem_time_at_status_start,
367 ))
361 ))
368 };
362 };
369 let (matcher, sparse_warnings) = matcher(repo)?;
363 let (narrow_matcher, narrow_warnings) = narrow::matcher(repo)?;
364 let (sparse_matcher, sparse_warnings) = sparse::matcher(repo)?;
365 let matcher = match (repo.has_narrow(), repo.has_sparse()) {
366 (true, true) => {
367 Box::new(IntersectionMatcher::new(narrow_matcher, sparse_matcher))
368 }
369 (true, false) => narrow_matcher,
370 (false, true) => sparse_matcher,
371 (false, false) => Box::new(AlwaysMatcher),
372 };
370
373
371 for warning in sparse_warnings {
374 for warning in narrow_warnings.into_iter().chain(sparse_warnings) {
372 match &warning {
375 match &warning {
373 SparseWarning::RootWarning { context, line } => {
376 sparse::SparseWarning::RootWarning { context, line } => {
374 let msg = format_bytes!(
377 let msg = format_bytes!(
375 b"warning: {} profile cannot use paths \"
378 b"warning: {} profile cannot use paths \"
376 starting with /, ignoring {}\n",
379 starting with /, ignoring {}\n",
377 context,
380 context,
378 line
381 line
379 );
382 );
380 ui.write_stderr(&msg)?;
383 ui.write_stderr(&msg)?;
381 }
384 }
382 SparseWarning::ProfileNotFound { profile, rev } => {
385 sparse::SparseWarning::ProfileNotFound { profile, rev } => {
383 let msg = format_bytes!(
386 let msg = format_bytes!(
384 b"warning: sparse profile '{}' not found \"
387 b"warning: sparse profile '{}' not found \"
385 in rev {} - ignoring it\n",
388 in rev {} - ignoring it\n",
386 profile,
389 profile,
387 rev
390 rev
388 );
391 );
389 ui.write_stderr(&msg)?;
392 ui.write_stderr(&msg)?;
390 }
393 }
391 SparseWarning::Pattern(e) => {
394 sparse::SparseWarning::Pattern(e) => {
392 ui.write_stderr(&print_pattern_file_warning(e, &repo))?;
395 ui.write_stderr(&print_pattern_file_warning(e, &repo))?;
393 }
396 }
394 }
397 }
395 }
398 }
396 let (fixup, mut dirstate_write_needed, filesystem_time_at_status_start) =
399 let (fixup, mut dirstate_write_needed, filesystem_time_at_status_start) =
397 dmap.with_status(
400 dmap.with_status(
398 matcher.as_ref(),
401 matcher.as_ref(),
399 repo.working_directory_path().to_owned(),
402 repo.working_directory_path().to_owned(),
400 ignore_files(repo, config),
403 ignore_files(repo, config),
401 options,
404 options,
402 after_status,
405 after_status,
403 )?;
406 )?;
404
407
405 if (fixup.is_empty() || filesystem_time_at_status_start.is_none())
408 if (fixup.is_empty() || filesystem_time_at_status_start.is_none())
406 && !dirstate_write_needed
409 && !dirstate_write_needed
407 {
410 {
408 // Nothing to update
411 // Nothing to update
409 return Ok(());
412 return Ok(());
410 }
413 }
411
414
412 // Update the dirstate on disk if we can
415 // Update the dirstate on disk if we can
413 let with_lock_result =
416 let with_lock_result =
414 repo.try_with_wlock_no_wait(|| -> Result<(), CommandError> {
417 repo.try_with_wlock_no_wait(|| -> Result<(), CommandError> {
415 if let Some(mtime_boundary) = filesystem_time_at_status_start {
418 if let Some(mtime_boundary) = filesystem_time_at_status_start {
416 for hg_path in fixup {
419 for hg_path in fixup {
417 use std::os::unix::fs::MetadataExt;
420 use std::os::unix::fs::MetadataExt;
418 let fs_path = hg_path_to_path_buf(&hg_path)
421 let fs_path = hg_path_to_path_buf(&hg_path)
419 .expect("HgPath conversion");
422 .expect("HgPath conversion");
420 // Specifically do not reuse `fs_metadata` from
423 // Specifically do not reuse `fs_metadata` from
421 // `unsure_is_clean` which was needed before reading
424 // `unsure_is_clean` which was needed before reading
422 // contents. Here we access metadata again after reading
425 // contents. Here we access metadata again after reading
423 // content, in case it changed in the meantime.
426 // content, in case it changed in the meantime.
424 let fs_metadata = repo
427 let fs_metadata = repo
425 .working_directory_vfs()
428 .working_directory_vfs()
426 .symlink_metadata(&fs_path)?;
429 .symlink_metadata(&fs_path)?;
427 if let Some(mtime) =
430 if let Some(mtime) =
428 TruncatedTimestamp::for_reliable_mtime_of(
431 TruncatedTimestamp::for_reliable_mtime_of(
429 &fs_metadata,
432 &fs_metadata,
430 &mtime_boundary,
433 &mtime_boundary,
431 )
434 )
432 .when_reading_file(&fs_path)?
435 .when_reading_file(&fs_path)?
433 {
436 {
434 let mode = fs_metadata.mode();
437 let mode = fs_metadata.mode();
435 let size = fs_metadata.len();
438 let size = fs_metadata.len();
436 dmap.set_clean(&hg_path, mode, size as u32, mtime)?;
439 dmap.set_clean(&hg_path, mode, size as u32, mtime)?;
437 dirstate_write_needed = true
440 dirstate_write_needed = true
438 }
441 }
439 }
442 }
440 }
443 }
441 drop(dmap); // Avoid "already mutably borrowed" RefCell panics
444 drop(dmap); // Avoid "already mutably borrowed" RefCell panics
442 if dirstate_write_needed {
445 if dirstate_write_needed {
443 repo.write_dirstate()?
446 repo.write_dirstate()?
444 }
447 }
445 Ok(())
448 Ok(())
446 });
449 });
447 match with_lock_result {
450 match with_lock_result {
448 Ok(closure_result) => closure_result?,
451 Ok(closure_result) => closure_result?,
449 Err(LockError::AlreadyHeld) => {
452 Err(LockError::AlreadyHeld) => {
450 // Not updating the dirstate is not ideal but not critical:
453 // Not updating the dirstate is not ideal but not critical:
451 // don’t keep our caller waiting until some other Mercurial
454 // don’t keep our caller waiting until some other Mercurial
452 // process releases the lock.
455 // process releases the lock.
453 }
456 }
454 Err(LockError::Other(HgError::IoError { error, .. }))
457 Err(LockError::Other(HgError::IoError { error, .. }))
455 if error.kind() == io::ErrorKind::PermissionDenied =>
458 if error.kind() == io::ErrorKind::PermissionDenied =>
456 {
459 {
457 // `hg status` on a read-only repository is fine
460 // `hg status` on a read-only repository is fine
458 }
461 }
459 Err(LockError::Other(error)) => {
462 Err(LockError::Other(error)) => {
460 // Report other I/O errors
463 // Report other I/O errors
461 Err(error)?
464 Err(error)?
462 }
465 }
463 }
466 }
464 Ok(())
467 Ok(())
465 }
468 }
466
469
467 fn ignore_files(repo: &Repo, config: &Config) -> Vec<PathBuf> {
470 fn ignore_files(repo: &Repo, config: &Config) -> Vec<PathBuf> {
468 let mut ignore_files = Vec::new();
471 let mut ignore_files = Vec::new();
469 let repo_ignore = repo.working_directory_vfs().join(".hgignore");
472 let repo_ignore = repo.working_directory_vfs().join(".hgignore");
470 if repo_ignore.exists() {
473 if repo_ignore.exists() {
471 ignore_files.push(repo_ignore)
474 ignore_files.push(repo_ignore)
472 }
475 }
473 for (key, value) in config.iter_section(b"ui") {
476 for (key, value) in config.iter_section(b"ui") {
474 if key == b"ignore" || key.starts_with(b"ignore.") {
477 if key == b"ignore" || key.starts_with(b"ignore.") {
475 let path = get_path_from_bytes(value);
478 let path = get_path_from_bytes(value);
476 // TODO: expand "~/" and environment variable here, like Python
479 // TODO: expand "~/" and environment variable here, like Python
477 // does with `os.path.expanduser` and `os.path.expandvars`
480 // does with `os.path.expanduser` and `os.path.expandvars`
478
481
479 let joined = repo.working_directory_path().join(path);
482 let joined = repo.working_directory_path().join(path);
480 ignore_files.push(joined);
483 ignore_files.push(joined);
481 }
484 }
482 }
485 }
483 ignore_files
486 ignore_files
484 }
487 }
485
488
486 struct DisplayStatusPaths<'a> {
489 struct DisplayStatusPaths<'a> {
487 ui: &'a Ui,
490 ui: &'a Ui,
488 no_status: bool,
491 no_status: bool,
489 relativize: Option<RelativizePaths>,
492 relativize: Option<RelativizePaths>,
490 }
493 }
491
494
492 impl DisplayStatusPaths<'_> {
495 impl DisplayStatusPaths<'_> {
493 // Probably more elegant to use a Deref or Borrow trait rather than
496 // Probably more elegant to use a Deref or Borrow trait rather than
494 // harcode HgPathBuf, but probably not really useful at this point
497 // harcode HgPathBuf, but probably not really useful at this point
495 fn display(
498 fn display(
496 &self,
499 &self,
497 status_prefix: &[u8],
500 status_prefix: &[u8],
498 label: &'static str,
501 label: &'static str,
499 mut paths: Vec<StatusPath<'_>>,
502 mut paths: Vec<StatusPath<'_>>,
500 ) -> Result<(), CommandError> {
503 ) -> Result<(), CommandError> {
501 paths.sort_unstable();
504 paths.sort_unstable();
502 // TODO: get the stdout lock once for the whole loop
505 // TODO: get the stdout lock once for the whole loop
503 // instead of in each write
506 // instead of in each write
504 for StatusPath { path, copy_source } in paths {
507 for StatusPath { path, copy_source } in paths {
505 let relative;
508 let relative;
506 let path = if let Some(relativize) = &self.relativize {
509 let path = if let Some(relativize) = &self.relativize {
507 relative = relativize.relativize(&path);
510 relative = relativize.relativize(&path);
508 &*relative
511 &*relative
509 } else {
512 } else {
510 path.as_bytes()
513 path.as_bytes()
511 };
514 };
512 // TODO: Add a way to use `write_bytes!` instead of `format_bytes!`
515 // TODO: Add a way to use `write_bytes!` instead of `format_bytes!`
513 // in order to stream to stdout instead of allocating an
516 // in order to stream to stdout instead of allocating an
514 // itermediate `Vec<u8>`.
517 // itermediate `Vec<u8>`.
515 if !self.no_status {
518 if !self.no_status {
516 self.ui.write_stdout_labelled(status_prefix, label)?
519 self.ui.write_stdout_labelled(status_prefix, label)?
517 }
520 }
518 self.ui
521 self.ui
519 .write_stdout_labelled(&format_bytes!(b"{}\n", path), label)?;
522 .write_stdout_labelled(&format_bytes!(b"{}\n", path), label)?;
520 if let Some(source) = copy_source {
523 if let Some(source) = copy_source {
521 let label = "status.copied";
524 let label = "status.copied";
522 self.ui.write_stdout_labelled(
525 self.ui.write_stdout_labelled(
523 &format_bytes!(b" {}\n", source.as_bytes()),
526 &format_bytes!(b" {}\n", source.as_bytes()),
524 label,
527 label,
525 )?
528 )?
526 }
529 }
527 }
530 }
528 Ok(())
531 Ok(())
529 }
532 }
530 }
533 }
531
534
532 /// Check if a file is modified by comparing actual repo store and file system.
535 /// Check if a file is modified by comparing actual repo store and file system.
533 ///
536 ///
534 /// This meant to be used for those that the dirstate cannot resolve, due
537 /// This meant to be used for those that the dirstate cannot resolve, due
535 /// to time resolution limits.
538 /// to time resolution limits.
536 fn unsure_is_modified(
539 fn unsure_is_modified(
537 repo: &Repo,
540 repo: &Repo,
538 manifest: &Manifest,
541 manifest: &Manifest,
539 hg_path: &HgPath,
542 hg_path: &HgPath,
540 ) -> Result<bool, HgError> {
543 ) -> Result<bool, HgError> {
541 let vfs = repo.working_directory_vfs();
544 let vfs = repo.working_directory_vfs();
542 let fs_path = hg_path_to_path_buf(hg_path).expect("HgPath conversion");
545 let fs_path = hg_path_to_path_buf(hg_path).expect("HgPath conversion");
543 let fs_metadata = vfs.symlink_metadata(&fs_path)?;
546 let fs_metadata = vfs.symlink_metadata(&fs_path)?;
544 let is_symlink = fs_metadata.file_type().is_symlink();
547 let is_symlink = fs_metadata.file_type().is_symlink();
545 // TODO: Also account for `FALLBACK_SYMLINK` and `FALLBACK_EXEC` from the
548 // TODO: Also account for `FALLBACK_SYMLINK` and `FALLBACK_EXEC` from the
546 // dirstate
549 // dirstate
547 let fs_flags = if is_symlink {
550 let fs_flags = if is_symlink {
548 Some(b'l')
551 Some(b'l')
549 } else if has_exec_bit(&fs_metadata) {
552 } else if has_exec_bit(&fs_metadata) {
550 Some(b'x')
553 Some(b'x')
551 } else {
554 } else {
552 None
555 None
553 };
556 };
554
557
555 let entry = manifest
558 let entry = manifest
556 .find_by_path(hg_path)?
559 .find_by_path(hg_path)?
557 .expect("ambgious file not in p1");
560 .expect("ambgious file not in p1");
558 if entry.flags != fs_flags {
561 if entry.flags != fs_flags {
559 return Ok(true);
562 return Ok(true);
560 }
563 }
561 let filelog = repo.filelog(hg_path)?;
564 let filelog = repo.filelog(hg_path)?;
562 let fs_len = fs_metadata.len();
565 let fs_len = fs_metadata.len();
563 let file_node = entry.node_id()?;
566 let file_node = entry.node_id()?;
564 let filelog_entry = filelog.entry_for_node(file_node).map_err(|_| {
567 let filelog_entry = filelog.entry_for_node(file_node).map_err(|_| {
565 HgError::corrupted(format!(
568 HgError::corrupted(format!(
566 "filelog missing node {:?} from manifest",
569 "filelog missing node {:?} from manifest",
567 file_node
570 file_node
568 ))
571 ))
569 })?;
572 })?;
570 if filelog_entry.file_data_len_not_equal_to(fs_len) {
573 if filelog_entry.file_data_len_not_equal_to(fs_len) {
571 // No need to read file contents:
574 // No need to read file contents:
572 // it cannot be equal if it has a different length.
575 // it cannot be equal if it has a different length.
573 return Ok(true);
576 return Ok(true);
574 }
577 }
575
578
576 let p1_filelog_data = filelog_entry.data()?;
579 let p1_filelog_data = filelog_entry.data()?;
577 let p1_contents = p1_filelog_data.file_data()?;
580 let p1_contents = p1_filelog_data.file_data()?;
578 if p1_contents.len() as u64 != fs_len {
581 if p1_contents.len() as u64 != fs_len {
579 // No need to read file contents:
582 // No need to read file contents:
580 // it cannot be equal if it has a different length.
583 // it cannot be equal if it has a different length.
581 return Ok(true);
584 return Ok(true);
582 }
585 }
583
586
584 let fs_contents = if is_symlink {
587 let fs_contents = if is_symlink {
585 get_bytes_from_os_string(vfs.read_link(fs_path)?.into_os_string())
588 get_bytes_from_os_string(vfs.read_link(fs_path)?.into_os_string())
586 } else {
589 } else {
587 vfs.read(fs_path)?
590 vfs.read(fs_path)?
588 };
591 };
589 Ok(p1_contents != &*fs_contents)
592 Ok(p1_contents != &*fs_contents)
590 }
593 }
591
594
592 fn print_pattern_file_warning(
595 fn print_pattern_file_warning(
593 warning: &PatternFileWarning,
596 warning: &PatternFileWarning,
594 repo: &Repo,
597 repo: &Repo,
595 ) -> Vec<u8> {
598 ) -> Vec<u8> {
596 match warning {
599 match warning {
597 PatternFileWarning::InvalidSyntax(path, syntax) => format_bytes!(
600 PatternFileWarning::InvalidSyntax(path, syntax) => format_bytes!(
598 b"{}: ignoring invalid syntax '{}'\n",
601 b"{}: ignoring invalid syntax '{}'\n",
599 get_bytes_from_path(path),
602 get_bytes_from_path(path),
600 &*syntax
603 &*syntax
601 ),
604 ),
602 PatternFileWarning::NoSuchFile(path) => {
605 PatternFileWarning::NoSuchFile(path) => {
603 let path = if let Ok(relative) =
606 let path = if let Ok(relative) =
604 path.strip_prefix(repo.working_directory_path())
607 path.strip_prefix(repo.working_directory_path())
605 {
608 {
606 relative
609 relative
607 } else {
610 } else {
608 &*path
611 &*path
609 };
612 };
610 format_bytes!(
613 format_bytes!(
611 b"skipping unreadable pattern file '{}': \
614 b"skipping unreadable pattern file '{}': \
612 No such file or directory\n",
615 No such file or directory\n",
613 get_bytes_from_path(path),
616 get_bytes_from_path(path),
614 )
617 )
615 }
618 }
616 }
619 }
617 }
620 }
@@ -1,277 +1,290 b''
1 use crate::ui::utf8_to_local;
1 use crate::ui::utf8_to_local;
2 use crate::ui::UiError;
2 use crate::ui::UiError;
3 use crate::NoRepoInCwdError;
3 use crate::NoRepoInCwdError;
4 use format_bytes::format_bytes;
4 use format_bytes::format_bytes;
5 use hg::config::{ConfigError, ConfigParseError, ConfigValueParseError};
5 use hg::config::{ConfigError, ConfigParseError, ConfigValueParseError};
6 use hg::dirstate_tree::on_disk::DirstateV2ParseError;
6 use hg::dirstate_tree::on_disk::DirstateV2ParseError;
7 use hg::errors::HgError;
7 use hg::errors::HgError;
8 use hg::exit_codes;
8 use hg::exit_codes;
9 use hg::repo::RepoError;
9 use hg::repo::RepoError;
10 use hg::revlog::revlog::RevlogError;
10 use hg::revlog::revlog::RevlogError;
11 use hg::sparse::SparseConfigError;
11 use hg::sparse::SparseConfigError;
12 use hg::utils::files::get_bytes_from_path;
12 use hg::utils::files::get_bytes_from_path;
13 use hg::{DirstateError, DirstateMapError, StatusError};
13 use hg::{DirstateError, DirstateMapError, StatusError};
14 use std::convert::From;
14 use std::convert::From;
15
15
16 /// The kind of command error
16 /// The kind of command error
17 #[derive(Debug)]
17 #[derive(Debug)]
18 pub enum CommandError {
18 pub enum CommandError {
19 /// Exit with an error message and "standard" failure exit code.
19 /// Exit with an error message and "standard" failure exit code.
20 Abort {
20 Abort {
21 message: Vec<u8>,
21 message: Vec<u8>,
22 detailed_exit_code: exit_codes::ExitCode,
22 detailed_exit_code: exit_codes::ExitCode,
23 hint: Option<Vec<u8>>,
23 hint: Option<Vec<u8>>,
24 },
24 },
25
25
26 /// Exit with a failure exit code but no message.
26 /// Exit with a failure exit code but no message.
27 Unsuccessful,
27 Unsuccessful,
28
28
29 /// Encountered something (such as a CLI argument, repository layout, …)
29 /// Encountered something (such as a CLI argument, repository layout, …)
30 /// not supported by this version of `rhg`. Depending on configuration
30 /// not supported by this version of `rhg`. Depending on configuration
31 /// `rhg` may attempt to silently fall back to Python-based `hg`, which
31 /// `rhg` may attempt to silently fall back to Python-based `hg`, which
32 /// may or may not support this feature.
32 /// may or may not support this feature.
33 UnsupportedFeature { message: Vec<u8> },
33 UnsupportedFeature { message: Vec<u8> },
34 /// The fallback executable does not exist (or has some other problem if
34 /// The fallback executable does not exist (or has some other problem if
35 /// we end up being more precise about broken fallbacks).
35 /// we end up being more precise about broken fallbacks).
36 InvalidFallback { path: Vec<u8>, err: String },
36 InvalidFallback { path: Vec<u8>, err: String },
37 }
37 }
38
38
39 impl CommandError {
39 impl CommandError {
40 pub fn abort(message: impl AsRef<str>) -> Self {
40 pub fn abort(message: impl AsRef<str>) -> Self {
41 CommandError::abort_with_exit_code(message, exit_codes::ABORT)
41 CommandError::abort_with_exit_code(message, exit_codes::ABORT)
42 }
42 }
43
43
44 pub fn abort_with_exit_code(
44 pub fn abort_with_exit_code(
45 message: impl AsRef<str>,
45 message: impl AsRef<str>,
46 detailed_exit_code: exit_codes::ExitCode,
46 detailed_exit_code: exit_codes::ExitCode,
47 ) -> Self {
47 ) -> Self {
48 CommandError::Abort {
48 CommandError::Abort {
49 // TODO: bytes-based (instead of Unicode-based) formatting
49 // TODO: bytes-based (instead of Unicode-based) formatting
50 // of error messages to handle non-UTF-8 filenames etc:
50 // of error messages to handle non-UTF-8 filenames etc:
51 // https://www.mercurial-scm.org/wiki/EncodingStrategy#Mixing_output
51 // https://www.mercurial-scm.org/wiki/EncodingStrategy#Mixing_output
52 message: utf8_to_local(message.as_ref()).into(),
52 message: utf8_to_local(message.as_ref()).into(),
53 detailed_exit_code: detailed_exit_code,
53 detailed_exit_code: detailed_exit_code,
54 hint: None,
54 hint: None,
55 }
55 }
56 }
56 }
57
57
58 pub fn abort_with_exit_code_and_hint(
58 pub fn abort_with_exit_code_and_hint(
59 message: impl AsRef<str>,
59 message: impl AsRef<str>,
60 detailed_exit_code: exit_codes::ExitCode,
60 detailed_exit_code: exit_codes::ExitCode,
61 hint: Option<impl AsRef<str>>,
61 hint: Option<impl AsRef<str>>,
62 ) -> Self {
62 ) -> Self {
63 CommandError::Abort {
63 CommandError::Abort {
64 message: utf8_to_local(message.as_ref()).into(),
64 message: utf8_to_local(message.as_ref()).into(),
65 detailed_exit_code,
65 detailed_exit_code,
66 hint: hint.map(|h| utf8_to_local(h.as_ref()).into()),
66 hint: hint.map(|h| utf8_to_local(h.as_ref()).into()),
67 }
67 }
68 }
68 }
69
69
70 pub fn abort_with_exit_code_bytes(
70 pub fn abort_with_exit_code_bytes(
71 message: impl AsRef<[u8]>,
71 message: impl AsRef<[u8]>,
72 detailed_exit_code: exit_codes::ExitCode,
72 detailed_exit_code: exit_codes::ExitCode,
73 ) -> Self {
73 ) -> Self {
74 // TODO: use this everywhere it makes sense instead of the string
74 // TODO: use this everywhere it makes sense instead of the string
75 // version.
75 // version.
76 CommandError::Abort {
76 CommandError::Abort {
77 message: message.as_ref().into(),
77 message: message.as_ref().into(),
78 detailed_exit_code,
78 detailed_exit_code,
79 hint: None,
79 hint: None,
80 }
80 }
81 }
81 }
82
82
83 pub fn unsupported(message: impl AsRef<str>) -> Self {
83 pub fn unsupported(message: impl AsRef<str>) -> Self {
84 CommandError::UnsupportedFeature {
84 CommandError::UnsupportedFeature {
85 message: utf8_to_local(message.as_ref()).into(),
85 message: utf8_to_local(message.as_ref()).into(),
86 }
86 }
87 }
87 }
88 }
88 }
89
89
90 /// For now we don’t differenciate between invalid CLI args and valid for `hg`
90 /// For now we don’t differenciate between invalid CLI args and valid for `hg`
91 /// but not supported yet by `rhg`.
91 /// but not supported yet by `rhg`.
92 impl From<clap::Error> for CommandError {
92 impl From<clap::Error> for CommandError {
93 fn from(error: clap::Error) -> Self {
93 fn from(error: clap::Error) -> Self {
94 CommandError::unsupported(error.to_string())
94 CommandError::unsupported(error.to_string())
95 }
95 }
96 }
96 }
97
97
98 impl From<HgError> for CommandError {
98 impl From<HgError> for CommandError {
99 fn from(error: HgError) -> Self {
99 fn from(error: HgError) -> Self {
100 match error {
100 match error {
101 HgError::UnsupportedFeature(message) => {
101 HgError::UnsupportedFeature(message) => {
102 CommandError::unsupported(message)
102 CommandError::unsupported(message)
103 }
103 }
104 HgError::CensoredNodeError => {
104 HgError::CensoredNodeError => {
105 CommandError::unsupported("Encountered a censored node")
105 CommandError::unsupported("Encountered a censored node")
106 }
106 }
107 HgError::Abort {
107 HgError::Abort {
108 message,
108 message,
109 detailed_exit_code,
109 detailed_exit_code,
110 hint,
110 hint,
111 } => CommandError::abort_with_exit_code_and_hint(
111 } => CommandError::abort_with_exit_code_and_hint(
112 message,
112 message,
113 detailed_exit_code,
113 detailed_exit_code,
114 hint,
114 hint,
115 ),
115 ),
116 _ => CommandError::abort(error.to_string()),
116 _ => CommandError::abort(error.to_string()),
117 }
117 }
118 }
118 }
119 }
119 }
120
120
121 impl From<ConfigValueParseError> for CommandError {
121 impl From<ConfigValueParseError> for CommandError {
122 fn from(error: ConfigValueParseError) -> Self {
122 fn from(error: ConfigValueParseError) -> Self {
123 CommandError::abort_with_exit_code(
123 CommandError::abort_with_exit_code(
124 error.to_string(),
124 error.to_string(),
125 exit_codes::CONFIG_ERROR_ABORT,
125 exit_codes::CONFIG_ERROR_ABORT,
126 )
126 )
127 }
127 }
128 }
128 }
129
129
130 impl From<UiError> for CommandError {
130 impl From<UiError> for CommandError {
131 fn from(_error: UiError) -> Self {
131 fn from(_error: UiError) -> Self {
132 // If we already failed writing to stdout or stderr,
132 // If we already failed writing to stdout or stderr,
133 // writing an error message to stderr about it would be likely to fail
133 // writing an error message to stderr about it would be likely to fail
134 // too.
134 // too.
135 CommandError::abort("")
135 CommandError::abort("")
136 }
136 }
137 }
137 }
138
138
139 impl From<RepoError> for CommandError {
139 impl From<RepoError> for CommandError {
140 fn from(error: RepoError) -> Self {
140 fn from(error: RepoError) -> Self {
141 match error {
141 match error {
142 RepoError::NotFound { at } => {
142 RepoError::NotFound { at } => {
143 CommandError::abort_with_exit_code_bytes(
143 CommandError::abort_with_exit_code_bytes(
144 format_bytes!(
144 format_bytes!(
145 b"abort: repository {} not found",
145 b"abort: repository {} not found",
146 get_bytes_from_path(at)
146 get_bytes_from_path(at)
147 ),
147 ),
148 exit_codes::ABORT,
148 exit_codes::ABORT,
149 )
149 )
150 }
150 }
151 RepoError::ConfigParseError(error) => error.into(),
151 RepoError::ConfigParseError(error) => error.into(),
152 RepoError::Other(error) => error.into(),
152 RepoError::Other(error) => error.into(),
153 }
153 }
154 }
154 }
155 }
155 }
156
156
157 impl<'a> From<&'a NoRepoInCwdError> for CommandError {
157 impl<'a> From<&'a NoRepoInCwdError> for CommandError {
158 fn from(error: &'a NoRepoInCwdError) -> Self {
158 fn from(error: &'a NoRepoInCwdError) -> Self {
159 let NoRepoInCwdError { cwd } = error;
159 let NoRepoInCwdError { cwd } = error;
160 CommandError::abort_with_exit_code_bytes(
160 CommandError::abort_with_exit_code_bytes(
161 format_bytes!(
161 format_bytes!(
162 b"abort: no repository found in '{}' (.hg not found)!",
162 b"abort: no repository found in '{}' (.hg not found)!",
163 get_bytes_from_path(cwd)
163 get_bytes_from_path(cwd)
164 ),
164 ),
165 exit_codes::ABORT,
165 exit_codes::ABORT,
166 )
166 )
167 }
167 }
168 }
168 }
169
169
170 impl From<ConfigError> for CommandError {
170 impl From<ConfigError> for CommandError {
171 fn from(error: ConfigError) -> Self {
171 fn from(error: ConfigError) -> Self {
172 match error {
172 match error {
173 ConfigError::Parse(error) => error.into(),
173 ConfigError::Parse(error) => error.into(),
174 ConfigError::Other(error) => error.into(),
174 ConfigError::Other(error) => error.into(),
175 }
175 }
176 }
176 }
177 }
177 }
178
178
179 impl From<ConfigParseError> for CommandError {
179 impl From<ConfigParseError> for CommandError {
180 fn from(error: ConfigParseError) -> Self {
180 fn from(error: ConfigParseError) -> Self {
181 let ConfigParseError {
181 let ConfigParseError {
182 origin,
182 origin,
183 line,
183 line,
184 message,
184 message,
185 } = error;
185 } = error;
186 let line_message = if let Some(line_number) = line {
186 let line_message = if let Some(line_number) = line {
187 format_bytes!(b":{}", line_number.to_string().into_bytes())
187 format_bytes!(b":{}", line_number.to_string().into_bytes())
188 } else {
188 } else {
189 Vec::new()
189 Vec::new()
190 };
190 };
191 CommandError::abort_with_exit_code_bytes(
191 CommandError::abort_with_exit_code_bytes(
192 format_bytes!(
192 format_bytes!(
193 b"config error at {}{}: {}",
193 b"config error at {}{}: {}",
194 origin,
194 origin,
195 line_message,
195 line_message,
196 message
196 message
197 ),
197 ),
198 exit_codes::CONFIG_ERROR_ABORT,
198 exit_codes::CONFIG_ERROR_ABORT,
199 )
199 )
200 }
200 }
201 }
201 }
202
202
203 impl From<(RevlogError, &str)> for CommandError {
203 impl From<(RevlogError, &str)> for CommandError {
204 fn from((err, rev): (RevlogError, &str)) -> CommandError {
204 fn from((err, rev): (RevlogError, &str)) -> CommandError {
205 match err {
205 match err {
206 RevlogError::WDirUnsupported => CommandError::abort(
206 RevlogError::WDirUnsupported => CommandError::abort(
207 "abort: working directory revision cannot be specified",
207 "abort: working directory revision cannot be specified",
208 ),
208 ),
209 RevlogError::InvalidRevision => CommandError::abort(format!(
209 RevlogError::InvalidRevision => CommandError::abort(format!(
210 "abort: invalid revision identifier: {}",
210 "abort: invalid revision identifier: {}",
211 rev
211 rev
212 )),
212 )),
213 RevlogError::AmbiguousPrefix => CommandError::abort(format!(
213 RevlogError::AmbiguousPrefix => CommandError::abort(format!(
214 "abort: ambiguous revision identifier: {}",
214 "abort: ambiguous revision identifier: {}",
215 rev
215 rev
216 )),
216 )),
217 RevlogError::Other(error) => error.into(),
217 RevlogError::Other(error) => error.into(),
218 }
218 }
219 }
219 }
220 }
220 }
221
221
222 impl From<StatusError> for CommandError {
222 impl From<StatusError> for CommandError {
223 fn from(error: StatusError) -> Self {
223 fn from(error: StatusError) -> Self {
224 CommandError::abort(format!("{}", error))
224 CommandError::abort(format!("{}", error))
225 }
225 }
226 }
226 }
227
227
228 impl From<DirstateMapError> for CommandError {
228 impl From<DirstateMapError> for CommandError {
229 fn from(error: DirstateMapError) -> Self {
229 fn from(error: DirstateMapError) -> Self {
230 CommandError::abort(format!("{}", error))
230 CommandError::abort(format!("{}", error))
231 }
231 }
232 }
232 }
233
233
234 impl From<DirstateError> for CommandError {
234 impl From<DirstateError> for CommandError {
235 fn from(error: DirstateError) -> Self {
235 fn from(error: DirstateError) -> Self {
236 match error {
236 match error {
237 DirstateError::Common(error) => error.into(),
237 DirstateError::Common(error) => error.into(),
238 DirstateError::Map(error) => error.into(),
238 DirstateError::Map(error) => error.into(),
239 }
239 }
240 }
240 }
241 }
241 }
242
242
243 impl From<DirstateV2ParseError> for CommandError {
243 impl From<DirstateV2ParseError> for CommandError {
244 fn from(error: DirstateV2ParseError) -> Self {
244 fn from(error: DirstateV2ParseError) -> Self {
245 HgError::from(error).into()
245 HgError::from(error).into()
246 }
246 }
247 }
247 }
248
248
249 impl From<SparseConfigError> for CommandError {
249 impl From<SparseConfigError> for CommandError {
250 fn from(e: SparseConfigError) -> Self {
250 fn from(e: SparseConfigError) -> Self {
251 match e {
251 match e {
252 SparseConfigError::IncludesAfterExcludes { context } => {
252 SparseConfigError::IncludesAfterExcludes { context } => {
253 Self::abort_with_exit_code_bytes(
253 Self::abort_with_exit_code_bytes(
254 format_bytes!(
254 format_bytes!(
255 b"{} config cannot have includes after excludes",
255 b"{} config cannot have includes after excludes",
256 context
256 context
257 ),
257 ),
258 exit_codes::CONFIG_PARSE_ERROR_ABORT,
258 exit_codes::CONFIG_PARSE_ERROR_ABORT,
259 )
259 )
260 }
260 }
261 SparseConfigError::EntryOutsideSection { context, line } => {
261 SparseConfigError::EntryOutsideSection { context, line } => {
262 Self::abort_with_exit_code_bytes(
262 Self::abort_with_exit_code_bytes(
263 format_bytes!(
263 format_bytes!(
264 b"{} config entry outside of section: {}",
264 b"{} config entry outside of section: {}",
265 context,
265 context,
266 &line,
266 &line,
267 ),
267 ),
268 exit_codes::CONFIG_PARSE_ERROR_ABORT,
268 exit_codes::CONFIG_PARSE_ERROR_ABORT,
269 )
269 )
270 }
270 }
271 SparseConfigError::InvalidNarrowPrefix(prefix) => {
272 Self::abort_with_exit_code_bytes(
273 format_bytes!(
274 b"invalid prefix on narrow pattern: {}",
275 &prefix
276 ),
277 exit_codes::ABORT,
278 )
279 }
280 SparseConfigError::IncludesInNarrow => Self::abort(
281 "including other spec files using '%include' \
282 is not supported in narrowspec",
283 ),
271 SparseConfigError::HgError(e) => Self::from(e),
284 SparseConfigError::HgError(e) => Self::from(e),
272 SparseConfigError::PatternError(e) => {
285 SparseConfigError::PatternError(e) => {
273 Self::unsupported(format!("{}", e))
286 Self::unsupported(format!("{}", e))
274 }
287 }
275 }
288 }
276 }
289 }
277 }
290 }
@@ -1,120 +1,117 b''
1 #require rhg
1 #require rhg
2
2
3 $ NO_FALLBACK="env RHG_ON_UNSUPPORTED=abort"
3 $ NO_FALLBACK="env RHG_ON_UNSUPPORTED=abort"
4
4
5 Rhg works well when sparse working copy is enabled.
5 Rhg works well when sparse working copy is enabled.
6
6
7 $ cd "$TESTTMP"
7 $ cd "$TESTTMP"
8 $ hg init repo-sparse
8 $ hg init repo-sparse
9 $ cd repo-sparse
9 $ cd repo-sparse
10 $ cat > .hg/hgrc <<EOF
10 $ cat > .hg/hgrc <<EOF
11 > [extensions]
11 > [extensions]
12 > sparse=
12 > sparse=
13 > EOF
13 > EOF
14
14
15 $ echo a > show
15 $ echo a > show
16 $ echo x > hide
16 $ echo x > hide
17 $ mkdir dir1 dir2
17 $ mkdir dir1 dir2
18 $ echo x > dir1/x
18 $ echo x > dir1/x
19 $ echo y > dir1/y
19 $ echo y > dir1/y
20 $ echo z > dir2/z
20 $ echo z > dir2/z
21
21
22 $ hg ci -Aqm 'initial'
22 $ hg ci -Aqm 'initial'
23 $ hg debugsparse --include 'show'
23 $ hg debugsparse --include 'show'
24 $ ls -A
24 $ ls -A
25 .hg
25 .hg
26 show
26 show
27
27
28 $ tip=$(hg log -r . --template '{node}')
28 $ tip=$(hg log -r . --template '{node}')
29 $ $NO_FALLBACK rhg files -r "$tip"
29 $ $NO_FALLBACK rhg files -r "$tip"
30 dir1/x
30 dir1/x
31 dir1/y
31 dir1/y
32 dir2/z
32 dir2/z
33 hide
33 hide
34 show
34 show
35 $ $NO_FALLBACK rhg files
35 $ $NO_FALLBACK rhg files
36 show
36 show
37
37
38 $ $NO_FALLBACK rhg cat -r "$tip" hide
38 $ $NO_FALLBACK rhg cat -r "$tip" hide
39 x
39 x
40
40
41 $ cd ..
41 $ cd ..
42
42
43 We support most things when narrow is enabled, too, with a couple of caveats.
43 We support most things when narrow is enabled, too, with a couple of caveats.
44
44
45 $ . "$TESTDIR/narrow-library.sh"
45 $ . "$TESTDIR/narrow-library.sh"
46 $ real_hg=$RHG_FALLBACK_EXECUTABLE
46 $ real_hg=$RHG_FALLBACK_EXECUTABLE
47
47
48 $ cat >> $HGRCPATH <<EOF
48 $ cat >> $HGRCPATH <<EOF
49 > [extensions]
49 > [extensions]
50 > narrow=
50 > narrow=
51 > EOF
51 > EOF
52
52
53 $ hg clone --narrow ./repo-sparse repo-narrow --include dir1
53 $ hg clone --narrow ./repo-sparse repo-narrow --include dir1
54 requesting all changes
54 requesting all changes
55 adding changesets
55 adding changesets
56 adding manifests
56 adding manifests
57 adding file changes
57 adding file changes
58 added 1 changesets with 2 changes to 2 files
58 added 1 changesets with 2 changes to 2 files
59 new changesets 6d714a4a2998
59 new changesets 6d714a4a2998
60 updating to branch default
60 updating to branch default
61 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
61 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
62
62
63 $ cd repo-narrow
63 $ cd repo-narrow
64
64
65 $ $NO_FALLBACK rhg cat -r "$tip" dir1/x
65 $ $NO_FALLBACK rhg cat -r "$tip" dir1/x
66 x
66 x
67 $ "$real_hg" cat -r "$tip" dir1/x
67 $ "$real_hg" cat -r "$tip" dir1/x
68 x
68 x
69
69
70 TODO: bad error message
70 TODO: bad error message
71
71
72 $ $NO_FALLBACK rhg cat -r "$tip" hide
72 $ $NO_FALLBACK rhg cat -r "$tip" hide
73 abort: invalid revision identifier: 6d714a4a2998cbfd0620db44da58b749f6565d63
73 abort: invalid revision identifier: 6d714a4a2998cbfd0620db44da58b749f6565d63
74 [255]
74 [255]
75 $ "$real_hg" cat -r "$tip" hide
75 $ "$real_hg" cat -r "$tip" hide
76 [1]
76 [1]
77
77
78 A naive implementation of [rhg files] leaks the paths that are supposed to be
78 A naive implementation of [rhg files] leaks the paths that are supposed to be
79 hidden by narrow, so we just fall back to hg.
79 hidden by narrow, so we just fall back to hg.
80
80
81 $ $NO_FALLBACK rhg files -r "$tip"
81 $ $NO_FALLBACK rhg files -r "$tip"
82 unsupported feature: rhg files -r <rev> is not supported in narrow clones
82 unsupported feature: rhg files -r <rev> is not supported in narrow clones
83 [252]
83 [252]
84 $ "$real_hg" files -r "$tip"
84 $ "$real_hg" files -r "$tip"
85 dir1/x
85 dir1/x
86 dir1/y
86 dir1/y
87
87
88 Hg status needs to do some filtering based on narrow spec, so we don't
88 Hg status needs to do some filtering based on narrow spec
89 support it in rhg for narrow clones yet.
90
89
91 $ mkdir dir2
90 $ mkdir dir2
92 $ touch dir2/q
91 $ touch dir2/q
93 $ "$real_hg" status
92 $ "$real_hg" status
94 $ $NO_FALLBACK rhg --config rhg.status=true status
93 $ $NO_FALLBACK rhg --config rhg.status=true status
95 unsupported feature: rhg status is not supported for narrow clones yet
96 [252]
97
94
98 Adding "orphaned" index files:
95 Adding "orphaned" index files:
99
96
100 $ (cd ..; cp repo-sparse/.hg/store/data/hide.i repo-narrow/.hg/store/data/hide.i)
97 $ (cd ..; cp repo-sparse/.hg/store/data/hide.i repo-narrow/.hg/store/data/hide.i)
101 $ (cd ..; mkdir repo-narrow/.hg/store/data/dir2; cp repo-sparse/.hg/store/data/dir2/z.i repo-narrow/.hg/store/data/dir2/z.i)
98 $ (cd ..; mkdir repo-narrow/.hg/store/data/dir2; cp repo-sparse/.hg/store/data/dir2/z.i repo-narrow/.hg/store/data/dir2/z.i)
102 $ "$real_hg" verify
99 $ "$real_hg" verify
103 checking changesets
100 checking changesets
104 checking manifests
101 checking manifests
105 crosschecking files in changesets and manifests
102 crosschecking files in changesets and manifests
106 checking files
103 checking files
107 checked 1 changesets with 2 changes to 2 files
104 checked 1 changesets with 2 changes to 2 files
108
105
109 $ "$real_hg" files -r "$tip"
106 $ "$real_hg" files -r "$tip"
110 dir1/x
107 dir1/x
111 dir1/y
108 dir1/y
112
109
113 # TODO: even though [hg files] hides the orphaned dir2/z, [hg cat] still shows it.
110 # TODO: even though [hg files] hides the orphaned dir2/z, [hg cat] still shows it.
114 # rhg has the same issue, but at least it's not specific to rhg.
111 # rhg has the same issue, but at least it's not specific to rhg.
115 # This is despite [hg verify] succeeding above.
112 # This is despite [hg verify] succeeding above.
116
113
117 $ $NO_FALLBACK rhg cat -r "$tip" dir2/z
114 $ $NO_FALLBACK rhg cat -r "$tip" dir2/z
118 z
115 z
119 $ "$real_hg" cat -r "$tip" dir2/z
116 $ "$real_hg" cat -r "$tip" dir2/z
120 z
117 z
General Comments 0
You need to be logged in to leave comments. Login now