Show More
@@ -0,0 +1,111 b'' | |||
|
1 | use std::path::Path; | |
|
2 | ||
|
3 | use crate::{ | |
|
4 | errors::HgError, | |
|
5 | exit_codes, | |
|
6 | filepatterns::parse_pattern_file_contents, | |
|
7 | matchers::{ | |
|
8 | AlwaysMatcher, DifferenceMatcher, IncludeMatcher, Matcher, | |
|
9 | NeverMatcher, | |
|
10 | }, | |
|
11 | repo::Repo, | |
|
12 | requirements::NARROW_REQUIREMENT, | |
|
13 | sparse::{self, SparseConfigError, SparseWarning}, | |
|
14 | }; | |
|
15 | ||
|
16 | /// The file in .hg/store/ that indicates which paths exit in the store | |
|
17 | const FILENAME: &str = "narrowspec"; | |
|
18 | /// The file in .hg/ that indicates which paths exit in the dirstate | |
|
19 | const DIRSTATE_FILENAME: &str = "narrowspec.dirstate"; | |
|
20 | ||
|
21 | /// Pattern prefixes that are allowed in narrow patterns. This list MUST | |
|
22 | /// only contain patterns that are fast and safe to evaluate. Keep in mind | |
|
23 | /// that patterns are supplied by clients and executed on remote servers | |
|
24 | /// as part of wire protocol commands. That means that changes to this | |
|
25 | /// data structure influence the wire protocol and should not be taken | |
|
26 | /// lightly - especially removals. | |
|
27 | const VALID_PREFIXES: [&str; 2] = ["path:", "rootfilesin:"]; | |
|
28 | ||
|
29 | /// Return the matcher for the current narrow spec, and all configuration | |
|
30 | /// warnings to display. | |
|
31 | pub fn matcher( | |
|
32 | repo: &Repo, | |
|
33 | ) -> Result<(Box<dyn Matcher + Sync>, Vec<SparseWarning>), SparseConfigError> { | |
|
34 | let mut warnings = vec![]; | |
|
35 | if !repo.requirements().contains(NARROW_REQUIREMENT) { | |
|
36 | return Ok((Box::new(AlwaysMatcher), warnings)); | |
|
37 | } | |
|
38 | // Treat "narrowspec does not exist" the same as "narrowspec file exists | |
|
39 | // and is empty". | |
|
40 | let store_spec = repo.store_vfs().try_read(FILENAME)?.unwrap_or(vec![]); | |
|
41 | let working_copy_spec = | |
|
42 | repo.hg_vfs().try_read(DIRSTATE_FILENAME)?.unwrap_or(vec![]); | |
|
43 | if store_spec != working_copy_spec { | |
|
44 | return Err(HgError::abort( | |
|
45 | "working copy's narrowspec is stale", | |
|
46 | exit_codes::STATE_ERROR, | |
|
47 | Some("run 'hg tracked --update-working-copy'".into()), | |
|
48 | ) | |
|
49 | .into()); | |
|
50 | } | |
|
51 | ||
|
52 | let config = sparse::parse_config( | |
|
53 | &store_spec, | |
|
54 | sparse::SparseConfigContext::Narrow, | |
|
55 | )?; | |
|
56 | ||
|
57 | warnings.extend(config.warnings); | |
|
58 | ||
|
59 | if !config.profiles.is_empty() { | |
|
60 | // TODO (from Python impl) maybe do something with profiles? | |
|
61 | return Err(SparseConfigError::IncludesInNarrow); | |
|
62 | } | |
|
63 | validate_patterns(&config.includes)?; | |
|
64 | validate_patterns(&config.excludes)?; | |
|
65 | ||
|
66 | if config.includes.is_empty() { | |
|
67 | return Ok((Box::new(NeverMatcher), warnings)); | |
|
68 | } | |
|
69 | ||
|
70 | let (patterns, subwarnings) = parse_pattern_file_contents( | |
|
71 | &config.includes, | |
|
72 | Path::new(""), | |
|
73 | None, | |
|
74 | false, | |
|
75 | )?; | |
|
76 | warnings.extend(subwarnings.into_iter().map(From::from)); | |
|
77 | ||
|
78 | let mut m: Box<dyn Matcher + Sync> = | |
|
79 | Box::new(IncludeMatcher::new(patterns)?); | |
|
80 | ||
|
81 | let (patterns, subwarnings) = parse_pattern_file_contents( | |
|
82 | &config.excludes, | |
|
83 | Path::new(""), | |
|
84 | None, | |
|
85 | false, | |
|
86 | )?; | |
|
87 | if !patterns.is_empty() { | |
|
88 | warnings.extend(subwarnings.into_iter().map(From::from)); | |
|
89 | let exclude_matcher = Box::new(IncludeMatcher::new(patterns)?); | |
|
90 | m = Box::new(DifferenceMatcher::new(m, exclude_matcher)); | |
|
91 | } | |
|
92 | ||
|
93 | Ok((m, warnings)) | |
|
94 | } | |
|
95 | ||
|
96 | fn validate_patterns(patterns: &[u8]) -> Result<(), SparseConfigError> { | |
|
97 | for pattern in patterns.split(|c| *c == b'\n') { | |
|
98 | if pattern.is_empty() { | |
|
99 | continue; | |
|
100 | } | |
|
101 | for prefix in VALID_PREFIXES.iter() { | |
|
102 | if pattern.starts_with(prefix.as_bytes()) { | |
|
103 | break; | |
|
104 | } | |
|
105 | return Err(SparseConfigError::InvalidNarrowPrefix( | |
|
106 | pattern.to_owned(), | |
|
107 | )); | |
|
108 | } | |
|
109 | } | |
|
110 | Ok(()) | |
|
111 | } |
@@ -1,22 +1,26 b'' | |||
|
1 | 1 | pub type ExitCode = i32; |
|
2 | 2 | |
|
3 | 3 | /// Successful exit |
|
4 | 4 | pub const OK: ExitCode = 0; |
|
5 | 5 | |
|
6 | 6 | /// Generic abort |
|
7 | 7 | pub const ABORT: ExitCode = 255; |
|
8 | 8 | |
|
9 | 9 | // Abort when there is a config related error |
|
10 | 10 | pub const CONFIG_ERROR_ABORT: ExitCode = 30; |
|
11 | 11 | |
|
12 | /// Indicates that the operation might work if retried in a different state. | |
|
13 | /// Examples: Unresolved merge conflicts, unfinished operations | |
|
14 | pub const STATE_ERROR: ExitCode = 20; | |
|
15 | ||
|
12 | 16 | // Abort when there is an error while parsing config |
|
13 | 17 | pub const CONFIG_PARSE_ERROR_ABORT: ExitCode = 10; |
|
14 | 18 | |
|
15 | 19 | /// Generic something completed but did not succeed |
|
16 | 20 | pub const UNSUCCESSFUL: ExitCode = 1; |
|
17 | 21 | |
|
18 | 22 | /// Command or feature not implemented by rhg |
|
19 | 23 | pub const UNIMPLEMENTED: ExitCode = 252; |
|
20 | 24 | |
|
21 | 25 | /// The fallback path is not valid |
|
22 | 26 | pub const INVALID_FALLBACK: ExitCode = 253; |
@@ -1,704 +1,706 b'' | |||
|
1 | 1 | // filepatterns.rs |
|
2 | 2 | // |
|
3 | 3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> |
|
4 | 4 | // |
|
5 | 5 | // This software may be used and distributed according to the terms of the |
|
6 | 6 | // GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | //! Handling of Mercurial-specific patterns. |
|
9 | 9 | |
|
10 | 10 | use crate::{ |
|
11 | 11 | utils::{ |
|
12 | 12 | files::{canonical_path, get_bytes_from_path, get_path_from_bytes}, |
|
13 | 13 | hg_path::{path_to_hg_path_buf, HgPathBuf, HgPathError}, |
|
14 | 14 | SliceExt, |
|
15 | 15 | }, |
|
16 | 16 | FastHashMap, PatternError, |
|
17 | 17 | }; |
|
18 | 18 | use lazy_static::lazy_static; |
|
19 | 19 | use regex::bytes::{NoExpand, Regex}; |
|
20 | 20 | use std::ops::Deref; |
|
21 | 21 | use std::path::{Path, PathBuf}; |
|
22 | 22 | use std::vec::Vec; |
|
23 | 23 | |
|
24 | 24 | lazy_static! { |
|
25 | 25 | static ref RE_ESCAPE: Vec<Vec<u8>> = { |
|
26 | 26 | let mut v: Vec<Vec<u8>> = (0..=255).map(|byte| vec![byte]).collect(); |
|
27 | 27 | let to_escape = b"()[]{}?*+-|^$\\.&~# \t\n\r\x0b\x0c"; |
|
28 | 28 | for byte in to_escape { |
|
29 | 29 | v[*byte as usize].insert(0, b'\\'); |
|
30 | 30 | } |
|
31 | 31 | v |
|
32 | 32 | }; |
|
33 | 33 | } |
|
34 | 34 | |
|
35 | 35 | /// These are matched in order |
|
36 | 36 | const GLOB_REPLACEMENTS: &[(&[u8], &[u8])] = |
|
37 | 37 | &[(b"*/", b"(?:.*/)?"), (b"*", b".*"), (b"", b"[^/]*")]; |
|
38 | 38 | |
|
39 | 39 | /// Appended to the regexp of globs |
|
40 | 40 | const GLOB_SUFFIX: &[u8; 7] = b"(?:/|$)"; |
|
41 | 41 | |
|
42 | 42 | #[derive(Debug, Clone, PartialEq, Eq)] |
|
43 | 43 | pub enum PatternSyntax { |
|
44 | 44 | /// A regular expression |
|
45 | 45 | Regexp, |
|
46 | 46 | /// Glob that matches at the front of the path |
|
47 | 47 | RootGlob, |
|
48 | 48 | /// Glob that matches at any suffix of the path (still anchored at |
|
49 | 49 | /// slashes) |
|
50 | 50 | Glob, |
|
51 | 51 | /// a path relative to repository root, which is matched recursively |
|
52 | 52 | Path, |
|
53 | 53 | /// A path relative to cwd |
|
54 | 54 | RelPath, |
|
55 | 55 | /// an unrooted glob (*.rs matches Rust files in all dirs) |
|
56 | 56 | RelGlob, |
|
57 | 57 | /// A regexp that needn't match the start of a name |
|
58 | 58 | RelRegexp, |
|
59 | 59 | /// A path relative to repository root, which is matched non-recursively |
|
60 | 60 | /// (will not match subdirectories) |
|
61 | 61 | RootFiles, |
|
62 | 62 | /// A file of patterns to read and include |
|
63 | 63 | Include, |
|
64 | 64 | /// A file of patterns to match against files under the same directory |
|
65 | 65 | SubInclude, |
|
66 | 66 | /// SubInclude with the result of parsing the included file |
|
67 | 67 | /// |
|
68 | 68 | /// Note: there is no ExpandedInclude because that expansion can be done |
|
69 | 69 | /// in place by replacing the Include pattern by the included patterns. |
|
70 | 70 | /// SubInclude requires more handling. |
|
71 | 71 | /// |
|
72 | 72 | /// Note: `Box` is used to minimize size impact on other enum variants |
|
73 | 73 | ExpandedSubInclude(Box<SubInclude>), |
|
74 | 74 | } |
|
75 | 75 | |
|
76 | 76 | /// Transforms a glob pattern into a regex |
|
77 | 77 | fn glob_to_re(pat: &[u8]) -> Vec<u8> { |
|
78 | 78 | let mut input = pat; |
|
79 | 79 | let mut res: Vec<u8> = vec![]; |
|
80 | 80 | let mut group_depth = 0; |
|
81 | 81 | |
|
82 | 82 | while let Some((c, rest)) = input.split_first() { |
|
83 | 83 | input = rest; |
|
84 | 84 | |
|
85 | 85 | match c { |
|
86 | 86 | b'*' => { |
|
87 | 87 | for (source, repl) in GLOB_REPLACEMENTS { |
|
88 | 88 | if let Some(rest) = input.drop_prefix(source) { |
|
89 | 89 | input = rest; |
|
90 | 90 | res.extend(*repl); |
|
91 | 91 | break; |
|
92 | 92 | } |
|
93 | 93 | } |
|
94 | 94 | } |
|
95 | 95 | b'?' => res.extend(b"."), |
|
96 | 96 | b'[' => { |
|
97 | 97 | match input.iter().skip(1).position(|b| *b == b']') { |
|
98 | 98 | None => res.extend(b"\\["), |
|
99 | 99 | Some(end) => { |
|
100 | 100 | // Account for the one we skipped |
|
101 | 101 | let end = end + 1; |
|
102 | 102 | |
|
103 | 103 | res.extend(b"["); |
|
104 | 104 | |
|
105 | 105 | for (i, b) in input[..end].iter().enumerate() { |
|
106 | 106 | if *b == b'!' && i == 0 { |
|
107 | 107 | res.extend(b"^") |
|
108 | 108 | } else if *b == b'^' && i == 0 { |
|
109 | 109 | res.extend(b"\\^") |
|
110 | 110 | } else if *b == b'\\' { |
|
111 | 111 | res.extend(b"\\\\") |
|
112 | 112 | } else { |
|
113 | 113 | res.push(*b) |
|
114 | 114 | } |
|
115 | 115 | } |
|
116 | 116 | res.extend(b"]"); |
|
117 | 117 | input = &input[end + 1..]; |
|
118 | 118 | } |
|
119 | 119 | } |
|
120 | 120 | } |
|
121 | 121 | b'{' => { |
|
122 | 122 | group_depth += 1; |
|
123 | 123 | res.extend(b"(?:") |
|
124 | 124 | } |
|
125 | 125 | b'}' if group_depth > 0 => { |
|
126 | 126 | group_depth -= 1; |
|
127 | 127 | res.extend(b")"); |
|
128 | 128 | } |
|
129 | 129 | b',' if group_depth > 0 => res.extend(b"|"), |
|
130 | 130 | b'\\' => { |
|
131 | 131 | let c = { |
|
132 | 132 | if let Some((c, rest)) = input.split_first() { |
|
133 | 133 | input = rest; |
|
134 | 134 | c |
|
135 | 135 | } else { |
|
136 | 136 | c |
|
137 | 137 | } |
|
138 | 138 | }; |
|
139 | 139 | res.extend(&RE_ESCAPE[*c as usize]) |
|
140 | 140 | } |
|
141 | 141 | _ => res.extend(&RE_ESCAPE[*c as usize]), |
|
142 | 142 | } |
|
143 | 143 | } |
|
144 | 144 | res |
|
145 | 145 | } |
|
146 | 146 | |
|
147 | 147 | fn escape_pattern(pattern: &[u8]) -> Vec<u8> { |
|
148 | 148 | pattern |
|
149 | 149 | .iter() |
|
150 | 150 | .flat_map(|c| RE_ESCAPE[*c as usize].clone()) |
|
151 | 151 | .collect() |
|
152 | 152 | } |
|
153 | 153 | |
|
154 | 154 | pub fn parse_pattern_syntax( |
|
155 | 155 | kind: &[u8], |
|
156 | 156 | ) -> Result<PatternSyntax, PatternError> { |
|
157 | 157 | match kind { |
|
158 | 158 | b"re:" => Ok(PatternSyntax::Regexp), |
|
159 | 159 | b"path:" => Ok(PatternSyntax::Path), |
|
160 | 160 | b"relpath:" => Ok(PatternSyntax::RelPath), |
|
161 | 161 | b"rootfilesin:" => Ok(PatternSyntax::RootFiles), |
|
162 | 162 | b"relglob:" => Ok(PatternSyntax::RelGlob), |
|
163 | 163 | b"relre:" => Ok(PatternSyntax::RelRegexp), |
|
164 | 164 | b"glob:" => Ok(PatternSyntax::Glob), |
|
165 | 165 | b"rootglob:" => Ok(PatternSyntax::RootGlob), |
|
166 | 166 | b"include:" => Ok(PatternSyntax::Include), |
|
167 | 167 | b"subinclude:" => Ok(PatternSyntax::SubInclude), |
|
168 | 168 | _ => Err(PatternError::UnsupportedSyntax( |
|
169 | 169 | String::from_utf8_lossy(kind).to_string(), |
|
170 | 170 | )), |
|
171 | 171 | } |
|
172 | 172 | } |
|
173 | 173 | |
|
174 | 174 | /// Builds the regex that corresponds to the given pattern. |
|
175 | 175 | /// If within a `syntax: regexp` context, returns the pattern, |
|
176 | 176 | /// otherwise, returns the corresponding regex. |
|
177 | 177 | fn _build_single_regex(entry: &IgnorePattern) -> Vec<u8> { |
|
178 | 178 | let IgnorePattern { |
|
179 | 179 | syntax, pattern, .. |
|
180 | 180 | } = entry; |
|
181 | 181 | if pattern.is_empty() { |
|
182 | 182 | return vec![]; |
|
183 | 183 | } |
|
184 | 184 | match syntax { |
|
185 | 185 | PatternSyntax::Regexp => pattern.to_owned(), |
|
186 | 186 | PatternSyntax::RelRegexp => { |
|
187 | 187 | // The `regex` crate accepts `**` while `re2` and Python's `re` |
|
188 | 188 | // do not. Checking for `*` correctly triggers the same error all |
|
189 | 189 | // engines. |
|
190 | 190 | if pattern[0] == b'^' |
|
191 | 191 | || pattern[0] == b'*' |
|
192 | 192 | || pattern.starts_with(b".*") |
|
193 | 193 | { |
|
194 | 194 | return pattern.to_owned(); |
|
195 | 195 | } |
|
196 | 196 | [&b".*"[..], pattern].concat() |
|
197 | 197 | } |
|
198 | 198 | PatternSyntax::Path | PatternSyntax::RelPath => { |
|
199 | 199 | if pattern == b"." { |
|
200 | 200 | return vec![]; |
|
201 | 201 | } |
|
202 | 202 | [escape_pattern(pattern).as_slice(), b"(?:/|$)"].concat() |
|
203 | 203 | } |
|
204 | 204 | PatternSyntax::RootFiles => { |
|
205 | 205 | let mut res = if pattern == b"." { |
|
206 | 206 | vec![] |
|
207 | 207 | } else { |
|
208 | 208 | // Pattern is a directory name. |
|
209 | 209 | [escape_pattern(pattern).as_slice(), b"/"].concat() |
|
210 | 210 | }; |
|
211 | 211 | |
|
212 | 212 | // Anything after the pattern must be a non-directory. |
|
213 | 213 | res.extend(b"[^/]+$"); |
|
214 | 214 | res |
|
215 | 215 | } |
|
216 | 216 | PatternSyntax::RelGlob => { |
|
217 | 217 | let glob_re = glob_to_re(pattern); |
|
218 | 218 | if let Some(rest) = glob_re.drop_prefix(b"[^/]*") { |
|
219 | 219 | [b".*", rest, GLOB_SUFFIX].concat() |
|
220 | 220 | } else { |
|
221 | 221 | [b"(?:.*/)?", glob_re.as_slice(), GLOB_SUFFIX].concat() |
|
222 | 222 | } |
|
223 | 223 | } |
|
224 | 224 | PatternSyntax::Glob | PatternSyntax::RootGlob => { |
|
225 | 225 | [glob_to_re(pattern).as_slice(), GLOB_SUFFIX].concat() |
|
226 | 226 | } |
|
227 | 227 | PatternSyntax::Include |
|
228 | 228 | | PatternSyntax::SubInclude |
|
229 | 229 | | PatternSyntax::ExpandedSubInclude(_) => unreachable!(), |
|
230 | 230 | } |
|
231 | 231 | } |
|
232 | 232 | |
|
233 | 233 | const GLOB_SPECIAL_CHARACTERS: [u8; 7] = |
|
234 | 234 | [b'*', b'?', b'[', b']', b'{', b'}', b'\\']; |
|
235 | 235 | |
|
236 | 236 | /// TODO support other platforms |
|
237 | 237 | #[cfg(unix)] |
|
238 | 238 | pub fn normalize_path_bytes(bytes: &[u8]) -> Vec<u8> { |
|
239 | 239 | if bytes.is_empty() { |
|
240 | 240 | return b".".to_vec(); |
|
241 | 241 | } |
|
242 | 242 | let sep = b'/'; |
|
243 | 243 | |
|
244 | 244 | let mut initial_slashes = bytes.iter().take_while(|b| **b == sep).count(); |
|
245 | 245 | if initial_slashes > 2 { |
|
246 | 246 | // POSIX allows one or two initial slashes, but treats three or more |
|
247 | 247 | // as single slash. |
|
248 | 248 | initial_slashes = 1; |
|
249 | 249 | } |
|
250 | 250 | let components = bytes |
|
251 | 251 | .split(|b| *b == sep) |
|
252 | 252 | .filter(|c| !(c.is_empty() || c == b".")) |
|
253 | 253 | .fold(vec![], |mut acc, component| { |
|
254 | 254 | if component != b".." |
|
255 | 255 | || (initial_slashes == 0 && acc.is_empty()) |
|
256 | 256 | || (!acc.is_empty() && acc[acc.len() - 1] == b"..") |
|
257 | 257 | { |
|
258 | 258 | acc.push(component) |
|
259 | 259 | } else if !acc.is_empty() { |
|
260 | 260 | acc.pop(); |
|
261 | 261 | } |
|
262 | 262 | acc |
|
263 | 263 | }); |
|
264 | 264 | let mut new_bytes = components.join(&sep); |
|
265 | 265 | |
|
266 | 266 | if initial_slashes > 0 { |
|
267 | 267 | let mut buf: Vec<_> = (0..initial_slashes).map(|_| sep).collect(); |
|
268 | 268 | buf.extend(new_bytes); |
|
269 | 269 | new_bytes = buf; |
|
270 | 270 | } |
|
271 | 271 | if new_bytes.is_empty() { |
|
272 | 272 | b".".to_vec() |
|
273 | 273 | } else { |
|
274 | 274 | new_bytes |
|
275 | 275 | } |
|
276 | 276 | } |
|
277 | 277 | |
|
278 | 278 | /// Wrapper function to `_build_single_regex` that short-circuits 'exact' globs |
|
279 | 279 | /// that don't need to be transformed into a regex. |
|
280 | 280 | pub fn build_single_regex( |
|
281 | 281 | entry: &IgnorePattern, |
|
282 | 282 | ) -> Result<Option<Vec<u8>>, PatternError> { |
|
283 | 283 | let IgnorePattern { |
|
284 | 284 | pattern, syntax, .. |
|
285 | 285 | } = entry; |
|
286 | 286 | let pattern = match syntax { |
|
287 | 287 | PatternSyntax::RootGlob |
|
288 | 288 | | PatternSyntax::Path |
|
289 | 289 | | PatternSyntax::RelGlob |
|
290 | 290 | | PatternSyntax::RootFiles => normalize_path_bytes(&pattern), |
|
291 | 291 | PatternSyntax::Include | PatternSyntax::SubInclude => { |
|
292 | 292 | return Err(PatternError::NonRegexPattern(entry.clone())) |
|
293 | 293 | } |
|
294 | 294 | _ => pattern.to_owned(), |
|
295 | 295 | }; |
|
296 | 296 | if *syntax == PatternSyntax::RootGlob |
|
297 | 297 | && !pattern.iter().any(|b| GLOB_SPECIAL_CHARACTERS.contains(b)) |
|
298 | 298 | { |
|
299 | 299 | Ok(None) |
|
300 | 300 | } else { |
|
301 | 301 | let mut entry = entry.clone(); |
|
302 | 302 | entry.pattern = pattern; |
|
303 | 303 | Ok(Some(_build_single_regex(&entry))) |
|
304 | 304 | } |
|
305 | 305 | } |
|
306 | 306 | |
|
307 | 307 | lazy_static! { |
|
308 | 308 | static ref SYNTAXES: FastHashMap<&'static [u8], &'static [u8]> = { |
|
309 | 309 | let mut m = FastHashMap::default(); |
|
310 | 310 | |
|
311 | 311 | m.insert(b"re".as_ref(), b"relre:".as_ref()); |
|
312 | 312 | m.insert(b"regexp".as_ref(), b"relre:".as_ref()); |
|
313 | 313 | m.insert(b"glob".as_ref(), b"relglob:".as_ref()); |
|
314 | 314 | m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref()); |
|
315 | 315 | m.insert(b"include".as_ref(), b"include:".as_ref()); |
|
316 | 316 | m.insert(b"subinclude".as_ref(), b"subinclude:".as_ref()); |
|
317 | m.insert(b"path".as_ref(), b"path:".as_ref()); | |
|
318 | m.insert(b"rootfilesin".as_ref(), b"rootfilesin:".as_ref()); | |
|
317 | 319 | m |
|
318 | 320 | }; |
|
319 | 321 | } |
|
320 | 322 | |
|
321 | 323 | #[derive(Debug)] |
|
322 | 324 | pub enum PatternFileWarning { |
|
323 | 325 | /// (file path, syntax bytes) |
|
324 | 326 | InvalidSyntax(PathBuf, Vec<u8>), |
|
325 | 327 | /// File path |
|
326 | 328 | NoSuchFile(PathBuf), |
|
327 | 329 | } |
|
328 | 330 | |
|
329 | 331 | pub fn parse_pattern_file_contents( |
|
330 | 332 | lines: &[u8], |
|
331 | 333 | file_path: &Path, |
|
332 | 334 | default_syntax_override: Option<&[u8]>, |
|
333 | 335 | warn: bool, |
|
334 | 336 | ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> { |
|
335 | 337 | let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap(); |
|
336 | 338 | |
|
337 | 339 | #[allow(clippy::trivial_regex)] |
|
338 | 340 | let comment_escape_regex = Regex::new(r"\\#").unwrap(); |
|
339 | 341 | let mut inputs: Vec<IgnorePattern> = vec![]; |
|
340 | 342 | let mut warnings: Vec<PatternFileWarning> = vec![]; |
|
341 | 343 | |
|
342 | 344 | let mut current_syntax = |
|
343 | 345 | default_syntax_override.unwrap_or(b"relre:".as_ref()); |
|
344 | 346 | |
|
345 | 347 | for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() { |
|
346 | 348 | let line_number = line_number + 1; |
|
347 | 349 | |
|
348 | 350 | let line_buf; |
|
349 | 351 | if line.contains(&b'#') { |
|
350 | 352 | if let Some(cap) = comment_regex.captures(line) { |
|
351 | 353 | line = &line[..cap.get(1).unwrap().end()] |
|
352 | 354 | } |
|
353 | 355 | line_buf = comment_escape_regex.replace_all(line, NoExpand(b"#")); |
|
354 | 356 | line = &line_buf; |
|
355 | 357 | } |
|
356 | 358 | |
|
357 | 359 | let mut line = line.trim_end(); |
|
358 | 360 | |
|
359 | 361 | if line.is_empty() { |
|
360 | 362 | continue; |
|
361 | 363 | } |
|
362 | 364 | |
|
363 | 365 | if let Some(syntax) = line.drop_prefix(b"syntax:") { |
|
364 | 366 | let syntax = syntax.trim(); |
|
365 | 367 | |
|
366 | 368 | if let Some(rel_syntax) = SYNTAXES.get(syntax) { |
|
367 | 369 | current_syntax = rel_syntax; |
|
368 | 370 | } else if warn { |
|
369 | 371 | warnings.push(PatternFileWarning::InvalidSyntax( |
|
370 | 372 | file_path.to_owned(), |
|
371 | 373 | syntax.to_owned(), |
|
372 | 374 | )); |
|
373 | 375 | } |
|
374 | 376 | continue; |
|
375 | 377 | } |
|
376 | 378 | |
|
377 | 379 | let mut line_syntax: &[u8] = ¤t_syntax; |
|
378 | 380 | |
|
379 | 381 | for (s, rels) in SYNTAXES.iter() { |
|
380 | 382 | if let Some(rest) = line.drop_prefix(rels) { |
|
381 | 383 | line_syntax = rels; |
|
382 | 384 | line = rest; |
|
383 | 385 | break; |
|
384 | 386 | } |
|
385 | 387 | if let Some(rest) = line.drop_prefix(&[s, &b":"[..]].concat()) { |
|
386 | 388 | line_syntax = rels; |
|
387 | 389 | line = rest; |
|
388 | 390 | break; |
|
389 | 391 | } |
|
390 | 392 | } |
|
391 | 393 | |
|
392 | 394 | inputs.push(IgnorePattern::new( |
|
393 | 395 | parse_pattern_syntax(&line_syntax).map_err(|e| match e { |
|
394 | 396 | PatternError::UnsupportedSyntax(syntax) => { |
|
395 | 397 | PatternError::UnsupportedSyntaxInFile( |
|
396 | 398 | syntax, |
|
397 | 399 | file_path.to_string_lossy().into(), |
|
398 | 400 | line_number, |
|
399 | 401 | ) |
|
400 | 402 | } |
|
401 | 403 | _ => e, |
|
402 | 404 | })?, |
|
403 | 405 | &line, |
|
404 | 406 | file_path, |
|
405 | 407 | )); |
|
406 | 408 | } |
|
407 | 409 | Ok((inputs, warnings)) |
|
408 | 410 | } |
|
409 | 411 | |
|
410 | 412 | pub fn read_pattern_file( |
|
411 | 413 | file_path: &Path, |
|
412 | 414 | warn: bool, |
|
413 | 415 | inspect_pattern_bytes: &mut impl FnMut(&[u8]), |
|
414 | 416 | ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> { |
|
415 | 417 | match std::fs::read(file_path) { |
|
416 | 418 | Ok(contents) => { |
|
417 | 419 | inspect_pattern_bytes(&contents); |
|
418 | 420 | parse_pattern_file_contents(&contents, file_path, None, warn) |
|
419 | 421 | } |
|
420 | 422 | Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(( |
|
421 | 423 | vec![], |
|
422 | 424 | vec![PatternFileWarning::NoSuchFile(file_path.to_owned())], |
|
423 | 425 | )), |
|
424 | 426 | Err(e) => Err(e.into()), |
|
425 | 427 | } |
|
426 | 428 | } |
|
427 | 429 | |
|
428 | 430 | /// Represents an entry in an "ignore" file. |
|
429 | 431 | #[derive(Debug, Eq, PartialEq, Clone)] |
|
430 | 432 | pub struct IgnorePattern { |
|
431 | 433 | pub syntax: PatternSyntax, |
|
432 | 434 | pub pattern: Vec<u8>, |
|
433 | 435 | pub source: PathBuf, |
|
434 | 436 | } |
|
435 | 437 | |
|
436 | 438 | impl IgnorePattern { |
|
437 | 439 | pub fn new(syntax: PatternSyntax, pattern: &[u8], source: &Path) -> Self { |
|
438 | 440 | Self { |
|
439 | 441 | syntax, |
|
440 | 442 | pattern: pattern.to_owned(), |
|
441 | 443 | source: source.to_owned(), |
|
442 | 444 | } |
|
443 | 445 | } |
|
444 | 446 | } |
|
445 | 447 | |
|
446 | 448 | pub type PatternResult<T> = Result<T, PatternError>; |
|
447 | 449 | |
|
448 | 450 | /// Wrapper for `read_pattern_file` that also recursively expands `include:` |
|
449 | 451 | /// and `subinclude:` patterns. |
|
450 | 452 | /// |
|
451 | 453 | /// The former are expanded in place, while `PatternSyntax::ExpandedSubInclude` |
|
452 | 454 | /// is used for the latter to form a tree of patterns. |
|
453 | 455 | pub fn get_patterns_from_file( |
|
454 | 456 | pattern_file: &Path, |
|
455 | 457 | root_dir: &Path, |
|
456 | 458 | inspect_pattern_bytes: &mut impl FnMut(&[u8]), |
|
457 | 459 | ) -> PatternResult<(Vec<IgnorePattern>, Vec<PatternFileWarning>)> { |
|
458 | 460 | let (patterns, mut warnings) = |
|
459 | 461 | read_pattern_file(pattern_file, true, inspect_pattern_bytes)?; |
|
460 | 462 | let patterns = patterns |
|
461 | 463 | .into_iter() |
|
462 | 464 | .flat_map(|entry| -> PatternResult<_> { |
|
463 | 465 | Ok(match &entry.syntax { |
|
464 | 466 | PatternSyntax::Include => { |
|
465 | 467 | let inner_include = |
|
466 | 468 | root_dir.join(get_path_from_bytes(&entry.pattern)); |
|
467 | 469 | let (inner_pats, inner_warnings) = get_patterns_from_file( |
|
468 | 470 | &inner_include, |
|
469 | 471 | root_dir, |
|
470 | 472 | inspect_pattern_bytes, |
|
471 | 473 | )?; |
|
472 | 474 | warnings.extend(inner_warnings); |
|
473 | 475 | inner_pats |
|
474 | 476 | } |
|
475 | 477 | PatternSyntax::SubInclude => { |
|
476 | 478 | let mut sub_include = SubInclude::new( |
|
477 | 479 | &root_dir, |
|
478 | 480 | &entry.pattern, |
|
479 | 481 | &entry.source, |
|
480 | 482 | )?; |
|
481 | 483 | let (inner_patterns, inner_warnings) = |
|
482 | 484 | get_patterns_from_file( |
|
483 | 485 | &sub_include.path, |
|
484 | 486 | &sub_include.root, |
|
485 | 487 | inspect_pattern_bytes, |
|
486 | 488 | )?; |
|
487 | 489 | sub_include.included_patterns = inner_patterns; |
|
488 | 490 | warnings.extend(inner_warnings); |
|
489 | 491 | vec![IgnorePattern { |
|
490 | 492 | syntax: PatternSyntax::ExpandedSubInclude(Box::new( |
|
491 | 493 | sub_include, |
|
492 | 494 | )), |
|
493 | 495 | ..entry |
|
494 | 496 | }] |
|
495 | 497 | } |
|
496 | 498 | _ => vec![entry], |
|
497 | 499 | }) |
|
498 | 500 | }) |
|
499 | 501 | .flatten() |
|
500 | 502 | .collect(); |
|
501 | 503 | |
|
502 | 504 | Ok((patterns, warnings)) |
|
503 | 505 | } |
|
504 | 506 | |
|
505 | 507 | /// Holds all the information needed to handle a `subinclude:` pattern. |
|
506 | 508 | #[derive(Debug, PartialEq, Eq, Clone)] |
|
507 | 509 | pub struct SubInclude { |
|
508 | 510 | /// Will be used for repository (hg) paths that start with this prefix. |
|
509 | 511 | /// It is relative to the current working directory, so comparing against |
|
510 | 512 | /// repository paths is painless. |
|
511 | 513 | pub prefix: HgPathBuf, |
|
512 | 514 | /// The file itself, containing the patterns |
|
513 | 515 | pub path: PathBuf, |
|
514 | 516 | /// Folder in the filesystem where this it applies |
|
515 | 517 | pub root: PathBuf, |
|
516 | 518 | |
|
517 | 519 | pub included_patterns: Vec<IgnorePattern>, |
|
518 | 520 | } |
|
519 | 521 | |
|
520 | 522 | impl SubInclude { |
|
521 | 523 | pub fn new( |
|
522 | 524 | root_dir: &Path, |
|
523 | 525 | pattern: &[u8], |
|
524 | 526 | source: &Path, |
|
525 | 527 | ) -> Result<SubInclude, HgPathError> { |
|
526 | 528 | let normalized_source = |
|
527 | 529 | normalize_path_bytes(&get_bytes_from_path(source)); |
|
528 | 530 | |
|
529 | 531 | let source_root = get_path_from_bytes(&normalized_source); |
|
530 | 532 | let source_root = |
|
531 | 533 | source_root.parent().unwrap_or_else(|| source_root.deref()); |
|
532 | 534 | |
|
533 | 535 | let path = source_root.join(get_path_from_bytes(pattern)); |
|
534 | 536 | let new_root = path.parent().unwrap_or_else(|| path.deref()); |
|
535 | 537 | |
|
536 | 538 | let prefix = canonical_path(root_dir, root_dir, new_root)?; |
|
537 | 539 | |
|
538 | 540 | Ok(Self { |
|
539 | 541 | prefix: path_to_hg_path_buf(prefix).and_then(|mut p| { |
|
540 | 542 | if !p.is_empty() { |
|
541 | 543 | p.push_byte(b'/'); |
|
542 | 544 | } |
|
543 | 545 | Ok(p) |
|
544 | 546 | })?, |
|
545 | 547 | path: path.to_owned(), |
|
546 | 548 | root: new_root.to_owned(), |
|
547 | 549 | included_patterns: Vec::new(), |
|
548 | 550 | }) |
|
549 | 551 | } |
|
550 | 552 | } |
|
551 | 553 | |
|
552 | 554 | /// Separate and pre-process subincludes from other patterns for the "ignore" |
|
553 | 555 | /// phase. |
|
554 | 556 | pub fn filter_subincludes( |
|
555 | 557 | ignore_patterns: Vec<IgnorePattern>, |
|
556 | 558 | ) -> Result<(Vec<Box<SubInclude>>, Vec<IgnorePattern>), HgPathError> { |
|
557 | 559 | let mut subincludes = vec![]; |
|
558 | 560 | let mut others = vec![]; |
|
559 | 561 | |
|
560 | 562 | for pattern in ignore_patterns { |
|
561 | 563 | if let PatternSyntax::ExpandedSubInclude(sub_include) = pattern.syntax |
|
562 | 564 | { |
|
563 | 565 | subincludes.push(sub_include); |
|
564 | 566 | } else { |
|
565 | 567 | others.push(pattern) |
|
566 | 568 | } |
|
567 | 569 | } |
|
568 | 570 | Ok((subincludes, others)) |
|
569 | 571 | } |
|
570 | 572 | |
|
571 | 573 | #[cfg(test)] |
|
572 | 574 | mod tests { |
|
573 | 575 | use super::*; |
|
574 | 576 | use pretty_assertions::assert_eq; |
|
575 | 577 | |
|
576 | 578 | #[test] |
|
577 | 579 | fn escape_pattern_test() { |
|
578 | 580 | let untouched = |
|
579 | 581 | br#"!"%',/0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ_`abcdefghijklmnopqrstuvwxyz"#; |
|
580 | 582 | assert_eq!(escape_pattern(untouched), untouched.to_vec()); |
|
581 | 583 | // All escape codes |
|
582 | 584 | assert_eq!( |
|
583 | 585 | escape_pattern(br#"()[]{}?*+-|^$\\.&~# \t\n\r\v\f"#), |
|
584 | 586 | br#"\(\)\[\]\{\}\?\*\+\-\|\^\$\\\\\.\&\~\#\ \\t\\n\\r\\v\\f"# |
|
585 | 587 | .to_vec() |
|
586 | 588 | ); |
|
587 | 589 | } |
|
588 | 590 | |
|
589 | 591 | #[test] |
|
590 | 592 | fn glob_test() { |
|
591 | 593 | assert_eq!(glob_to_re(br#"?"#), br#"."#); |
|
592 | 594 | assert_eq!(glob_to_re(br#"*"#), br#"[^/]*"#); |
|
593 | 595 | assert_eq!(glob_to_re(br#"**"#), br#".*"#); |
|
594 | 596 | assert_eq!(glob_to_re(br#"**/a"#), br#"(?:.*/)?a"#); |
|
595 | 597 | assert_eq!(glob_to_re(br#"a/**/b"#), br#"a/(?:.*/)?b"#); |
|
596 | 598 | assert_eq!(glob_to_re(br#"[a*?!^][^b][!c]"#), br#"[a*?!^][\^b][^c]"#); |
|
597 | 599 | assert_eq!(glob_to_re(br#"{a,b}"#), br#"(?:a|b)"#); |
|
598 | 600 | assert_eq!(glob_to_re(br#".\*\?"#), br#"\.\*\?"#); |
|
599 | 601 | } |
|
600 | 602 | |
|
601 | 603 | #[test] |
|
602 | 604 | fn test_parse_pattern_file_contents() { |
|
603 | 605 | let lines = b"syntax: glob\n*.elc"; |
|
604 | 606 | |
|
605 | 607 | assert_eq!( |
|
606 | 608 | parse_pattern_file_contents( |
|
607 | 609 | lines, |
|
608 | 610 | Path::new("file_path"), |
|
609 | 611 | None, |
|
610 | 612 | false |
|
611 | 613 | ) |
|
612 | 614 | .unwrap() |
|
613 | 615 | .0, |
|
614 | 616 | vec![IgnorePattern::new( |
|
615 | 617 | PatternSyntax::RelGlob, |
|
616 | 618 | b"*.elc", |
|
617 | 619 | Path::new("file_path") |
|
618 | 620 | )], |
|
619 | 621 | ); |
|
620 | 622 | |
|
621 | 623 | let lines = b"syntax: include\nsyntax: glob"; |
|
622 | 624 | |
|
623 | 625 | assert_eq!( |
|
624 | 626 | parse_pattern_file_contents( |
|
625 | 627 | lines, |
|
626 | 628 | Path::new("file_path"), |
|
627 | 629 | None, |
|
628 | 630 | false |
|
629 | 631 | ) |
|
630 | 632 | .unwrap() |
|
631 | 633 | .0, |
|
632 | 634 | vec![] |
|
633 | 635 | ); |
|
634 | 636 | let lines = b"glob:**.o"; |
|
635 | 637 | assert_eq!( |
|
636 | 638 | parse_pattern_file_contents( |
|
637 | 639 | lines, |
|
638 | 640 | Path::new("file_path"), |
|
639 | 641 | None, |
|
640 | 642 | false |
|
641 | 643 | ) |
|
642 | 644 | .unwrap() |
|
643 | 645 | .0, |
|
644 | 646 | vec![IgnorePattern::new( |
|
645 | 647 | PatternSyntax::RelGlob, |
|
646 | 648 | b"**.o", |
|
647 | 649 | Path::new("file_path") |
|
648 | 650 | )] |
|
649 | 651 | ); |
|
650 | 652 | } |
|
651 | 653 | |
|
652 | 654 | #[test] |
|
653 | 655 | fn test_build_single_regex() { |
|
654 | 656 | assert_eq!( |
|
655 | 657 | build_single_regex(&IgnorePattern::new( |
|
656 | 658 | PatternSyntax::RelGlob, |
|
657 | 659 | b"rust/target/", |
|
658 | 660 | Path::new("") |
|
659 | 661 | )) |
|
660 | 662 | .unwrap(), |
|
661 | 663 | Some(br"(?:.*/)?rust/target(?:/|$)".to_vec()), |
|
662 | 664 | ); |
|
663 | 665 | assert_eq!( |
|
664 | 666 | build_single_regex(&IgnorePattern::new( |
|
665 | 667 | PatternSyntax::Regexp, |
|
666 | 668 | br"rust/target/\d+", |
|
667 | 669 | Path::new("") |
|
668 | 670 | )) |
|
669 | 671 | .unwrap(), |
|
670 | 672 | Some(br"rust/target/\d+".to_vec()), |
|
671 | 673 | ); |
|
672 | 674 | } |
|
673 | 675 | |
|
674 | 676 | #[test] |
|
675 | 677 | fn test_build_single_regex_shortcut() { |
|
676 | 678 | assert_eq!( |
|
677 | 679 | build_single_regex(&IgnorePattern::new( |
|
678 | 680 | PatternSyntax::RootGlob, |
|
679 | 681 | b"", |
|
680 | 682 | Path::new("") |
|
681 | 683 | )) |
|
682 | 684 | .unwrap(), |
|
683 | 685 | None, |
|
684 | 686 | ); |
|
685 | 687 | assert_eq!( |
|
686 | 688 | build_single_regex(&IgnorePattern::new( |
|
687 | 689 | PatternSyntax::RootGlob, |
|
688 | 690 | b"whatever", |
|
689 | 691 | Path::new("") |
|
690 | 692 | )) |
|
691 | 693 | .unwrap(), |
|
692 | 694 | None, |
|
693 | 695 | ); |
|
694 | 696 | assert_eq!( |
|
695 | 697 | build_single_regex(&IgnorePattern::new( |
|
696 | 698 | PatternSyntax::RootGlob, |
|
697 | 699 | b"*.o", |
|
698 | 700 | Path::new("") |
|
699 | 701 | )) |
|
700 | 702 | .unwrap(), |
|
701 | 703 | Some(br"[^/]*\.o(?:/|$)".to_vec()), |
|
702 | 704 | ); |
|
703 | 705 | } |
|
704 | 706 | } |
@@ -1,138 +1,139 b'' | |||
|
1 | 1 | // Copyright 2018-2020 Georges Racinet <georges.racinet@octobus.net> |
|
2 | 2 | // and Mercurial contributors |
|
3 | 3 | // |
|
4 | 4 | // This software may be used and distributed according to the terms of the |
|
5 | 5 | // GNU General Public License version 2 or any later version. |
|
6 | 6 | |
|
7 | 7 | mod ancestors; |
|
8 | 8 | pub mod dagops; |
|
9 | 9 | pub mod errors; |
|
10 | pub mod narrow; | |
|
10 | 11 | pub mod sparse; |
|
11 | 12 | pub use ancestors::{AncestorsIterator, MissingAncestors}; |
|
12 | 13 | pub mod dirstate; |
|
13 | 14 | pub mod dirstate_tree; |
|
14 | 15 | pub mod discovery; |
|
15 | 16 | pub mod exit_codes; |
|
16 | 17 | pub mod requirements; |
|
17 | 18 | pub mod testing; // unconditionally built, for use from integration tests |
|
18 | 19 | pub use dirstate::{ |
|
19 | 20 | dirs_multiset::{DirsMultiset, DirsMultisetIter}, |
|
20 | 21 | status::{ |
|
21 | 22 | BadMatch, BadType, DirstateStatus, HgPathCow, StatusError, |
|
22 | 23 | StatusOptions, |
|
23 | 24 | }, |
|
24 | 25 | DirstateEntry, DirstateParents, EntryState, |
|
25 | 26 | }; |
|
26 | 27 | pub mod copy_tracing; |
|
27 | 28 | mod filepatterns; |
|
28 | 29 | pub mod matchers; |
|
29 | 30 | pub mod repo; |
|
30 | 31 | pub mod revlog; |
|
31 | 32 | pub use revlog::*; |
|
32 | 33 | pub mod config; |
|
33 | 34 | pub mod lock; |
|
34 | 35 | pub mod logging; |
|
35 | 36 | pub mod operations; |
|
36 | 37 | pub mod revset; |
|
37 | 38 | pub mod utils; |
|
38 | 39 | pub mod vfs; |
|
39 | 40 | |
|
40 | 41 | use crate::utils::hg_path::{HgPathBuf, HgPathError}; |
|
41 | 42 | pub use filepatterns::{ |
|
42 | 43 | parse_pattern_syntax, read_pattern_file, IgnorePattern, |
|
43 | 44 | PatternFileWarning, PatternSyntax, |
|
44 | 45 | }; |
|
45 | 46 | use std::collections::HashMap; |
|
46 | 47 | use std::fmt; |
|
47 | 48 | use twox_hash::RandomXxHashBuilder64; |
|
48 | 49 | |
|
49 | 50 | /// This is a contract between the `micro-timer` crate and us, to expose |
|
50 | 51 | /// the `log` crate as `crate::log`. |
|
51 | 52 | use log; |
|
52 | 53 | |
|
53 | 54 | pub type LineNumber = usize; |
|
54 | 55 | |
|
55 | 56 | /// Rust's default hasher is too slow because it tries to prevent collision |
|
56 | 57 | /// attacks. We are not concerned about those: if an ill-minded person has |
|
57 | 58 | /// write access to your repository, you have other issues. |
|
58 | 59 | pub type FastHashMap<K, V> = HashMap<K, V, RandomXxHashBuilder64>; |
|
59 | 60 | |
|
60 | 61 | // TODO: should this be the default `FastHashMap` for all of hg-core, not just |
|
61 | 62 | // dirstate_tree? How does XxHash compare with AHash, hashbrown’s default? |
|
62 | 63 | pub type FastHashbrownMap<K, V> = |
|
63 | 64 | hashbrown::HashMap<K, V, RandomXxHashBuilder64>; |
|
64 | 65 | |
|
65 | 66 | #[derive(Debug, PartialEq)] |
|
66 | 67 | pub enum DirstateMapError { |
|
67 | 68 | PathNotFound(HgPathBuf), |
|
68 | 69 | EmptyPath, |
|
69 | 70 | InvalidPath(HgPathError), |
|
70 | 71 | } |
|
71 | 72 | |
|
72 | 73 | impl fmt::Display for DirstateMapError { |
|
73 | 74 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
|
74 | 75 | match self { |
|
75 | 76 | DirstateMapError::PathNotFound(_) => { |
|
76 | 77 | f.write_str("expected a value, found none") |
|
77 | 78 | } |
|
78 | 79 | DirstateMapError::EmptyPath => { |
|
79 | 80 | f.write_str("Overflow in dirstate.") |
|
80 | 81 | } |
|
81 | 82 | DirstateMapError::InvalidPath(path_error) => path_error.fmt(f), |
|
82 | 83 | } |
|
83 | 84 | } |
|
84 | 85 | } |
|
85 | 86 | |
|
86 | 87 | #[derive(Debug, derive_more::From)] |
|
87 | 88 | pub enum DirstateError { |
|
88 | 89 | Map(DirstateMapError), |
|
89 | 90 | Common(errors::HgError), |
|
90 | 91 | } |
|
91 | 92 | |
|
92 | 93 | impl fmt::Display for DirstateError { |
|
93 | 94 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
|
94 | 95 | match self { |
|
95 | 96 | DirstateError::Map(error) => error.fmt(f), |
|
96 | 97 | DirstateError::Common(error) => error.fmt(f), |
|
97 | 98 | } |
|
98 | 99 | } |
|
99 | 100 | } |
|
100 | 101 | |
|
101 | 102 | #[derive(Debug, derive_more::From)] |
|
102 | 103 | pub enum PatternError { |
|
103 | 104 | #[from] |
|
104 | 105 | Path(HgPathError), |
|
105 | 106 | UnsupportedSyntax(String), |
|
106 | 107 | UnsupportedSyntaxInFile(String, String, usize), |
|
107 | 108 | TooLong(usize), |
|
108 | 109 | #[from] |
|
109 | 110 | IO(std::io::Error), |
|
110 | 111 | /// Needed a pattern that can be turned into a regex but got one that |
|
111 | 112 | /// can't. This should only happen through programmer error. |
|
112 | 113 | NonRegexPattern(IgnorePattern), |
|
113 | 114 | } |
|
114 | 115 | |
|
115 | 116 | impl fmt::Display for PatternError { |
|
116 | 117 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
|
117 | 118 | match self { |
|
118 | 119 | PatternError::UnsupportedSyntax(syntax) => { |
|
119 | 120 | write!(f, "Unsupported syntax {}", syntax) |
|
120 | 121 | } |
|
121 | 122 | PatternError::UnsupportedSyntaxInFile(syntax, file_path, line) => { |
|
122 | 123 | write!( |
|
123 | 124 | f, |
|
124 | 125 | "{}:{}: unsupported syntax {}", |
|
125 | 126 | file_path, line, syntax |
|
126 | 127 | ) |
|
127 | 128 | } |
|
128 | 129 | PatternError::TooLong(size) => { |
|
129 | 130 | write!(f, "matcher pattern is too long ({} bytes)", size) |
|
130 | 131 | } |
|
131 | 132 | PatternError::IO(error) => error.fmt(f), |
|
132 | 133 | PatternError::Path(error) => error.fmt(f), |
|
133 | 134 | PatternError::NonRegexPattern(pattern) => { |
|
134 | 135 | write!(f, "'{:?}' cannot be turned into a regex", pattern) |
|
135 | 136 | } |
|
136 | 137 | } |
|
137 | 138 | } |
|
138 | 139 | } |
@@ -1,333 +1,338 b'' | |||
|
1 | 1 | use std::{collections::HashSet, path::Path}; |
|
2 | 2 | |
|
3 | 3 | use format_bytes::{write_bytes, DisplayBytes}; |
|
4 | 4 | |
|
5 | 5 | use crate::{ |
|
6 | 6 | errors::HgError, |
|
7 | 7 | filepatterns::parse_pattern_file_contents, |
|
8 | 8 | matchers::{ |
|
9 | 9 | AlwaysMatcher, DifferenceMatcher, IncludeMatcher, Matcher, |
|
10 | 10 | UnionMatcher, |
|
11 | 11 | }, |
|
12 | 12 | operations::cat, |
|
13 | 13 | repo::Repo, |
|
14 | 14 | requirements::SPARSE_REQUIREMENT, |
|
15 | 15 | utils::{hg_path::HgPath, SliceExt}, |
|
16 | 16 | IgnorePattern, PatternError, PatternFileWarning, PatternSyntax, Revision, |
|
17 | 17 | NULL_REVISION, |
|
18 | 18 | }; |
|
19 | 19 | |
|
20 | 20 | /// Command which is triggering the config read |
|
21 | 21 | #[derive(Copy, Clone, Debug)] |
|
22 | 22 | pub enum SparseConfigContext { |
|
23 | 23 | Sparse, |
|
24 | 24 | Narrow, |
|
25 | 25 | } |
|
26 | 26 | |
|
27 | 27 | impl DisplayBytes for SparseConfigContext { |
|
28 | 28 | fn display_bytes( |
|
29 | 29 | &self, |
|
30 | 30 | output: &mut dyn std::io::Write, |
|
31 | 31 | ) -> std::io::Result<()> { |
|
32 | 32 | match self { |
|
33 | 33 | SparseConfigContext::Sparse => write_bytes!(output, b"sparse"), |
|
34 | 34 | SparseConfigContext::Narrow => write_bytes!(output, b"narrow"), |
|
35 | 35 | } |
|
36 | 36 | } |
|
37 | 37 | } |
|
38 | 38 | |
|
39 | 39 | /// Possible warnings when reading sparse configuration |
|
40 | 40 | #[derive(Debug, derive_more::From)] |
|
41 | 41 | pub enum SparseWarning { |
|
42 | 42 | /// Warns about improper paths that start with "/" |
|
43 | 43 | RootWarning { |
|
44 | 44 | context: SparseConfigContext, |
|
45 | 45 | line: Vec<u8>, |
|
46 | 46 | }, |
|
47 | 47 | /// Warns about a profile missing from the given changelog revision |
|
48 | 48 | ProfileNotFound { profile: Vec<u8>, rev: Revision }, |
|
49 | 49 | #[from] |
|
50 | 50 | Pattern(PatternFileWarning), |
|
51 | 51 | } |
|
52 | 52 | |
|
53 | 53 | /// Parsed sparse config |
|
54 | 54 | #[derive(Debug, Default)] |
|
55 | 55 | pub struct SparseConfig { |
|
56 | 56 | // Line-separated |
|
57 | includes: Vec<u8>, | |
|
57 | pub(crate) includes: Vec<u8>, | |
|
58 | 58 | // Line-separated |
|
59 | excludes: Vec<u8>, | |
|
60 | profiles: HashSet<Vec<u8>>, | |
|
61 | warnings: Vec<SparseWarning>, | |
|
59 | pub(crate) excludes: Vec<u8>, | |
|
60 | pub(crate) profiles: HashSet<Vec<u8>>, | |
|
61 | pub(crate) warnings: Vec<SparseWarning>, | |
|
62 | 62 | } |
|
63 | 63 | |
|
64 | /// All possible errors when reading sparse config | |
|
64 | /// All possible errors when reading sparse/narrow config | |
|
65 | 65 | #[derive(Debug, derive_more::From)] |
|
66 | 66 | pub enum SparseConfigError { |
|
67 | 67 | IncludesAfterExcludes { |
|
68 | 68 | context: SparseConfigContext, |
|
69 | 69 | }, |
|
70 | 70 | EntryOutsideSection { |
|
71 | 71 | context: SparseConfigContext, |
|
72 | 72 | line: Vec<u8>, |
|
73 | 73 | }, |
|
74 | /// Narrow config does not support '%include' directives | |
|
75 | IncludesInNarrow, | |
|
76 | /// An invalid pattern prefix was given to the narrow spec. Includes the | |
|
77 | /// entire pattern for context. | |
|
78 | InvalidNarrowPrefix(Vec<u8>), | |
|
74 | 79 | #[from] |
|
75 | 80 | HgError(HgError), |
|
76 | 81 | #[from] |
|
77 | 82 | PatternError(PatternError), |
|
78 | 83 | } |
|
79 | 84 | |
|
80 | 85 | /// Parse sparse config file content. |
|
81 | fn parse_config( | |
|
86 | pub(crate) fn parse_config( | |
|
82 | 87 | raw: &[u8], |
|
83 | 88 | context: SparseConfigContext, |
|
84 | 89 | ) -> Result<SparseConfig, SparseConfigError> { |
|
85 | 90 | let mut includes = vec![]; |
|
86 | 91 | let mut excludes = vec![]; |
|
87 | 92 | let mut profiles = HashSet::new(); |
|
88 | 93 | let mut warnings = vec![]; |
|
89 | 94 | |
|
90 | 95 | #[derive(PartialEq, Eq)] |
|
91 | 96 | enum Current { |
|
92 | 97 | Includes, |
|
93 | 98 | Excludes, |
|
94 | 99 | None, |
|
95 | 100 | }; |
|
96 | 101 | |
|
97 | 102 | let mut current = Current::None; |
|
98 | 103 | let mut in_section = false; |
|
99 | 104 | |
|
100 | 105 | for line in raw.split(|c| *c == b'\n') { |
|
101 | 106 | let line = line.trim(); |
|
102 | 107 | if line.is_empty() || line[0] == b'#' { |
|
103 | 108 | // empty or comment line, skip |
|
104 | 109 | continue; |
|
105 | 110 | } |
|
106 | 111 | if line.starts_with(b"%include ") { |
|
107 | 112 | let profile = line[b"%include ".len()..].trim(); |
|
108 | 113 | if !profile.is_empty() { |
|
109 | 114 | profiles.insert(profile.into()); |
|
110 | 115 | } |
|
111 | 116 | } else if line == b"[include]" { |
|
112 | 117 | if in_section && current == Current::Includes { |
|
113 | 118 | return Err(SparseConfigError::IncludesAfterExcludes { |
|
114 | 119 | context, |
|
115 | 120 | }); |
|
116 | 121 | } |
|
117 | 122 | in_section = true; |
|
118 | 123 | current = Current::Includes; |
|
119 | 124 | continue; |
|
120 | 125 | } else if line == b"[exclude]" { |
|
121 | 126 | in_section = true; |
|
122 | 127 | current = Current::Excludes; |
|
123 | 128 | } else { |
|
124 | 129 | if current == Current::None { |
|
125 | 130 | return Err(SparseConfigError::EntryOutsideSection { |
|
126 | 131 | context, |
|
127 | 132 | line: line.into(), |
|
128 | 133 | }); |
|
129 | 134 | } |
|
130 | 135 | if line.trim().starts_with(b"/") { |
|
131 | 136 | warnings.push(SparseWarning::RootWarning { |
|
132 | 137 | context, |
|
133 | 138 | line: line.into(), |
|
134 | 139 | }); |
|
135 | 140 | continue; |
|
136 | 141 | } |
|
137 | 142 | match current { |
|
138 | 143 | Current::Includes => { |
|
139 | 144 | includes.push(b'\n'); |
|
140 | 145 | includes.extend(line.iter()); |
|
141 | 146 | } |
|
142 | 147 | Current::Excludes => { |
|
143 | 148 | excludes.push(b'\n'); |
|
144 | 149 | excludes.extend(line.iter()); |
|
145 | 150 | } |
|
146 | 151 | Current::None => unreachable!(), |
|
147 | 152 | } |
|
148 | 153 | } |
|
149 | 154 | } |
|
150 | 155 | |
|
151 | 156 | Ok(SparseConfig { |
|
152 | 157 | includes, |
|
153 | 158 | excludes, |
|
154 | 159 | profiles, |
|
155 | 160 | warnings, |
|
156 | 161 | }) |
|
157 | 162 | } |
|
158 | 163 | |
|
159 | 164 | fn read_temporary_includes( |
|
160 | 165 | repo: &Repo, |
|
161 | 166 | ) -> Result<Vec<Vec<u8>>, SparseConfigError> { |
|
162 | 167 | let raw = repo.hg_vfs().try_read("tempsparse")?.unwrap_or(vec![]); |
|
163 | 168 | if raw.is_empty() { |
|
164 | 169 | return Ok(vec![]); |
|
165 | 170 | } |
|
166 | 171 | Ok(raw.split(|c| *c == b'\n').map(ToOwned::to_owned).collect()) |
|
167 | 172 | } |
|
168 | 173 | |
|
169 | 174 | /// Obtain sparse checkout patterns for the given revision |
|
170 | 175 | fn patterns_for_rev( |
|
171 | 176 | repo: &Repo, |
|
172 | 177 | rev: Revision, |
|
173 | 178 | ) -> Result<Option<SparseConfig>, SparseConfigError> { |
|
174 | 179 | if !repo.has_sparse() { |
|
175 | 180 | return Ok(None); |
|
176 | 181 | } |
|
177 | 182 | let raw = repo.hg_vfs().try_read("sparse")?.unwrap_or(vec![]); |
|
178 | 183 | |
|
179 | 184 | if raw.is_empty() { |
|
180 | 185 | return Ok(None); |
|
181 | 186 | } |
|
182 | 187 | |
|
183 | 188 | let mut config = parse_config(&raw, SparseConfigContext::Sparse)?; |
|
184 | 189 | |
|
185 | 190 | if !config.profiles.is_empty() { |
|
186 | 191 | let mut profiles: Vec<Vec<u8>> = config.profiles.into_iter().collect(); |
|
187 | 192 | let mut visited = HashSet::new(); |
|
188 | 193 | |
|
189 | 194 | while let Some(profile) = profiles.pop() { |
|
190 | 195 | if visited.contains(&profile) { |
|
191 | 196 | continue; |
|
192 | 197 | } |
|
193 | 198 | visited.insert(profile.to_owned()); |
|
194 | 199 | |
|
195 | 200 | let output = |
|
196 | 201 | cat(repo, &rev.to_string(), vec![HgPath::new(&profile)]) |
|
197 | 202 | .map_err(|_| { |
|
198 | 203 | HgError::corrupted(format!( |
|
199 | 204 | "dirstate points to non-existent parent node" |
|
200 | 205 | )) |
|
201 | 206 | })?; |
|
202 | 207 | if output.results.is_empty() { |
|
203 | 208 | config.warnings.push(SparseWarning::ProfileNotFound { |
|
204 | 209 | profile: profile.to_owned(), |
|
205 | 210 | rev, |
|
206 | 211 | }) |
|
207 | 212 | } |
|
208 | 213 | |
|
209 | 214 | let subconfig = parse_config( |
|
210 | 215 | &output.results[0].1, |
|
211 | 216 | SparseConfigContext::Sparse, |
|
212 | 217 | )?; |
|
213 | 218 | if !subconfig.includes.is_empty() { |
|
214 | 219 | config.includes.push(b'\n'); |
|
215 | 220 | config.includes.extend(&subconfig.includes); |
|
216 | 221 | } |
|
217 | 222 | if !subconfig.includes.is_empty() { |
|
218 | 223 | config.includes.push(b'\n'); |
|
219 | 224 | config.excludes.extend(&subconfig.excludes); |
|
220 | 225 | } |
|
221 | 226 | config.warnings.extend(subconfig.warnings.into_iter()); |
|
222 | 227 | profiles.extend(subconfig.profiles.into_iter()); |
|
223 | 228 | } |
|
224 | 229 | |
|
225 | 230 | config.profiles = visited; |
|
226 | 231 | } |
|
227 | 232 | |
|
228 | 233 | if !config.includes.is_empty() { |
|
229 | 234 | config.includes.extend(b"\n.hg*"); |
|
230 | 235 | } |
|
231 | 236 | |
|
232 | 237 | Ok(Some(config)) |
|
233 | 238 | } |
|
234 | 239 | |
|
235 | 240 | /// Obtain a matcher for sparse working directories. |
|
236 | 241 | pub fn matcher( |
|
237 | 242 | repo: &Repo, |
|
238 | 243 | ) -> Result<(Box<dyn Matcher + Sync>, Vec<SparseWarning>), SparseConfigError> { |
|
239 | 244 | let mut warnings = vec![]; |
|
240 | 245 | if !repo.requirements().contains(SPARSE_REQUIREMENT) { |
|
241 | 246 | return Ok((Box::new(AlwaysMatcher), warnings)); |
|
242 | 247 | } |
|
243 | 248 | |
|
244 | 249 | let parents = repo.dirstate_parents()?; |
|
245 | 250 | let mut revs = vec![]; |
|
246 | 251 | let p1_rev = |
|
247 | 252 | repo.changelog()? |
|
248 | 253 | .rev_from_node(parents.p1.into()) |
|
249 | 254 | .map_err(|_| { |
|
250 | 255 | HgError::corrupted(format!( |
|
251 | 256 | "dirstate points to non-existent parent node" |
|
252 | 257 | )) |
|
253 | 258 | })?; |
|
254 | 259 | if p1_rev != NULL_REVISION { |
|
255 | 260 | revs.push(p1_rev) |
|
256 | 261 | } |
|
257 | 262 | let p2_rev = |
|
258 | 263 | repo.changelog()? |
|
259 | 264 | .rev_from_node(parents.p2.into()) |
|
260 | 265 | .map_err(|_| { |
|
261 | 266 | HgError::corrupted(format!( |
|
262 | 267 | "dirstate points to non-existent parent node" |
|
263 | 268 | )) |
|
264 | 269 | })?; |
|
265 | 270 | if p2_rev != NULL_REVISION { |
|
266 | 271 | revs.push(p2_rev) |
|
267 | 272 | } |
|
268 | 273 | let mut matchers = vec![]; |
|
269 | 274 | |
|
270 | 275 | for rev in revs.iter() { |
|
271 | 276 | let config = patterns_for_rev(repo, *rev); |
|
272 | 277 | if let Ok(Some(config)) = config { |
|
273 | 278 | warnings.extend(config.warnings); |
|
274 | 279 | let mut m: Box<dyn Matcher + Sync> = Box::new(AlwaysMatcher); |
|
275 | 280 | if !config.includes.is_empty() { |
|
276 | 281 | let (patterns, subwarnings) = parse_pattern_file_contents( |
|
277 | 282 | &config.includes, |
|
278 | 283 | Path::new(""), |
|
279 | 284 | Some(b"relglob:".as_ref()), |
|
280 | 285 | false, |
|
281 | 286 | )?; |
|
282 | 287 | warnings.extend(subwarnings.into_iter().map(From::from)); |
|
283 | 288 | m = Box::new(IncludeMatcher::new(patterns)?); |
|
284 | 289 | } |
|
285 | 290 | if !config.excludes.is_empty() { |
|
286 | 291 | let (patterns, subwarnings) = parse_pattern_file_contents( |
|
287 | 292 | &config.excludes, |
|
288 | 293 | Path::new(""), |
|
289 | 294 | Some(b"relglob:".as_ref()), |
|
290 | 295 | false, |
|
291 | 296 | )?; |
|
292 | 297 | warnings.extend(subwarnings.into_iter().map(From::from)); |
|
293 | 298 | m = Box::new(DifferenceMatcher::new( |
|
294 | 299 | m, |
|
295 | 300 | Box::new(IncludeMatcher::new(patterns)?), |
|
296 | 301 | )); |
|
297 | 302 | } |
|
298 | 303 | matchers.push(m); |
|
299 | 304 | } |
|
300 | 305 | } |
|
301 | 306 | let result: Box<dyn Matcher + Sync> = match matchers.len() { |
|
302 | 307 | 0 => Box::new(AlwaysMatcher), |
|
303 | 308 | 1 => matchers.pop().expect("1 is equal to 0"), |
|
304 | 309 | _ => Box::new(UnionMatcher::new(matchers)), |
|
305 | 310 | }; |
|
306 | 311 | |
|
307 | 312 | let matcher = |
|
308 | 313 | force_include_matcher(result, &read_temporary_includes(repo)?)?; |
|
309 | 314 | Ok((matcher, warnings)) |
|
310 | 315 | } |
|
311 | 316 | |
|
312 | 317 | /// Returns a matcher that returns true for any of the forced includes before |
|
313 | 318 | /// testing against the actual matcher |
|
314 | 319 | fn force_include_matcher( |
|
315 | 320 | result: Box<dyn Matcher + Sync>, |
|
316 | 321 | temp_includes: &[Vec<u8>], |
|
317 | 322 | ) -> Result<Box<dyn Matcher + Sync>, PatternError> { |
|
318 | 323 | if temp_includes.is_empty() { |
|
319 | 324 | return Ok(result); |
|
320 | 325 | } |
|
321 | 326 | let forced_include_matcher = IncludeMatcher::new( |
|
322 | 327 | temp_includes |
|
323 | 328 | .into_iter() |
|
324 | 329 | .map(|include| { |
|
325 | 330 | IgnorePattern::new(PatternSyntax::Path, include, Path::new("")) |
|
326 | 331 | }) |
|
327 | 332 | .collect(), |
|
328 | 333 | )?; |
|
329 | 334 | Ok(Box::new(UnionMatcher::new(vec![ |
|
330 | 335 | Box::new(forced_include_matcher), |
|
331 | 336 | result, |
|
332 | 337 | ]))) |
|
333 | 338 | } |
@@ -1,617 +1,620 b'' | |||
|
1 | 1 | // status.rs |
|
2 | 2 | // |
|
3 | 3 | // Copyright 2020, Georges Racinet <georges.racinets@octobus.net> |
|
4 | 4 | // |
|
5 | 5 | // This software may be used and distributed according to the terms of the |
|
6 | 6 | // GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | use crate::error::CommandError; |
|
9 | 9 | use crate::ui::Ui; |
|
10 | 10 | use crate::utils::path_utils::RelativizePaths; |
|
11 | 11 | use clap::{Arg, SubCommand}; |
|
12 | 12 | use format_bytes::format_bytes; |
|
13 | use hg; | |
|
14 | 13 | use hg::config::Config; |
|
15 | 14 | use hg::dirstate::has_exec_bit; |
|
16 | 15 | use hg::dirstate::status::StatusPath; |
|
17 | 16 | use hg::dirstate::TruncatedTimestamp; |
|
18 | 17 | use hg::errors::{HgError, IoResultExt}; |
|
19 | 18 | use hg::lock::LockError; |
|
20 | 19 | use hg::manifest::Manifest; |
|
20 | use hg::matchers::{AlwaysMatcher, IntersectionMatcher}; | |
|
21 | 21 | use hg::repo::Repo; |
|
22 | use hg::sparse::{matcher, SparseWarning}; | |
|
23 | 22 | use hg::utils::files::get_bytes_from_os_string; |
|
24 | 23 | use hg::utils::files::get_bytes_from_path; |
|
25 | 24 | use hg::utils::files::get_path_from_bytes; |
|
26 | 25 | use hg::utils::hg_path::{hg_path_to_path_buf, HgPath}; |
|
27 | 26 | use hg::DirstateStatus; |
|
28 | 27 | use hg::PatternFileWarning; |
|
29 | 28 | use hg::StatusError; |
|
30 | 29 | use hg::StatusOptions; |
|
30 | use hg::{self, narrow, sparse}; | |
|
31 | 31 | use log::info; |
|
32 | 32 | use std::io; |
|
33 | 33 | use std::path::PathBuf; |
|
34 | 34 | |
|
35 | 35 | pub const HELP_TEXT: &str = " |
|
36 | 36 | Show changed files in the working directory |
|
37 | 37 | |
|
38 | 38 | This is a pure Rust version of `hg status`. |
|
39 | 39 | |
|
40 | 40 | Some options might be missing, check the list below. |
|
41 | 41 | "; |
|
42 | 42 | |
|
43 | 43 | pub fn args() -> clap::App<'static, 'static> { |
|
44 | 44 | SubCommand::with_name("status") |
|
45 | 45 | .alias("st") |
|
46 | 46 | .about(HELP_TEXT) |
|
47 | 47 | .arg( |
|
48 | 48 | Arg::with_name("all") |
|
49 | 49 | .help("show status of all files") |
|
50 | 50 | .short("-A") |
|
51 | 51 | .long("--all"), |
|
52 | 52 | ) |
|
53 | 53 | .arg( |
|
54 | 54 | Arg::with_name("modified") |
|
55 | 55 | .help("show only modified files") |
|
56 | 56 | .short("-m") |
|
57 | 57 | .long("--modified"), |
|
58 | 58 | ) |
|
59 | 59 | .arg( |
|
60 | 60 | Arg::with_name("added") |
|
61 | 61 | .help("show only added files") |
|
62 | 62 | .short("-a") |
|
63 | 63 | .long("--added"), |
|
64 | 64 | ) |
|
65 | 65 | .arg( |
|
66 | 66 | Arg::with_name("removed") |
|
67 | 67 | .help("show only removed files") |
|
68 | 68 | .short("-r") |
|
69 | 69 | .long("--removed"), |
|
70 | 70 | ) |
|
71 | 71 | .arg( |
|
72 | 72 | Arg::with_name("clean") |
|
73 | 73 | .help("show only clean files") |
|
74 | 74 | .short("-c") |
|
75 | 75 | .long("--clean"), |
|
76 | 76 | ) |
|
77 | 77 | .arg( |
|
78 | 78 | Arg::with_name("deleted") |
|
79 | 79 | .help("show only deleted files") |
|
80 | 80 | .short("-d") |
|
81 | 81 | .long("--deleted"), |
|
82 | 82 | ) |
|
83 | 83 | .arg( |
|
84 | 84 | Arg::with_name("unknown") |
|
85 | 85 | .help("show only unknown (not tracked) files") |
|
86 | 86 | .short("-u") |
|
87 | 87 | .long("--unknown"), |
|
88 | 88 | ) |
|
89 | 89 | .arg( |
|
90 | 90 | Arg::with_name("ignored") |
|
91 | 91 | .help("show only ignored files") |
|
92 | 92 | .short("-i") |
|
93 | 93 | .long("--ignored"), |
|
94 | 94 | ) |
|
95 | 95 | .arg( |
|
96 | 96 | Arg::with_name("copies") |
|
97 | 97 | .help("show source of copied files (DEFAULT: ui.statuscopies)") |
|
98 | 98 | .short("-C") |
|
99 | 99 | .long("--copies"), |
|
100 | 100 | ) |
|
101 | 101 | .arg( |
|
102 | 102 | Arg::with_name("no-status") |
|
103 | 103 | .help("hide status prefix") |
|
104 | 104 | .short("-n") |
|
105 | 105 | .long("--no-status"), |
|
106 | 106 | ) |
|
107 | 107 | .arg( |
|
108 | 108 | Arg::with_name("verbose") |
|
109 | 109 | .help("enable additional output") |
|
110 | 110 | .short("-v") |
|
111 | 111 | .long("--verbose"), |
|
112 | 112 | ) |
|
113 | 113 | } |
|
114 | 114 | |
|
115 | 115 | /// Pure data type allowing the caller to specify file states to display |
|
116 | 116 | #[derive(Copy, Clone, Debug)] |
|
117 | 117 | pub struct DisplayStates { |
|
118 | 118 | pub modified: bool, |
|
119 | 119 | pub added: bool, |
|
120 | 120 | pub removed: bool, |
|
121 | 121 | pub clean: bool, |
|
122 | 122 | pub deleted: bool, |
|
123 | 123 | pub unknown: bool, |
|
124 | 124 | pub ignored: bool, |
|
125 | 125 | } |
|
126 | 126 | |
|
127 | 127 | pub const DEFAULT_DISPLAY_STATES: DisplayStates = DisplayStates { |
|
128 | 128 | modified: true, |
|
129 | 129 | added: true, |
|
130 | 130 | removed: true, |
|
131 | 131 | clean: false, |
|
132 | 132 | deleted: true, |
|
133 | 133 | unknown: true, |
|
134 | 134 | ignored: false, |
|
135 | 135 | }; |
|
136 | 136 | |
|
137 | 137 | pub const ALL_DISPLAY_STATES: DisplayStates = DisplayStates { |
|
138 | 138 | modified: true, |
|
139 | 139 | added: true, |
|
140 | 140 | removed: true, |
|
141 | 141 | clean: true, |
|
142 | 142 | deleted: true, |
|
143 | 143 | unknown: true, |
|
144 | 144 | ignored: true, |
|
145 | 145 | }; |
|
146 | 146 | |
|
147 | 147 | impl DisplayStates { |
|
148 | 148 | pub fn is_empty(&self) -> bool { |
|
149 | 149 | !(self.modified |
|
150 | 150 | || self.added |
|
151 | 151 | || self.removed |
|
152 | 152 | || self.clean |
|
153 | 153 | || self.deleted |
|
154 | 154 | || self.unknown |
|
155 | 155 | || self.ignored) |
|
156 | 156 | } |
|
157 | 157 | } |
|
158 | 158 | |
|
159 | 159 | fn has_unfinished_merge(repo: &Repo) -> Result<bool, CommandError> { |
|
160 | 160 | return Ok(repo.dirstate_parents()?.is_merge()); |
|
161 | 161 | } |
|
162 | 162 | |
|
163 | 163 | fn has_unfinished_state(repo: &Repo) -> Result<bool, CommandError> { |
|
164 | 164 | // These are all the known values for the [fname] argument of |
|
165 | 165 | // [addunfinished] function in [state.py] |
|
166 | 166 | let known_state_files: &[&str] = &[ |
|
167 | 167 | "bisect.state", |
|
168 | 168 | "graftstate", |
|
169 | 169 | "histedit-state", |
|
170 | 170 | "rebasestate", |
|
171 | 171 | "shelvedstate", |
|
172 | 172 | "transplant/journal", |
|
173 | 173 | "updatestate", |
|
174 | 174 | ]; |
|
175 | 175 | if has_unfinished_merge(repo)? { |
|
176 | 176 | return Ok(true); |
|
177 | 177 | }; |
|
178 | 178 | for f in known_state_files { |
|
179 | 179 | if repo.hg_vfs().join(f).exists() { |
|
180 | 180 | return Ok(true); |
|
181 | 181 | } |
|
182 | 182 | } |
|
183 | 183 | return Ok(false); |
|
184 | 184 | } |
|
185 | 185 | |
|
186 | 186 | pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> { |
|
187 | 187 | // TODO: lift these limitations |
|
188 | 188 | if invocation.config.get_bool(b"ui", b"tweakdefaults")? { |
|
189 | 189 | return Err(CommandError::unsupported( |
|
190 | 190 | "ui.tweakdefaults is not yet supported with rhg status", |
|
191 | 191 | )); |
|
192 | 192 | } |
|
193 | 193 | if invocation.config.get_bool(b"ui", b"statuscopies")? { |
|
194 | 194 | return Err(CommandError::unsupported( |
|
195 | 195 | "ui.statuscopies is not yet supported with rhg status", |
|
196 | 196 | )); |
|
197 | 197 | } |
|
198 | 198 | if invocation |
|
199 | 199 | .config |
|
200 | 200 | .get(b"commands", b"status.terse") |
|
201 | 201 | .is_some() |
|
202 | 202 | { |
|
203 | 203 | return Err(CommandError::unsupported( |
|
204 | 204 | "status.terse is not yet supported with rhg status", |
|
205 | 205 | )); |
|
206 | 206 | } |
|
207 | 207 | |
|
208 | 208 | let ui = invocation.ui; |
|
209 | 209 | let config = invocation.config; |
|
210 | 210 | let args = invocation.subcommand_args; |
|
211 | 211 | |
|
212 | 212 | let verbose = !ui.plain(None) |
|
213 | 213 | && !args.is_present("print0") |
|
214 | 214 | && (args.is_present("verbose") |
|
215 | 215 | || config.get_bool(b"ui", b"verbose")? |
|
216 | 216 | || config.get_bool(b"commands", b"status.verbose")?); |
|
217 | 217 | |
|
218 | 218 | let all = args.is_present("all"); |
|
219 | 219 | let display_states = if all { |
|
220 | 220 | // TODO when implementing `--quiet`: it excludes clean files |
|
221 | 221 | // from `--all` |
|
222 | 222 | ALL_DISPLAY_STATES |
|
223 | 223 | } else { |
|
224 | 224 | let requested = DisplayStates { |
|
225 | 225 | modified: args.is_present("modified"), |
|
226 | 226 | added: args.is_present("added"), |
|
227 | 227 | removed: args.is_present("removed"), |
|
228 | 228 | clean: args.is_present("clean"), |
|
229 | 229 | deleted: args.is_present("deleted"), |
|
230 | 230 | unknown: args.is_present("unknown"), |
|
231 | 231 | ignored: args.is_present("ignored"), |
|
232 | 232 | }; |
|
233 | 233 | if requested.is_empty() { |
|
234 | 234 | DEFAULT_DISPLAY_STATES |
|
235 | 235 | } else { |
|
236 | 236 | requested |
|
237 | 237 | } |
|
238 | 238 | }; |
|
239 | 239 | let no_status = args.is_present("no-status"); |
|
240 | 240 | let list_copies = all |
|
241 | 241 | || args.is_present("copies") |
|
242 | 242 | || config.get_bool(b"ui", b"statuscopies")?; |
|
243 | 243 | |
|
244 | 244 | let repo = invocation.repo?; |
|
245 | 245 | |
|
246 | 246 | if verbose { |
|
247 | 247 | if has_unfinished_state(repo)? { |
|
248 | 248 | return Err(CommandError::unsupported( |
|
249 | 249 | "verbose status output is not supported by rhg (and is needed because we're in an unfinished operation)", |
|
250 | 250 | )); |
|
251 | 251 | }; |
|
252 | 252 | } |
|
253 | 253 | |
|
254 | if repo.has_narrow() { | |
|
255 | return Err(CommandError::unsupported( | |
|
256 | "rhg status is not supported for narrow clones yet", | |
|
257 | )); | |
|
258 | } | |
|
259 | ||
|
260 | 254 | let mut dmap = repo.dirstate_map_mut()?; |
|
261 | 255 | |
|
262 | 256 | let options = StatusOptions { |
|
263 | 257 | // we're currently supporting file systems with exec flags only |
|
264 | 258 | // anyway |
|
265 | 259 | check_exec: true, |
|
266 | 260 | list_clean: display_states.clean, |
|
267 | 261 | list_unknown: display_states.unknown, |
|
268 | 262 | list_ignored: display_states.ignored, |
|
269 | 263 | list_copies, |
|
270 | 264 | collect_traversed_dirs: false, |
|
271 | 265 | }; |
|
272 | 266 | |
|
273 | 267 | type StatusResult<'a> = |
|
274 | 268 | Result<(DirstateStatus<'a>, Vec<PatternFileWarning>), StatusError>; |
|
275 | 269 | |
|
276 | 270 | let after_status = |res: StatusResult| -> Result<_, CommandError> { |
|
277 | 271 | let (mut ds_status, pattern_warnings) = res?; |
|
278 | 272 | for warning in pattern_warnings { |
|
279 | 273 | ui.write_stderr(&print_pattern_file_warning(&warning, &repo))?; |
|
280 | 274 | } |
|
281 | 275 | |
|
282 | 276 | for (path, error) in ds_status.bad { |
|
283 | 277 | let error = match error { |
|
284 | 278 | hg::BadMatch::OsError(code) => { |
|
285 | 279 | std::io::Error::from_raw_os_error(code).to_string() |
|
286 | 280 | } |
|
287 | 281 | hg::BadMatch::BadType(ty) => { |
|
288 | 282 | format!("unsupported file type (type is {})", ty) |
|
289 | 283 | } |
|
290 | 284 | }; |
|
291 | 285 | ui.write_stderr(&format_bytes!( |
|
292 | 286 | b"{}: {}\n", |
|
293 | 287 | path.as_bytes(), |
|
294 | 288 | error.as_bytes() |
|
295 | 289 | ))? |
|
296 | 290 | } |
|
297 | 291 | if !ds_status.unsure.is_empty() { |
|
298 | 292 | info!( |
|
299 | 293 | "Files to be rechecked by retrieval from filelog: {:?}", |
|
300 | 294 | ds_status.unsure.iter().map(|s| &s.path).collect::<Vec<_>>() |
|
301 | 295 | ); |
|
302 | 296 | } |
|
303 | 297 | let mut fixup = Vec::new(); |
|
304 | 298 | if !ds_status.unsure.is_empty() |
|
305 | 299 | && (display_states.modified || display_states.clean) |
|
306 | 300 | { |
|
307 | 301 | let p1 = repo.dirstate_parents()?.p1; |
|
308 | 302 | let manifest = repo.manifest_for_node(p1).map_err(|e| { |
|
309 | 303 | CommandError::from((e, &*format!("{:x}", p1.short()))) |
|
310 | 304 | })?; |
|
311 | 305 | for to_check in ds_status.unsure { |
|
312 | 306 | if unsure_is_modified(repo, &manifest, &to_check.path)? { |
|
313 | 307 | if display_states.modified { |
|
314 | 308 | ds_status.modified.push(to_check); |
|
315 | 309 | } |
|
316 | 310 | } else { |
|
317 | 311 | if display_states.clean { |
|
318 | 312 | ds_status.clean.push(to_check.clone()); |
|
319 | 313 | } |
|
320 | 314 | fixup.push(to_check.path.into_owned()) |
|
321 | 315 | } |
|
322 | 316 | } |
|
323 | 317 | } |
|
324 | 318 | let relative_paths = (!ui.plain(None)) |
|
325 | 319 | && config |
|
326 | 320 | .get_option(b"commands", b"status.relative")? |
|
327 | 321 | .unwrap_or(config.get_bool(b"ui", b"relative-paths")?); |
|
328 | 322 | let output = DisplayStatusPaths { |
|
329 | 323 | ui, |
|
330 | 324 | no_status, |
|
331 | 325 | relativize: if relative_paths { |
|
332 | 326 | Some(RelativizePaths::new(repo)?) |
|
333 | 327 | } else { |
|
334 | 328 | None |
|
335 | 329 | }, |
|
336 | 330 | }; |
|
337 | 331 | if display_states.modified { |
|
338 | 332 | output.display(b"M ", "status.modified", ds_status.modified)?; |
|
339 | 333 | } |
|
340 | 334 | if display_states.added { |
|
341 | 335 | output.display(b"A ", "status.added", ds_status.added)?; |
|
342 | 336 | } |
|
343 | 337 | if display_states.removed { |
|
344 | 338 | output.display(b"R ", "status.removed", ds_status.removed)?; |
|
345 | 339 | } |
|
346 | 340 | if display_states.deleted { |
|
347 | 341 | output.display(b"! ", "status.deleted", ds_status.deleted)?; |
|
348 | 342 | } |
|
349 | 343 | if display_states.unknown { |
|
350 | 344 | output.display(b"? ", "status.unknown", ds_status.unknown)?; |
|
351 | 345 | } |
|
352 | 346 | if display_states.ignored { |
|
353 | 347 | output.display(b"I ", "status.ignored", ds_status.ignored)?; |
|
354 | 348 | } |
|
355 | 349 | if display_states.clean { |
|
356 | 350 | output.display(b"C ", "status.clean", ds_status.clean)?; |
|
357 | 351 | } |
|
358 | 352 | |
|
359 | 353 | let dirstate_write_needed = ds_status.dirty; |
|
360 | 354 | let filesystem_time_at_status_start = |
|
361 | 355 | ds_status.filesystem_time_at_status_start; |
|
362 | 356 | |
|
363 | 357 | Ok(( |
|
364 | 358 | fixup, |
|
365 | 359 | dirstate_write_needed, |
|
366 | 360 | filesystem_time_at_status_start, |
|
367 | 361 | )) |
|
368 | 362 | }; |
|
369 |
let (matcher, |
|
|
363 | let (narrow_matcher, narrow_warnings) = narrow::matcher(repo)?; | |
|
364 | let (sparse_matcher, sparse_warnings) = sparse::matcher(repo)?; | |
|
365 | let matcher = match (repo.has_narrow(), repo.has_sparse()) { | |
|
366 | (true, true) => { | |
|
367 | Box::new(IntersectionMatcher::new(narrow_matcher, sparse_matcher)) | |
|
368 | } | |
|
369 | (true, false) => narrow_matcher, | |
|
370 | (false, true) => sparse_matcher, | |
|
371 | (false, false) => Box::new(AlwaysMatcher), | |
|
372 | }; | |
|
370 | 373 | |
|
371 | for warning in sparse_warnings { | |
|
374 | for warning in narrow_warnings.into_iter().chain(sparse_warnings) { | |
|
372 | 375 | match &warning { |
|
373 | SparseWarning::RootWarning { context, line } => { | |
|
376 | sparse::SparseWarning::RootWarning { context, line } => { | |
|
374 | 377 | let msg = format_bytes!( |
|
375 | 378 | b"warning: {} profile cannot use paths \" |
|
376 | 379 | starting with /, ignoring {}\n", |
|
377 | 380 | context, |
|
378 | 381 | line |
|
379 | 382 | ); |
|
380 | 383 | ui.write_stderr(&msg)?; |
|
381 | 384 | } |
|
382 | SparseWarning::ProfileNotFound { profile, rev } => { | |
|
385 | sparse::SparseWarning::ProfileNotFound { profile, rev } => { | |
|
383 | 386 | let msg = format_bytes!( |
|
384 | 387 | b"warning: sparse profile '{}' not found \" |
|
385 | 388 | in rev {} - ignoring it\n", |
|
386 | 389 | profile, |
|
387 | 390 | rev |
|
388 | 391 | ); |
|
389 | 392 | ui.write_stderr(&msg)?; |
|
390 | 393 | } |
|
391 | SparseWarning::Pattern(e) => { | |
|
394 | sparse::SparseWarning::Pattern(e) => { | |
|
392 | 395 | ui.write_stderr(&print_pattern_file_warning(e, &repo))?; |
|
393 | 396 | } |
|
394 | 397 | } |
|
395 | 398 | } |
|
396 | 399 | let (fixup, mut dirstate_write_needed, filesystem_time_at_status_start) = |
|
397 | 400 | dmap.with_status( |
|
398 | 401 | matcher.as_ref(), |
|
399 | 402 | repo.working_directory_path().to_owned(), |
|
400 | 403 | ignore_files(repo, config), |
|
401 | 404 | options, |
|
402 | 405 | after_status, |
|
403 | 406 | )?; |
|
404 | 407 | |
|
405 | 408 | if (fixup.is_empty() || filesystem_time_at_status_start.is_none()) |
|
406 | 409 | && !dirstate_write_needed |
|
407 | 410 | { |
|
408 | 411 | // Nothing to update |
|
409 | 412 | return Ok(()); |
|
410 | 413 | } |
|
411 | 414 | |
|
412 | 415 | // Update the dirstate on disk if we can |
|
413 | 416 | let with_lock_result = |
|
414 | 417 | repo.try_with_wlock_no_wait(|| -> Result<(), CommandError> { |
|
415 | 418 | if let Some(mtime_boundary) = filesystem_time_at_status_start { |
|
416 | 419 | for hg_path in fixup { |
|
417 | 420 | use std::os::unix::fs::MetadataExt; |
|
418 | 421 | let fs_path = hg_path_to_path_buf(&hg_path) |
|
419 | 422 | .expect("HgPath conversion"); |
|
420 | 423 | // Specifically do not reuse `fs_metadata` from |
|
421 | 424 | // `unsure_is_clean` which was needed before reading |
|
422 | 425 | // contents. Here we access metadata again after reading |
|
423 | 426 | // content, in case it changed in the meantime. |
|
424 | 427 | let fs_metadata = repo |
|
425 | 428 | .working_directory_vfs() |
|
426 | 429 | .symlink_metadata(&fs_path)?; |
|
427 | 430 | if let Some(mtime) = |
|
428 | 431 | TruncatedTimestamp::for_reliable_mtime_of( |
|
429 | 432 | &fs_metadata, |
|
430 | 433 | &mtime_boundary, |
|
431 | 434 | ) |
|
432 | 435 | .when_reading_file(&fs_path)? |
|
433 | 436 | { |
|
434 | 437 | let mode = fs_metadata.mode(); |
|
435 | 438 | let size = fs_metadata.len(); |
|
436 | 439 | dmap.set_clean(&hg_path, mode, size as u32, mtime)?; |
|
437 | 440 | dirstate_write_needed = true |
|
438 | 441 | } |
|
439 | 442 | } |
|
440 | 443 | } |
|
441 | 444 | drop(dmap); // Avoid "already mutably borrowed" RefCell panics |
|
442 | 445 | if dirstate_write_needed { |
|
443 | 446 | repo.write_dirstate()? |
|
444 | 447 | } |
|
445 | 448 | Ok(()) |
|
446 | 449 | }); |
|
447 | 450 | match with_lock_result { |
|
448 | 451 | Ok(closure_result) => closure_result?, |
|
449 | 452 | Err(LockError::AlreadyHeld) => { |
|
450 | 453 | // Not updating the dirstate is not ideal but not critical: |
|
451 | 454 | // don’t keep our caller waiting until some other Mercurial |
|
452 | 455 | // process releases the lock. |
|
453 | 456 | } |
|
454 | 457 | Err(LockError::Other(HgError::IoError { error, .. })) |
|
455 | 458 | if error.kind() == io::ErrorKind::PermissionDenied => |
|
456 | 459 | { |
|
457 | 460 | // `hg status` on a read-only repository is fine |
|
458 | 461 | } |
|
459 | 462 | Err(LockError::Other(error)) => { |
|
460 | 463 | // Report other I/O errors |
|
461 | 464 | Err(error)? |
|
462 | 465 | } |
|
463 | 466 | } |
|
464 | 467 | Ok(()) |
|
465 | 468 | } |
|
466 | 469 | |
|
467 | 470 | fn ignore_files(repo: &Repo, config: &Config) -> Vec<PathBuf> { |
|
468 | 471 | let mut ignore_files = Vec::new(); |
|
469 | 472 | let repo_ignore = repo.working_directory_vfs().join(".hgignore"); |
|
470 | 473 | if repo_ignore.exists() { |
|
471 | 474 | ignore_files.push(repo_ignore) |
|
472 | 475 | } |
|
473 | 476 | for (key, value) in config.iter_section(b"ui") { |
|
474 | 477 | if key == b"ignore" || key.starts_with(b"ignore.") { |
|
475 | 478 | let path = get_path_from_bytes(value); |
|
476 | 479 | // TODO: expand "~/" and environment variable here, like Python |
|
477 | 480 | // does with `os.path.expanduser` and `os.path.expandvars` |
|
478 | 481 | |
|
479 | 482 | let joined = repo.working_directory_path().join(path); |
|
480 | 483 | ignore_files.push(joined); |
|
481 | 484 | } |
|
482 | 485 | } |
|
483 | 486 | ignore_files |
|
484 | 487 | } |
|
485 | 488 | |
|
486 | 489 | struct DisplayStatusPaths<'a> { |
|
487 | 490 | ui: &'a Ui, |
|
488 | 491 | no_status: bool, |
|
489 | 492 | relativize: Option<RelativizePaths>, |
|
490 | 493 | } |
|
491 | 494 | |
|
492 | 495 | impl DisplayStatusPaths<'_> { |
|
493 | 496 | // Probably more elegant to use a Deref or Borrow trait rather than |
|
494 | 497 | // harcode HgPathBuf, but probably not really useful at this point |
|
495 | 498 | fn display( |
|
496 | 499 | &self, |
|
497 | 500 | status_prefix: &[u8], |
|
498 | 501 | label: &'static str, |
|
499 | 502 | mut paths: Vec<StatusPath<'_>>, |
|
500 | 503 | ) -> Result<(), CommandError> { |
|
501 | 504 | paths.sort_unstable(); |
|
502 | 505 | // TODO: get the stdout lock once for the whole loop |
|
503 | 506 | // instead of in each write |
|
504 | 507 | for StatusPath { path, copy_source } in paths { |
|
505 | 508 | let relative; |
|
506 | 509 | let path = if let Some(relativize) = &self.relativize { |
|
507 | 510 | relative = relativize.relativize(&path); |
|
508 | 511 | &*relative |
|
509 | 512 | } else { |
|
510 | 513 | path.as_bytes() |
|
511 | 514 | }; |
|
512 | 515 | // TODO: Add a way to use `write_bytes!` instead of `format_bytes!` |
|
513 | 516 | // in order to stream to stdout instead of allocating an |
|
514 | 517 | // itermediate `Vec<u8>`. |
|
515 | 518 | if !self.no_status { |
|
516 | 519 | self.ui.write_stdout_labelled(status_prefix, label)? |
|
517 | 520 | } |
|
518 | 521 | self.ui |
|
519 | 522 | .write_stdout_labelled(&format_bytes!(b"{}\n", path), label)?; |
|
520 | 523 | if let Some(source) = copy_source { |
|
521 | 524 | let label = "status.copied"; |
|
522 | 525 | self.ui.write_stdout_labelled( |
|
523 | 526 | &format_bytes!(b" {}\n", source.as_bytes()), |
|
524 | 527 | label, |
|
525 | 528 | )? |
|
526 | 529 | } |
|
527 | 530 | } |
|
528 | 531 | Ok(()) |
|
529 | 532 | } |
|
530 | 533 | } |
|
531 | 534 | |
|
532 | 535 | /// Check if a file is modified by comparing actual repo store and file system. |
|
533 | 536 | /// |
|
534 | 537 | /// This meant to be used for those that the dirstate cannot resolve, due |
|
535 | 538 | /// to time resolution limits. |
|
536 | 539 | fn unsure_is_modified( |
|
537 | 540 | repo: &Repo, |
|
538 | 541 | manifest: &Manifest, |
|
539 | 542 | hg_path: &HgPath, |
|
540 | 543 | ) -> Result<bool, HgError> { |
|
541 | 544 | let vfs = repo.working_directory_vfs(); |
|
542 | 545 | let fs_path = hg_path_to_path_buf(hg_path).expect("HgPath conversion"); |
|
543 | 546 | let fs_metadata = vfs.symlink_metadata(&fs_path)?; |
|
544 | 547 | let is_symlink = fs_metadata.file_type().is_symlink(); |
|
545 | 548 | // TODO: Also account for `FALLBACK_SYMLINK` and `FALLBACK_EXEC` from the |
|
546 | 549 | // dirstate |
|
547 | 550 | let fs_flags = if is_symlink { |
|
548 | 551 | Some(b'l') |
|
549 | 552 | } else if has_exec_bit(&fs_metadata) { |
|
550 | 553 | Some(b'x') |
|
551 | 554 | } else { |
|
552 | 555 | None |
|
553 | 556 | }; |
|
554 | 557 | |
|
555 | 558 | let entry = manifest |
|
556 | 559 | .find_by_path(hg_path)? |
|
557 | 560 | .expect("ambgious file not in p1"); |
|
558 | 561 | if entry.flags != fs_flags { |
|
559 | 562 | return Ok(true); |
|
560 | 563 | } |
|
561 | 564 | let filelog = repo.filelog(hg_path)?; |
|
562 | 565 | let fs_len = fs_metadata.len(); |
|
563 | 566 | let file_node = entry.node_id()?; |
|
564 | 567 | let filelog_entry = filelog.entry_for_node(file_node).map_err(|_| { |
|
565 | 568 | HgError::corrupted(format!( |
|
566 | 569 | "filelog missing node {:?} from manifest", |
|
567 | 570 | file_node |
|
568 | 571 | )) |
|
569 | 572 | })?; |
|
570 | 573 | if filelog_entry.file_data_len_not_equal_to(fs_len) { |
|
571 | 574 | // No need to read file contents: |
|
572 | 575 | // it cannot be equal if it has a different length. |
|
573 | 576 | return Ok(true); |
|
574 | 577 | } |
|
575 | 578 | |
|
576 | 579 | let p1_filelog_data = filelog_entry.data()?; |
|
577 | 580 | let p1_contents = p1_filelog_data.file_data()?; |
|
578 | 581 | if p1_contents.len() as u64 != fs_len { |
|
579 | 582 | // No need to read file contents: |
|
580 | 583 | // it cannot be equal if it has a different length. |
|
581 | 584 | return Ok(true); |
|
582 | 585 | } |
|
583 | 586 | |
|
584 | 587 | let fs_contents = if is_symlink { |
|
585 | 588 | get_bytes_from_os_string(vfs.read_link(fs_path)?.into_os_string()) |
|
586 | 589 | } else { |
|
587 | 590 | vfs.read(fs_path)? |
|
588 | 591 | }; |
|
589 | 592 | Ok(p1_contents != &*fs_contents) |
|
590 | 593 | } |
|
591 | 594 | |
|
592 | 595 | fn print_pattern_file_warning( |
|
593 | 596 | warning: &PatternFileWarning, |
|
594 | 597 | repo: &Repo, |
|
595 | 598 | ) -> Vec<u8> { |
|
596 | 599 | match warning { |
|
597 | 600 | PatternFileWarning::InvalidSyntax(path, syntax) => format_bytes!( |
|
598 | 601 | b"{}: ignoring invalid syntax '{}'\n", |
|
599 | 602 | get_bytes_from_path(path), |
|
600 | 603 | &*syntax |
|
601 | 604 | ), |
|
602 | 605 | PatternFileWarning::NoSuchFile(path) => { |
|
603 | 606 | let path = if let Ok(relative) = |
|
604 | 607 | path.strip_prefix(repo.working_directory_path()) |
|
605 | 608 | { |
|
606 | 609 | relative |
|
607 | 610 | } else { |
|
608 | 611 | &*path |
|
609 | 612 | }; |
|
610 | 613 | format_bytes!( |
|
611 | 614 | b"skipping unreadable pattern file '{}': \ |
|
612 | 615 | No such file or directory\n", |
|
613 | 616 | get_bytes_from_path(path), |
|
614 | 617 | ) |
|
615 | 618 | } |
|
616 | 619 | } |
|
617 | 620 | } |
@@ -1,277 +1,290 b'' | |||
|
1 | 1 | use crate::ui::utf8_to_local; |
|
2 | 2 | use crate::ui::UiError; |
|
3 | 3 | use crate::NoRepoInCwdError; |
|
4 | 4 | use format_bytes::format_bytes; |
|
5 | 5 | use hg::config::{ConfigError, ConfigParseError, ConfigValueParseError}; |
|
6 | 6 | use hg::dirstate_tree::on_disk::DirstateV2ParseError; |
|
7 | 7 | use hg::errors::HgError; |
|
8 | 8 | use hg::exit_codes; |
|
9 | 9 | use hg::repo::RepoError; |
|
10 | 10 | use hg::revlog::revlog::RevlogError; |
|
11 | 11 | use hg::sparse::SparseConfigError; |
|
12 | 12 | use hg::utils::files::get_bytes_from_path; |
|
13 | 13 | use hg::{DirstateError, DirstateMapError, StatusError}; |
|
14 | 14 | use std::convert::From; |
|
15 | 15 | |
|
16 | 16 | /// The kind of command error |
|
17 | 17 | #[derive(Debug)] |
|
18 | 18 | pub enum CommandError { |
|
19 | 19 | /// Exit with an error message and "standard" failure exit code. |
|
20 | 20 | Abort { |
|
21 | 21 | message: Vec<u8>, |
|
22 | 22 | detailed_exit_code: exit_codes::ExitCode, |
|
23 | 23 | hint: Option<Vec<u8>>, |
|
24 | 24 | }, |
|
25 | 25 | |
|
26 | 26 | /// Exit with a failure exit code but no message. |
|
27 | 27 | Unsuccessful, |
|
28 | 28 | |
|
29 | 29 | /// Encountered something (such as a CLI argument, repository layout, …) |
|
30 | 30 | /// not supported by this version of `rhg`. Depending on configuration |
|
31 | 31 | /// `rhg` may attempt to silently fall back to Python-based `hg`, which |
|
32 | 32 | /// may or may not support this feature. |
|
33 | 33 | UnsupportedFeature { message: Vec<u8> }, |
|
34 | 34 | /// The fallback executable does not exist (or has some other problem if |
|
35 | 35 | /// we end up being more precise about broken fallbacks). |
|
36 | 36 | InvalidFallback { path: Vec<u8>, err: String }, |
|
37 | 37 | } |
|
38 | 38 | |
|
39 | 39 | impl CommandError { |
|
40 | 40 | pub fn abort(message: impl AsRef<str>) -> Self { |
|
41 | 41 | CommandError::abort_with_exit_code(message, exit_codes::ABORT) |
|
42 | 42 | } |
|
43 | 43 | |
|
44 | 44 | pub fn abort_with_exit_code( |
|
45 | 45 | message: impl AsRef<str>, |
|
46 | 46 | detailed_exit_code: exit_codes::ExitCode, |
|
47 | 47 | ) -> Self { |
|
48 | 48 | CommandError::Abort { |
|
49 | 49 | // TODO: bytes-based (instead of Unicode-based) formatting |
|
50 | 50 | // of error messages to handle non-UTF-8 filenames etc: |
|
51 | 51 | // https://www.mercurial-scm.org/wiki/EncodingStrategy#Mixing_output |
|
52 | 52 | message: utf8_to_local(message.as_ref()).into(), |
|
53 | 53 | detailed_exit_code: detailed_exit_code, |
|
54 | 54 | hint: None, |
|
55 | 55 | } |
|
56 | 56 | } |
|
57 | 57 | |
|
58 | 58 | pub fn abort_with_exit_code_and_hint( |
|
59 | 59 | message: impl AsRef<str>, |
|
60 | 60 | detailed_exit_code: exit_codes::ExitCode, |
|
61 | 61 | hint: Option<impl AsRef<str>>, |
|
62 | 62 | ) -> Self { |
|
63 | 63 | CommandError::Abort { |
|
64 | 64 | message: utf8_to_local(message.as_ref()).into(), |
|
65 | 65 | detailed_exit_code, |
|
66 | 66 | hint: hint.map(|h| utf8_to_local(h.as_ref()).into()), |
|
67 | 67 | } |
|
68 | 68 | } |
|
69 | 69 | |
|
70 | 70 | pub fn abort_with_exit_code_bytes( |
|
71 | 71 | message: impl AsRef<[u8]>, |
|
72 | 72 | detailed_exit_code: exit_codes::ExitCode, |
|
73 | 73 | ) -> Self { |
|
74 | 74 | // TODO: use this everywhere it makes sense instead of the string |
|
75 | 75 | // version. |
|
76 | 76 | CommandError::Abort { |
|
77 | 77 | message: message.as_ref().into(), |
|
78 | 78 | detailed_exit_code, |
|
79 | 79 | hint: None, |
|
80 | 80 | } |
|
81 | 81 | } |
|
82 | 82 | |
|
83 | 83 | pub fn unsupported(message: impl AsRef<str>) -> Self { |
|
84 | 84 | CommandError::UnsupportedFeature { |
|
85 | 85 | message: utf8_to_local(message.as_ref()).into(), |
|
86 | 86 | } |
|
87 | 87 | } |
|
88 | 88 | } |
|
89 | 89 | |
|
90 | 90 | /// For now we don’t differenciate between invalid CLI args and valid for `hg` |
|
91 | 91 | /// but not supported yet by `rhg`. |
|
92 | 92 | impl From<clap::Error> for CommandError { |
|
93 | 93 | fn from(error: clap::Error) -> Self { |
|
94 | 94 | CommandError::unsupported(error.to_string()) |
|
95 | 95 | } |
|
96 | 96 | } |
|
97 | 97 | |
|
98 | 98 | impl From<HgError> for CommandError { |
|
99 | 99 | fn from(error: HgError) -> Self { |
|
100 | 100 | match error { |
|
101 | 101 | HgError::UnsupportedFeature(message) => { |
|
102 | 102 | CommandError::unsupported(message) |
|
103 | 103 | } |
|
104 | 104 | HgError::CensoredNodeError => { |
|
105 | 105 | CommandError::unsupported("Encountered a censored node") |
|
106 | 106 | } |
|
107 | 107 | HgError::Abort { |
|
108 | 108 | message, |
|
109 | 109 | detailed_exit_code, |
|
110 | 110 | hint, |
|
111 | 111 | } => CommandError::abort_with_exit_code_and_hint( |
|
112 | 112 | message, |
|
113 | 113 | detailed_exit_code, |
|
114 | 114 | hint, |
|
115 | 115 | ), |
|
116 | 116 | _ => CommandError::abort(error.to_string()), |
|
117 | 117 | } |
|
118 | 118 | } |
|
119 | 119 | } |
|
120 | 120 | |
|
121 | 121 | impl From<ConfigValueParseError> for CommandError { |
|
122 | 122 | fn from(error: ConfigValueParseError) -> Self { |
|
123 | 123 | CommandError::abort_with_exit_code( |
|
124 | 124 | error.to_string(), |
|
125 | 125 | exit_codes::CONFIG_ERROR_ABORT, |
|
126 | 126 | ) |
|
127 | 127 | } |
|
128 | 128 | } |
|
129 | 129 | |
|
130 | 130 | impl From<UiError> for CommandError { |
|
131 | 131 | fn from(_error: UiError) -> Self { |
|
132 | 132 | // If we already failed writing to stdout or stderr, |
|
133 | 133 | // writing an error message to stderr about it would be likely to fail |
|
134 | 134 | // too. |
|
135 | 135 | CommandError::abort("") |
|
136 | 136 | } |
|
137 | 137 | } |
|
138 | 138 | |
|
139 | 139 | impl From<RepoError> for CommandError { |
|
140 | 140 | fn from(error: RepoError) -> Self { |
|
141 | 141 | match error { |
|
142 | 142 | RepoError::NotFound { at } => { |
|
143 | 143 | CommandError::abort_with_exit_code_bytes( |
|
144 | 144 | format_bytes!( |
|
145 | 145 | b"abort: repository {} not found", |
|
146 | 146 | get_bytes_from_path(at) |
|
147 | 147 | ), |
|
148 | 148 | exit_codes::ABORT, |
|
149 | 149 | ) |
|
150 | 150 | } |
|
151 | 151 | RepoError::ConfigParseError(error) => error.into(), |
|
152 | 152 | RepoError::Other(error) => error.into(), |
|
153 | 153 | } |
|
154 | 154 | } |
|
155 | 155 | } |
|
156 | 156 | |
|
157 | 157 | impl<'a> From<&'a NoRepoInCwdError> for CommandError { |
|
158 | 158 | fn from(error: &'a NoRepoInCwdError) -> Self { |
|
159 | 159 | let NoRepoInCwdError { cwd } = error; |
|
160 | 160 | CommandError::abort_with_exit_code_bytes( |
|
161 | 161 | format_bytes!( |
|
162 | 162 | b"abort: no repository found in '{}' (.hg not found)!", |
|
163 | 163 | get_bytes_from_path(cwd) |
|
164 | 164 | ), |
|
165 | 165 | exit_codes::ABORT, |
|
166 | 166 | ) |
|
167 | 167 | } |
|
168 | 168 | } |
|
169 | 169 | |
|
170 | 170 | impl From<ConfigError> for CommandError { |
|
171 | 171 | fn from(error: ConfigError) -> Self { |
|
172 | 172 | match error { |
|
173 | 173 | ConfigError::Parse(error) => error.into(), |
|
174 | 174 | ConfigError::Other(error) => error.into(), |
|
175 | 175 | } |
|
176 | 176 | } |
|
177 | 177 | } |
|
178 | 178 | |
|
179 | 179 | impl From<ConfigParseError> for CommandError { |
|
180 | 180 | fn from(error: ConfigParseError) -> Self { |
|
181 | 181 | let ConfigParseError { |
|
182 | 182 | origin, |
|
183 | 183 | line, |
|
184 | 184 | message, |
|
185 | 185 | } = error; |
|
186 | 186 | let line_message = if let Some(line_number) = line { |
|
187 | 187 | format_bytes!(b":{}", line_number.to_string().into_bytes()) |
|
188 | 188 | } else { |
|
189 | 189 | Vec::new() |
|
190 | 190 | }; |
|
191 | 191 | CommandError::abort_with_exit_code_bytes( |
|
192 | 192 | format_bytes!( |
|
193 | 193 | b"config error at {}{}: {}", |
|
194 | 194 | origin, |
|
195 | 195 | line_message, |
|
196 | 196 | message |
|
197 | 197 | ), |
|
198 | 198 | exit_codes::CONFIG_ERROR_ABORT, |
|
199 | 199 | ) |
|
200 | 200 | } |
|
201 | 201 | } |
|
202 | 202 | |
|
203 | 203 | impl From<(RevlogError, &str)> for CommandError { |
|
204 | 204 | fn from((err, rev): (RevlogError, &str)) -> CommandError { |
|
205 | 205 | match err { |
|
206 | 206 | RevlogError::WDirUnsupported => CommandError::abort( |
|
207 | 207 | "abort: working directory revision cannot be specified", |
|
208 | 208 | ), |
|
209 | 209 | RevlogError::InvalidRevision => CommandError::abort(format!( |
|
210 | 210 | "abort: invalid revision identifier: {}", |
|
211 | 211 | rev |
|
212 | 212 | )), |
|
213 | 213 | RevlogError::AmbiguousPrefix => CommandError::abort(format!( |
|
214 | 214 | "abort: ambiguous revision identifier: {}", |
|
215 | 215 | rev |
|
216 | 216 | )), |
|
217 | 217 | RevlogError::Other(error) => error.into(), |
|
218 | 218 | } |
|
219 | 219 | } |
|
220 | 220 | } |
|
221 | 221 | |
|
222 | 222 | impl From<StatusError> for CommandError { |
|
223 | 223 | fn from(error: StatusError) -> Self { |
|
224 | 224 | CommandError::abort(format!("{}", error)) |
|
225 | 225 | } |
|
226 | 226 | } |
|
227 | 227 | |
|
228 | 228 | impl From<DirstateMapError> for CommandError { |
|
229 | 229 | fn from(error: DirstateMapError) -> Self { |
|
230 | 230 | CommandError::abort(format!("{}", error)) |
|
231 | 231 | } |
|
232 | 232 | } |
|
233 | 233 | |
|
234 | 234 | impl From<DirstateError> for CommandError { |
|
235 | 235 | fn from(error: DirstateError) -> Self { |
|
236 | 236 | match error { |
|
237 | 237 | DirstateError::Common(error) => error.into(), |
|
238 | 238 | DirstateError::Map(error) => error.into(), |
|
239 | 239 | } |
|
240 | 240 | } |
|
241 | 241 | } |
|
242 | 242 | |
|
243 | 243 | impl From<DirstateV2ParseError> for CommandError { |
|
244 | 244 | fn from(error: DirstateV2ParseError) -> Self { |
|
245 | 245 | HgError::from(error).into() |
|
246 | 246 | } |
|
247 | 247 | } |
|
248 | 248 | |
|
249 | 249 | impl From<SparseConfigError> for CommandError { |
|
250 | 250 | fn from(e: SparseConfigError) -> Self { |
|
251 | 251 | match e { |
|
252 | 252 | SparseConfigError::IncludesAfterExcludes { context } => { |
|
253 | 253 | Self::abort_with_exit_code_bytes( |
|
254 | 254 | format_bytes!( |
|
255 | 255 | b"{} config cannot have includes after excludes", |
|
256 | 256 | context |
|
257 | 257 | ), |
|
258 | 258 | exit_codes::CONFIG_PARSE_ERROR_ABORT, |
|
259 | 259 | ) |
|
260 | 260 | } |
|
261 | 261 | SparseConfigError::EntryOutsideSection { context, line } => { |
|
262 | 262 | Self::abort_with_exit_code_bytes( |
|
263 | 263 | format_bytes!( |
|
264 | 264 | b"{} config entry outside of section: {}", |
|
265 | 265 | context, |
|
266 | 266 | &line, |
|
267 | 267 | ), |
|
268 | 268 | exit_codes::CONFIG_PARSE_ERROR_ABORT, |
|
269 | 269 | ) |
|
270 | 270 | } |
|
271 | SparseConfigError::InvalidNarrowPrefix(prefix) => { | |
|
272 | Self::abort_with_exit_code_bytes( | |
|
273 | format_bytes!( | |
|
274 | b"invalid prefix on narrow pattern: {}", | |
|
275 | &prefix | |
|
276 | ), | |
|
277 | exit_codes::ABORT, | |
|
278 | ) | |
|
279 | } | |
|
280 | SparseConfigError::IncludesInNarrow => Self::abort( | |
|
281 | "including other spec files using '%include' \ | |
|
282 | is not supported in narrowspec", | |
|
283 | ), | |
|
271 | 284 | SparseConfigError::HgError(e) => Self::from(e), |
|
272 | 285 | SparseConfigError::PatternError(e) => { |
|
273 | 286 | Self::unsupported(format!("{}", e)) |
|
274 | 287 | } |
|
275 | 288 | } |
|
276 | 289 | } |
|
277 | 290 | } |
@@ -1,120 +1,117 b'' | |||
|
1 | 1 | #require rhg |
|
2 | 2 | |
|
3 | 3 | $ NO_FALLBACK="env RHG_ON_UNSUPPORTED=abort" |
|
4 | 4 | |
|
5 | 5 | Rhg works well when sparse working copy is enabled. |
|
6 | 6 | |
|
7 | 7 | $ cd "$TESTTMP" |
|
8 | 8 | $ hg init repo-sparse |
|
9 | 9 | $ cd repo-sparse |
|
10 | 10 | $ cat > .hg/hgrc <<EOF |
|
11 | 11 | > [extensions] |
|
12 | 12 | > sparse= |
|
13 | 13 | > EOF |
|
14 | 14 | |
|
15 | 15 | $ echo a > show |
|
16 | 16 | $ echo x > hide |
|
17 | 17 | $ mkdir dir1 dir2 |
|
18 | 18 | $ echo x > dir1/x |
|
19 | 19 | $ echo y > dir1/y |
|
20 | 20 | $ echo z > dir2/z |
|
21 | 21 | |
|
22 | 22 | $ hg ci -Aqm 'initial' |
|
23 | 23 | $ hg debugsparse --include 'show' |
|
24 | 24 | $ ls -A |
|
25 | 25 | .hg |
|
26 | 26 | show |
|
27 | 27 | |
|
28 | 28 | $ tip=$(hg log -r . --template '{node}') |
|
29 | 29 | $ $NO_FALLBACK rhg files -r "$tip" |
|
30 | 30 | dir1/x |
|
31 | 31 | dir1/y |
|
32 | 32 | dir2/z |
|
33 | 33 | hide |
|
34 | 34 | show |
|
35 | 35 | $ $NO_FALLBACK rhg files |
|
36 | 36 | show |
|
37 | 37 | |
|
38 | 38 | $ $NO_FALLBACK rhg cat -r "$tip" hide |
|
39 | 39 | x |
|
40 | 40 | |
|
41 | 41 | $ cd .. |
|
42 | 42 | |
|
43 | 43 | We support most things when narrow is enabled, too, with a couple of caveats. |
|
44 | 44 | |
|
45 | 45 | $ . "$TESTDIR/narrow-library.sh" |
|
46 | 46 | $ real_hg=$RHG_FALLBACK_EXECUTABLE |
|
47 | 47 | |
|
48 | 48 | $ cat >> $HGRCPATH <<EOF |
|
49 | 49 | > [extensions] |
|
50 | 50 | > narrow= |
|
51 | 51 | > EOF |
|
52 | 52 | |
|
53 | 53 | $ hg clone --narrow ./repo-sparse repo-narrow --include dir1 |
|
54 | 54 | requesting all changes |
|
55 | 55 | adding changesets |
|
56 | 56 | adding manifests |
|
57 | 57 | adding file changes |
|
58 | 58 | added 1 changesets with 2 changes to 2 files |
|
59 | 59 | new changesets 6d714a4a2998 |
|
60 | 60 | updating to branch default |
|
61 | 61 | 2 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
62 | 62 | |
|
63 | 63 | $ cd repo-narrow |
|
64 | 64 | |
|
65 | 65 | $ $NO_FALLBACK rhg cat -r "$tip" dir1/x |
|
66 | 66 | x |
|
67 | 67 | $ "$real_hg" cat -r "$tip" dir1/x |
|
68 | 68 | x |
|
69 | 69 | |
|
70 | 70 | TODO: bad error message |
|
71 | 71 | |
|
72 | 72 | $ $NO_FALLBACK rhg cat -r "$tip" hide |
|
73 | 73 | abort: invalid revision identifier: 6d714a4a2998cbfd0620db44da58b749f6565d63 |
|
74 | 74 | [255] |
|
75 | 75 | $ "$real_hg" cat -r "$tip" hide |
|
76 | 76 | [1] |
|
77 | 77 | |
|
78 | 78 | A naive implementation of [rhg files] leaks the paths that are supposed to be |
|
79 | 79 | hidden by narrow, so we just fall back to hg. |
|
80 | 80 | |
|
81 | 81 | $ $NO_FALLBACK rhg files -r "$tip" |
|
82 | 82 | unsupported feature: rhg files -r <rev> is not supported in narrow clones |
|
83 | 83 | [252] |
|
84 | 84 | $ "$real_hg" files -r "$tip" |
|
85 | 85 | dir1/x |
|
86 | 86 | dir1/y |
|
87 | 87 | |
|
88 |
Hg status needs to do some filtering based on narrow spec |
|
|
89 | support it in rhg for narrow clones yet. | |
|
88 | Hg status needs to do some filtering based on narrow spec | |
|
90 | 89 | |
|
91 | 90 | $ mkdir dir2 |
|
92 | 91 | $ touch dir2/q |
|
93 | 92 | $ "$real_hg" status |
|
94 | 93 | $ $NO_FALLBACK rhg --config rhg.status=true status |
|
95 | unsupported feature: rhg status is not supported for narrow clones yet | |
|
96 | [252] | |
|
97 | 94 | |
|
98 | 95 | Adding "orphaned" index files: |
|
99 | 96 | |
|
100 | 97 | $ (cd ..; cp repo-sparse/.hg/store/data/hide.i repo-narrow/.hg/store/data/hide.i) |
|
101 | 98 | $ (cd ..; mkdir repo-narrow/.hg/store/data/dir2; cp repo-sparse/.hg/store/data/dir2/z.i repo-narrow/.hg/store/data/dir2/z.i) |
|
102 | 99 | $ "$real_hg" verify |
|
103 | 100 | checking changesets |
|
104 | 101 | checking manifests |
|
105 | 102 | crosschecking files in changesets and manifests |
|
106 | 103 | checking files |
|
107 | 104 | checked 1 changesets with 2 changes to 2 files |
|
108 | 105 | |
|
109 | 106 | $ "$real_hg" files -r "$tip" |
|
110 | 107 | dir1/x |
|
111 | 108 | dir1/y |
|
112 | 109 | |
|
113 | 110 | # TODO: even though [hg files] hides the orphaned dir2/z, [hg cat] still shows it. |
|
114 | 111 |
|
|
115 | 112 | # This is despite [hg verify] succeeding above. |
|
116 | 113 | |
|
117 | 114 | $ $NO_FALLBACK rhg cat -r "$tip" dir2/z |
|
118 | 115 | z |
|
119 | 116 | $ "$real_hg" cat -r "$tip" dir2/z |
|
120 | 117 | z |
General Comments 0
You need to be logged in to leave comments.
Login now