Show More
@@ -1,706 +1,756 | |||||
1 | // filepatterns.rs |
|
1 | // filepatterns.rs | |
2 | // |
|
2 | // | |
3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> |
|
3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> | |
4 | // |
|
4 | // | |
5 | // This software may be used and distributed according to the terms of the |
|
5 | // This software may be used and distributed according to the terms of the | |
6 | // GNU General Public License version 2 or any later version. |
|
6 | // GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | //! Handling of Mercurial-specific patterns. |
|
8 | //! Handling of Mercurial-specific patterns. | |
9 |
|
9 | |||
10 | use crate::{ |
|
10 | use crate::{ | |
11 | utils::{ |
|
11 | utils::{ | |
12 | files::{canonical_path, get_bytes_from_path, get_path_from_bytes}, |
|
12 | files::{canonical_path, get_bytes_from_path, get_path_from_bytes}, | |
13 | hg_path::{path_to_hg_path_buf, HgPathBuf, HgPathError}, |
|
13 | hg_path::{path_to_hg_path_buf, HgPathBuf, HgPathError}, | |
14 | SliceExt, |
|
14 | SliceExt, | |
15 | }, |
|
15 | }, | |
16 | FastHashMap, PatternError, |
|
16 | FastHashMap, PatternError, | |
17 | }; |
|
17 | }; | |
18 | use lazy_static::lazy_static; |
|
18 | use lazy_static::lazy_static; | |
19 | use regex::bytes::{NoExpand, Regex}; |
|
19 | use regex::bytes::{NoExpand, Regex}; | |
20 | use std::ops::Deref; |
|
20 | use std::ops::Deref; | |
21 | use std::path::{Path, PathBuf}; |
|
21 | use std::path::{Path, PathBuf}; | |
22 | use std::vec::Vec; |
|
22 | use std::vec::Vec; | |
23 |
|
23 | |||
24 | lazy_static! { |
|
24 | lazy_static! { | |
25 | static ref RE_ESCAPE: Vec<Vec<u8>> = { |
|
25 | static ref RE_ESCAPE: Vec<Vec<u8>> = { | |
26 | let mut v: Vec<Vec<u8>> = (0..=255).map(|byte| vec![byte]).collect(); |
|
26 | let mut v: Vec<Vec<u8>> = (0..=255).map(|byte| vec![byte]).collect(); | |
27 | let to_escape = b"()[]{}?*+-|^$\\.&~# \t\n\r\x0b\x0c"; |
|
27 | let to_escape = b"()[]{}?*+-|^$\\.&~# \t\n\r\x0b\x0c"; | |
28 | for byte in to_escape { |
|
28 | for byte in to_escape { | |
29 | v[*byte as usize].insert(0, b'\\'); |
|
29 | v[*byte as usize].insert(0, b'\\'); | |
30 | } |
|
30 | } | |
31 | v |
|
31 | v | |
32 | }; |
|
32 | }; | |
33 | } |
|
33 | } | |
34 |
|
34 | |||
35 | /// These are matched in order |
|
35 | /// These are matched in order | |
36 | const GLOB_REPLACEMENTS: &[(&[u8], &[u8])] = |
|
36 | const GLOB_REPLACEMENTS: &[(&[u8], &[u8])] = | |
37 | &[(b"*/", b"(?:.*/)?"), (b"*", b".*"), (b"", b"[^/]*")]; |
|
37 | &[(b"*/", b"(?:.*/)?"), (b"*", b".*"), (b"", b"[^/]*")]; | |
38 |
|
38 | |||
39 | /// Appended to the regexp of globs |
|
39 | /// Appended to the regexp of globs | |
40 | const GLOB_SUFFIX: &[u8; 7] = b"(?:/|$)"; |
|
40 | const GLOB_SUFFIX: &[u8; 7] = b"(?:/|$)"; | |
41 |
|
41 | |||
42 | #[derive(Debug, Clone, PartialEq, Eq)] |
|
42 | #[derive(Debug, Clone, PartialEq, Eq)] | |
43 | pub enum PatternSyntax { |
|
43 | pub enum PatternSyntax { | |
44 | /// A regular expression |
|
44 | /// A regular expression | |
45 | Regexp, |
|
45 | Regexp, | |
46 | /// Glob that matches at the front of the path |
|
46 | /// Glob that matches at the front of the path | |
47 | RootGlob, |
|
47 | RootGlob, | |
48 | /// Glob that matches at any suffix of the path (still anchored at |
|
48 | /// Glob that matches at any suffix of the path (still anchored at | |
49 | /// slashes) |
|
49 | /// slashes) | |
50 | Glob, |
|
50 | Glob, | |
51 | /// a path relative to repository root, which is matched recursively |
|
51 | /// a path relative to repository root, which is matched recursively | |
52 | Path, |
|
52 | Path, | |
53 | /// A path relative to cwd |
|
53 | /// A path relative to cwd | |
54 | RelPath, |
|
54 | RelPath, | |
55 | /// an unrooted glob (*.rs matches Rust files in all dirs) |
|
55 | /// an unrooted glob (*.rs matches Rust files in all dirs) | |
56 | RelGlob, |
|
56 | RelGlob, | |
57 | /// A regexp that needn't match the start of a name |
|
57 | /// A regexp that needn't match the start of a name | |
58 | RelRegexp, |
|
58 | RelRegexp, | |
59 | /// A path relative to repository root, which is matched non-recursively |
|
59 | /// A path relative to repository root, which is matched non-recursively | |
60 | /// (will not match subdirectories) |
|
60 | /// (will not match subdirectories) | |
61 | RootFiles, |
|
61 | RootFiles, | |
62 | /// A file of patterns to read and include |
|
62 | /// A file of patterns to read and include | |
63 | Include, |
|
63 | Include, | |
64 | /// A file of patterns to match against files under the same directory |
|
64 | /// A file of patterns to match against files under the same directory | |
65 | SubInclude, |
|
65 | SubInclude, | |
66 | /// SubInclude with the result of parsing the included file |
|
66 | /// SubInclude with the result of parsing the included file | |
67 | /// |
|
67 | /// | |
68 | /// Note: there is no ExpandedInclude because that expansion can be done |
|
68 | /// Note: there is no ExpandedInclude because that expansion can be done | |
69 | /// in place by replacing the Include pattern by the included patterns. |
|
69 | /// in place by replacing the Include pattern by the included patterns. | |
70 | /// SubInclude requires more handling. |
|
70 | /// SubInclude requires more handling. | |
71 | /// |
|
71 | /// | |
72 | /// Note: `Box` is used to minimize size impact on other enum variants |
|
72 | /// Note: `Box` is used to minimize size impact on other enum variants | |
73 | ExpandedSubInclude(Box<SubInclude>), |
|
73 | ExpandedSubInclude(Box<SubInclude>), | |
74 | } |
|
74 | } | |
75 |
|
75 | |||
76 | /// Transforms a glob pattern into a regex |
|
76 | /// Transforms a glob pattern into a regex | |
77 | fn glob_to_re(pat: &[u8]) -> Vec<u8> { |
|
77 | fn glob_to_re(pat: &[u8]) -> Vec<u8> { | |
78 | let mut input = pat; |
|
78 | let mut input = pat; | |
79 | let mut res: Vec<u8> = vec![]; |
|
79 | let mut res: Vec<u8> = vec![]; | |
80 | let mut group_depth = 0; |
|
80 | let mut group_depth = 0; | |
81 |
|
81 | |||
82 | while let Some((c, rest)) = input.split_first() { |
|
82 | while let Some((c, rest)) = input.split_first() { | |
83 | input = rest; |
|
83 | input = rest; | |
84 |
|
84 | |||
85 | match c { |
|
85 | match c { | |
86 | b'*' => { |
|
86 | b'*' => { | |
87 | for (source, repl) in GLOB_REPLACEMENTS { |
|
87 | for (source, repl) in GLOB_REPLACEMENTS { | |
88 | if let Some(rest) = input.drop_prefix(source) { |
|
88 | if let Some(rest) = input.drop_prefix(source) { | |
89 | input = rest; |
|
89 | input = rest; | |
90 | res.extend(*repl); |
|
90 | res.extend(*repl); | |
91 | break; |
|
91 | break; | |
92 | } |
|
92 | } | |
93 | } |
|
93 | } | |
94 | } |
|
94 | } | |
95 | b'?' => res.extend(b"."), |
|
95 | b'?' => res.extend(b"."), | |
96 | b'[' => { |
|
96 | b'[' => { | |
97 | match input.iter().skip(1).position(|b| *b == b']') { |
|
97 | match input.iter().skip(1).position(|b| *b == b']') { | |
98 | None => res.extend(b"\\["), |
|
98 | None => res.extend(b"\\["), | |
99 | Some(end) => { |
|
99 | Some(end) => { | |
100 | // Account for the one we skipped |
|
100 | // Account for the one we skipped | |
101 | let end = end + 1; |
|
101 | let end = end + 1; | |
102 |
|
102 | |||
103 | res.extend(b"["); |
|
103 | res.extend(b"["); | |
104 |
|
104 | |||
105 | for (i, b) in input[..end].iter().enumerate() { |
|
105 | for (i, b) in input[..end].iter().enumerate() { | |
106 | if *b == b'!' && i == 0 { |
|
106 | if *b == b'!' && i == 0 { | |
107 | res.extend(b"^") |
|
107 | res.extend(b"^") | |
108 | } else if *b == b'^' && i == 0 { |
|
108 | } else if *b == b'^' && i == 0 { | |
109 | res.extend(b"\\^") |
|
109 | res.extend(b"\\^") | |
110 | } else if *b == b'\\' { |
|
110 | } else if *b == b'\\' { | |
111 | res.extend(b"\\\\") |
|
111 | res.extend(b"\\\\") | |
112 | } else { |
|
112 | } else { | |
113 | res.push(*b) |
|
113 | res.push(*b) | |
114 | } |
|
114 | } | |
115 | } |
|
115 | } | |
116 | res.extend(b"]"); |
|
116 | res.extend(b"]"); | |
117 | input = &input[end + 1..]; |
|
117 | input = &input[end + 1..]; | |
118 | } |
|
118 | } | |
119 | } |
|
119 | } | |
120 | } |
|
120 | } | |
121 | b'{' => { |
|
121 | b'{' => { | |
122 | group_depth += 1; |
|
122 | group_depth += 1; | |
123 | res.extend(b"(?:") |
|
123 | res.extend(b"(?:") | |
124 | } |
|
124 | } | |
125 | b'}' if group_depth > 0 => { |
|
125 | b'}' if group_depth > 0 => { | |
126 | group_depth -= 1; |
|
126 | group_depth -= 1; | |
127 | res.extend(b")"); |
|
127 | res.extend(b")"); | |
128 | } |
|
128 | } | |
129 | b',' if group_depth > 0 => res.extend(b"|"), |
|
129 | b',' if group_depth > 0 => res.extend(b"|"), | |
130 | b'\\' => { |
|
130 | b'\\' => { | |
131 | let c = { |
|
131 | let c = { | |
132 | if let Some((c, rest)) = input.split_first() { |
|
132 | if let Some((c, rest)) = input.split_first() { | |
133 | input = rest; |
|
133 | input = rest; | |
134 | c |
|
134 | c | |
135 | } else { |
|
135 | } else { | |
136 | c |
|
136 | c | |
137 | } |
|
137 | } | |
138 | }; |
|
138 | }; | |
139 | res.extend(&RE_ESCAPE[*c as usize]) |
|
139 | res.extend(&RE_ESCAPE[*c as usize]) | |
140 | } |
|
140 | } | |
141 | _ => res.extend(&RE_ESCAPE[*c as usize]), |
|
141 | _ => res.extend(&RE_ESCAPE[*c as usize]), | |
142 | } |
|
142 | } | |
143 | } |
|
143 | } | |
144 | res |
|
144 | res | |
145 | } |
|
145 | } | |
146 |
|
146 | |||
147 | fn escape_pattern(pattern: &[u8]) -> Vec<u8> { |
|
147 | fn escape_pattern(pattern: &[u8]) -> Vec<u8> { | |
148 | pattern |
|
148 | pattern | |
149 | .iter() |
|
149 | .iter() | |
150 | .flat_map(|c| RE_ESCAPE[*c as usize].clone()) |
|
150 | .flat_map(|c| RE_ESCAPE[*c as usize].clone()) | |
151 | .collect() |
|
151 | .collect() | |
152 | } |
|
152 | } | |
153 |
|
153 | |||
154 | pub fn parse_pattern_syntax( |
|
154 | pub fn parse_pattern_syntax( | |
155 | kind: &[u8], |
|
155 | kind: &[u8], | |
156 | ) -> Result<PatternSyntax, PatternError> { |
|
156 | ) -> Result<PatternSyntax, PatternError> { | |
157 | match kind { |
|
157 | match kind { | |
158 | b"re:" => Ok(PatternSyntax::Regexp), |
|
158 | b"re:" => Ok(PatternSyntax::Regexp), | |
159 | b"path:" => Ok(PatternSyntax::Path), |
|
159 | b"path:" => Ok(PatternSyntax::Path), | |
160 | b"relpath:" => Ok(PatternSyntax::RelPath), |
|
160 | b"relpath:" => Ok(PatternSyntax::RelPath), | |
161 | b"rootfilesin:" => Ok(PatternSyntax::RootFiles), |
|
161 | b"rootfilesin:" => Ok(PatternSyntax::RootFiles), | |
162 | b"relglob:" => Ok(PatternSyntax::RelGlob), |
|
162 | b"relglob:" => Ok(PatternSyntax::RelGlob), | |
163 | b"relre:" => Ok(PatternSyntax::RelRegexp), |
|
163 | b"relre:" => Ok(PatternSyntax::RelRegexp), | |
164 | b"glob:" => Ok(PatternSyntax::Glob), |
|
164 | b"glob:" => Ok(PatternSyntax::Glob), | |
165 | b"rootglob:" => Ok(PatternSyntax::RootGlob), |
|
165 | b"rootglob:" => Ok(PatternSyntax::RootGlob), | |
166 | b"include:" => Ok(PatternSyntax::Include), |
|
166 | b"include:" => Ok(PatternSyntax::Include), | |
167 | b"subinclude:" => Ok(PatternSyntax::SubInclude), |
|
167 | b"subinclude:" => Ok(PatternSyntax::SubInclude), | |
168 | _ => Err(PatternError::UnsupportedSyntax( |
|
168 | _ => Err(PatternError::UnsupportedSyntax( | |
169 | String::from_utf8_lossy(kind).to_string(), |
|
169 | String::from_utf8_lossy(kind).to_string(), | |
170 | )), |
|
170 | )), | |
171 | } |
|
171 | } | |
172 | } |
|
172 | } | |
173 |
|
173 | |||
|
174 | lazy_static! { | |||
|
175 | static ref FLAG_RE: Regex = Regex::new(r"^\(\?[aiLmsux]+\)").unwrap(); | |||
|
176 | } | |||
|
177 | ||||
174 | /// Builds the regex that corresponds to the given pattern. |
|
178 | /// Builds the regex that corresponds to the given pattern. | |
175 | /// If within a `syntax: regexp` context, returns the pattern, |
|
179 | /// If within a `syntax: regexp` context, returns the pattern, | |
176 | /// otherwise, returns the corresponding regex. |
|
180 | /// otherwise, returns the corresponding regex. | |
177 | fn _build_single_regex(entry: &IgnorePattern) -> Vec<u8> { |
|
181 | fn _build_single_regex(entry: &IgnorePattern) -> Vec<u8> { | |
178 | let IgnorePattern { |
|
182 | let IgnorePattern { | |
179 | syntax, pattern, .. |
|
183 | syntax, pattern, .. | |
180 | } = entry; |
|
184 | } = entry; | |
181 | if pattern.is_empty() { |
|
185 | if pattern.is_empty() { | |
182 | return vec![]; |
|
186 | return vec![]; | |
183 | } |
|
187 | } | |
184 | match syntax { |
|
188 | match syntax { | |
185 | PatternSyntax::Regexp => pattern.to_owned(), |
|
189 | PatternSyntax::Regexp => pattern.to_owned(), | |
186 | PatternSyntax::RelRegexp => { |
|
190 | PatternSyntax::RelRegexp => { | |
187 | // The `regex` crate accepts `**` while `re2` and Python's `re` |
|
191 | // The `regex` crate accepts `**` while `re2` and Python's `re` | |
188 | // do not. Checking for `*` correctly triggers the same error all |
|
192 | // do not. Checking for `*` correctly triggers the same error all | |
189 | // engines. |
|
193 | // engines. | |
190 | if pattern[0] == b'^' |
|
194 | if pattern[0] == b'^' | |
191 | || pattern[0] == b'*' |
|
195 | || pattern[0] == b'*' | |
192 | || pattern.starts_with(b".*") |
|
196 | || pattern.starts_with(b".*") | |
193 | { |
|
197 | { | |
194 | return pattern.to_owned(); |
|
198 | return pattern.to_owned(); | |
195 | } |
|
199 | } | |
196 | [&b".*"[..], pattern].concat() |
|
200 | match FLAG_RE.find(pattern) { | |
|
201 | Some(mat) => { | |||
|
202 | let s = mat.start(); | |||
|
203 | let e = mat.end(); | |||
|
204 | [ | |||
|
205 | &b"(?"[..], | |||
|
206 | &pattern[s + 2..e - 1], | |||
|
207 | &b":"[..], | |||
|
208 | &b".*"[..], | |||
|
209 | &pattern[e..], | |||
|
210 | &b")"[..], | |||
|
211 | ] | |||
|
212 | .concat() | |||
|
213 | } | |||
|
214 | None => [&b".*"[..], pattern].concat(), | |||
|
215 | } | |||
197 | } |
|
216 | } | |
198 | PatternSyntax::Path | PatternSyntax::RelPath => { |
|
217 | PatternSyntax::Path | PatternSyntax::RelPath => { | |
199 | if pattern == b"." { |
|
218 | if pattern == b"." { | |
200 | return vec![]; |
|
219 | return vec![]; | |
201 | } |
|
220 | } | |
202 | [escape_pattern(pattern).as_slice(), b"(?:/|$)"].concat() |
|
221 | [escape_pattern(pattern).as_slice(), b"(?:/|$)"].concat() | |
203 | } |
|
222 | } | |
204 | PatternSyntax::RootFiles => { |
|
223 | PatternSyntax::RootFiles => { | |
205 | let mut res = if pattern == b"." { |
|
224 | let mut res = if pattern == b"." { | |
206 | vec![] |
|
225 | vec![] | |
207 | } else { |
|
226 | } else { | |
208 | // Pattern is a directory name. |
|
227 | // Pattern is a directory name. | |
209 | [escape_pattern(pattern).as_slice(), b"/"].concat() |
|
228 | [escape_pattern(pattern).as_slice(), b"/"].concat() | |
210 | }; |
|
229 | }; | |
211 |
|
230 | |||
212 | // Anything after the pattern must be a non-directory. |
|
231 | // Anything after the pattern must be a non-directory. | |
213 | res.extend(b"[^/]+$"); |
|
232 | res.extend(b"[^/]+$"); | |
214 | res |
|
233 | res | |
215 | } |
|
234 | } | |
216 | PatternSyntax::RelGlob => { |
|
235 | PatternSyntax::RelGlob => { | |
217 | let glob_re = glob_to_re(pattern); |
|
236 | let glob_re = glob_to_re(pattern); | |
218 | if let Some(rest) = glob_re.drop_prefix(b"[^/]*") { |
|
237 | if let Some(rest) = glob_re.drop_prefix(b"[^/]*") { | |
219 | [b".*", rest, GLOB_SUFFIX].concat() |
|
238 | [b".*", rest, GLOB_SUFFIX].concat() | |
220 | } else { |
|
239 | } else { | |
221 | [b"(?:.*/)?", glob_re.as_slice(), GLOB_SUFFIX].concat() |
|
240 | [b"(?:.*/)?", glob_re.as_slice(), GLOB_SUFFIX].concat() | |
222 | } |
|
241 | } | |
223 | } |
|
242 | } | |
224 | PatternSyntax::Glob | PatternSyntax::RootGlob => { |
|
243 | PatternSyntax::Glob | PatternSyntax::RootGlob => { | |
225 | [glob_to_re(pattern).as_slice(), GLOB_SUFFIX].concat() |
|
244 | [glob_to_re(pattern).as_slice(), GLOB_SUFFIX].concat() | |
226 | } |
|
245 | } | |
227 | PatternSyntax::Include |
|
246 | PatternSyntax::Include | |
228 | | PatternSyntax::SubInclude |
|
247 | | PatternSyntax::SubInclude | |
229 | | PatternSyntax::ExpandedSubInclude(_) => unreachable!(), |
|
248 | | PatternSyntax::ExpandedSubInclude(_) => unreachable!(), | |
230 | } |
|
249 | } | |
231 | } |
|
250 | } | |
232 |
|
251 | |||
233 | const GLOB_SPECIAL_CHARACTERS: [u8; 7] = |
|
252 | const GLOB_SPECIAL_CHARACTERS: [u8; 7] = | |
234 | [b'*', b'?', b'[', b']', b'{', b'}', b'\\']; |
|
253 | [b'*', b'?', b'[', b']', b'{', b'}', b'\\']; | |
235 |
|
254 | |||
236 | /// TODO support other platforms |
|
255 | /// TODO support other platforms | |
237 | #[cfg(unix)] |
|
256 | #[cfg(unix)] | |
238 | pub fn normalize_path_bytes(bytes: &[u8]) -> Vec<u8> { |
|
257 | pub fn normalize_path_bytes(bytes: &[u8]) -> Vec<u8> { | |
239 | if bytes.is_empty() { |
|
258 | if bytes.is_empty() { | |
240 | return b".".to_vec(); |
|
259 | return b".".to_vec(); | |
241 | } |
|
260 | } | |
242 | let sep = b'/'; |
|
261 | let sep = b'/'; | |
243 |
|
262 | |||
244 | let mut initial_slashes = bytes.iter().take_while(|b| **b == sep).count(); |
|
263 | let mut initial_slashes = bytes.iter().take_while(|b| **b == sep).count(); | |
245 | if initial_slashes > 2 { |
|
264 | if initial_slashes > 2 { | |
246 | // POSIX allows one or two initial slashes, but treats three or more |
|
265 | // POSIX allows one or two initial slashes, but treats three or more | |
247 | // as single slash. |
|
266 | // as single slash. | |
248 | initial_slashes = 1; |
|
267 | initial_slashes = 1; | |
249 | } |
|
268 | } | |
250 | let components = bytes |
|
269 | let components = bytes | |
251 | .split(|b| *b == sep) |
|
270 | .split(|b| *b == sep) | |
252 | .filter(|c| !(c.is_empty() || c == b".")) |
|
271 | .filter(|c| !(c.is_empty() || c == b".")) | |
253 | .fold(vec![], |mut acc, component| { |
|
272 | .fold(vec![], |mut acc, component| { | |
254 | if component != b".." |
|
273 | if component != b".." | |
255 | || (initial_slashes == 0 && acc.is_empty()) |
|
274 | || (initial_slashes == 0 && acc.is_empty()) | |
256 | || (!acc.is_empty() && acc[acc.len() - 1] == b"..") |
|
275 | || (!acc.is_empty() && acc[acc.len() - 1] == b"..") | |
257 | { |
|
276 | { | |
258 | acc.push(component) |
|
277 | acc.push(component) | |
259 | } else if !acc.is_empty() { |
|
278 | } else if !acc.is_empty() { | |
260 | acc.pop(); |
|
279 | acc.pop(); | |
261 | } |
|
280 | } | |
262 | acc |
|
281 | acc | |
263 | }); |
|
282 | }); | |
264 | let mut new_bytes = components.join(&sep); |
|
283 | let mut new_bytes = components.join(&sep); | |
265 |
|
284 | |||
266 | if initial_slashes > 0 { |
|
285 | if initial_slashes > 0 { | |
267 | let mut buf: Vec<_> = (0..initial_slashes).map(|_| sep).collect(); |
|
286 | let mut buf: Vec<_> = (0..initial_slashes).map(|_| sep).collect(); | |
268 | buf.extend(new_bytes); |
|
287 | buf.extend(new_bytes); | |
269 | new_bytes = buf; |
|
288 | new_bytes = buf; | |
270 | } |
|
289 | } | |
271 | if new_bytes.is_empty() { |
|
290 | if new_bytes.is_empty() { | |
272 | b".".to_vec() |
|
291 | b".".to_vec() | |
273 | } else { |
|
292 | } else { | |
274 | new_bytes |
|
293 | new_bytes | |
275 | } |
|
294 | } | |
276 | } |
|
295 | } | |
277 |
|
296 | |||
278 | /// Wrapper function to `_build_single_regex` that short-circuits 'exact' globs |
|
297 | /// Wrapper function to `_build_single_regex` that short-circuits 'exact' globs | |
279 | /// that don't need to be transformed into a regex. |
|
298 | /// that don't need to be transformed into a regex. | |
280 | pub fn build_single_regex( |
|
299 | pub fn build_single_regex( | |
281 | entry: &IgnorePattern, |
|
300 | entry: &IgnorePattern, | |
282 | ) -> Result<Option<Vec<u8>>, PatternError> { |
|
301 | ) -> Result<Option<Vec<u8>>, PatternError> { | |
283 | let IgnorePattern { |
|
302 | let IgnorePattern { | |
284 | pattern, syntax, .. |
|
303 | pattern, syntax, .. | |
285 | } = entry; |
|
304 | } = entry; | |
286 | let pattern = match syntax { |
|
305 | let pattern = match syntax { | |
287 | PatternSyntax::RootGlob |
|
306 | PatternSyntax::RootGlob | |
288 | | PatternSyntax::Path |
|
307 | | PatternSyntax::Path | |
289 | | PatternSyntax::RelGlob |
|
308 | | PatternSyntax::RelGlob | |
290 | | PatternSyntax::RootFiles => normalize_path_bytes(&pattern), |
|
309 | | PatternSyntax::RootFiles => normalize_path_bytes(&pattern), | |
291 | PatternSyntax::Include | PatternSyntax::SubInclude => { |
|
310 | PatternSyntax::Include | PatternSyntax::SubInclude => { | |
292 | return Err(PatternError::NonRegexPattern(entry.clone())) |
|
311 | return Err(PatternError::NonRegexPattern(entry.clone())) | |
293 | } |
|
312 | } | |
294 | _ => pattern.to_owned(), |
|
313 | _ => pattern.to_owned(), | |
295 | }; |
|
314 | }; | |
296 | if *syntax == PatternSyntax::RootGlob |
|
315 | if *syntax == PatternSyntax::RootGlob | |
297 | && !pattern.iter().any(|b| GLOB_SPECIAL_CHARACTERS.contains(b)) |
|
316 | && !pattern.iter().any(|b| GLOB_SPECIAL_CHARACTERS.contains(b)) | |
298 | { |
|
317 | { | |
299 | Ok(None) |
|
318 | Ok(None) | |
300 | } else { |
|
319 | } else { | |
301 | let mut entry = entry.clone(); |
|
320 | let mut entry = entry.clone(); | |
302 | entry.pattern = pattern; |
|
321 | entry.pattern = pattern; | |
303 | Ok(Some(_build_single_regex(&entry))) |
|
322 | Ok(Some(_build_single_regex(&entry))) | |
304 | } |
|
323 | } | |
305 | } |
|
324 | } | |
306 |
|
325 | |||
307 | lazy_static! { |
|
326 | lazy_static! { | |
308 | static ref SYNTAXES: FastHashMap<&'static [u8], &'static [u8]> = { |
|
327 | static ref SYNTAXES: FastHashMap<&'static [u8], &'static [u8]> = { | |
309 | let mut m = FastHashMap::default(); |
|
328 | let mut m = FastHashMap::default(); | |
310 |
|
329 | |||
311 | m.insert(b"re".as_ref(), b"relre:".as_ref()); |
|
330 | m.insert(b"re".as_ref(), b"relre:".as_ref()); | |
312 | m.insert(b"regexp".as_ref(), b"relre:".as_ref()); |
|
331 | m.insert(b"regexp".as_ref(), b"relre:".as_ref()); | |
313 | m.insert(b"glob".as_ref(), b"relglob:".as_ref()); |
|
332 | m.insert(b"glob".as_ref(), b"relglob:".as_ref()); | |
314 | m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref()); |
|
333 | m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref()); | |
315 | m.insert(b"include".as_ref(), b"include:".as_ref()); |
|
334 | m.insert(b"include".as_ref(), b"include:".as_ref()); | |
316 | m.insert(b"subinclude".as_ref(), b"subinclude:".as_ref()); |
|
335 | m.insert(b"subinclude".as_ref(), b"subinclude:".as_ref()); | |
317 | m.insert(b"path".as_ref(), b"path:".as_ref()); |
|
336 | m.insert(b"path".as_ref(), b"path:".as_ref()); | |
318 | m.insert(b"rootfilesin".as_ref(), b"rootfilesin:".as_ref()); |
|
337 | m.insert(b"rootfilesin".as_ref(), b"rootfilesin:".as_ref()); | |
319 | m |
|
338 | m | |
320 | }; |
|
339 | }; | |
321 | } |
|
340 | } | |
322 |
|
341 | |||
323 | #[derive(Debug)] |
|
342 | #[derive(Debug)] | |
324 | pub enum PatternFileWarning { |
|
343 | pub enum PatternFileWarning { | |
325 | /// (file path, syntax bytes) |
|
344 | /// (file path, syntax bytes) | |
326 | InvalidSyntax(PathBuf, Vec<u8>), |
|
345 | InvalidSyntax(PathBuf, Vec<u8>), | |
327 | /// File path |
|
346 | /// File path | |
328 | NoSuchFile(PathBuf), |
|
347 | NoSuchFile(PathBuf), | |
329 | } |
|
348 | } | |
330 |
|
349 | |||
331 | pub fn parse_pattern_file_contents( |
|
350 | pub fn parse_pattern_file_contents( | |
332 | lines: &[u8], |
|
351 | lines: &[u8], | |
333 | file_path: &Path, |
|
352 | file_path: &Path, | |
334 | default_syntax_override: Option<&[u8]>, |
|
353 | default_syntax_override: Option<&[u8]>, | |
335 | warn: bool, |
|
354 | warn: bool, | |
336 | ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> { |
|
355 | ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> { | |
337 | let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap(); |
|
356 | let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap(); | |
338 |
|
357 | |||
339 | #[allow(clippy::trivial_regex)] |
|
358 | #[allow(clippy::trivial_regex)] | |
340 | let comment_escape_regex = Regex::new(r"\\#").unwrap(); |
|
359 | let comment_escape_regex = Regex::new(r"\\#").unwrap(); | |
341 | let mut inputs: Vec<IgnorePattern> = vec![]; |
|
360 | let mut inputs: Vec<IgnorePattern> = vec![]; | |
342 | let mut warnings: Vec<PatternFileWarning> = vec![]; |
|
361 | let mut warnings: Vec<PatternFileWarning> = vec![]; | |
343 |
|
362 | |||
344 | let mut current_syntax = |
|
363 | let mut current_syntax = | |
345 | default_syntax_override.unwrap_or(b"relre:".as_ref()); |
|
364 | default_syntax_override.unwrap_or(b"relre:".as_ref()); | |
346 |
|
365 | |||
347 | for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() { |
|
366 | for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() { | |
348 | let line_number = line_number + 1; |
|
367 | let line_number = line_number + 1; | |
349 |
|
368 | |||
350 | let line_buf; |
|
369 | let line_buf; | |
351 | if line.contains(&b'#') { |
|
370 | if line.contains(&b'#') { | |
352 | if let Some(cap) = comment_regex.captures(line) { |
|
371 | if let Some(cap) = comment_regex.captures(line) { | |
353 | line = &line[..cap.get(1).unwrap().end()] |
|
372 | line = &line[..cap.get(1).unwrap().end()] | |
354 | } |
|
373 | } | |
355 | line_buf = comment_escape_regex.replace_all(line, NoExpand(b"#")); |
|
374 | line_buf = comment_escape_regex.replace_all(line, NoExpand(b"#")); | |
356 | line = &line_buf; |
|
375 | line = &line_buf; | |
357 | } |
|
376 | } | |
358 |
|
377 | |||
359 | let mut line = line.trim_end(); |
|
378 | let mut line = line.trim_end(); | |
360 |
|
379 | |||
361 | if line.is_empty() { |
|
380 | if line.is_empty() { | |
362 | continue; |
|
381 | continue; | |
363 | } |
|
382 | } | |
364 |
|
383 | |||
365 | if let Some(syntax) = line.drop_prefix(b"syntax:") { |
|
384 | if let Some(syntax) = line.drop_prefix(b"syntax:") { | |
366 | let syntax = syntax.trim(); |
|
385 | let syntax = syntax.trim(); | |
367 |
|
386 | |||
368 | if let Some(rel_syntax) = SYNTAXES.get(syntax) { |
|
387 | if let Some(rel_syntax) = SYNTAXES.get(syntax) { | |
369 | current_syntax = rel_syntax; |
|
388 | current_syntax = rel_syntax; | |
370 | } else if warn { |
|
389 | } else if warn { | |
371 | warnings.push(PatternFileWarning::InvalidSyntax( |
|
390 | warnings.push(PatternFileWarning::InvalidSyntax( | |
372 | file_path.to_owned(), |
|
391 | file_path.to_owned(), | |
373 | syntax.to_owned(), |
|
392 | syntax.to_owned(), | |
374 | )); |
|
393 | )); | |
375 | } |
|
394 | } | |
376 | continue; |
|
395 | continue; | |
377 | } |
|
396 | } | |
378 |
|
397 | |||
379 | let mut line_syntax: &[u8] = ¤t_syntax; |
|
398 | let mut line_syntax: &[u8] = ¤t_syntax; | |
380 |
|
399 | |||
381 | for (s, rels) in SYNTAXES.iter() { |
|
400 | for (s, rels) in SYNTAXES.iter() { | |
382 | if let Some(rest) = line.drop_prefix(rels) { |
|
401 | if let Some(rest) = line.drop_prefix(rels) { | |
383 | line_syntax = rels; |
|
402 | line_syntax = rels; | |
384 | line = rest; |
|
403 | line = rest; | |
385 | break; |
|
404 | break; | |
386 | } |
|
405 | } | |
387 | if let Some(rest) = line.drop_prefix(&[s, &b":"[..]].concat()) { |
|
406 | if let Some(rest) = line.drop_prefix(&[s, &b":"[..]].concat()) { | |
388 | line_syntax = rels; |
|
407 | line_syntax = rels; | |
389 | line = rest; |
|
408 | line = rest; | |
390 | break; |
|
409 | break; | |
391 | } |
|
410 | } | |
392 | } |
|
411 | } | |
393 |
|
412 | |||
394 | inputs.push(IgnorePattern::new( |
|
413 | inputs.push(IgnorePattern::new( | |
395 | parse_pattern_syntax(&line_syntax).map_err(|e| match e { |
|
414 | parse_pattern_syntax(&line_syntax).map_err(|e| match e { | |
396 | PatternError::UnsupportedSyntax(syntax) => { |
|
415 | PatternError::UnsupportedSyntax(syntax) => { | |
397 | PatternError::UnsupportedSyntaxInFile( |
|
416 | PatternError::UnsupportedSyntaxInFile( | |
398 | syntax, |
|
417 | syntax, | |
399 | file_path.to_string_lossy().into(), |
|
418 | file_path.to_string_lossy().into(), | |
400 | line_number, |
|
419 | line_number, | |
401 | ) |
|
420 | ) | |
402 | } |
|
421 | } | |
403 | _ => e, |
|
422 | _ => e, | |
404 | })?, |
|
423 | })?, | |
405 | &line, |
|
424 | &line, | |
406 | file_path, |
|
425 | file_path, | |
407 | )); |
|
426 | )); | |
408 | } |
|
427 | } | |
409 | Ok((inputs, warnings)) |
|
428 | Ok((inputs, warnings)) | |
410 | } |
|
429 | } | |
411 |
|
430 | |||
412 | pub fn read_pattern_file( |
|
431 | pub fn read_pattern_file( | |
413 | file_path: &Path, |
|
432 | file_path: &Path, | |
414 | warn: bool, |
|
433 | warn: bool, | |
415 | inspect_pattern_bytes: &mut impl FnMut(&Path, &[u8]), |
|
434 | inspect_pattern_bytes: &mut impl FnMut(&Path, &[u8]), | |
416 | ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> { |
|
435 | ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> { | |
417 | match std::fs::read(file_path) { |
|
436 | match std::fs::read(file_path) { | |
418 | Ok(contents) => { |
|
437 | Ok(contents) => { | |
419 | inspect_pattern_bytes(file_path, &contents); |
|
438 | inspect_pattern_bytes(file_path, &contents); | |
420 | parse_pattern_file_contents(&contents, file_path, None, warn) |
|
439 | parse_pattern_file_contents(&contents, file_path, None, warn) | |
421 | } |
|
440 | } | |
422 | Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(( |
|
441 | Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(( | |
423 | vec![], |
|
442 | vec![], | |
424 | vec![PatternFileWarning::NoSuchFile(file_path.to_owned())], |
|
443 | vec![PatternFileWarning::NoSuchFile(file_path.to_owned())], | |
425 | )), |
|
444 | )), | |
426 | Err(e) => Err(e.into()), |
|
445 | Err(e) => Err(e.into()), | |
427 | } |
|
446 | } | |
428 | } |
|
447 | } | |
429 |
|
448 | |||
430 | /// Represents an entry in an "ignore" file. |
|
449 | /// Represents an entry in an "ignore" file. | |
431 | #[derive(Debug, Eq, PartialEq, Clone)] |
|
450 | #[derive(Debug, Eq, PartialEq, Clone)] | |
432 | pub struct IgnorePattern { |
|
451 | pub struct IgnorePattern { | |
433 | pub syntax: PatternSyntax, |
|
452 | pub syntax: PatternSyntax, | |
434 | pub pattern: Vec<u8>, |
|
453 | pub pattern: Vec<u8>, | |
435 | pub source: PathBuf, |
|
454 | pub source: PathBuf, | |
436 | } |
|
455 | } | |
437 |
|
456 | |||
438 | impl IgnorePattern { |
|
457 | impl IgnorePattern { | |
439 | pub fn new(syntax: PatternSyntax, pattern: &[u8], source: &Path) -> Self { |
|
458 | pub fn new(syntax: PatternSyntax, pattern: &[u8], source: &Path) -> Self { | |
440 | Self { |
|
459 | Self { | |
441 | syntax, |
|
460 | syntax, | |
442 | pattern: pattern.to_owned(), |
|
461 | pattern: pattern.to_owned(), | |
443 | source: source.to_owned(), |
|
462 | source: source.to_owned(), | |
444 | } |
|
463 | } | |
445 | } |
|
464 | } | |
446 | } |
|
465 | } | |
447 |
|
466 | |||
448 | pub type PatternResult<T> = Result<T, PatternError>; |
|
467 | pub type PatternResult<T> = Result<T, PatternError>; | |
449 |
|
468 | |||
450 | /// Wrapper for `read_pattern_file` that also recursively expands `include:` |
|
469 | /// Wrapper for `read_pattern_file` that also recursively expands `include:` | |
451 | /// and `subinclude:` patterns. |
|
470 | /// and `subinclude:` patterns. | |
452 | /// |
|
471 | /// | |
453 | /// The former are expanded in place, while `PatternSyntax::ExpandedSubInclude` |
|
472 | /// The former are expanded in place, while `PatternSyntax::ExpandedSubInclude` | |
454 | /// is used for the latter to form a tree of patterns. |
|
473 | /// is used for the latter to form a tree of patterns. | |
455 | pub fn get_patterns_from_file( |
|
474 | pub fn get_patterns_from_file( | |
456 | pattern_file: &Path, |
|
475 | pattern_file: &Path, | |
457 | root_dir: &Path, |
|
476 | root_dir: &Path, | |
458 | inspect_pattern_bytes: &mut impl FnMut(&Path, &[u8]), |
|
477 | inspect_pattern_bytes: &mut impl FnMut(&Path, &[u8]), | |
459 | ) -> PatternResult<(Vec<IgnorePattern>, Vec<PatternFileWarning>)> { |
|
478 | ) -> PatternResult<(Vec<IgnorePattern>, Vec<PatternFileWarning>)> { | |
460 | let (patterns, mut warnings) = |
|
479 | let (patterns, mut warnings) = | |
461 | read_pattern_file(pattern_file, true, inspect_pattern_bytes)?; |
|
480 | read_pattern_file(pattern_file, true, inspect_pattern_bytes)?; | |
462 | let patterns = patterns |
|
481 | let patterns = patterns | |
463 | .into_iter() |
|
482 | .into_iter() | |
464 | .flat_map(|entry| -> PatternResult<_> { |
|
483 | .flat_map(|entry| -> PatternResult<_> { | |
465 | Ok(match &entry.syntax { |
|
484 | Ok(match &entry.syntax { | |
466 | PatternSyntax::Include => { |
|
485 | PatternSyntax::Include => { | |
467 | let inner_include = |
|
486 | let inner_include = | |
468 | root_dir.join(get_path_from_bytes(&entry.pattern)); |
|
487 | root_dir.join(get_path_from_bytes(&entry.pattern)); | |
469 | let (inner_pats, inner_warnings) = get_patterns_from_file( |
|
488 | let (inner_pats, inner_warnings) = get_patterns_from_file( | |
470 | &inner_include, |
|
489 | &inner_include, | |
471 | root_dir, |
|
490 | root_dir, | |
472 | inspect_pattern_bytes, |
|
491 | inspect_pattern_bytes, | |
473 | )?; |
|
492 | )?; | |
474 | warnings.extend(inner_warnings); |
|
493 | warnings.extend(inner_warnings); | |
475 | inner_pats |
|
494 | inner_pats | |
476 | } |
|
495 | } | |
477 | PatternSyntax::SubInclude => { |
|
496 | PatternSyntax::SubInclude => { | |
478 | let mut sub_include = SubInclude::new( |
|
497 | let mut sub_include = SubInclude::new( | |
479 | &root_dir, |
|
498 | &root_dir, | |
480 | &entry.pattern, |
|
499 | &entry.pattern, | |
481 | &entry.source, |
|
500 | &entry.source, | |
482 | )?; |
|
501 | )?; | |
483 | let (inner_patterns, inner_warnings) = |
|
502 | let (inner_patterns, inner_warnings) = | |
484 | get_patterns_from_file( |
|
503 | get_patterns_from_file( | |
485 | &sub_include.path, |
|
504 | &sub_include.path, | |
486 | &sub_include.root, |
|
505 | &sub_include.root, | |
487 | inspect_pattern_bytes, |
|
506 | inspect_pattern_bytes, | |
488 | )?; |
|
507 | )?; | |
489 | sub_include.included_patterns = inner_patterns; |
|
508 | sub_include.included_patterns = inner_patterns; | |
490 | warnings.extend(inner_warnings); |
|
509 | warnings.extend(inner_warnings); | |
491 | vec![IgnorePattern { |
|
510 | vec![IgnorePattern { | |
492 | syntax: PatternSyntax::ExpandedSubInclude(Box::new( |
|
511 | syntax: PatternSyntax::ExpandedSubInclude(Box::new( | |
493 | sub_include, |
|
512 | sub_include, | |
494 | )), |
|
513 | )), | |
495 | ..entry |
|
514 | ..entry | |
496 | }] |
|
515 | }] | |
497 | } |
|
516 | } | |
498 | _ => vec![entry], |
|
517 | _ => vec![entry], | |
499 | }) |
|
518 | }) | |
500 | }) |
|
519 | }) | |
501 | .flatten() |
|
520 | .flatten() | |
502 | .collect(); |
|
521 | .collect(); | |
503 |
|
522 | |||
504 | Ok((patterns, warnings)) |
|
523 | Ok((patterns, warnings)) | |
505 | } |
|
524 | } | |
506 |
|
525 | |||
507 | /// Holds all the information needed to handle a `subinclude:` pattern. |
|
526 | /// Holds all the information needed to handle a `subinclude:` pattern. | |
508 | #[derive(Debug, PartialEq, Eq, Clone)] |
|
527 | #[derive(Debug, PartialEq, Eq, Clone)] | |
509 | pub struct SubInclude { |
|
528 | pub struct SubInclude { | |
510 | /// Will be used for repository (hg) paths that start with this prefix. |
|
529 | /// Will be used for repository (hg) paths that start with this prefix. | |
511 | /// It is relative to the current working directory, so comparing against |
|
530 | /// It is relative to the current working directory, so comparing against | |
512 | /// repository paths is painless. |
|
531 | /// repository paths is painless. | |
513 | pub prefix: HgPathBuf, |
|
532 | pub prefix: HgPathBuf, | |
514 | /// The file itself, containing the patterns |
|
533 | /// The file itself, containing the patterns | |
515 | pub path: PathBuf, |
|
534 | pub path: PathBuf, | |
516 | /// Folder in the filesystem where this it applies |
|
535 | /// Folder in the filesystem where this it applies | |
517 | pub root: PathBuf, |
|
536 | pub root: PathBuf, | |
518 |
|
537 | |||
519 | pub included_patterns: Vec<IgnorePattern>, |
|
538 | pub included_patterns: Vec<IgnorePattern>, | |
520 | } |
|
539 | } | |
521 |
|
540 | |||
522 | impl SubInclude { |
|
541 | impl SubInclude { | |
523 | pub fn new( |
|
542 | pub fn new( | |
524 | root_dir: &Path, |
|
543 | root_dir: &Path, | |
525 | pattern: &[u8], |
|
544 | pattern: &[u8], | |
526 | source: &Path, |
|
545 | source: &Path, | |
527 | ) -> Result<SubInclude, HgPathError> { |
|
546 | ) -> Result<SubInclude, HgPathError> { | |
528 | let normalized_source = |
|
547 | let normalized_source = | |
529 | normalize_path_bytes(&get_bytes_from_path(source)); |
|
548 | normalize_path_bytes(&get_bytes_from_path(source)); | |
530 |
|
549 | |||
531 | let source_root = get_path_from_bytes(&normalized_source); |
|
550 | let source_root = get_path_from_bytes(&normalized_source); | |
532 | let source_root = |
|
551 | let source_root = | |
533 | source_root.parent().unwrap_or_else(|| source_root.deref()); |
|
552 | source_root.parent().unwrap_or_else(|| source_root.deref()); | |
534 |
|
553 | |||
535 | let path = source_root.join(get_path_from_bytes(pattern)); |
|
554 | let path = source_root.join(get_path_from_bytes(pattern)); | |
536 | let new_root = path.parent().unwrap_or_else(|| path.deref()); |
|
555 | let new_root = path.parent().unwrap_or_else(|| path.deref()); | |
537 |
|
556 | |||
538 | let prefix = canonical_path(root_dir, root_dir, new_root)?; |
|
557 | let prefix = canonical_path(root_dir, root_dir, new_root)?; | |
539 |
|
558 | |||
540 | Ok(Self { |
|
559 | Ok(Self { | |
541 | prefix: path_to_hg_path_buf(prefix).and_then(|mut p| { |
|
560 | prefix: path_to_hg_path_buf(prefix).and_then(|mut p| { | |
542 | if !p.is_empty() { |
|
561 | if !p.is_empty() { | |
543 | p.push_byte(b'/'); |
|
562 | p.push_byte(b'/'); | |
544 | } |
|
563 | } | |
545 | Ok(p) |
|
564 | Ok(p) | |
546 | })?, |
|
565 | })?, | |
547 | path: path.to_owned(), |
|
566 | path: path.to_owned(), | |
548 | root: new_root.to_owned(), |
|
567 | root: new_root.to_owned(), | |
549 | included_patterns: Vec::new(), |
|
568 | included_patterns: Vec::new(), | |
550 | }) |
|
569 | }) | |
551 | } |
|
570 | } | |
552 | } |
|
571 | } | |
553 |
|
572 | |||
554 | /// Separate and pre-process subincludes from other patterns for the "ignore" |
|
573 | /// Separate and pre-process subincludes from other patterns for the "ignore" | |
555 | /// phase. |
|
574 | /// phase. | |
556 | pub fn filter_subincludes( |
|
575 | pub fn filter_subincludes( | |
557 | ignore_patterns: Vec<IgnorePattern>, |
|
576 | ignore_patterns: Vec<IgnorePattern>, | |
558 | ) -> Result<(Vec<Box<SubInclude>>, Vec<IgnorePattern>), HgPathError> { |
|
577 | ) -> Result<(Vec<Box<SubInclude>>, Vec<IgnorePattern>), HgPathError> { | |
559 | let mut subincludes = vec![]; |
|
578 | let mut subincludes = vec![]; | |
560 | let mut others = vec![]; |
|
579 | let mut others = vec![]; | |
561 |
|
580 | |||
562 | for pattern in ignore_patterns { |
|
581 | for pattern in ignore_patterns { | |
563 | if let PatternSyntax::ExpandedSubInclude(sub_include) = pattern.syntax |
|
582 | if let PatternSyntax::ExpandedSubInclude(sub_include) = pattern.syntax | |
564 | { |
|
583 | { | |
565 | subincludes.push(sub_include); |
|
584 | subincludes.push(sub_include); | |
566 | } else { |
|
585 | } else { | |
567 | others.push(pattern) |
|
586 | others.push(pattern) | |
568 | } |
|
587 | } | |
569 | } |
|
588 | } | |
570 | Ok((subincludes, others)) |
|
589 | Ok((subincludes, others)) | |
571 | } |
|
590 | } | |
572 |
|
591 | |||
573 | #[cfg(test)] |
|
592 | #[cfg(test)] | |
574 | mod tests { |
|
593 | mod tests { | |
575 | use super::*; |
|
594 | use super::*; | |
576 | use pretty_assertions::assert_eq; |
|
595 | use pretty_assertions::assert_eq; | |
577 |
|
596 | |||
578 | #[test] |
|
597 | #[test] | |
579 | fn escape_pattern_test() { |
|
598 | fn escape_pattern_test() { | |
580 | let untouched = |
|
599 | let untouched = | |
581 | br#"!"%',/0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ_`abcdefghijklmnopqrstuvwxyz"#; |
|
600 | br#"!"%',/0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ_`abcdefghijklmnopqrstuvwxyz"#; | |
582 | assert_eq!(escape_pattern(untouched), untouched.to_vec()); |
|
601 | assert_eq!(escape_pattern(untouched), untouched.to_vec()); | |
583 | // All escape codes |
|
602 | // All escape codes | |
584 | assert_eq!( |
|
603 | assert_eq!( | |
585 | escape_pattern(br#"()[]{}?*+-|^$\\.&~# \t\n\r\v\f"#), |
|
604 | escape_pattern(br#"()[]{}?*+-|^$\\.&~# \t\n\r\v\f"#), | |
586 | br#"\(\)\[\]\{\}\?\*\+\-\|\^\$\\\\\.\&\~\#\ \\t\\n\\r\\v\\f"# |
|
605 | br#"\(\)\[\]\{\}\?\*\+\-\|\^\$\\\\\.\&\~\#\ \\t\\n\\r\\v\\f"# | |
587 | .to_vec() |
|
606 | .to_vec() | |
588 | ); |
|
607 | ); | |
589 | } |
|
608 | } | |
590 |
|
609 | |||
591 | #[test] |
|
610 | #[test] | |
592 | fn glob_test() { |
|
611 | fn glob_test() { | |
593 | assert_eq!(glob_to_re(br#"?"#), br#"."#); |
|
612 | assert_eq!(glob_to_re(br#"?"#), br#"."#); | |
594 | assert_eq!(glob_to_re(br#"*"#), br#"[^/]*"#); |
|
613 | assert_eq!(glob_to_re(br#"*"#), br#"[^/]*"#); | |
595 | assert_eq!(glob_to_re(br#"**"#), br#".*"#); |
|
614 | assert_eq!(glob_to_re(br#"**"#), br#".*"#); | |
596 | assert_eq!(glob_to_re(br#"**/a"#), br#"(?:.*/)?a"#); |
|
615 | assert_eq!(glob_to_re(br#"**/a"#), br#"(?:.*/)?a"#); | |
597 | assert_eq!(glob_to_re(br#"a/**/b"#), br#"a/(?:.*/)?b"#); |
|
616 | assert_eq!(glob_to_re(br#"a/**/b"#), br#"a/(?:.*/)?b"#); | |
598 | assert_eq!(glob_to_re(br#"[a*?!^][^b][!c]"#), br#"[a*?!^][\^b][^c]"#); |
|
617 | assert_eq!(glob_to_re(br#"[a*?!^][^b][!c]"#), br#"[a*?!^][\^b][^c]"#); | |
599 | assert_eq!(glob_to_re(br#"{a,b}"#), br#"(?:a|b)"#); |
|
618 | assert_eq!(glob_to_re(br#"{a,b}"#), br#"(?:a|b)"#); | |
600 | assert_eq!(glob_to_re(br#".\*\?"#), br#"\.\*\?"#); |
|
619 | assert_eq!(glob_to_re(br#".\*\?"#), br#"\.\*\?"#); | |
601 | } |
|
620 | } | |
602 |
|
621 | |||
603 | #[test] |
|
622 | #[test] | |
604 | fn test_parse_pattern_file_contents() { |
|
623 | fn test_parse_pattern_file_contents() { | |
605 | let lines = b"syntax: glob\n*.elc"; |
|
624 | let lines = b"syntax: glob\n*.elc"; | |
606 |
|
625 | |||
607 | assert_eq!( |
|
626 | assert_eq!( | |
608 | parse_pattern_file_contents( |
|
627 | parse_pattern_file_contents( | |
609 | lines, |
|
628 | lines, | |
610 | Path::new("file_path"), |
|
629 | Path::new("file_path"), | |
611 | None, |
|
630 | None, | |
612 | false |
|
631 | false | |
613 | ) |
|
632 | ) | |
614 | .unwrap() |
|
633 | .unwrap() | |
615 | .0, |
|
634 | .0, | |
616 | vec![IgnorePattern::new( |
|
635 | vec![IgnorePattern::new( | |
617 | PatternSyntax::RelGlob, |
|
636 | PatternSyntax::RelGlob, | |
618 | b"*.elc", |
|
637 | b"*.elc", | |
619 | Path::new("file_path") |
|
638 | Path::new("file_path") | |
620 | )], |
|
639 | )], | |
621 | ); |
|
640 | ); | |
622 |
|
641 | |||
623 | let lines = b"syntax: include\nsyntax: glob"; |
|
642 | let lines = b"syntax: include\nsyntax: glob"; | |
624 |
|
643 | |||
625 | assert_eq!( |
|
644 | assert_eq!( | |
626 | parse_pattern_file_contents( |
|
645 | parse_pattern_file_contents( | |
627 | lines, |
|
646 | lines, | |
628 | Path::new("file_path"), |
|
647 | Path::new("file_path"), | |
629 | None, |
|
648 | None, | |
630 | false |
|
649 | false | |
631 | ) |
|
650 | ) | |
632 | .unwrap() |
|
651 | .unwrap() | |
633 | .0, |
|
652 | .0, | |
634 | vec![] |
|
653 | vec![] | |
635 | ); |
|
654 | ); | |
636 | let lines = b"glob:**.o"; |
|
655 | let lines = b"glob:**.o"; | |
637 | assert_eq!( |
|
656 | assert_eq!( | |
638 | parse_pattern_file_contents( |
|
657 | parse_pattern_file_contents( | |
639 | lines, |
|
658 | lines, | |
640 | Path::new("file_path"), |
|
659 | Path::new("file_path"), | |
641 | None, |
|
660 | None, | |
642 | false |
|
661 | false | |
643 | ) |
|
662 | ) | |
644 | .unwrap() |
|
663 | .unwrap() | |
645 | .0, |
|
664 | .0, | |
646 | vec![IgnorePattern::new( |
|
665 | vec![IgnorePattern::new( | |
647 | PatternSyntax::RelGlob, |
|
666 | PatternSyntax::RelGlob, | |
648 | b"**.o", |
|
667 | b"**.o", | |
649 | Path::new("file_path") |
|
668 | Path::new("file_path") | |
650 | )] |
|
669 | )] | |
651 | ); |
|
670 | ); | |
652 | } |
|
671 | } | |
653 |
|
672 | |||
654 | #[test] |
|
673 | #[test] | |
655 | fn test_build_single_regex() { |
|
674 | fn test_build_single_regex() { | |
656 | assert_eq!( |
|
675 | assert_eq!( | |
657 | build_single_regex(&IgnorePattern::new( |
|
676 | build_single_regex(&IgnorePattern::new( | |
658 | PatternSyntax::RelGlob, |
|
677 | PatternSyntax::RelGlob, | |
659 | b"rust/target/", |
|
678 | b"rust/target/", | |
660 | Path::new("") |
|
679 | Path::new("") | |
661 | )) |
|
680 | )) | |
662 | .unwrap(), |
|
681 | .unwrap(), | |
663 | Some(br"(?:.*/)?rust/target(?:/|$)".to_vec()), |
|
682 | Some(br"(?:.*/)?rust/target(?:/|$)".to_vec()), | |
664 | ); |
|
683 | ); | |
665 | assert_eq!( |
|
684 | assert_eq!( | |
666 | build_single_regex(&IgnorePattern::new( |
|
685 | build_single_regex(&IgnorePattern::new( | |
667 | PatternSyntax::Regexp, |
|
686 | PatternSyntax::Regexp, | |
668 | br"rust/target/\d+", |
|
687 | br"rust/target/\d+", | |
669 | Path::new("") |
|
688 | Path::new("") | |
670 | )) |
|
689 | )) | |
671 | .unwrap(), |
|
690 | .unwrap(), | |
672 | Some(br"rust/target/\d+".to_vec()), |
|
691 | Some(br"rust/target/\d+".to_vec()), | |
673 | ); |
|
692 | ); | |
674 | } |
|
693 | } | |
675 |
|
694 | |||
676 | #[test] |
|
695 | #[test] | |
677 | fn test_build_single_regex_shortcut() { |
|
696 | fn test_build_single_regex_shortcut() { | |
678 | assert_eq!( |
|
697 | assert_eq!( | |
679 | build_single_regex(&IgnorePattern::new( |
|
698 | build_single_regex(&IgnorePattern::new( | |
680 | PatternSyntax::RootGlob, |
|
699 | PatternSyntax::RootGlob, | |
681 | b"", |
|
700 | b"", | |
682 | Path::new("") |
|
701 | Path::new("") | |
683 | )) |
|
702 | )) | |
684 | .unwrap(), |
|
703 | .unwrap(), | |
685 | None, |
|
704 | None, | |
686 | ); |
|
705 | ); | |
687 | assert_eq!( |
|
706 | assert_eq!( | |
688 | build_single_regex(&IgnorePattern::new( |
|
707 | build_single_regex(&IgnorePattern::new( | |
689 | PatternSyntax::RootGlob, |
|
708 | PatternSyntax::RootGlob, | |
690 | b"whatever", |
|
709 | b"whatever", | |
691 | Path::new("") |
|
710 | Path::new("") | |
692 | )) |
|
711 | )) | |
693 | .unwrap(), |
|
712 | .unwrap(), | |
694 | None, |
|
713 | None, | |
695 | ); |
|
714 | ); | |
696 | assert_eq!( |
|
715 | assert_eq!( | |
697 | build_single_regex(&IgnorePattern::new( |
|
716 | build_single_regex(&IgnorePattern::new( | |
698 | PatternSyntax::RootGlob, |
|
717 | PatternSyntax::RootGlob, | |
699 | b"*.o", |
|
718 | b"*.o", | |
700 | Path::new("") |
|
719 | Path::new("") | |
701 | )) |
|
720 | )) | |
702 | .unwrap(), |
|
721 | .unwrap(), | |
703 | Some(br"[^/]*\.o(?:/|$)".to_vec()), |
|
722 | Some(br"[^/]*\.o(?:/|$)".to_vec()), | |
704 | ); |
|
723 | ); | |
705 | } |
|
724 | } | |
|
725 | ||||
|
726 | #[test] | |||
|
727 | fn test_build_single_relregex() { | |||
|
728 | assert_eq!( | |||
|
729 | build_single_regex(&IgnorePattern::new( | |||
|
730 | PatternSyntax::RelRegexp, | |||
|
731 | b"^ba{2}r", | |||
|
732 | Path::new("") | |||
|
733 | )) | |||
|
734 | .unwrap(), | |||
|
735 | Some(b"^ba{2}r".to_vec()), | |||
|
736 | ); | |||
|
737 | assert_eq!( | |||
|
738 | build_single_regex(&IgnorePattern::new( | |||
|
739 | PatternSyntax::RelRegexp, | |||
|
740 | b"ba{2}r", | |||
|
741 | Path::new("") | |||
|
742 | )) | |||
|
743 | .unwrap(), | |||
|
744 | Some(b".*ba{2}r".to_vec()), | |||
|
745 | ); | |||
|
746 | assert_eq!( | |||
|
747 | build_single_regex(&IgnorePattern::new( | |||
|
748 | PatternSyntax::RelRegexp, | |||
|
749 | b"(?ia)ba{2}r", | |||
|
750 | Path::new("") | |||
|
751 | )) | |||
|
752 | .unwrap(), | |||
|
753 | Some(b"(?ia:.*ba{2}r)".to_vec()), | |||
|
754 | ); | |||
|
755 | } | |||
706 | } |
|
756 | } |
@@ -1,515 +1,514 | |||||
1 | #testcases dirstate-v1 dirstate-v2 |
|
1 | #testcases dirstate-v1 dirstate-v2 | |
2 |
|
2 | |||
3 | #if dirstate-v2 |
|
3 | #if dirstate-v2 | |
4 | $ cat >> $HGRCPATH << EOF |
|
4 | $ cat >> $HGRCPATH << EOF | |
5 | > [format] |
|
5 | > [format] | |
6 | > use-dirstate-v2=1 |
|
6 | > use-dirstate-v2=1 | |
7 | > [storage] |
|
7 | > [storage] | |
8 | > dirstate-v2.slow-path=allow |
|
8 | > dirstate-v2.slow-path=allow | |
9 | > EOF |
|
9 | > EOF | |
10 | #endif |
|
10 | #endif | |
11 |
|
11 | |||
12 | $ hg init ignorerepo |
|
12 | $ hg init ignorerepo | |
13 | $ cd ignorerepo |
|
13 | $ cd ignorerepo | |
14 |
|
14 | |||
15 | debugignore with no hgignore should be deterministic: |
|
15 | debugignore with no hgignore should be deterministic: | |
16 | $ hg debugignore |
|
16 | $ hg debugignore | |
17 | <nevermatcher> |
|
17 | <nevermatcher> | |
18 |
|
18 | |||
19 | Issue562: .hgignore requires newline at end: |
|
19 | Issue562: .hgignore requires newline at end: | |
20 |
|
20 | |||
21 | $ touch foo |
|
21 | $ touch foo | |
22 | $ touch bar |
|
22 | $ touch bar | |
23 | $ touch baz |
|
23 | $ touch baz | |
24 | $ cat > makeignore.py <<EOF |
|
24 | $ cat > makeignore.py <<EOF | |
25 | > f = open(".hgignore", "w") |
|
25 | > f = open(".hgignore", "w") | |
26 | > f.write("ignore\n") |
|
26 | > f.write("ignore\n") | |
27 | > f.write("foo\n") |
|
27 | > f.write("foo\n") | |
28 | > # No EOL here |
|
28 | > # No EOL here | |
29 | > f.write("bar") |
|
29 | > f.write("bar") | |
30 | > f.close() |
|
30 | > f.close() | |
31 | > EOF |
|
31 | > EOF | |
32 |
|
32 | |||
33 | $ "$PYTHON" makeignore.py |
|
33 | $ "$PYTHON" makeignore.py | |
34 |
|
34 | |||
35 | Should display baz only: |
|
35 | Should display baz only: | |
36 |
|
36 | |||
37 | $ hg status |
|
37 | $ hg status | |
38 | ? baz |
|
38 | ? baz | |
39 |
|
39 | |||
40 | $ rm foo bar baz .hgignore makeignore.py |
|
40 | $ rm foo bar baz .hgignore makeignore.py | |
41 |
|
41 | |||
42 | $ touch a.o |
|
42 | $ touch a.o | |
43 | $ touch a.c |
|
43 | $ touch a.c | |
44 | $ touch syntax |
|
44 | $ touch syntax | |
45 | $ mkdir dir |
|
45 | $ mkdir dir | |
46 | $ touch dir/a.o |
|
46 | $ touch dir/a.o | |
47 | $ touch dir/b.o |
|
47 | $ touch dir/b.o | |
48 | $ touch dir/c.o |
|
48 | $ touch dir/c.o | |
49 |
|
49 | |||
50 | $ hg add dir/a.o |
|
50 | $ hg add dir/a.o | |
51 | $ hg commit -m 0 |
|
51 | $ hg commit -m 0 | |
52 | $ hg add dir/b.o |
|
52 | $ hg add dir/b.o | |
53 |
|
53 | |||
54 | $ hg status |
|
54 | $ hg status | |
55 | A dir/b.o |
|
55 | A dir/b.o | |
56 | ? a.c |
|
56 | ? a.c | |
57 | ? a.o |
|
57 | ? a.o | |
58 | ? dir/c.o |
|
58 | ? dir/c.o | |
59 | ? syntax |
|
59 | ? syntax | |
60 |
|
60 | |||
61 | $ echo "*.o" > .hgignore |
|
61 | $ echo "*.o" > .hgignore | |
62 | $ hg status |
|
62 | $ hg status | |
63 | abort: $TESTTMP/ignorerepo/.hgignore: invalid pattern (relre): *.o (glob) |
|
63 | abort: $TESTTMP/ignorerepo/.hgignore: invalid pattern (relre): *.o (glob) | |
64 | [255] |
|
64 | [255] | |
65 |
|
65 | |||
66 | Test relre with flags (issue6759) |
|
66 | Test relre with flags (issue6759) | |
67 | --------------------------------- |
|
67 | --------------------------------- | |
68 |
|
68 | |||
69 | regexp with flag is the first one |
|
69 | regexp with flag is the first one | |
70 |
|
70 | |||
71 | $ echo 're:(?i)\.O$' > .hgignore |
|
71 | $ echo 're:(?i)\.O$' > .hgignore | |
72 | $ echo 're:.hgignore' >> .hgignore |
|
72 | $ echo 're:.hgignore' >> .hgignore | |
73 | $ hg status |
|
73 | $ hg status | |
74 | A dir/b.o |
|
74 | A dir/b.o | |
75 | ? a.c |
|
75 | ? a.c | |
76 | ? syntax |
|
76 | ? syntax | |
77 |
|
77 | |||
78 | regex with flag is not the first one |
|
78 | regex with flag is not the first one | |
79 |
|
79 | |||
80 | $ echo 're:.hgignore' > .hgignore |
|
80 | $ echo 're:.hgignore' > .hgignore | |
81 | $ echo 're:(?i)\.O$' >> .hgignore |
|
81 | $ echo 're:(?i)\.O$' >> .hgignore | |
82 | $ hg status |
|
82 | $ hg status | |
83 | A dir/b.o |
|
83 | A dir/b.o | |
84 | ? a.c |
|
84 | ? a.c | |
85 | ? syntax |
|
85 | ? syntax | |
86 |
|
86 | |||
87 | flag in a pattern should affect that pattern only |
|
87 | flag in a pattern should affect that pattern only | |
88 |
|
88 | |||
89 | $ echo 're:(?i)\.O$' > .hgignore |
|
89 | $ echo 're:(?i)\.O$' > .hgignore | |
90 | $ echo 're:.HGIGNORE' >> .hgignore |
|
90 | $ echo 're:.HGIGNORE' >> .hgignore | |
91 | $ hg status |
|
91 | $ hg status | |
92 | A dir/b.o |
|
92 | A dir/b.o | |
93 |
? .hgignore |
|
93 | ? .hgignore | |
94 | ? .hgignore (rust missing-correct-output !) |
|
|||
95 | ? a.c |
|
94 | ? a.c | |
96 | ? syntax |
|
95 | ? syntax | |
97 |
|
96 | |||
98 | $ echo 're:.HGIGNORE' > .hgignore |
|
97 | $ echo 're:.HGIGNORE' > .hgignore | |
99 | $ echo 're:(?i)\.O$' >> .hgignore |
|
98 | $ echo 're:(?i)\.O$' >> .hgignore | |
100 | $ hg status |
|
99 | $ hg status | |
101 | A dir/b.o |
|
100 | A dir/b.o | |
102 | ? .hgignore |
|
101 | ? .hgignore | |
103 | ? a.c |
|
102 | ? a.c | |
104 | ? syntax |
|
103 | ? syntax | |
105 |
|
104 | |||
106 |
|
105 | |||
107 | further testing |
|
106 | further testing | |
108 | --------------- |
|
107 | --------------- | |
109 |
|
108 | |||
110 | $ echo 're:^(?!a).*\.o$' > .hgignore |
|
109 | $ echo 're:^(?!a).*\.o$' > .hgignore | |
111 | $ hg status |
|
110 | $ hg status | |
112 | A dir/b.o |
|
111 | A dir/b.o | |
113 | ? .hgignore |
|
112 | ? .hgignore | |
114 | ? a.c |
|
113 | ? a.c | |
115 | ? a.o |
|
114 | ? a.o | |
116 | ? syntax |
|
115 | ? syntax | |
117 | #if rhg |
|
116 | #if rhg | |
118 | $ hg status --config rhg.on-unsupported=abort |
|
117 | $ hg status --config rhg.on-unsupported=abort | |
119 | unsupported feature: Unsupported syntax regex parse error: |
|
118 | unsupported feature: Unsupported syntax regex parse error: | |
120 | ^(?:^(?!a).*\.o$) |
|
119 | ^(?:^(?!a).*\.o$) | |
121 | ^^^ |
|
120 | ^^^ | |
122 | error: look-around, including look-ahead and look-behind, is not supported |
|
121 | error: look-around, including look-ahead and look-behind, is not supported | |
123 | [252] |
|
122 | [252] | |
124 | #endif |
|
123 | #endif | |
125 |
|
124 | |||
126 | Ensure given files are relative to cwd |
|
125 | Ensure given files are relative to cwd | |
127 |
|
126 | |||
128 | $ echo "dir/.*\.o" > .hgignore |
|
127 | $ echo "dir/.*\.o" > .hgignore | |
129 | $ hg status -i |
|
128 | $ hg status -i | |
130 | I dir/c.o |
|
129 | I dir/c.o | |
131 |
|
130 | |||
132 | $ hg debugignore dir/c.o dir/missing.o |
|
131 | $ hg debugignore dir/c.o dir/missing.o | |
133 | dir/c.o is ignored |
|
132 | dir/c.o is ignored | |
134 | (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob) |
|
133 | (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob) | |
135 | dir/missing.o is ignored |
|
134 | dir/missing.o is ignored | |
136 | (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob) |
|
135 | (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob) | |
137 | $ cd dir |
|
136 | $ cd dir | |
138 | $ hg debugignore c.o missing.o |
|
137 | $ hg debugignore c.o missing.o | |
139 | c.o is ignored |
|
138 | c.o is ignored | |
140 | (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob) |
|
139 | (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob) | |
141 | missing.o is ignored |
|
140 | missing.o is ignored | |
142 | (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob) |
|
141 | (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob) | |
143 |
|
142 | |||
144 | For icasefs, inexact matches also work, except for missing files |
|
143 | For icasefs, inexact matches also work, except for missing files | |
145 |
|
144 | |||
146 | #if icasefs |
|
145 | #if icasefs | |
147 | $ hg debugignore c.O missing.O |
|
146 | $ hg debugignore c.O missing.O | |
148 | c.o is ignored |
|
147 | c.o is ignored | |
149 | (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob) |
|
148 | (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: 'dir/.*\.o') (glob) | |
150 | missing.O is not ignored |
|
149 | missing.O is not ignored | |
151 | #endif |
|
150 | #endif | |
152 |
|
151 | |||
153 | $ cd .. |
|
152 | $ cd .. | |
154 |
|
153 | |||
155 | $ echo ".*\.o" > .hgignore |
|
154 | $ echo ".*\.o" > .hgignore | |
156 | $ hg status |
|
155 | $ hg status | |
157 | A dir/b.o |
|
156 | A dir/b.o | |
158 | ? .hgignore |
|
157 | ? .hgignore | |
159 | ? a.c |
|
158 | ? a.c | |
160 | ? syntax |
|
159 | ? syntax | |
161 |
|
160 | |||
162 | Ensure that comments work: |
|
161 | Ensure that comments work: | |
163 |
|
162 | |||
164 | $ touch 'foo#bar' 'quux#' 'quu0#' |
|
163 | $ touch 'foo#bar' 'quux#' 'quu0#' | |
165 | #if no-windows |
|
164 | #if no-windows | |
166 | $ touch 'baz\' 'baz\wat' 'ba0\#wat' 'ba1\\' 'ba1\\wat' 'quu0\' |
|
165 | $ touch 'baz\' 'baz\wat' 'ba0\#wat' 'ba1\\' 'ba1\\wat' 'quu0\' | |
167 | #endif |
|
166 | #endif | |
168 |
|
167 | |||
169 | $ cat <<'EOF' >> .hgignore |
|
168 | $ cat <<'EOF' >> .hgignore | |
170 | > # full-line comment |
|
169 | > # full-line comment | |
171 | > # whitespace-only comment line |
|
170 | > # whitespace-only comment line | |
172 | > syntax# pattern, no whitespace, then comment |
|
171 | > syntax# pattern, no whitespace, then comment | |
173 | > a.c # pattern, then whitespace, then comment |
|
172 | > a.c # pattern, then whitespace, then comment | |
174 | > baz\\# # (escaped) backslash, then comment |
|
173 | > baz\\# # (escaped) backslash, then comment | |
175 | > ba0\\\#w # (escaped) backslash, escaped comment character, then comment |
|
174 | > ba0\\\#w # (escaped) backslash, escaped comment character, then comment | |
176 | > ba1\\\\# # (escaped) backslashes, then comment |
|
175 | > ba1\\\\# # (escaped) backslashes, then comment | |
177 | > foo\#b # escaped comment character |
|
176 | > foo\#b # escaped comment character | |
178 | > quux\## escaped comment character at end of name |
|
177 | > quux\## escaped comment character at end of name | |
179 | > EOF |
|
178 | > EOF | |
180 | $ hg status |
|
179 | $ hg status | |
181 | A dir/b.o |
|
180 | A dir/b.o | |
182 | ? .hgignore |
|
181 | ? .hgignore | |
183 | ? quu0# |
|
182 | ? quu0# | |
184 | ? quu0\ (no-windows !) |
|
183 | ? quu0\ (no-windows !) | |
185 |
|
184 | |||
186 | $ cat <<'EOF' > .hgignore |
|
185 | $ cat <<'EOF' > .hgignore | |
187 | > .*\.o |
|
186 | > .*\.o | |
188 | > syntax: glob |
|
187 | > syntax: glob | |
189 | > syntax# pattern, no whitespace, then comment |
|
188 | > syntax# pattern, no whitespace, then comment | |
190 | > a.c # pattern, then whitespace, then comment |
|
189 | > a.c # pattern, then whitespace, then comment | |
191 | > baz\\#* # (escaped) backslash, then comment |
|
190 | > baz\\#* # (escaped) backslash, then comment | |
192 | > ba0\\\#w* # (escaped) backslash, escaped comment character, then comment |
|
191 | > ba0\\\#w* # (escaped) backslash, escaped comment character, then comment | |
193 | > ba1\\\\#* # (escaped) backslashes, then comment |
|
192 | > ba1\\\\#* # (escaped) backslashes, then comment | |
194 | > foo\#b* # escaped comment character |
|
193 | > foo\#b* # escaped comment character | |
195 | > quux\## escaped comment character at end of name |
|
194 | > quux\## escaped comment character at end of name | |
196 | > quu0[\#]# escaped comment character inside [...] |
|
195 | > quu0[\#]# escaped comment character inside [...] | |
197 | > EOF |
|
196 | > EOF | |
198 | $ hg status |
|
197 | $ hg status | |
199 | A dir/b.o |
|
198 | A dir/b.o | |
200 | ? .hgignore |
|
199 | ? .hgignore | |
201 | ? ba1\\wat (no-windows !) |
|
200 | ? ba1\\wat (no-windows !) | |
202 | ? baz\wat (no-windows !) |
|
201 | ? baz\wat (no-windows !) | |
203 | ? quu0\ (no-windows !) |
|
202 | ? quu0\ (no-windows !) | |
204 |
|
203 | |||
205 | $ rm 'foo#bar' 'quux#' 'quu0#' |
|
204 | $ rm 'foo#bar' 'quux#' 'quu0#' | |
206 | #if no-windows |
|
205 | #if no-windows | |
207 | $ rm 'baz\' 'baz\wat' 'ba0\#wat' 'ba1\\' 'ba1\\wat' 'quu0\' |
|
206 | $ rm 'baz\' 'baz\wat' 'ba0\#wat' 'ba1\\' 'ba1\\wat' 'quu0\' | |
208 | #endif |
|
207 | #endif | |
209 |
|
208 | |||
210 | Check that '^\.' does not ignore the root directory: |
|
209 | Check that '^\.' does not ignore the root directory: | |
211 |
|
210 | |||
212 | $ echo "^\." > .hgignore |
|
211 | $ echo "^\." > .hgignore | |
213 | $ hg status |
|
212 | $ hg status | |
214 | A dir/b.o |
|
213 | A dir/b.o | |
215 | ? a.c |
|
214 | ? a.c | |
216 | ? a.o |
|
215 | ? a.o | |
217 | ? dir/c.o |
|
216 | ? dir/c.o | |
218 | ? syntax |
|
217 | ? syntax | |
219 |
|
218 | |||
220 | Test that patterns from ui.ignore options are read: |
|
219 | Test that patterns from ui.ignore options are read: | |
221 |
|
220 | |||
222 | $ echo > .hgignore |
|
221 | $ echo > .hgignore | |
223 | $ cat >> $HGRCPATH << EOF |
|
222 | $ cat >> $HGRCPATH << EOF | |
224 | > [ui] |
|
223 | > [ui] | |
225 | > ignore.other = $TESTTMP/ignorerepo/.hg/testhgignore |
|
224 | > ignore.other = $TESTTMP/ignorerepo/.hg/testhgignore | |
226 | > EOF |
|
225 | > EOF | |
227 | $ echo "glob:**.o" > .hg/testhgignore |
|
226 | $ echo "glob:**.o" > .hg/testhgignore | |
228 | $ hg status |
|
227 | $ hg status | |
229 | A dir/b.o |
|
228 | A dir/b.o | |
230 | ? .hgignore |
|
229 | ? .hgignore | |
231 | ? a.c |
|
230 | ? a.c | |
232 | ? syntax |
|
231 | ? syntax | |
233 |
|
232 | |||
234 | empty out testhgignore |
|
233 | empty out testhgignore | |
235 | $ echo > .hg/testhgignore |
|
234 | $ echo > .hg/testhgignore | |
236 |
|
235 | |||
237 | Test relative ignore path (issue4473): |
|
236 | Test relative ignore path (issue4473): | |
238 |
|
237 | |||
239 | $ cat >> $HGRCPATH << EOF |
|
238 | $ cat >> $HGRCPATH << EOF | |
240 | > [ui] |
|
239 | > [ui] | |
241 | > ignore.relative = .hg/testhgignorerel |
|
240 | > ignore.relative = .hg/testhgignorerel | |
242 | > EOF |
|
241 | > EOF | |
243 | $ echo "glob:*.o" > .hg/testhgignorerel |
|
242 | $ echo "glob:*.o" > .hg/testhgignorerel | |
244 | $ cd dir |
|
243 | $ cd dir | |
245 | $ hg status |
|
244 | $ hg status | |
246 | A dir/b.o |
|
245 | A dir/b.o | |
247 | ? .hgignore |
|
246 | ? .hgignore | |
248 | ? a.c |
|
247 | ? a.c | |
249 | ? syntax |
|
248 | ? syntax | |
250 | $ hg debugignore |
|
249 | $ hg debugignore | |
251 | <includematcher includes='.*\\.o(?:/|$)'> |
|
250 | <includematcher includes='.*\\.o(?:/|$)'> | |
252 |
|
251 | |||
253 | $ cd .. |
|
252 | $ cd .. | |
254 | $ echo > .hg/testhgignorerel |
|
253 | $ echo > .hg/testhgignorerel | |
255 | $ echo "syntax: glob" > .hgignore |
|
254 | $ echo "syntax: glob" > .hgignore | |
256 | $ echo "re:.*\.o" >> .hgignore |
|
255 | $ echo "re:.*\.o" >> .hgignore | |
257 | $ hg status |
|
256 | $ hg status | |
258 | A dir/b.o |
|
257 | A dir/b.o | |
259 | ? .hgignore |
|
258 | ? .hgignore | |
260 | ? a.c |
|
259 | ? a.c | |
261 | ? syntax |
|
260 | ? syntax | |
262 |
|
261 | |||
263 | $ echo "syntax: invalid" > .hgignore |
|
262 | $ echo "syntax: invalid" > .hgignore | |
264 | $ hg status |
|
263 | $ hg status | |
265 | $TESTTMP/ignorerepo/.hgignore: ignoring invalid syntax 'invalid' |
|
264 | $TESTTMP/ignorerepo/.hgignore: ignoring invalid syntax 'invalid' | |
266 | A dir/b.o |
|
265 | A dir/b.o | |
267 | ? .hgignore |
|
266 | ? .hgignore | |
268 | ? a.c |
|
267 | ? a.c | |
269 | ? a.o |
|
268 | ? a.o | |
270 | ? dir/c.o |
|
269 | ? dir/c.o | |
271 | ? syntax |
|
270 | ? syntax | |
272 |
|
271 | |||
273 | $ echo "syntax: glob" > .hgignore |
|
272 | $ echo "syntax: glob" > .hgignore | |
274 | $ echo "*.o" >> .hgignore |
|
273 | $ echo "*.o" >> .hgignore | |
275 | $ hg status |
|
274 | $ hg status | |
276 | A dir/b.o |
|
275 | A dir/b.o | |
277 | ? .hgignore |
|
276 | ? .hgignore | |
278 | ? a.c |
|
277 | ? a.c | |
279 | ? syntax |
|
278 | ? syntax | |
280 |
|
279 | |||
281 | $ echo "relglob:syntax*" > .hgignore |
|
280 | $ echo "relglob:syntax*" > .hgignore | |
282 | $ hg status |
|
281 | $ hg status | |
283 | A dir/b.o |
|
282 | A dir/b.o | |
284 | ? .hgignore |
|
283 | ? .hgignore | |
285 | ? a.c |
|
284 | ? a.c | |
286 | ? a.o |
|
285 | ? a.o | |
287 | ? dir/c.o |
|
286 | ? dir/c.o | |
288 |
|
287 | |||
289 | $ echo "relglob:*" > .hgignore |
|
288 | $ echo "relglob:*" > .hgignore | |
290 | $ hg status |
|
289 | $ hg status | |
291 | A dir/b.o |
|
290 | A dir/b.o | |
292 |
|
291 | |||
293 | $ cd dir |
|
292 | $ cd dir | |
294 | $ hg status . |
|
293 | $ hg status . | |
295 | A b.o |
|
294 | A b.o | |
296 |
|
295 | |||
297 | $ hg debugignore |
|
296 | $ hg debugignore | |
298 | <includematcher includes='.*(?:/|$)'> |
|
297 | <includematcher includes='.*(?:/|$)'> | |
299 |
|
298 | |||
300 | $ hg debugignore b.o |
|
299 | $ hg debugignore b.o | |
301 | b.o is ignored |
|
300 | b.o is ignored | |
302 | (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: '*') (glob) |
|
301 | (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: '*') (glob) | |
303 |
|
302 | |||
304 | $ cd .. |
|
303 | $ cd .. | |
305 |
|
304 | |||
306 | Check patterns that match only the directory |
|
305 | Check patterns that match only the directory | |
307 |
|
306 | |||
308 | "(fsmonitor !)" below assumes that fsmonitor is enabled with |
|
307 | "(fsmonitor !)" below assumes that fsmonitor is enabled with | |
309 | "walk_on_invalidate = false" (default), which doesn't involve |
|
308 | "walk_on_invalidate = false" (default), which doesn't involve | |
310 | re-walking whole repository at detection of .hgignore change. |
|
309 | re-walking whole repository at detection of .hgignore change. | |
311 |
|
310 | |||
312 | $ echo "^dir\$" > .hgignore |
|
311 | $ echo "^dir\$" > .hgignore | |
313 | $ hg status |
|
312 | $ hg status | |
314 | A dir/b.o |
|
313 | A dir/b.o | |
315 | ? .hgignore |
|
314 | ? .hgignore | |
316 | ? a.c |
|
315 | ? a.c | |
317 | ? a.o |
|
316 | ? a.o | |
318 | ? dir/c.o (fsmonitor !) |
|
317 | ? dir/c.o (fsmonitor !) | |
319 | ? syntax |
|
318 | ? syntax | |
320 |
|
319 | |||
321 | Check recursive glob pattern matches no directories (dir/**/c.o matches dir/c.o) |
|
320 | Check recursive glob pattern matches no directories (dir/**/c.o matches dir/c.o) | |
322 |
|
321 | |||
323 | $ echo "syntax: glob" > .hgignore |
|
322 | $ echo "syntax: glob" > .hgignore | |
324 | $ echo "dir/**/c.o" >> .hgignore |
|
323 | $ echo "dir/**/c.o" >> .hgignore | |
325 | $ touch dir/c.o |
|
324 | $ touch dir/c.o | |
326 | $ mkdir dir/subdir |
|
325 | $ mkdir dir/subdir | |
327 | $ touch dir/subdir/c.o |
|
326 | $ touch dir/subdir/c.o | |
328 | $ hg status |
|
327 | $ hg status | |
329 | A dir/b.o |
|
328 | A dir/b.o | |
330 | ? .hgignore |
|
329 | ? .hgignore | |
331 | ? a.c |
|
330 | ? a.c | |
332 | ? a.o |
|
331 | ? a.o | |
333 | ? syntax |
|
332 | ? syntax | |
334 | $ hg debugignore a.c |
|
333 | $ hg debugignore a.c | |
335 | a.c is not ignored |
|
334 | a.c is not ignored | |
336 | $ hg debugignore dir/c.o |
|
335 | $ hg debugignore dir/c.o | |
337 | dir/c.o is ignored |
|
336 | dir/c.o is ignored | |
338 | (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 2: 'dir/**/c.o') (glob) |
|
337 | (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 2: 'dir/**/c.o') (glob) | |
339 |
|
338 | |||
340 | Check rooted globs |
|
339 | Check rooted globs | |
341 |
|
340 | |||
342 | $ hg purge --all --config extensions.purge= |
|
341 | $ hg purge --all --config extensions.purge= | |
343 | $ echo "syntax: rootglob" > .hgignore |
|
342 | $ echo "syntax: rootglob" > .hgignore | |
344 | $ echo "a/*.ext" >> .hgignore |
|
343 | $ echo "a/*.ext" >> .hgignore | |
345 | $ for p in a b/a aa; do mkdir -p $p; touch $p/b.ext; done |
|
344 | $ for p in a b/a aa; do mkdir -p $p; touch $p/b.ext; done | |
346 | $ hg status -A 'set:**.ext' |
|
345 | $ hg status -A 'set:**.ext' | |
347 | ? aa/b.ext |
|
346 | ? aa/b.ext | |
348 | ? b/a/b.ext |
|
347 | ? b/a/b.ext | |
349 | I a/b.ext |
|
348 | I a/b.ext | |
350 |
|
349 | |||
351 | Check using 'include:' in ignore file |
|
350 | Check using 'include:' in ignore file | |
352 |
|
351 | |||
353 | $ hg purge --all --config extensions.purge= |
|
352 | $ hg purge --all --config extensions.purge= | |
354 | $ touch foo.included |
|
353 | $ touch foo.included | |
355 |
|
354 | |||
356 | $ echo ".*.included" > otherignore |
|
355 | $ echo ".*.included" > otherignore | |
357 | $ hg status -I "include:otherignore" |
|
356 | $ hg status -I "include:otherignore" | |
358 | ? foo.included |
|
357 | ? foo.included | |
359 |
|
358 | |||
360 | $ echo "include:otherignore" >> .hgignore |
|
359 | $ echo "include:otherignore" >> .hgignore | |
361 | $ hg status |
|
360 | $ hg status | |
362 | A dir/b.o |
|
361 | A dir/b.o | |
363 | ? .hgignore |
|
362 | ? .hgignore | |
364 | ? otherignore |
|
363 | ? otherignore | |
365 |
|
364 | |||
366 | Check recursive uses of 'include:' |
|
365 | Check recursive uses of 'include:' | |
367 |
|
366 | |||
368 | $ echo "include:nested/ignore" >> otherignore |
|
367 | $ echo "include:nested/ignore" >> otherignore | |
369 | $ mkdir nested nested/more |
|
368 | $ mkdir nested nested/more | |
370 | $ echo "glob:*ignore" > nested/ignore |
|
369 | $ echo "glob:*ignore" > nested/ignore | |
371 | $ echo "rootglob:a" >> nested/ignore |
|
370 | $ echo "rootglob:a" >> nested/ignore | |
372 | $ touch a nested/a nested/more/a |
|
371 | $ touch a nested/a nested/more/a | |
373 | $ hg status |
|
372 | $ hg status | |
374 | A dir/b.o |
|
373 | A dir/b.o | |
375 | ? nested/a |
|
374 | ? nested/a | |
376 | ? nested/more/a |
|
375 | ? nested/more/a | |
377 | $ rm a nested/a nested/more/a |
|
376 | $ rm a nested/a nested/more/a | |
378 |
|
377 | |||
379 | $ cp otherignore goodignore |
|
378 | $ cp otherignore goodignore | |
380 | $ echo "include:badignore" >> otherignore |
|
379 | $ echo "include:badignore" >> otherignore | |
381 | $ hg status |
|
380 | $ hg status | |
382 | skipping unreadable pattern file 'badignore': $ENOENT$ |
|
381 | skipping unreadable pattern file 'badignore': $ENOENT$ | |
383 | A dir/b.o |
|
382 | A dir/b.o | |
384 |
|
383 | |||
385 | $ mv goodignore otherignore |
|
384 | $ mv goodignore otherignore | |
386 |
|
385 | |||
387 | Check using 'include:' while in a non-root directory |
|
386 | Check using 'include:' while in a non-root directory | |
388 |
|
387 | |||
389 | $ cd .. |
|
388 | $ cd .. | |
390 | $ hg -R ignorerepo status |
|
389 | $ hg -R ignorerepo status | |
391 | A dir/b.o |
|
390 | A dir/b.o | |
392 | $ cd ignorerepo |
|
391 | $ cd ignorerepo | |
393 |
|
392 | |||
394 | Check including subincludes |
|
393 | Check including subincludes | |
395 |
|
394 | |||
396 | $ hg revert -q --all |
|
395 | $ hg revert -q --all | |
397 | $ hg purge --all --config extensions.purge= |
|
396 | $ hg purge --all --config extensions.purge= | |
398 | $ echo ".hgignore" > .hgignore |
|
397 | $ echo ".hgignore" > .hgignore | |
399 | $ mkdir dir1 dir2 |
|
398 | $ mkdir dir1 dir2 | |
400 | $ touch dir1/file1 dir1/file2 dir2/file1 dir2/file2 |
|
399 | $ touch dir1/file1 dir1/file2 dir2/file1 dir2/file2 | |
401 | $ echo "subinclude:dir2/.hgignore" >> .hgignore |
|
400 | $ echo "subinclude:dir2/.hgignore" >> .hgignore | |
402 | $ echo "glob:file*2" > dir2/.hgignore |
|
401 | $ echo "glob:file*2" > dir2/.hgignore | |
403 | $ hg status |
|
402 | $ hg status | |
404 | ? dir1/file1 |
|
403 | ? dir1/file1 | |
405 | ? dir1/file2 |
|
404 | ? dir1/file2 | |
406 | ? dir2/file1 |
|
405 | ? dir2/file1 | |
407 |
|
406 | |||
408 | Check including subincludes with other patterns |
|
407 | Check including subincludes with other patterns | |
409 |
|
408 | |||
410 | $ echo "subinclude:dir1/.hgignore" >> .hgignore |
|
409 | $ echo "subinclude:dir1/.hgignore" >> .hgignore | |
411 |
|
410 | |||
412 | $ mkdir dir1/subdir |
|
411 | $ mkdir dir1/subdir | |
413 | $ touch dir1/subdir/file1 |
|
412 | $ touch dir1/subdir/file1 | |
414 | $ echo "rootglob:f?le1" > dir1/.hgignore |
|
413 | $ echo "rootglob:f?le1" > dir1/.hgignore | |
415 | $ hg status |
|
414 | $ hg status | |
416 | ? dir1/file2 |
|
415 | ? dir1/file2 | |
417 | ? dir1/subdir/file1 |
|
416 | ? dir1/subdir/file1 | |
418 | ? dir2/file1 |
|
417 | ? dir2/file1 | |
419 | $ rm dir1/subdir/file1 |
|
418 | $ rm dir1/subdir/file1 | |
420 |
|
419 | |||
421 | $ echo "regexp:f.le1" > dir1/.hgignore |
|
420 | $ echo "regexp:f.le1" > dir1/.hgignore | |
422 | $ hg status |
|
421 | $ hg status | |
423 | ? dir1/file2 |
|
422 | ? dir1/file2 | |
424 | ? dir2/file1 |
|
423 | ? dir2/file1 | |
425 |
|
424 | |||
426 | Check multiple levels of sub-ignores |
|
425 | Check multiple levels of sub-ignores | |
427 |
|
426 | |||
428 | $ touch dir1/subdir/subfile1 dir1/subdir/subfile3 dir1/subdir/subfile4 |
|
427 | $ touch dir1/subdir/subfile1 dir1/subdir/subfile3 dir1/subdir/subfile4 | |
429 | $ echo "subinclude:subdir/.hgignore" >> dir1/.hgignore |
|
428 | $ echo "subinclude:subdir/.hgignore" >> dir1/.hgignore | |
430 | $ echo "glob:subfil*3" >> dir1/subdir/.hgignore |
|
429 | $ echo "glob:subfil*3" >> dir1/subdir/.hgignore | |
431 |
|
430 | |||
432 | $ hg status |
|
431 | $ hg status | |
433 | ? dir1/file2 |
|
432 | ? dir1/file2 | |
434 | ? dir1/subdir/subfile4 |
|
433 | ? dir1/subdir/subfile4 | |
435 | ? dir2/file1 |
|
434 | ? dir2/file1 | |
436 |
|
435 | |||
437 | Check include subignore at the same level |
|
436 | Check include subignore at the same level | |
438 |
|
437 | |||
439 | $ mv dir1/subdir/.hgignore dir1/.hgignoretwo |
|
438 | $ mv dir1/subdir/.hgignore dir1/.hgignoretwo | |
440 | $ echo "regexp:f.le1" > dir1/.hgignore |
|
439 | $ echo "regexp:f.le1" > dir1/.hgignore | |
441 | $ echo "subinclude:.hgignoretwo" >> dir1/.hgignore |
|
440 | $ echo "subinclude:.hgignoretwo" >> dir1/.hgignore | |
442 | $ echo "glob:file*2" > dir1/.hgignoretwo |
|
441 | $ echo "glob:file*2" > dir1/.hgignoretwo | |
443 |
|
442 | |||
444 | $ hg status | grep file2 |
|
443 | $ hg status | grep file2 | |
445 | [1] |
|
444 | [1] | |
446 | $ hg debugignore dir1/file2 |
|
445 | $ hg debugignore dir1/file2 | |
447 | dir1/file2 is ignored |
|
446 | dir1/file2 is ignored | |
448 | (ignore rule in dir2/.hgignore, line 1: 'file*2') |
|
447 | (ignore rule in dir2/.hgignore, line 1: 'file*2') | |
449 |
|
448 | |||
450 | #if windows |
|
449 | #if windows | |
451 |
|
450 | |||
452 | Windows paths are accepted on input |
|
451 | Windows paths are accepted on input | |
453 |
|
452 | |||
454 | $ rm dir1/.hgignore |
|
453 | $ rm dir1/.hgignore | |
455 | $ echo "dir1/file*" >> .hgignore |
|
454 | $ echo "dir1/file*" >> .hgignore | |
456 | $ hg debugignore "dir1\file2" |
|
455 | $ hg debugignore "dir1\file2" | |
457 | dir1/file2 is ignored |
|
456 | dir1/file2 is ignored | |
458 | (ignore rule in $TESTTMP\ignorerepo\.hgignore, line 4: 'dir1/file*') |
|
457 | (ignore rule in $TESTTMP\ignorerepo\.hgignore, line 4: 'dir1/file*') | |
459 | $ hg up -qC . |
|
458 | $ hg up -qC . | |
460 |
|
459 | |||
461 | #endif |
|
460 | #endif | |
462 |
|
461 | |||
463 | #if dirstate-v2 rust |
|
462 | #if dirstate-v2 rust | |
464 |
|
463 | |||
465 | Check the hash of ignore patterns written in the dirstate |
|
464 | Check the hash of ignore patterns written in the dirstate | |
466 | This is an optimization that is only relevant when using the Rust extensions |
|
465 | This is an optimization that is only relevant when using the Rust extensions | |
467 |
|
466 | |||
468 | $ cat_filename_and_hash () { |
|
467 | $ cat_filename_and_hash () { | |
469 | > for i in "$@"; do |
|
468 | > for i in "$@"; do | |
470 | > printf "$i " |
|
469 | > printf "$i " | |
471 | > cat "$i" | "$TESTDIR"/f --raw-sha1 | sed 's/^raw-sha1=//' |
|
470 | > cat "$i" | "$TESTDIR"/f --raw-sha1 | sed 's/^raw-sha1=//' | |
472 | > done |
|
471 | > done | |
473 | > } |
|
472 | > } | |
474 | $ hg status > /dev/null |
|
473 | $ hg status > /dev/null | |
475 | $ cat_filename_and_hash .hg/testhgignore .hg/testhgignorerel .hgignore dir2/.hgignore dir1/.hgignore dir1/.hgignoretwo | $TESTDIR/f --sha1 |
|
474 | $ cat_filename_and_hash .hg/testhgignore .hg/testhgignorerel .hgignore dir2/.hgignore dir1/.hgignore dir1/.hgignoretwo | $TESTDIR/f --sha1 | |
476 | sha1=c0beb296395d48ced8e14f39009c4ea6e409bfe6 |
|
475 | sha1=c0beb296395d48ced8e14f39009c4ea6e409bfe6 | |
477 | $ hg debugstate --docket | grep ignore |
|
476 | $ hg debugstate --docket | grep ignore | |
478 | ignore pattern hash: c0beb296395d48ced8e14f39009c4ea6e409bfe6 |
|
477 | ignore pattern hash: c0beb296395d48ced8e14f39009c4ea6e409bfe6 | |
479 |
|
478 | |||
480 | $ echo rel > .hg/testhgignorerel |
|
479 | $ echo rel > .hg/testhgignorerel | |
481 | $ hg status > /dev/null |
|
480 | $ hg status > /dev/null | |
482 | $ cat_filename_and_hash .hg/testhgignore .hg/testhgignorerel .hgignore dir2/.hgignore dir1/.hgignore dir1/.hgignoretwo | $TESTDIR/f --sha1 |
|
481 | $ cat_filename_and_hash .hg/testhgignore .hg/testhgignorerel .hgignore dir2/.hgignore dir1/.hgignore dir1/.hgignoretwo | $TESTDIR/f --sha1 | |
483 | sha1=b8e63d3428ec38abc68baa27631516d5ec46b7fa |
|
482 | sha1=b8e63d3428ec38abc68baa27631516d5ec46b7fa | |
484 | $ hg debugstate --docket | grep ignore |
|
483 | $ hg debugstate --docket | grep ignore | |
485 | ignore pattern hash: b8e63d3428ec38abc68baa27631516d5ec46b7fa |
|
484 | ignore pattern hash: b8e63d3428ec38abc68baa27631516d5ec46b7fa | |
486 | $ cd .. |
|
485 | $ cd .. | |
487 |
|
486 | |||
488 | Check that the hash depends on the source of the hgignore patterns |
|
487 | Check that the hash depends on the source of the hgignore patterns | |
489 | (otherwise the context is lost and things like subinclude are cached improperly) |
|
488 | (otherwise the context is lost and things like subinclude are cached improperly) | |
490 |
|
489 | |||
491 | $ hg init ignore-collision |
|
490 | $ hg init ignore-collision | |
492 | $ cd ignore-collision |
|
491 | $ cd ignore-collision | |
493 | $ echo > .hg/testhgignorerel |
|
492 | $ echo > .hg/testhgignorerel | |
494 |
|
493 | |||
495 | $ mkdir dir1/ dir1/subdir |
|
494 | $ mkdir dir1/ dir1/subdir | |
496 | $ touch dir1/subdir/f dir1/subdir/ignored1 |
|
495 | $ touch dir1/subdir/f dir1/subdir/ignored1 | |
497 | $ echo 'ignored1' > dir1/.hgignore |
|
496 | $ echo 'ignored1' > dir1/.hgignore | |
498 |
|
497 | |||
499 | $ mkdir dir2 dir2/subdir |
|
498 | $ mkdir dir2 dir2/subdir | |
500 | $ touch dir2/subdir/f dir2/subdir/ignored2 |
|
499 | $ touch dir2/subdir/f dir2/subdir/ignored2 | |
501 | $ echo 'ignored2' > dir2/.hgignore |
|
500 | $ echo 'ignored2' > dir2/.hgignore | |
502 | $ echo 'subinclude:dir2/.hgignore' >> .hgignore |
|
501 | $ echo 'subinclude:dir2/.hgignore' >> .hgignore | |
503 | $ echo 'subinclude:dir1/.hgignore' >> .hgignore |
|
502 | $ echo 'subinclude:dir1/.hgignore' >> .hgignore | |
504 |
|
503 | |||
505 | $ hg commit -Aqm_ |
|
504 | $ hg commit -Aqm_ | |
506 |
|
505 | |||
507 | $ > dir1/.hgignore |
|
506 | $ > dir1/.hgignore | |
508 | $ echo 'ignored' > dir2/.hgignore |
|
507 | $ echo 'ignored' > dir2/.hgignore | |
509 | $ echo 'ignored1' >> dir2/.hgignore |
|
508 | $ echo 'ignored1' >> dir2/.hgignore | |
510 | $ hg status |
|
509 | $ hg status | |
511 | M dir1/.hgignore |
|
510 | M dir1/.hgignore | |
512 | M dir2/.hgignore |
|
511 | M dir2/.hgignore | |
513 | ? dir1/subdir/ignored1 |
|
512 | ? dir1/subdir/ignored1 | |
514 |
|
513 | |||
515 | #endif |
|
514 | #endif |
General Comments 0
You need to be logged in to leave comments.
Login now