Show More
@@ -1,657 +1,660 b'' | |||||
1 | // filepatterns.rs |
|
1 | // filepatterns.rs | |
2 | // |
|
2 | // | |
3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> |
|
3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> | |
4 | // |
|
4 | // | |
5 | // This software may be used and distributed according to the terms of the |
|
5 | // This software may be used and distributed according to the terms of the | |
6 | // GNU General Public License version 2 or any later version. |
|
6 | // GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | //! Handling of Mercurial-specific patterns. |
|
8 | //! Handling of Mercurial-specific patterns. | |
9 |
|
9 | |||
10 | use crate::{ |
|
10 | use crate::{ | |
11 | utils::{ |
|
11 | utils::{ | |
12 | files::{canonical_path, get_bytes_from_path, get_path_from_bytes}, |
|
12 | files::{canonical_path, get_bytes_from_path, get_path_from_bytes}, | |
13 | hg_path::{path_to_hg_path_buf, HgPathBuf, HgPathError}, |
|
13 | hg_path::{path_to_hg_path_buf, HgPathBuf, HgPathError}, | |
14 | SliceExt, |
|
14 | SliceExt, | |
15 | }, |
|
15 | }, | |
16 | FastHashMap, PatternError, |
|
16 | FastHashMap, PatternError, | |
17 | }; |
|
17 | }; | |
18 | use lazy_static::lazy_static; |
|
18 | use lazy_static::lazy_static; | |
19 | use regex::bytes::{NoExpand, Regex}; |
|
19 | use regex::bytes::{NoExpand, Regex}; | |
20 | use std::fs::File; |
|
20 | use std::fs::File; | |
21 | use std::io::Read; |
|
21 | use std::io::Read; | |
22 | use std::ops::Deref; |
|
22 | use std::ops::Deref; | |
23 | use std::path::{Path, PathBuf}; |
|
23 | use std::path::{Path, PathBuf}; | |
24 | use std::vec::Vec; |
|
24 | use std::vec::Vec; | |
25 |
|
25 | |||
26 | lazy_static! { |
|
26 | lazy_static! { | |
27 | static ref RE_ESCAPE: Vec<Vec<u8>> = { |
|
27 | static ref RE_ESCAPE: Vec<Vec<u8>> = { | |
28 | let mut v: Vec<Vec<u8>> = (0..=255).map(|byte| vec![byte]).collect(); |
|
28 | let mut v: Vec<Vec<u8>> = (0..=255).map(|byte| vec![byte]).collect(); | |
29 | let to_escape = b"()[]{}?*+-|^$\\.&~# \t\n\r\x0b\x0c"; |
|
29 | let to_escape = b"()[]{}?*+-|^$\\.&~# \t\n\r\x0b\x0c"; | |
30 | for byte in to_escape { |
|
30 | for byte in to_escape { | |
31 | v[*byte as usize].insert(0, b'\\'); |
|
31 | v[*byte as usize].insert(0, b'\\'); | |
32 | } |
|
32 | } | |
33 | v |
|
33 | v | |
34 | }; |
|
34 | }; | |
35 | } |
|
35 | } | |
36 |
|
36 | |||
37 | /// These are matched in order |
|
37 | /// These are matched in order | |
38 | const GLOB_REPLACEMENTS: &[(&[u8], &[u8])] = |
|
38 | const GLOB_REPLACEMENTS: &[(&[u8], &[u8])] = | |
39 | &[(b"*/", b"(?:.*/)?"), (b"*", b".*"), (b"", b"[^/]*")]; |
|
39 | &[(b"*/", b"(?:.*/)?"), (b"*", b".*"), (b"", b"[^/]*")]; | |
40 |
|
40 | |||
41 | /// Appended to the regexp of globs |
|
41 | /// Appended to the regexp of globs | |
42 | const GLOB_SUFFIX: &[u8; 7] = b"(?:/|$)"; |
|
42 | const GLOB_SUFFIX: &[u8; 7] = b"(?:/|$)"; | |
43 |
|
43 | |||
44 | #[derive(Debug, Copy, Clone, PartialEq, Eq)] |
|
44 | #[derive(Debug, Copy, Clone, PartialEq, Eq)] | |
45 | pub enum PatternSyntax { |
|
45 | pub enum PatternSyntax { | |
46 | /// A regular expression |
|
46 | /// A regular expression | |
47 | Regexp, |
|
47 | Regexp, | |
48 | /// Glob that matches at the front of the path |
|
48 | /// Glob that matches at the front of the path | |
49 | RootGlob, |
|
49 | RootGlob, | |
50 | /// Glob that matches at any suffix of the path (still anchored at |
|
50 | /// Glob that matches at any suffix of the path (still anchored at | |
51 | /// slashes) |
|
51 | /// slashes) | |
52 | Glob, |
|
52 | Glob, | |
53 | /// a path relative to repository root, which is matched recursively |
|
53 | /// a path relative to repository root, which is matched recursively | |
54 | Path, |
|
54 | Path, | |
55 | /// A path relative to cwd |
|
55 | /// A path relative to cwd | |
56 | RelPath, |
|
56 | RelPath, | |
57 | /// an unrooted glob (*.rs matches Rust files in all dirs) |
|
57 | /// an unrooted glob (*.rs matches Rust files in all dirs) | |
58 | RelGlob, |
|
58 | RelGlob, | |
59 | /// A regexp that needn't match the start of a name |
|
59 | /// A regexp that needn't match the start of a name | |
60 | RelRegexp, |
|
60 | RelRegexp, | |
61 | /// A path relative to repository root, which is matched non-recursively |
|
61 | /// A path relative to repository root, which is matched non-recursively | |
62 | /// (will not match subdirectories) |
|
62 | /// (will not match subdirectories) | |
63 | RootFiles, |
|
63 | RootFiles, | |
64 | /// A file of patterns to read and include |
|
64 | /// A file of patterns to read and include | |
65 | Include, |
|
65 | Include, | |
66 | /// A file of patterns to match against files under the same directory |
|
66 | /// A file of patterns to match against files under the same directory | |
67 | SubInclude, |
|
67 | SubInclude, | |
68 | } |
|
68 | } | |
69 |
|
69 | |||
70 | /// Transforms a glob pattern into a regex |
|
70 | /// Transforms a glob pattern into a regex | |
71 | fn glob_to_re(pat: &[u8]) -> Vec<u8> { |
|
71 | fn glob_to_re(pat: &[u8]) -> Vec<u8> { | |
72 | let mut input = pat; |
|
72 | let mut input = pat; | |
73 | let mut res: Vec<u8> = vec![]; |
|
73 | let mut res: Vec<u8> = vec![]; | |
74 | let mut group_depth = 0; |
|
74 | let mut group_depth = 0; | |
75 |
|
75 | |||
76 | while let Some((c, rest)) = input.split_first() { |
|
76 | while let Some((c, rest)) = input.split_first() { | |
77 | input = rest; |
|
77 | input = rest; | |
78 |
|
78 | |||
79 | match c { |
|
79 | match c { | |
80 | b'*' => { |
|
80 | b'*' => { | |
81 | for (source, repl) in GLOB_REPLACEMENTS { |
|
81 | for (source, repl) in GLOB_REPLACEMENTS { | |
82 | if let Some(rest) = input.drop_prefix(source) { |
|
82 | if let Some(rest) = input.drop_prefix(source) { | |
83 | input = rest; |
|
83 | input = rest; | |
84 | res.extend(*repl); |
|
84 | res.extend(*repl); | |
85 | break; |
|
85 | break; | |
86 | } |
|
86 | } | |
87 | } |
|
87 | } | |
88 | } |
|
88 | } | |
89 | b'?' => res.extend(b"."), |
|
89 | b'?' => res.extend(b"."), | |
90 | b'[' => { |
|
90 | b'[' => { | |
91 | match input.iter().skip(1).position(|b| *b == b']') { |
|
91 | match input.iter().skip(1).position(|b| *b == b']') { | |
92 | None => res.extend(b"\\["), |
|
92 | None => res.extend(b"\\["), | |
93 | Some(end) => { |
|
93 | Some(end) => { | |
94 | // Account for the one we skipped |
|
94 | // Account for the one we skipped | |
95 | let end = end + 1; |
|
95 | let end = end + 1; | |
96 |
|
96 | |||
97 | res.extend(b"["); |
|
97 | res.extend(b"["); | |
98 |
|
98 | |||
99 | for (i, b) in input[..end].iter().enumerate() { |
|
99 | for (i, b) in input[..end].iter().enumerate() { | |
100 | if *b == b'!' && i == 0 { |
|
100 | if *b == b'!' && i == 0 { | |
101 | res.extend(b"^") |
|
101 | res.extend(b"^") | |
102 | } else if *b == b'^' && i == 0 { |
|
102 | } else if *b == b'^' && i == 0 { | |
103 | res.extend(b"\\^") |
|
103 | res.extend(b"\\^") | |
104 | } else if *b == b'\\' { |
|
104 | } else if *b == b'\\' { | |
105 | res.extend(b"\\\\") |
|
105 | res.extend(b"\\\\") | |
106 | } else { |
|
106 | } else { | |
107 | res.push(*b) |
|
107 | res.push(*b) | |
108 | } |
|
108 | } | |
109 | } |
|
109 | } | |
110 | res.extend(b"]"); |
|
110 | res.extend(b"]"); | |
111 | input = &input[end + 1..]; |
|
111 | input = &input[end + 1..]; | |
112 | } |
|
112 | } | |
113 | } |
|
113 | } | |
114 | } |
|
114 | } | |
115 | b'{' => { |
|
115 | b'{' => { | |
116 | group_depth += 1; |
|
116 | group_depth += 1; | |
117 | res.extend(b"(?:") |
|
117 | res.extend(b"(?:") | |
118 | } |
|
118 | } | |
119 | b'}' if group_depth > 0 => { |
|
119 | b'}' if group_depth > 0 => { | |
120 | group_depth -= 1; |
|
120 | group_depth -= 1; | |
121 | res.extend(b")"); |
|
121 | res.extend(b")"); | |
122 | } |
|
122 | } | |
123 | b',' if group_depth > 0 => res.extend(b"|"), |
|
123 | b',' if group_depth > 0 => res.extend(b"|"), | |
124 | b'\\' => { |
|
124 | b'\\' => { | |
125 | let c = { |
|
125 | let c = { | |
126 | if let Some((c, rest)) = input.split_first() { |
|
126 | if let Some((c, rest)) = input.split_first() { | |
127 | input = rest; |
|
127 | input = rest; | |
128 | c |
|
128 | c | |
129 | } else { |
|
129 | } else { | |
130 | c |
|
130 | c | |
131 | } |
|
131 | } | |
132 | }; |
|
132 | }; | |
133 | res.extend(&RE_ESCAPE[*c as usize]) |
|
133 | res.extend(&RE_ESCAPE[*c as usize]) | |
134 | } |
|
134 | } | |
135 | _ => res.extend(&RE_ESCAPE[*c as usize]), |
|
135 | _ => res.extend(&RE_ESCAPE[*c as usize]), | |
136 | } |
|
136 | } | |
137 | } |
|
137 | } | |
138 | res |
|
138 | res | |
139 | } |
|
139 | } | |
140 |
|
140 | |||
141 | fn escape_pattern(pattern: &[u8]) -> Vec<u8> { |
|
141 | fn escape_pattern(pattern: &[u8]) -> Vec<u8> { | |
142 | pattern |
|
142 | pattern | |
143 | .iter() |
|
143 | .iter() | |
144 | .flat_map(|c| RE_ESCAPE[*c as usize].clone()) |
|
144 | .flat_map(|c| RE_ESCAPE[*c as usize].clone()) | |
145 | .collect() |
|
145 | .collect() | |
146 | } |
|
146 | } | |
147 |
|
147 | |||
148 | pub fn parse_pattern_syntax( |
|
148 | pub fn parse_pattern_syntax( | |
149 | kind: &[u8], |
|
149 | kind: &[u8], | |
150 | ) -> Result<PatternSyntax, PatternError> { |
|
150 | ) -> Result<PatternSyntax, PatternError> { | |
151 | match kind { |
|
151 | match kind { | |
152 | b"re:" => Ok(PatternSyntax::Regexp), |
|
152 | b"re:" => Ok(PatternSyntax::Regexp), | |
153 | b"path:" => Ok(PatternSyntax::Path), |
|
153 | b"path:" => Ok(PatternSyntax::Path), | |
154 | b"relpath:" => Ok(PatternSyntax::RelPath), |
|
154 | b"relpath:" => Ok(PatternSyntax::RelPath), | |
155 | b"rootfilesin:" => Ok(PatternSyntax::RootFiles), |
|
155 | b"rootfilesin:" => Ok(PatternSyntax::RootFiles), | |
156 | b"relglob:" => Ok(PatternSyntax::RelGlob), |
|
156 | b"relglob:" => Ok(PatternSyntax::RelGlob), | |
157 | b"relre:" => Ok(PatternSyntax::RelRegexp), |
|
157 | b"relre:" => Ok(PatternSyntax::RelRegexp), | |
158 | b"glob:" => Ok(PatternSyntax::Glob), |
|
158 | b"glob:" => Ok(PatternSyntax::Glob), | |
159 | b"rootglob:" => Ok(PatternSyntax::RootGlob), |
|
159 | b"rootglob:" => Ok(PatternSyntax::RootGlob), | |
160 | b"include:" => Ok(PatternSyntax::Include), |
|
160 | b"include:" => Ok(PatternSyntax::Include), | |
161 | b"subinclude:" => Ok(PatternSyntax::SubInclude), |
|
161 | b"subinclude:" => Ok(PatternSyntax::SubInclude), | |
162 | _ => Err(PatternError::UnsupportedSyntax( |
|
162 | _ => Err(PatternError::UnsupportedSyntax( | |
163 | String::from_utf8_lossy(kind).to_string(), |
|
163 | String::from_utf8_lossy(kind).to_string(), | |
164 | )), |
|
164 | )), | |
165 | } |
|
165 | } | |
166 | } |
|
166 | } | |
167 |
|
167 | |||
168 | /// Builds the regex that corresponds to the given pattern. |
|
168 | /// Builds the regex that corresponds to the given pattern. | |
169 | /// If within a `syntax: regexp` context, returns the pattern, |
|
169 | /// If within a `syntax: regexp` context, returns the pattern, | |
170 | /// otherwise, returns the corresponding regex. |
|
170 | /// otherwise, returns the corresponding regex. | |
171 | fn _build_single_regex(entry: &IgnorePattern) -> Vec<u8> { |
|
171 | fn _build_single_regex(entry: &IgnorePattern) -> Vec<u8> { | |
172 | let IgnorePattern { |
|
172 | let IgnorePattern { | |
173 | syntax, pattern, .. |
|
173 | syntax, pattern, .. | |
174 | } = entry; |
|
174 | } = entry; | |
175 | if pattern.is_empty() { |
|
175 | if pattern.is_empty() { | |
176 | return vec![]; |
|
176 | return vec![]; | |
177 | } |
|
177 | } | |
178 | match syntax { |
|
178 | match syntax { | |
179 | PatternSyntax::Regexp => pattern.to_owned(), |
|
179 | PatternSyntax::Regexp => pattern.to_owned(), | |
180 | PatternSyntax::RelRegexp => { |
|
180 | PatternSyntax::RelRegexp => { | |
181 | // The `regex` crate accepts `**` while `re2` and Python's `re` |
|
181 | // The `regex` crate accepts `**` while `re2` and Python's `re` | |
182 | // do not. Checking for `*` correctly triggers the same error all |
|
182 | // do not. Checking for `*` correctly triggers the same error all | |
183 | // engines. |
|
183 | // engines. | |
184 |
if pattern[0] == b'^' |
|
184 | if pattern[0] == b'^' | |
|
185 | || pattern[0] == b'*' | |||
|
186 | || pattern.starts_with(b".*") | |||
|
187 | { | |||
185 | return pattern.to_owned(); |
|
188 | return pattern.to_owned(); | |
186 | } |
|
189 | } | |
187 | [&b".*"[..], pattern].concat() |
|
190 | [&b".*"[..], pattern].concat() | |
188 | } |
|
191 | } | |
189 | PatternSyntax::Path | PatternSyntax::RelPath => { |
|
192 | PatternSyntax::Path | PatternSyntax::RelPath => { | |
190 | if pattern == b"." { |
|
193 | if pattern == b"." { | |
191 | return vec![]; |
|
194 | return vec![]; | |
192 | } |
|
195 | } | |
193 | [escape_pattern(pattern).as_slice(), b"(?:/|$)"].concat() |
|
196 | [escape_pattern(pattern).as_slice(), b"(?:/|$)"].concat() | |
194 | } |
|
197 | } | |
195 | PatternSyntax::RootFiles => { |
|
198 | PatternSyntax::RootFiles => { | |
196 | let mut res = if pattern == b"." { |
|
199 | let mut res = if pattern == b"." { | |
197 | vec![] |
|
200 | vec![] | |
198 | } else { |
|
201 | } else { | |
199 | // Pattern is a directory name. |
|
202 | // Pattern is a directory name. | |
200 | [escape_pattern(pattern).as_slice(), b"/"].concat() |
|
203 | [escape_pattern(pattern).as_slice(), b"/"].concat() | |
201 | }; |
|
204 | }; | |
202 |
|
205 | |||
203 | // Anything after the pattern must be a non-directory. |
|
206 | // Anything after the pattern must be a non-directory. | |
204 | res.extend(b"[^/]+$"); |
|
207 | res.extend(b"[^/]+$"); | |
205 | res |
|
208 | res | |
206 | } |
|
209 | } | |
207 | PatternSyntax::RelGlob => { |
|
210 | PatternSyntax::RelGlob => { | |
208 | let glob_re = glob_to_re(pattern); |
|
211 | let glob_re = glob_to_re(pattern); | |
209 | if let Some(rest) = glob_re.drop_prefix(b"[^/]*") { |
|
212 | if let Some(rest) = glob_re.drop_prefix(b"[^/]*") { | |
210 | [b".*", rest, GLOB_SUFFIX].concat() |
|
213 | [b".*", rest, GLOB_SUFFIX].concat() | |
211 | } else { |
|
214 | } else { | |
212 | [b"(?:.*/)?", glob_re.as_slice(), GLOB_SUFFIX].concat() |
|
215 | [b"(?:.*/)?", glob_re.as_slice(), GLOB_SUFFIX].concat() | |
213 | } |
|
216 | } | |
214 | } |
|
217 | } | |
215 | PatternSyntax::Glob | PatternSyntax::RootGlob => { |
|
218 | PatternSyntax::Glob | PatternSyntax::RootGlob => { | |
216 | [glob_to_re(pattern).as_slice(), GLOB_SUFFIX].concat() |
|
219 | [glob_to_re(pattern).as_slice(), GLOB_SUFFIX].concat() | |
217 | } |
|
220 | } | |
218 | PatternSyntax::Include | PatternSyntax::SubInclude => unreachable!(), |
|
221 | PatternSyntax::Include | PatternSyntax::SubInclude => unreachable!(), | |
219 | } |
|
222 | } | |
220 | } |
|
223 | } | |
221 |
|
224 | |||
222 | const GLOB_SPECIAL_CHARACTERS: [u8; 7] = |
|
225 | const GLOB_SPECIAL_CHARACTERS: [u8; 7] = | |
223 | [b'*', b'?', b'[', b']', b'{', b'}', b'\\']; |
|
226 | [b'*', b'?', b'[', b']', b'{', b'}', b'\\']; | |
224 |
|
227 | |||
225 | /// TODO support other platforms |
|
228 | /// TODO support other platforms | |
226 | #[cfg(unix)] |
|
229 | #[cfg(unix)] | |
227 | pub fn normalize_path_bytes(bytes: &[u8]) -> Vec<u8> { |
|
230 | pub fn normalize_path_bytes(bytes: &[u8]) -> Vec<u8> { | |
228 | if bytes.is_empty() { |
|
231 | if bytes.is_empty() { | |
229 | return b".".to_vec(); |
|
232 | return b".".to_vec(); | |
230 | } |
|
233 | } | |
231 | let sep = b'/'; |
|
234 | let sep = b'/'; | |
232 |
|
235 | |||
233 | let mut initial_slashes = bytes.iter().take_while(|b| **b == sep).count(); |
|
236 | let mut initial_slashes = bytes.iter().take_while(|b| **b == sep).count(); | |
234 | if initial_slashes > 2 { |
|
237 | if initial_slashes > 2 { | |
235 | // POSIX allows one or two initial slashes, but treats three or more |
|
238 | // POSIX allows one or two initial slashes, but treats three or more | |
236 | // as single slash. |
|
239 | // as single slash. | |
237 | initial_slashes = 1; |
|
240 | initial_slashes = 1; | |
238 | } |
|
241 | } | |
239 | let components = bytes |
|
242 | let components = bytes | |
240 | .split(|b| *b == sep) |
|
243 | .split(|b| *b == sep) | |
241 | .filter(|c| !(c.is_empty() || c == b".")) |
|
244 | .filter(|c| !(c.is_empty() || c == b".")) | |
242 | .fold(vec![], |mut acc, component| { |
|
245 | .fold(vec![], |mut acc, component| { | |
243 | if component != b".." |
|
246 | if component != b".." | |
244 | || (initial_slashes == 0 && acc.is_empty()) |
|
247 | || (initial_slashes == 0 && acc.is_empty()) | |
245 | || (!acc.is_empty() && acc[acc.len() - 1] == b"..") |
|
248 | || (!acc.is_empty() && acc[acc.len() - 1] == b"..") | |
246 | { |
|
249 | { | |
247 | acc.push(component) |
|
250 | acc.push(component) | |
248 | } else if !acc.is_empty() { |
|
251 | } else if !acc.is_empty() { | |
249 | acc.pop(); |
|
252 | acc.pop(); | |
250 | } |
|
253 | } | |
251 | acc |
|
254 | acc | |
252 | }); |
|
255 | }); | |
253 | let mut new_bytes = components.join(&sep); |
|
256 | let mut new_bytes = components.join(&sep); | |
254 |
|
257 | |||
255 | if initial_slashes > 0 { |
|
258 | if initial_slashes > 0 { | |
256 | let mut buf: Vec<_> = (0..initial_slashes).map(|_| sep).collect(); |
|
259 | let mut buf: Vec<_> = (0..initial_slashes).map(|_| sep).collect(); | |
257 | buf.extend(new_bytes); |
|
260 | buf.extend(new_bytes); | |
258 | new_bytes = buf; |
|
261 | new_bytes = buf; | |
259 | } |
|
262 | } | |
260 | if new_bytes.is_empty() { |
|
263 | if new_bytes.is_empty() { | |
261 | b".".to_vec() |
|
264 | b".".to_vec() | |
262 | } else { |
|
265 | } else { | |
263 | new_bytes |
|
266 | new_bytes | |
264 | } |
|
267 | } | |
265 | } |
|
268 | } | |
266 |
|
269 | |||
267 | /// Wrapper function to `_build_single_regex` that short-circuits 'exact' globs |
|
270 | /// Wrapper function to `_build_single_regex` that short-circuits 'exact' globs | |
268 | /// that don't need to be transformed into a regex. |
|
271 | /// that don't need to be transformed into a regex. | |
269 | pub fn build_single_regex( |
|
272 | pub fn build_single_regex( | |
270 | entry: &IgnorePattern, |
|
273 | entry: &IgnorePattern, | |
271 | ) -> Result<Option<Vec<u8>>, PatternError> { |
|
274 | ) -> Result<Option<Vec<u8>>, PatternError> { | |
272 | let IgnorePattern { |
|
275 | let IgnorePattern { | |
273 | pattern, syntax, .. |
|
276 | pattern, syntax, .. | |
274 | } = entry; |
|
277 | } = entry; | |
275 | let pattern = match syntax { |
|
278 | let pattern = match syntax { | |
276 | PatternSyntax::RootGlob |
|
279 | PatternSyntax::RootGlob | |
277 | | PatternSyntax::Path |
|
280 | | PatternSyntax::Path | |
278 | | PatternSyntax::RelGlob |
|
281 | | PatternSyntax::RelGlob | |
279 | | PatternSyntax::RootFiles => normalize_path_bytes(&pattern), |
|
282 | | PatternSyntax::RootFiles => normalize_path_bytes(&pattern), | |
280 | PatternSyntax::Include | PatternSyntax::SubInclude => { |
|
283 | PatternSyntax::Include | PatternSyntax::SubInclude => { | |
281 | return Err(PatternError::NonRegexPattern(entry.clone())) |
|
284 | return Err(PatternError::NonRegexPattern(entry.clone())) | |
282 | } |
|
285 | } | |
283 | _ => pattern.to_owned(), |
|
286 | _ => pattern.to_owned(), | |
284 | }; |
|
287 | }; | |
285 | if *syntax == PatternSyntax::RootGlob |
|
288 | if *syntax == PatternSyntax::RootGlob | |
286 | && !pattern.iter().any(|b| GLOB_SPECIAL_CHARACTERS.contains(b)) |
|
289 | && !pattern.iter().any(|b| GLOB_SPECIAL_CHARACTERS.contains(b)) | |
287 | { |
|
290 | { | |
288 | Ok(None) |
|
291 | Ok(None) | |
289 | } else { |
|
292 | } else { | |
290 | let mut entry = entry.clone(); |
|
293 | let mut entry = entry.clone(); | |
291 | entry.pattern = pattern; |
|
294 | entry.pattern = pattern; | |
292 | Ok(Some(_build_single_regex(&entry))) |
|
295 | Ok(Some(_build_single_regex(&entry))) | |
293 | } |
|
296 | } | |
294 | } |
|
297 | } | |
295 |
|
298 | |||
296 | lazy_static! { |
|
299 | lazy_static! { | |
297 | static ref SYNTAXES: FastHashMap<&'static [u8], &'static [u8]> = { |
|
300 | static ref SYNTAXES: FastHashMap<&'static [u8], &'static [u8]> = { | |
298 | let mut m = FastHashMap::default(); |
|
301 | let mut m = FastHashMap::default(); | |
299 |
|
302 | |||
300 | m.insert(b"re".as_ref(), b"relre:".as_ref()); |
|
303 | m.insert(b"re".as_ref(), b"relre:".as_ref()); | |
301 | m.insert(b"regexp".as_ref(), b"relre:".as_ref()); |
|
304 | m.insert(b"regexp".as_ref(), b"relre:".as_ref()); | |
302 | m.insert(b"glob".as_ref(), b"relglob:".as_ref()); |
|
305 | m.insert(b"glob".as_ref(), b"relglob:".as_ref()); | |
303 | m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref()); |
|
306 | m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref()); | |
304 | m.insert(b"include".as_ref(), b"include:".as_ref()); |
|
307 | m.insert(b"include".as_ref(), b"include:".as_ref()); | |
305 | m.insert(b"subinclude".as_ref(), b"subinclude:".as_ref()); |
|
308 | m.insert(b"subinclude".as_ref(), b"subinclude:".as_ref()); | |
306 | m |
|
309 | m | |
307 | }; |
|
310 | }; | |
308 | } |
|
311 | } | |
309 |
|
312 | |||
310 | #[derive(Debug)] |
|
313 | #[derive(Debug)] | |
311 | pub enum PatternFileWarning { |
|
314 | pub enum PatternFileWarning { | |
312 | /// (file path, syntax bytes) |
|
315 | /// (file path, syntax bytes) | |
313 | InvalidSyntax(PathBuf, Vec<u8>), |
|
316 | InvalidSyntax(PathBuf, Vec<u8>), | |
314 | /// File path |
|
317 | /// File path | |
315 | NoSuchFile(PathBuf), |
|
318 | NoSuchFile(PathBuf), | |
316 | } |
|
319 | } | |
317 |
|
320 | |||
318 | pub fn parse_pattern_file_contents<P: AsRef<Path>>( |
|
321 | pub fn parse_pattern_file_contents<P: AsRef<Path>>( | |
319 | lines: &[u8], |
|
322 | lines: &[u8], | |
320 | file_path: P, |
|
323 | file_path: P, | |
321 | warn: bool, |
|
324 | warn: bool, | |
322 | ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> { |
|
325 | ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> { | |
323 | let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap(); |
|
326 | let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap(); | |
324 | let comment_escape_regex = Regex::new(r"\\#").unwrap(); |
|
327 | let comment_escape_regex = Regex::new(r"\\#").unwrap(); | |
325 | let mut inputs: Vec<IgnorePattern> = vec![]; |
|
328 | let mut inputs: Vec<IgnorePattern> = vec![]; | |
326 | let mut warnings: Vec<PatternFileWarning> = vec![]; |
|
329 | let mut warnings: Vec<PatternFileWarning> = vec![]; | |
327 |
|
330 | |||
328 | let mut current_syntax = b"relre:".as_ref(); |
|
331 | let mut current_syntax = b"relre:".as_ref(); | |
329 |
|
332 | |||
330 | for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() { |
|
333 | for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() { | |
331 | let line_number = line_number + 1; |
|
334 | let line_number = line_number + 1; | |
332 |
|
335 | |||
333 | let line_buf; |
|
336 | let line_buf; | |
334 | if line.contains(&b'#') { |
|
337 | if line.contains(&b'#') { | |
335 | if let Some(cap) = comment_regex.captures(line) { |
|
338 | if let Some(cap) = comment_regex.captures(line) { | |
336 | line = &line[..cap.get(1).unwrap().end()] |
|
339 | line = &line[..cap.get(1).unwrap().end()] | |
337 | } |
|
340 | } | |
338 | line_buf = comment_escape_regex.replace_all(line, NoExpand(b"#")); |
|
341 | line_buf = comment_escape_regex.replace_all(line, NoExpand(b"#")); | |
339 | line = &line_buf; |
|
342 | line = &line_buf; | |
340 | } |
|
343 | } | |
341 |
|
344 | |||
342 | let mut line = line.trim_end(); |
|
345 | let mut line = line.trim_end(); | |
343 |
|
346 | |||
344 | if line.is_empty() { |
|
347 | if line.is_empty() { | |
345 | continue; |
|
348 | continue; | |
346 | } |
|
349 | } | |
347 |
|
350 | |||
348 | if let Some(syntax) = line.drop_prefix(b"syntax:") { |
|
351 | if let Some(syntax) = line.drop_prefix(b"syntax:") { | |
349 | let syntax = syntax.trim(); |
|
352 | let syntax = syntax.trim(); | |
350 |
|
353 | |||
351 | if let Some(rel_syntax) = SYNTAXES.get(syntax) { |
|
354 | if let Some(rel_syntax) = SYNTAXES.get(syntax) { | |
352 | current_syntax = rel_syntax; |
|
355 | current_syntax = rel_syntax; | |
353 | } else if warn { |
|
356 | } else if warn { | |
354 | warnings.push(PatternFileWarning::InvalidSyntax( |
|
357 | warnings.push(PatternFileWarning::InvalidSyntax( | |
355 | file_path.as_ref().to_owned(), |
|
358 | file_path.as_ref().to_owned(), | |
356 | syntax.to_owned(), |
|
359 | syntax.to_owned(), | |
357 | )); |
|
360 | )); | |
358 | } |
|
361 | } | |
359 | continue; |
|
362 | continue; | |
360 | } |
|
363 | } | |
361 |
|
364 | |||
362 | let mut line_syntax: &[u8] = ¤t_syntax; |
|
365 | let mut line_syntax: &[u8] = ¤t_syntax; | |
363 |
|
366 | |||
364 | for (s, rels) in SYNTAXES.iter() { |
|
367 | for (s, rels) in SYNTAXES.iter() { | |
365 | if let Some(rest) = line.drop_prefix(rels) { |
|
368 | if let Some(rest) = line.drop_prefix(rels) { | |
366 | line_syntax = rels; |
|
369 | line_syntax = rels; | |
367 | line = rest; |
|
370 | line = rest; | |
368 | break; |
|
371 | break; | |
369 | } |
|
372 | } | |
370 | if let Some(rest) = line.drop_prefix(&[s, &b":"[..]].concat()) { |
|
373 | if let Some(rest) = line.drop_prefix(&[s, &b":"[..]].concat()) { | |
371 | line_syntax = rels; |
|
374 | line_syntax = rels; | |
372 | line = rest; |
|
375 | line = rest; | |
373 | break; |
|
376 | break; | |
374 | } |
|
377 | } | |
375 | } |
|
378 | } | |
376 |
|
379 | |||
377 | inputs.push(IgnorePattern::new( |
|
380 | inputs.push(IgnorePattern::new( | |
378 | parse_pattern_syntax(&line_syntax).map_err(|e| match e { |
|
381 | parse_pattern_syntax(&line_syntax).map_err(|e| match e { | |
379 | PatternError::UnsupportedSyntax(syntax) => { |
|
382 | PatternError::UnsupportedSyntax(syntax) => { | |
380 | PatternError::UnsupportedSyntaxInFile( |
|
383 | PatternError::UnsupportedSyntaxInFile( | |
381 | syntax, |
|
384 | syntax, | |
382 | file_path.as_ref().to_string_lossy().into(), |
|
385 | file_path.as_ref().to_string_lossy().into(), | |
383 | line_number, |
|
386 | line_number, | |
384 | ) |
|
387 | ) | |
385 | } |
|
388 | } | |
386 | _ => e, |
|
389 | _ => e, | |
387 | })?, |
|
390 | })?, | |
388 | &line, |
|
391 | &line, | |
389 | &file_path, |
|
392 | &file_path, | |
390 | )); |
|
393 | )); | |
391 | } |
|
394 | } | |
392 | Ok((inputs, warnings)) |
|
395 | Ok((inputs, warnings)) | |
393 | } |
|
396 | } | |
394 |
|
397 | |||
395 | pub fn read_pattern_file<P: AsRef<Path>>( |
|
398 | pub fn read_pattern_file<P: AsRef<Path>>( | |
396 | file_path: P, |
|
399 | file_path: P, | |
397 | warn: bool, |
|
400 | warn: bool, | |
398 | ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> { |
|
401 | ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> { | |
399 | let mut f = match File::open(file_path.as_ref()) { |
|
402 | let mut f = match File::open(file_path.as_ref()) { | |
400 | Ok(f) => Ok(f), |
|
403 | Ok(f) => Ok(f), | |
401 | Err(e) => match e.kind() { |
|
404 | Err(e) => match e.kind() { | |
402 | std::io::ErrorKind::NotFound => { |
|
405 | std::io::ErrorKind::NotFound => { | |
403 | return Ok(( |
|
406 | return Ok(( | |
404 | vec![], |
|
407 | vec![], | |
405 | vec![PatternFileWarning::NoSuchFile( |
|
408 | vec![PatternFileWarning::NoSuchFile( | |
406 | file_path.as_ref().to_owned(), |
|
409 | file_path.as_ref().to_owned(), | |
407 | )], |
|
410 | )], | |
408 | )) |
|
411 | )) | |
409 | } |
|
412 | } | |
410 | _ => Err(e), |
|
413 | _ => Err(e), | |
411 | }, |
|
414 | }, | |
412 | }?; |
|
415 | }?; | |
413 | let mut contents = Vec::new(); |
|
416 | let mut contents = Vec::new(); | |
414 |
|
417 | |||
415 | f.read_to_end(&mut contents)?; |
|
418 | f.read_to_end(&mut contents)?; | |
416 |
|
419 | |||
417 | Ok(parse_pattern_file_contents(&contents, file_path, warn)?) |
|
420 | Ok(parse_pattern_file_contents(&contents, file_path, warn)?) | |
418 | } |
|
421 | } | |
419 |
|
422 | |||
420 | /// Represents an entry in an "ignore" file. |
|
423 | /// Represents an entry in an "ignore" file. | |
421 | #[derive(Debug, Eq, PartialEq, Clone)] |
|
424 | #[derive(Debug, Eq, PartialEq, Clone)] | |
422 | pub struct IgnorePattern { |
|
425 | pub struct IgnorePattern { | |
423 | pub syntax: PatternSyntax, |
|
426 | pub syntax: PatternSyntax, | |
424 | pub pattern: Vec<u8>, |
|
427 | pub pattern: Vec<u8>, | |
425 | pub source: PathBuf, |
|
428 | pub source: PathBuf, | |
426 | } |
|
429 | } | |
427 |
|
430 | |||
428 | impl IgnorePattern { |
|
431 | impl IgnorePattern { | |
429 | pub fn new( |
|
432 | pub fn new( | |
430 | syntax: PatternSyntax, |
|
433 | syntax: PatternSyntax, | |
431 | pattern: &[u8], |
|
434 | pattern: &[u8], | |
432 | source: impl AsRef<Path>, |
|
435 | source: impl AsRef<Path>, | |
433 | ) -> Self { |
|
436 | ) -> Self { | |
434 | Self { |
|
437 | Self { | |
435 | syntax, |
|
438 | syntax, | |
436 | pattern: pattern.to_owned(), |
|
439 | pattern: pattern.to_owned(), | |
437 | source: source.as_ref().to_owned(), |
|
440 | source: source.as_ref().to_owned(), | |
438 | } |
|
441 | } | |
439 | } |
|
442 | } | |
440 | } |
|
443 | } | |
441 |
|
444 | |||
442 | pub type PatternResult<T> = Result<T, PatternError>; |
|
445 | pub type PatternResult<T> = Result<T, PatternError>; | |
443 |
|
446 | |||
444 | /// Wrapper for `read_pattern_file` that also recursively expands `include:` |
|
447 | /// Wrapper for `read_pattern_file` that also recursively expands `include:` | |
445 | /// patterns. |
|
448 | /// patterns. | |
446 | /// |
|
449 | /// | |
447 | /// `subinclude:` is not treated as a special pattern here: unraveling them |
|
450 | /// `subinclude:` is not treated as a special pattern here: unraveling them | |
448 | /// needs to occur in the "ignore" phase. |
|
451 | /// needs to occur in the "ignore" phase. | |
449 | pub fn get_patterns_from_file( |
|
452 | pub fn get_patterns_from_file( | |
450 | pattern_file: impl AsRef<Path>, |
|
453 | pattern_file: impl AsRef<Path>, | |
451 | root_dir: impl AsRef<Path>, |
|
454 | root_dir: impl AsRef<Path>, | |
452 | ) -> PatternResult<(Vec<IgnorePattern>, Vec<PatternFileWarning>)> { |
|
455 | ) -> PatternResult<(Vec<IgnorePattern>, Vec<PatternFileWarning>)> { | |
453 | let (patterns, mut warnings) = read_pattern_file(&pattern_file, true)?; |
|
456 | let (patterns, mut warnings) = read_pattern_file(&pattern_file, true)?; | |
454 | let patterns = patterns |
|
457 | let patterns = patterns | |
455 | .into_iter() |
|
458 | .into_iter() | |
456 | .flat_map(|entry| -> PatternResult<_> { |
|
459 | .flat_map(|entry| -> PatternResult<_> { | |
457 | let IgnorePattern { |
|
460 | let IgnorePattern { | |
458 | syntax, |
|
461 | syntax, | |
459 | pattern, |
|
462 | pattern, | |
460 | source: _, |
|
463 | source: _, | |
461 | } = &entry; |
|
464 | } = &entry; | |
462 | Ok(match syntax { |
|
465 | Ok(match syntax { | |
463 | PatternSyntax::Include => { |
|
466 | PatternSyntax::Include => { | |
464 | let inner_include = |
|
467 | let inner_include = | |
465 | root_dir.as_ref().join(get_path_from_bytes(&pattern)); |
|
468 | root_dir.as_ref().join(get_path_from_bytes(&pattern)); | |
466 | let (inner_pats, inner_warnings) = get_patterns_from_file( |
|
469 | let (inner_pats, inner_warnings) = get_patterns_from_file( | |
467 | &inner_include, |
|
470 | &inner_include, | |
468 | root_dir.as_ref(), |
|
471 | root_dir.as_ref(), | |
469 | )?; |
|
472 | )?; | |
470 | warnings.extend(inner_warnings); |
|
473 | warnings.extend(inner_warnings); | |
471 | inner_pats |
|
474 | inner_pats | |
472 | } |
|
475 | } | |
473 | _ => vec![entry], |
|
476 | _ => vec![entry], | |
474 | }) |
|
477 | }) | |
475 | }) |
|
478 | }) | |
476 | .flatten() |
|
479 | .flatten() | |
477 | .collect(); |
|
480 | .collect(); | |
478 |
|
481 | |||
479 | Ok((patterns, warnings)) |
|
482 | Ok((patterns, warnings)) | |
480 | } |
|
483 | } | |
481 |
|
484 | |||
482 | /// Holds all the information needed to handle a `subinclude:` pattern. |
|
485 | /// Holds all the information needed to handle a `subinclude:` pattern. | |
483 | pub struct SubInclude { |
|
486 | pub struct SubInclude { | |
484 | /// Will be used for repository (hg) paths that start with this prefix. |
|
487 | /// Will be used for repository (hg) paths that start with this prefix. | |
485 | /// It is relative to the current working directory, so comparing against |
|
488 | /// It is relative to the current working directory, so comparing against | |
486 | /// repository paths is painless. |
|
489 | /// repository paths is painless. | |
487 | pub prefix: HgPathBuf, |
|
490 | pub prefix: HgPathBuf, | |
488 | /// The file itself, containing the patterns |
|
491 | /// The file itself, containing the patterns | |
489 | pub path: PathBuf, |
|
492 | pub path: PathBuf, | |
490 | /// Folder in the filesystem where this it applies |
|
493 | /// Folder in the filesystem where this it applies | |
491 | pub root: PathBuf, |
|
494 | pub root: PathBuf, | |
492 | } |
|
495 | } | |
493 |
|
496 | |||
494 | impl SubInclude { |
|
497 | impl SubInclude { | |
495 | pub fn new( |
|
498 | pub fn new( | |
496 | root_dir: impl AsRef<Path>, |
|
499 | root_dir: impl AsRef<Path>, | |
497 | pattern: &[u8], |
|
500 | pattern: &[u8], | |
498 | source: impl AsRef<Path>, |
|
501 | source: impl AsRef<Path>, | |
499 | ) -> Result<SubInclude, HgPathError> { |
|
502 | ) -> Result<SubInclude, HgPathError> { | |
500 | let normalized_source = |
|
503 | let normalized_source = | |
501 | normalize_path_bytes(&get_bytes_from_path(source)); |
|
504 | normalize_path_bytes(&get_bytes_from_path(source)); | |
502 |
|
505 | |||
503 | let source_root = get_path_from_bytes(&normalized_source); |
|
506 | let source_root = get_path_from_bytes(&normalized_source); | |
504 | let source_root = source_root.parent().unwrap_or(source_root.deref()); |
|
507 | let source_root = source_root.parent().unwrap_or(source_root.deref()); | |
505 |
|
508 | |||
506 | let path = source_root.join(get_path_from_bytes(pattern)); |
|
509 | let path = source_root.join(get_path_from_bytes(pattern)); | |
507 | let new_root = path.parent().unwrap_or(path.deref()); |
|
510 | let new_root = path.parent().unwrap_or(path.deref()); | |
508 |
|
511 | |||
509 | let prefix = canonical_path(&root_dir, &root_dir, new_root)?; |
|
512 | let prefix = canonical_path(&root_dir, &root_dir, new_root)?; | |
510 |
|
513 | |||
511 | Ok(Self { |
|
514 | Ok(Self { | |
512 | prefix: path_to_hg_path_buf(prefix).and_then(|mut p| { |
|
515 | prefix: path_to_hg_path_buf(prefix).and_then(|mut p| { | |
513 | if !p.is_empty() { |
|
516 | if !p.is_empty() { | |
514 | p.push(b'/'); |
|
517 | p.push(b'/'); | |
515 | } |
|
518 | } | |
516 | Ok(p) |
|
519 | Ok(p) | |
517 | })?, |
|
520 | })?, | |
518 | path: path.to_owned(), |
|
521 | path: path.to_owned(), | |
519 | root: new_root.to_owned(), |
|
522 | root: new_root.to_owned(), | |
520 | }) |
|
523 | }) | |
521 | } |
|
524 | } | |
522 | } |
|
525 | } | |
523 |
|
526 | |||
524 | /// Separate and pre-process subincludes from other patterns for the "ignore" |
|
527 | /// Separate and pre-process subincludes from other patterns for the "ignore" | |
525 | /// phase. |
|
528 | /// phase. | |
526 | pub fn filter_subincludes( |
|
529 | pub fn filter_subincludes( | |
527 | ignore_patterns: &[IgnorePattern], |
|
530 | ignore_patterns: &[IgnorePattern], | |
528 | root_dir: impl AsRef<Path>, |
|
531 | root_dir: impl AsRef<Path>, | |
529 | ) -> Result<(Vec<SubInclude>, Vec<&IgnorePattern>), HgPathError> { |
|
532 | ) -> Result<(Vec<SubInclude>, Vec<&IgnorePattern>), HgPathError> { | |
530 | let mut subincludes = vec![]; |
|
533 | let mut subincludes = vec![]; | |
531 | let mut others = vec![]; |
|
534 | let mut others = vec![]; | |
532 |
|
535 | |||
533 | for ignore_pattern in ignore_patterns.iter() { |
|
536 | for ignore_pattern in ignore_patterns.iter() { | |
534 | let IgnorePattern { |
|
537 | let IgnorePattern { | |
535 | syntax, |
|
538 | syntax, | |
536 | pattern, |
|
539 | pattern, | |
537 | source, |
|
540 | source, | |
538 | } = ignore_pattern; |
|
541 | } = ignore_pattern; | |
539 | if *syntax == PatternSyntax::SubInclude { |
|
542 | if *syntax == PatternSyntax::SubInclude { | |
540 | subincludes.push(SubInclude::new(&root_dir, pattern, &source)?); |
|
543 | subincludes.push(SubInclude::new(&root_dir, pattern, &source)?); | |
541 | } else { |
|
544 | } else { | |
542 | others.push(ignore_pattern) |
|
545 | others.push(ignore_pattern) | |
543 | } |
|
546 | } | |
544 | } |
|
547 | } | |
545 | Ok((subincludes, others)) |
|
548 | Ok((subincludes, others)) | |
546 | } |
|
549 | } | |
547 |
|
550 | |||
548 | #[cfg(test)] |
|
551 | #[cfg(test)] | |
549 | mod tests { |
|
552 | mod tests { | |
550 | use super::*; |
|
553 | use super::*; | |
551 | use pretty_assertions::assert_eq; |
|
554 | use pretty_assertions::assert_eq; | |
552 |
|
555 | |||
553 | #[test] |
|
556 | #[test] | |
554 | fn escape_pattern_test() { |
|
557 | fn escape_pattern_test() { | |
555 | let untouched = |
|
558 | let untouched = | |
556 | br#"!"%',/0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ_`abcdefghijklmnopqrstuvwxyz"#; |
|
559 | br#"!"%',/0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ_`abcdefghijklmnopqrstuvwxyz"#; | |
557 | assert_eq!(escape_pattern(untouched), untouched.to_vec()); |
|
560 | assert_eq!(escape_pattern(untouched), untouched.to_vec()); | |
558 | // All escape codes |
|
561 | // All escape codes | |
559 | assert_eq!( |
|
562 | assert_eq!( | |
560 | escape_pattern(br#"()[]{}?*+-|^$\\.&~# \t\n\r\v\f"#), |
|
563 | escape_pattern(br#"()[]{}?*+-|^$\\.&~# \t\n\r\v\f"#), | |
561 | br#"\(\)\[\]\{\}\?\*\+\-\|\^\$\\\\\.\&\~\#\ \\t\\n\\r\\v\\f"# |
|
564 | br#"\(\)\[\]\{\}\?\*\+\-\|\^\$\\\\\.\&\~\#\ \\t\\n\\r\\v\\f"# | |
562 | .to_vec() |
|
565 | .to_vec() | |
563 | ); |
|
566 | ); | |
564 | } |
|
567 | } | |
565 |
|
568 | |||
566 | #[test] |
|
569 | #[test] | |
567 | fn glob_test() { |
|
570 | fn glob_test() { | |
568 | assert_eq!(glob_to_re(br#"?"#), br#"."#); |
|
571 | assert_eq!(glob_to_re(br#"?"#), br#"."#); | |
569 | assert_eq!(glob_to_re(br#"*"#), br#"[^/]*"#); |
|
572 | assert_eq!(glob_to_re(br#"*"#), br#"[^/]*"#); | |
570 | assert_eq!(glob_to_re(br#"**"#), br#".*"#); |
|
573 | assert_eq!(glob_to_re(br#"**"#), br#".*"#); | |
571 | assert_eq!(glob_to_re(br#"**/a"#), br#"(?:.*/)?a"#); |
|
574 | assert_eq!(glob_to_re(br#"**/a"#), br#"(?:.*/)?a"#); | |
572 | assert_eq!(glob_to_re(br#"a/**/b"#), br#"a/(?:.*/)?b"#); |
|
575 | assert_eq!(glob_to_re(br#"a/**/b"#), br#"a/(?:.*/)?b"#); | |
573 | assert_eq!(glob_to_re(br#"[a*?!^][^b][!c]"#), br#"[a*?!^][\^b][^c]"#); |
|
576 | assert_eq!(glob_to_re(br#"[a*?!^][^b][!c]"#), br#"[a*?!^][\^b][^c]"#); | |
574 | assert_eq!(glob_to_re(br#"{a,b}"#), br#"(?:a|b)"#); |
|
577 | assert_eq!(glob_to_re(br#"{a,b}"#), br#"(?:a|b)"#); | |
575 | assert_eq!(glob_to_re(br#".\*\?"#), br#"\.\*\?"#); |
|
578 | assert_eq!(glob_to_re(br#".\*\?"#), br#"\.\*\?"#); | |
576 | } |
|
579 | } | |
577 |
|
580 | |||
578 | #[test] |
|
581 | #[test] | |
579 | fn test_parse_pattern_file_contents() { |
|
582 | fn test_parse_pattern_file_contents() { | |
580 | let lines = b"syntax: glob\n*.elc"; |
|
583 | let lines = b"syntax: glob\n*.elc"; | |
581 |
|
584 | |||
582 | assert_eq!( |
|
585 | assert_eq!( | |
583 | parse_pattern_file_contents(lines, Path::new("file_path"), false) |
|
586 | parse_pattern_file_contents(lines, Path::new("file_path"), false) | |
584 | .unwrap() |
|
587 | .unwrap() | |
585 | .0, |
|
588 | .0, | |
586 | vec![IgnorePattern::new( |
|
589 | vec![IgnorePattern::new( | |
587 | PatternSyntax::RelGlob, |
|
590 | PatternSyntax::RelGlob, | |
588 | b"*.elc", |
|
591 | b"*.elc", | |
589 | Path::new("file_path") |
|
592 | Path::new("file_path") | |
590 | )], |
|
593 | )], | |
591 | ); |
|
594 | ); | |
592 |
|
595 | |||
593 | let lines = b"syntax: include\nsyntax: glob"; |
|
596 | let lines = b"syntax: include\nsyntax: glob"; | |
594 |
|
597 | |||
595 | assert_eq!( |
|
598 | assert_eq!( | |
596 | parse_pattern_file_contents(lines, Path::new("file_path"), false) |
|
599 | parse_pattern_file_contents(lines, Path::new("file_path"), false) | |
597 | .unwrap() |
|
600 | .unwrap() | |
598 | .0, |
|
601 | .0, | |
599 | vec![] |
|
602 | vec![] | |
600 | ); |
|
603 | ); | |
601 | let lines = b"glob:**.o"; |
|
604 | let lines = b"glob:**.o"; | |
602 | assert_eq!( |
|
605 | assert_eq!( | |
603 | parse_pattern_file_contents(lines, Path::new("file_path"), false) |
|
606 | parse_pattern_file_contents(lines, Path::new("file_path"), false) | |
604 | .unwrap() |
|
607 | .unwrap() | |
605 | .0, |
|
608 | .0, | |
606 | vec![IgnorePattern::new( |
|
609 | vec![IgnorePattern::new( | |
607 | PatternSyntax::RelGlob, |
|
610 | PatternSyntax::RelGlob, | |
608 | b"**.o", |
|
611 | b"**.o", | |
609 | Path::new("file_path") |
|
612 | Path::new("file_path") | |
610 | )] |
|
613 | )] | |
611 | ); |
|
614 | ); | |
612 | } |
|
615 | } | |
613 |
|
616 | |||
614 | #[test] |
|
617 | #[test] | |
615 | fn test_build_single_regex() { |
|
618 | fn test_build_single_regex() { | |
616 | assert_eq!( |
|
619 | assert_eq!( | |
617 | build_single_regex(&IgnorePattern::new( |
|
620 | build_single_regex(&IgnorePattern::new( | |
618 | PatternSyntax::RelGlob, |
|
621 | PatternSyntax::RelGlob, | |
619 | b"rust/target/", |
|
622 | b"rust/target/", | |
620 | Path::new("") |
|
623 | Path::new("") | |
621 | )) |
|
624 | )) | |
622 | .unwrap(), |
|
625 | .unwrap(), | |
623 | Some(br"(?:.*/)?rust/target(?:/|$)".to_vec()), |
|
626 | Some(br"(?:.*/)?rust/target(?:/|$)".to_vec()), | |
624 | ); |
|
627 | ); | |
625 | } |
|
628 | } | |
626 |
|
629 | |||
627 | #[test] |
|
630 | #[test] | |
628 | fn test_build_single_regex_shortcut() { |
|
631 | fn test_build_single_regex_shortcut() { | |
629 | assert_eq!( |
|
632 | assert_eq!( | |
630 | build_single_regex(&IgnorePattern::new( |
|
633 | build_single_regex(&IgnorePattern::new( | |
631 | PatternSyntax::RootGlob, |
|
634 | PatternSyntax::RootGlob, | |
632 | b"", |
|
635 | b"", | |
633 | Path::new("") |
|
636 | Path::new("") | |
634 | )) |
|
637 | )) | |
635 | .unwrap(), |
|
638 | .unwrap(), | |
636 | None, |
|
639 | None, | |
637 | ); |
|
640 | ); | |
638 | assert_eq!( |
|
641 | assert_eq!( | |
639 | build_single_regex(&IgnorePattern::new( |
|
642 | build_single_regex(&IgnorePattern::new( | |
640 | PatternSyntax::RootGlob, |
|
643 | PatternSyntax::RootGlob, | |
641 | b"whatever", |
|
644 | b"whatever", | |
642 | Path::new("") |
|
645 | Path::new("") | |
643 | )) |
|
646 | )) | |
644 | .unwrap(), |
|
647 | .unwrap(), | |
645 | None, |
|
648 | None, | |
646 | ); |
|
649 | ); | |
647 | assert_eq!( |
|
650 | assert_eq!( | |
648 | build_single_regex(&IgnorePattern::new( |
|
651 | build_single_regex(&IgnorePattern::new( | |
649 | PatternSyntax::RootGlob, |
|
652 | PatternSyntax::RootGlob, | |
650 | b"*.o", |
|
653 | b"*.o", | |
651 | Path::new("") |
|
654 | Path::new("") | |
652 | )) |
|
655 | )) | |
653 | .unwrap(), |
|
656 | .unwrap(), | |
654 | Some(br"[^/]*\.o(?:/|$)".to_vec()), |
|
657 | Some(br"[^/]*\.o(?:/|$)".to_vec()), | |
655 | ); |
|
658 | ); | |
656 | } |
|
659 | } | |
657 | } |
|
660 | } |
General Comments 0
You need to be logged in to leave comments.
Login now