Show More
@@ -1,373 +1,382 | |||||
|
1 | // filepatterns.rs | |||
|
2 | // | |||
|
3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> | |||
|
4 | // | |||
|
5 | // This software may be used and distributed according to the terms of the | |||
|
6 | // GNU General Public License version 2 or any later version. | |||
|
7 | ||||
|
8 | //! Handling of Mercurial-specific patterns. | |||
|
9 | ||||
1 | use crate::{ |
|
10 | use crate::{ | |
2 | utils::{files::get_path_from_bytes, SliceExt}, |
|
11 | utils::{files::get_path_from_bytes, SliceExt}, | |
3 | LineNumber, PatternError, PatternFileError, |
|
12 | LineNumber, PatternError, PatternFileError, | |
4 | }; |
|
13 | }; | |
5 | use lazy_static::lazy_static; |
|
14 | use lazy_static::lazy_static; | |
6 | use regex::bytes::{NoExpand, Regex}; |
|
15 | use regex::bytes::{NoExpand, Regex}; | |
7 | use std::collections::HashMap; |
|
16 | use std::collections::HashMap; | |
8 | use std::fs::File; |
|
17 | use std::fs::File; | |
9 | use std::io::Read; |
|
18 | use std::io::Read; | |
10 | use std::vec::Vec; |
|
19 | use std::vec::Vec; | |
11 |
|
20 | |||
12 | lazy_static! { |
|
21 | lazy_static! { | |
13 | static ref RE_ESCAPE: Vec<Vec<u8>> = { |
|
22 | static ref RE_ESCAPE: Vec<Vec<u8>> = { | |
14 | let mut v: Vec<Vec<u8>> = (0..=255).map(|byte| vec![byte]).collect(); |
|
23 | let mut v: Vec<Vec<u8>> = (0..=255).map(|byte| vec![byte]).collect(); | |
15 | let to_escape = b"()[]{}?*+-|^$\\.&~# \t\n\r\x0b\x0c"; |
|
24 | let to_escape = b"()[]{}?*+-|^$\\.&~# \t\n\r\x0b\x0c"; | |
16 | for byte in to_escape { |
|
25 | for byte in to_escape { | |
17 | v[*byte as usize].insert(0, b'\\'); |
|
26 | v[*byte as usize].insert(0, b'\\'); | |
18 | } |
|
27 | } | |
19 | v |
|
28 | v | |
20 | }; |
|
29 | }; | |
21 | } |
|
30 | } | |
22 |
|
31 | |||
23 | /// These are matched in order |
|
32 | /// These are matched in order | |
24 | const GLOB_REPLACEMENTS: &[(&[u8], &[u8])] = |
|
33 | const GLOB_REPLACEMENTS: &[(&[u8], &[u8])] = | |
25 | &[(b"*/", b"(?:.*/)?"), (b"*", b".*"), (b"", b"[^/]*")]; |
|
34 | &[(b"*/", b"(?:.*/)?"), (b"*", b".*"), (b"", b"[^/]*")]; | |
26 |
|
35 | |||
27 | #[derive(Debug, Copy, Clone, PartialEq, Eq)] |
|
36 | #[derive(Debug, Copy, Clone, PartialEq, Eq)] | |
28 | pub enum PatternSyntax { |
|
37 | pub enum PatternSyntax { | |
29 | Regexp, |
|
38 | Regexp, | |
30 | /// Glob that matches at the front of the path |
|
39 | /// Glob that matches at the front of the path | |
31 | RootGlob, |
|
40 | RootGlob, | |
32 | /// Glob that matches at any suffix of the path (still anchored at slashes) |
|
41 | /// Glob that matches at any suffix of the path (still anchored at slashes) | |
33 | Glob, |
|
42 | Glob, | |
34 | Path, |
|
43 | Path, | |
35 | RelPath, |
|
44 | RelPath, | |
36 | RelGlob, |
|
45 | RelGlob, | |
37 | RelRegexp, |
|
46 | RelRegexp, | |
38 | RootFiles, |
|
47 | RootFiles, | |
39 | } |
|
48 | } | |
40 |
|
49 | |||
41 | /// Transforms a glob pattern into a regex |
|
50 | /// Transforms a glob pattern into a regex | |
42 | fn glob_to_re(pat: &[u8]) -> Vec<u8> { |
|
51 | fn glob_to_re(pat: &[u8]) -> Vec<u8> { | |
43 | let mut input = pat; |
|
52 | let mut input = pat; | |
44 | let mut res: Vec<u8> = vec![]; |
|
53 | let mut res: Vec<u8> = vec![]; | |
45 | let mut group_depth = 0; |
|
54 | let mut group_depth = 0; | |
46 |
|
55 | |||
47 | while let Some((c, rest)) = input.split_first() { |
|
56 | while let Some((c, rest)) = input.split_first() { | |
48 | input = rest; |
|
57 | input = rest; | |
49 |
|
58 | |||
50 | match c { |
|
59 | match c { | |
51 | b'*' => { |
|
60 | b'*' => { | |
52 | for (source, repl) in GLOB_REPLACEMENTS { |
|
61 | for (source, repl) in GLOB_REPLACEMENTS { | |
53 | if input.starts_with(source) { |
|
62 | if input.starts_with(source) { | |
54 | input = &input[source.len()..]; |
|
63 | input = &input[source.len()..]; | |
55 | res.extend(*repl); |
|
64 | res.extend(*repl); | |
56 | break; |
|
65 | break; | |
57 | } |
|
66 | } | |
58 | } |
|
67 | } | |
59 | } |
|
68 | } | |
60 | b'?' => res.extend(b"."), |
|
69 | b'?' => res.extend(b"."), | |
61 | b'[' => { |
|
70 | b'[' => { | |
62 | match input.iter().skip(1).position(|b| *b == b']') { |
|
71 | match input.iter().skip(1).position(|b| *b == b']') { | |
63 | None => res.extend(b"\\["), |
|
72 | None => res.extend(b"\\["), | |
64 | Some(end) => { |
|
73 | Some(end) => { | |
65 | // Account for the one we skipped |
|
74 | // Account for the one we skipped | |
66 | let end = end + 1; |
|
75 | let end = end + 1; | |
67 |
|
76 | |||
68 | res.extend(b"["); |
|
77 | res.extend(b"["); | |
69 |
|
78 | |||
70 | for (i, b) in input[..end].iter().enumerate() { |
|
79 | for (i, b) in input[..end].iter().enumerate() { | |
71 | if *b == b'!' && i == 0 { |
|
80 | if *b == b'!' && i == 0 { | |
72 | res.extend(b"^") |
|
81 | res.extend(b"^") | |
73 | } else if *b == b'^' && i == 0 { |
|
82 | } else if *b == b'^' && i == 0 { | |
74 | res.extend(b"\\^") |
|
83 | res.extend(b"\\^") | |
75 | } else if *b == b'\\' { |
|
84 | } else if *b == b'\\' { | |
76 | res.extend(b"\\\\") |
|
85 | res.extend(b"\\\\") | |
77 | } else { |
|
86 | } else { | |
78 | res.push(*b) |
|
87 | res.push(*b) | |
79 | } |
|
88 | } | |
80 | } |
|
89 | } | |
81 | res.extend(b"]"); |
|
90 | res.extend(b"]"); | |
82 | input = &input[end + 1..]; |
|
91 | input = &input[end + 1..]; | |
83 | } |
|
92 | } | |
84 | } |
|
93 | } | |
85 | } |
|
94 | } | |
86 | b'{' => { |
|
95 | b'{' => { | |
87 | group_depth += 1; |
|
96 | group_depth += 1; | |
88 | res.extend(b"(?:") |
|
97 | res.extend(b"(?:") | |
89 | } |
|
98 | } | |
90 | b'}' if group_depth > 0 => { |
|
99 | b'}' if group_depth > 0 => { | |
91 | group_depth -= 1; |
|
100 | group_depth -= 1; | |
92 | res.extend(b")"); |
|
101 | res.extend(b")"); | |
93 | } |
|
102 | } | |
94 | b',' if group_depth > 0 => res.extend(b"|"), |
|
103 | b',' if group_depth > 0 => res.extend(b"|"), | |
95 | b'\\' => { |
|
104 | b'\\' => { | |
96 | let c = { |
|
105 | let c = { | |
97 | if let Some((c, rest)) = input.split_first() { |
|
106 | if let Some((c, rest)) = input.split_first() { | |
98 | input = rest; |
|
107 | input = rest; | |
99 | c |
|
108 | c | |
100 | } else { |
|
109 | } else { | |
101 | c |
|
110 | c | |
102 | } |
|
111 | } | |
103 | }; |
|
112 | }; | |
104 | res.extend(&RE_ESCAPE[*c as usize]) |
|
113 | res.extend(&RE_ESCAPE[*c as usize]) | |
105 | } |
|
114 | } | |
106 | _ => res.extend(&RE_ESCAPE[*c as usize]), |
|
115 | _ => res.extend(&RE_ESCAPE[*c as usize]), | |
107 | } |
|
116 | } | |
108 | } |
|
117 | } | |
109 | res |
|
118 | res | |
110 | } |
|
119 | } | |
111 |
|
120 | |||
112 | fn escape_pattern(pattern: &[u8]) -> Vec<u8> { |
|
121 | fn escape_pattern(pattern: &[u8]) -> Vec<u8> { | |
113 | pattern |
|
122 | pattern | |
114 | .iter() |
|
123 | .iter() | |
115 | .flat_map(|c| RE_ESCAPE[*c as usize].clone()) |
|
124 | .flat_map(|c| RE_ESCAPE[*c as usize].clone()) | |
116 | .collect() |
|
125 | .collect() | |
117 | } |
|
126 | } | |
118 |
|
127 | |||
119 | fn parse_pattern_syntax(kind: &[u8]) -> Result<PatternSyntax, PatternError> { |
|
128 | fn parse_pattern_syntax(kind: &[u8]) -> Result<PatternSyntax, PatternError> { | |
120 | match kind { |
|
129 | match kind { | |
121 | b"re" => Ok(PatternSyntax::Regexp), |
|
130 | b"re" => Ok(PatternSyntax::Regexp), | |
122 | b"path" => Ok(PatternSyntax::Path), |
|
131 | b"path" => Ok(PatternSyntax::Path), | |
123 | b"relpath" => Ok(PatternSyntax::RelPath), |
|
132 | b"relpath" => Ok(PatternSyntax::RelPath), | |
124 | b"rootfilesin" => Ok(PatternSyntax::RootFiles), |
|
133 | b"rootfilesin" => Ok(PatternSyntax::RootFiles), | |
125 | b"relglob" => Ok(PatternSyntax::RelGlob), |
|
134 | b"relglob" => Ok(PatternSyntax::RelGlob), | |
126 | b"relre" => Ok(PatternSyntax::RelRegexp), |
|
135 | b"relre" => Ok(PatternSyntax::RelRegexp), | |
127 | b"glob" => Ok(PatternSyntax::Glob), |
|
136 | b"glob" => Ok(PatternSyntax::Glob), | |
128 | b"rootglob" => Ok(PatternSyntax::RootGlob), |
|
137 | b"rootglob" => Ok(PatternSyntax::RootGlob), | |
129 | _ => Err(PatternError::UnsupportedSyntax( |
|
138 | _ => Err(PatternError::UnsupportedSyntax( | |
130 | String::from_utf8_lossy(kind).to_string(), |
|
139 | String::from_utf8_lossy(kind).to_string(), | |
131 | )), |
|
140 | )), | |
132 | } |
|
141 | } | |
133 | } |
|
142 | } | |
134 |
|
143 | |||
135 | /// Builds the regex that corresponds to the given pattern. |
|
144 | /// Builds the regex that corresponds to the given pattern. | |
136 | /// If within a `syntax: regexp` context, returns the pattern, |
|
145 | /// If within a `syntax: regexp` context, returns the pattern, | |
137 | /// otherwise, returns the corresponding regex. |
|
146 | /// otherwise, returns the corresponding regex. | |
138 | fn _build_single_regex( |
|
147 | fn _build_single_regex( | |
139 | syntax: PatternSyntax, |
|
148 | syntax: PatternSyntax, | |
140 | pattern: &[u8], |
|
149 | pattern: &[u8], | |
141 | globsuffix: &[u8], |
|
150 | globsuffix: &[u8], | |
142 | ) -> Vec<u8> { |
|
151 | ) -> Vec<u8> { | |
143 | if pattern.is_empty() { |
|
152 | if pattern.is_empty() { | |
144 | return vec![]; |
|
153 | return vec![]; | |
145 | } |
|
154 | } | |
146 | match syntax { |
|
155 | match syntax { | |
147 | PatternSyntax::Regexp => pattern.to_owned(), |
|
156 | PatternSyntax::Regexp => pattern.to_owned(), | |
148 | PatternSyntax::RelRegexp => { |
|
157 | PatternSyntax::RelRegexp => { | |
149 | if pattern[0] == b'^' { |
|
158 | if pattern[0] == b'^' { | |
150 | return pattern.to_owned(); |
|
159 | return pattern.to_owned(); | |
151 | } |
|
160 | } | |
152 | let mut res = b".*".to_vec(); |
|
161 | let mut res = b".*".to_vec(); | |
153 | res.extend(pattern); |
|
162 | res.extend(pattern); | |
154 | res |
|
163 | res | |
155 | } |
|
164 | } | |
156 | PatternSyntax::Path | PatternSyntax::RelPath => { |
|
165 | PatternSyntax::Path | PatternSyntax::RelPath => { | |
157 | if pattern == b"." { |
|
166 | if pattern == b"." { | |
158 | return vec![]; |
|
167 | return vec![]; | |
159 | } |
|
168 | } | |
160 | let mut pattern = escape_pattern(pattern); |
|
169 | let mut pattern = escape_pattern(pattern); | |
161 | pattern.extend(b"(?:/|$)"); |
|
170 | pattern.extend(b"(?:/|$)"); | |
162 | pattern |
|
171 | pattern | |
163 | } |
|
172 | } | |
164 | PatternSyntax::RootFiles => { |
|
173 | PatternSyntax::RootFiles => { | |
165 | let mut res = if pattern == b"." { |
|
174 | let mut res = if pattern == b"." { | |
166 | vec![] |
|
175 | vec![] | |
167 | } else { |
|
176 | } else { | |
168 | // Pattern is a directory name. |
|
177 | // Pattern is a directory name. | |
169 | let mut as_vec: Vec<u8> = escape_pattern(pattern); |
|
178 | let mut as_vec: Vec<u8> = escape_pattern(pattern); | |
170 | as_vec.push(b'/'); |
|
179 | as_vec.push(b'/'); | |
171 | as_vec |
|
180 | as_vec | |
172 | }; |
|
181 | }; | |
173 |
|
182 | |||
174 | // Anything after the pattern must be a non-directory. |
|
183 | // Anything after the pattern must be a non-directory. | |
175 | res.extend(b"[^/]+$"); |
|
184 | res.extend(b"[^/]+$"); | |
176 | res |
|
185 | res | |
177 | } |
|
186 | } | |
178 | PatternSyntax::Glob |
|
187 | PatternSyntax::Glob | |
179 | | PatternSyntax::RelGlob |
|
188 | | PatternSyntax::RelGlob | |
180 | | PatternSyntax::RootGlob => { |
|
189 | | PatternSyntax::RootGlob => { | |
181 | let mut res: Vec<u8> = vec![]; |
|
190 | let mut res: Vec<u8> = vec![]; | |
182 | if syntax == PatternSyntax::RelGlob { |
|
191 | if syntax == PatternSyntax::RelGlob { | |
183 | res.extend(b"(?:|.*/)"); |
|
192 | res.extend(b"(?:|.*/)"); | |
184 | } |
|
193 | } | |
185 |
|
194 | |||
186 | res.extend(glob_to_re(pattern)); |
|
195 | res.extend(glob_to_re(pattern)); | |
187 | res.extend(globsuffix.iter()); |
|
196 | res.extend(globsuffix.iter()); | |
188 | res |
|
197 | res | |
189 | } |
|
198 | } | |
190 | } |
|
199 | } | |
191 | } |
|
200 | } | |
192 |
|
201 | |||
193 | const GLOB_SPECIAL_CHARACTERS: [u8; 7] = |
|
202 | const GLOB_SPECIAL_CHARACTERS: [u8; 7] = | |
194 | [b'*', b'?', b'[', b']', b'{', b'}', b'\\']; |
|
203 | [b'*', b'?', b'[', b']', b'{', b'}', b'\\']; | |
195 |
|
204 | |||
196 | /// Wrapper function to `_build_single_regex` that short-circuits 'exact' globs |
|
205 | /// Wrapper function to `_build_single_regex` that short-circuits 'exact' globs | |
197 | /// that don't need to be transformed into a regex. |
|
206 | /// that don't need to be transformed into a regex. | |
198 | pub fn build_single_regex( |
|
207 | pub fn build_single_regex( | |
199 | kind: &[u8], |
|
208 | kind: &[u8], | |
200 | pat: &[u8], |
|
209 | pat: &[u8], | |
201 | globsuffix: &[u8], |
|
210 | globsuffix: &[u8], | |
202 | ) -> Result<Vec<u8>, PatternError> { |
|
211 | ) -> Result<Vec<u8>, PatternError> { | |
203 | let enum_kind = parse_pattern_syntax(kind)?; |
|
212 | let enum_kind = parse_pattern_syntax(kind)?; | |
204 | if enum_kind == PatternSyntax::RootGlob |
|
213 | if enum_kind == PatternSyntax::RootGlob | |
205 | && !pat.iter().any(|b| GLOB_SPECIAL_CHARACTERS.contains(b)) |
|
214 | && !pat.iter().any(|b| GLOB_SPECIAL_CHARACTERS.contains(b)) | |
206 | { |
|
215 | { | |
207 | let mut escaped = escape_pattern(pat); |
|
216 | let mut escaped = escape_pattern(pat); | |
208 | escaped.extend(b"(?:/|$)"); |
|
217 | escaped.extend(b"(?:/|$)"); | |
209 | Ok(escaped) |
|
218 | Ok(escaped) | |
210 | } else { |
|
219 | } else { | |
211 | Ok(_build_single_regex(enum_kind, pat, globsuffix)) |
|
220 | Ok(_build_single_regex(enum_kind, pat, globsuffix)) | |
212 | } |
|
221 | } | |
213 | } |
|
222 | } | |
214 |
|
223 | |||
215 | lazy_static! { |
|
224 | lazy_static! { | |
216 | static ref SYNTAXES: HashMap<&'static [u8], &'static [u8]> = { |
|
225 | static ref SYNTAXES: HashMap<&'static [u8], &'static [u8]> = { | |
217 | let mut m = HashMap::new(); |
|
226 | let mut m = HashMap::new(); | |
218 |
|
227 | |||
219 | m.insert(b"re".as_ref(), b"relre:".as_ref()); |
|
228 | m.insert(b"re".as_ref(), b"relre:".as_ref()); | |
220 | m.insert(b"regexp".as_ref(), b"relre:".as_ref()); |
|
229 | m.insert(b"regexp".as_ref(), b"relre:".as_ref()); | |
221 | m.insert(b"glob".as_ref(), b"relglob:".as_ref()); |
|
230 | m.insert(b"glob".as_ref(), b"relglob:".as_ref()); | |
222 | m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref()); |
|
231 | m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref()); | |
223 | m.insert(b"include".as_ref(), b"include".as_ref()); |
|
232 | m.insert(b"include".as_ref(), b"include".as_ref()); | |
224 | m.insert(b"subinclude".as_ref(), b"subinclude".as_ref()); |
|
233 | m.insert(b"subinclude".as_ref(), b"subinclude".as_ref()); | |
225 | m |
|
234 | m | |
226 | }; |
|
235 | }; | |
227 | } |
|
236 | } | |
228 |
|
237 | |||
229 | pub type PatternTuple = (Vec<u8>, LineNumber, Vec<u8>); |
|
238 | pub type PatternTuple = (Vec<u8>, LineNumber, Vec<u8>); | |
230 | type WarningTuple = (Vec<u8>, Vec<u8>); |
|
239 | type WarningTuple = (Vec<u8>, Vec<u8>); | |
231 |
|
240 | |||
232 | pub fn parse_pattern_file_contents( |
|
241 | pub fn parse_pattern_file_contents( | |
233 | lines: &[u8], |
|
242 | lines: &[u8], | |
234 | file_path: &[u8], |
|
243 | file_path: &[u8], | |
235 | warn: bool, |
|
244 | warn: bool, | |
236 | ) -> (Vec<PatternTuple>, Vec<WarningTuple>) { |
|
245 | ) -> (Vec<PatternTuple>, Vec<WarningTuple>) { | |
237 | let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap(); |
|
246 | let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap(); | |
238 | let comment_escape_regex = Regex::new(r"\\#").unwrap(); |
|
247 | let comment_escape_regex = Regex::new(r"\\#").unwrap(); | |
239 | let mut inputs: Vec<PatternTuple> = vec![]; |
|
248 | let mut inputs: Vec<PatternTuple> = vec![]; | |
240 | let mut warnings: Vec<WarningTuple> = vec![]; |
|
249 | let mut warnings: Vec<WarningTuple> = vec![]; | |
241 |
|
250 | |||
242 | let mut current_syntax = b"relre:".as_ref(); |
|
251 | let mut current_syntax = b"relre:".as_ref(); | |
243 |
|
252 | |||
244 | for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() { |
|
253 | for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() { | |
245 | let line_number = line_number + 1; |
|
254 | let line_number = line_number + 1; | |
246 |
|
255 | |||
247 | let line_buf; |
|
256 | let line_buf; | |
248 | if line.contains(&b'#') { |
|
257 | if line.contains(&b'#') { | |
249 | if let Some(cap) = comment_regex.captures(line) { |
|
258 | if let Some(cap) = comment_regex.captures(line) { | |
250 | line = &line[..cap.get(1).unwrap().end()] |
|
259 | line = &line[..cap.get(1).unwrap().end()] | |
251 | } |
|
260 | } | |
252 | line_buf = comment_escape_regex.replace_all(line, NoExpand(b"#")); |
|
261 | line_buf = comment_escape_regex.replace_all(line, NoExpand(b"#")); | |
253 | line = &line_buf; |
|
262 | line = &line_buf; | |
254 | } |
|
263 | } | |
255 |
|
264 | |||
256 | let mut line = line.trim_end(); |
|
265 | let mut line = line.trim_end(); | |
257 |
|
266 | |||
258 | if line.is_empty() { |
|
267 | if line.is_empty() { | |
259 | continue; |
|
268 | continue; | |
260 | } |
|
269 | } | |
261 |
|
270 | |||
262 | if line.starts_with(b"syntax:") { |
|
271 | if line.starts_with(b"syntax:") { | |
263 | let syntax = line[b"syntax:".len()..].trim(); |
|
272 | let syntax = line[b"syntax:".len()..].trim(); | |
264 |
|
273 | |||
265 | if let Some(rel_syntax) = SYNTAXES.get(syntax) { |
|
274 | if let Some(rel_syntax) = SYNTAXES.get(syntax) { | |
266 | current_syntax = rel_syntax; |
|
275 | current_syntax = rel_syntax; | |
267 | } else if warn { |
|
276 | } else if warn { | |
268 | warnings.push((file_path.to_owned(), syntax.to_owned())); |
|
277 | warnings.push((file_path.to_owned(), syntax.to_owned())); | |
269 | } |
|
278 | } | |
270 | continue; |
|
279 | continue; | |
271 | } |
|
280 | } | |
272 |
|
281 | |||
273 | let mut line_syntax: &[u8] = ¤t_syntax; |
|
282 | let mut line_syntax: &[u8] = ¤t_syntax; | |
274 |
|
283 | |||
275 | for (s, rels) in SYNTAXES.iter() { |
|
284 | for (s, rels) in SYNTAXES.iter() { | |
276 | if line.starts_with(rels) { |
|
285 | if line.starts_with(rels) { | |
277 | line_syntax = rels; |
|
286 | line_syntax = rels; | |
278 | line = &line[rels.len()..]; |
|
287 | line = &line[rels.len()..]; | |
279 | break; |
|
288 | break; | |
280 | } else if line.starts_with(&[s, b":".as_ref()].concat()) { |
|
289 | } else if line.starts_with(&[s, b":".as_ref()].concat()) { | |
281 | line_syntax = rels; |
|
290 | line_syntax = rels; | |
282 | line = &line[s.len() + 1..]; |
|
291 | line = &line[s.len() + 1..]; | |
283 | break; |
|
292 | break; | |
284 | } |
|
293 | } | |
285 | } |
|
294 | } | |
286 |
|
295 | |||
287 | inputs.push(( |
|
296 | inputs.push(( | |
288 | [line_syntax, line].concat(), |
|
297 | [line_syntax, line].concat(), | |
289 | line_number, |
|
298 | line_number, | |
290 | line.to_owned(), |
|
299 | line.to_owned(), | |
291 | )); |
|
300 | )); | |
292 | } |
|
301 | } | |
293 | (inputs, warnings) |
|
302 | (inputs, warnings) | |
294 | } |
|
303 | } | |
295 |
|
304 | |||
296 | pub fn read_pattern_file( |
|
305 | pub fn read_pattern_file( | |
297 | file_path: &[u8], |
|
306 | file_path: &[u8], | |
298 | warn: bool, |
|
307 | warn: bool, | |
299 | ) -> Result<(Vec<PatternTuple>, Vec<WarningTuple>), PatternFileError> { |
|
308 | ) -> Result<(Vec<PatternTuple>, Vec<WarningTuple>), PatternFileError> { | |
300 | let mut f = File::open(get_path_from_bytes(file_path))?; |
|
309 | let mut f = File::open(get_path_from_bytes(file_path))?; | |
301 | let mut contents = Vec::new(); |
|
310 | let mut contents = Vec::new(); | |
302 |
|
311 | |||
303 | f.read_to_end(&mut contents)?; |
|
312 | f.read_to_end(&mut contents)?; | |
304 |
|
313 | |||
305 | Ok(parse_pattern_file_contents(&contents, file_path, warn)) |
|
314 | Ok(parse_pattern_file_contents(&contents, file_path, warn)) | |
306 | } |
|
315 | } | |
307 |
|
316 | |||
308 | #[cfg(test)] |
|
317 | #[cfg(test)] | |
309 | mod tests { |
|
318 | mod tests { | |
310 | use super::*; |
|
319 | use super::*; | |
311 |
|
320 | |||
312 | #[test] |
|
321 | #[test] | |
313 | fn escape_pattern_test() { |
|
322 | fn escape_pattern_test() { | |
314 | let untouched = br#"!"%',/0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ_`abcdefghijklmnopqrstuvwxyz"#; |
|
323 | let untouched = br#"!"%',/0123456789:;<=>@ABCDEFGHIJKLMNOPQRSTUVWXYZ_`abcdefghijklmnopqrstuvwxyz"#; | |
315 | assert_eq!(escape_pattern(untouched), untouched.to_vec()); |
|
324 | assert_eq!(escape_pattern(untouched), untouched.to_vec()); | |
316 | // All escape codes |
|
325 | // All escape codes | |
317 | assert_eq!( |
|
326 | assert_eq!( | |
318 | escape_pattern(br#"()[]{}?*+-|^$\\.&~# \t\n\r\v\f"#), |
|
327 | escape_pattern(br#"()[]{}?*+-|^$\\.&~# \t\n\r\v\f"#), | |
319 | br#"\(\)\[\]\{\}\?\*\+\-\|\^\$\\\\\.\&\~\#\ \\t\\n\\r\\v\\f"# |
|
328 | br#"\(\)\[\]\{\}\?\*\+\-\|\^\$\\\\\.\&\~\#\ \\t\\n\\r\\v\\f"# | |
320 | .to_vec() |
|
329 | .to_vec() | |
321 | ); |
|
330 | ); | |
322 | } |
|
331 | } | |
323 |
|
332 | |||
324 | #[test] |
|
333 | #[test] | |
325 | fn glob_test() { |
|
334 | fn glob_test() { | |
326 | assert_eq!(glob_to_re(br#"?"#), br#"."#); |
|
335 | assert_eq!(glob_to_re(br#"?"#), br#"."#); | |
327 | assert_eq!(glob_to_re(br#"*"#), br#"[^/]*"#); |
|
336 | assert_eq!(glob_to_re(br#"*"#), br#"[^/]*"#); | |
328 | assert_eq!(glob_to_re(br#"**"#), br#".*"#); |
|
337 | assert_eq!(glob_to_re(br#"**"#), br#".*"#); | |
329 | assert_eq!(glob_to_re(br#"**/a"#), br#"(?:.*/)?a"#); |
|
338 | assert_eq!(glob_to_re(br#"**/a"#), br#"(?:.*/)?a"#); | |
330 | assert_eq!(glob_to_re(br#"a/**/b"#), br#"a/(?:.*/)?b"#); |
|
339 | assert_eq!(glob_to_re(br#"a/**/b"#), br#"a/(?:.*/)?b"#); | |
331 | assert_eq!(glob_to_re(br#"[a*?!^][^b][!c]"#), br#"[a*?!^][\^b][^c]"#); |
|
340 | assert_eq!(glob_to_re(br#"[a*?!^][^b][!c]"#), br#"[a*?!^][\^b][^c]"#); | |
332 | assert_eq!(glob_to_re(br#"{a,b}"#), br#"(?:a|b)"#); |
|
341 | assert_eq!(glob_to_re(br#"{a,b}"#), br#"(?:a|b)"#); | |
333 | assert_eq!(glob_to_re(br#".\*\?"#), br#"\.\*\?"#); |
|
342 | assert_eq!(glob_to_re(br#".\*\?"#), br#"\.\*\?"#); | |
334 | } |
|
343 | } | |
335 |
|
344 | |||
336 | #[test] |
|
345 | #[test] | |
337 | fn test_parse_pattern_file_contents() { |
|
346 | fn test_parse_pattern_file_contents() { | |
338 | let lines = b"syntax: glob\n*.elc"; |
|
347 | let lines = b"syntax: glob\n*.elc"; | |
339 |
|
348 | |||
340 | assert_eq!( |
|
349 | assert_eq!( | |
341 | vec![(b"relglob:*.elc".to_vec(), 2, b"*.elc".to_vec())], |
|
350 | vec![(b"relglob:*.elc".to_vec(), 2, b"*.elc".to_vec())], | |
342 | parse_pattern_file_contents(lines, b"file_path", false).0, |
|
351 | parse_pattern_file_contents(lines, b"file_path", false).0, | |
343 | ); |
|
352 | ); | |
344 |
|
353 | |||
345 | let lines = b"syntax: include\nsyntax: glob"; |
|
354 | let lines = b"syntax: include\nsyntax: glob"; | |
346 |
|
355 | |||
347 | assert_eq!( |
|
356 | assert_eq!( | |
348 | parse_pattern_file_contents(lines, b"file_path", false).0, |
|
357 | parse_pattern_file_contents(lines, b"file_path", false).0, | |
349 | vec![] |
|
358 | vec![] | |
350 | ); |
|
359 | ); | |
351 | let lines = b"glob:**.o"; |
|
360 | let lines = b"glob:**.o"; | |
352 | assert_eq!( |
|
361 | assert_eq!( | |
353 | parse_pattern_file_contents(lines, b"file_path", false).0, |
|
362 | parse_pattern_file_contents(lines, b"file_path", false).0, | |
354 | vec![(b"relglob:**.o".to_vec(), 1, b"**.o".to_vec())] |
|
363 | vec![(b"relglob:**.o".to_vec(), 1, b"**.o".to_vec())] | |
355 | ); |
|
364 | ); | |
356 | } |
|
365 | } | |
357 |
|
366 | |||
358 | #[test] |
|
367 | #[test] | |
359 | fn test_build_single_regex_shortcut() { |
|
368 | fn test_build_single_regex_shortcut() { | |
360 | assert_eq!( |
|
369 | assert_eq!( | |
361 | br"(?:/|$)".to_vec(), |
|
370 | br"(?:/|$)".to_vec(), | |
362 | build_single_regex(b"rootglob", b"", b"").unwrap() |
|
371 | build_single_regex(b"rootglob", b"", b"").unwrap() | |
363 | ); |
|
372 | ); | |
364 | assert_eq!( |
|
373 | assert_eq!( | |
365 | br"whatever(?:/|$)".to_vec(), |
|
374 | br"whatever(?:/|$)".to_vec(), | |
366 | build_single_regex(b"rootglob", b"whatever", b"").unwrap() |
|
375 | build_single_regex(b"rootglob", b"whatever", b"").unwrap() | |
367 | ); |
|
376 | ); | |
368 | assert_eq!( |
|
377 | assert_eq!( | |
369 | br"[^/]*\.o".to_vec(), |
|
378 | br"[^/]*\.o".to_vec(), | |
370 | build_single_regex(b"rootglob", b"*.o", b"").unwrap() |
|
379 | build_single_regex(b"rootglob", b"*.o", b"").unwrap() | |
371 | ); |
|
380 | ); | |
372 | } |
|
381 | } | |
373 | } |
|
382 | } |
@@ -1,92 +1,101 | |||||
|
1 | // utils module | |||
|
2 | // | |||
|
3 | // Copyright 2019 Raphaël Gomès <rgomes@octobus.net> | |||
|
4 | // | |||
|
5 | // This software may be used and distributed according to the terms of the | |||
|
6 | // GNU General Public License version 2 or any later version. | |||
|
7 | ||||
|
8 | //! Contains useful functions, traits, structs, etc. for use in core. | |||
|
9 | ||||
1 | pub mod files; |
|
10 | pub mod files; | |
2 |
|
11 | |||
3 | use std::convert::AsMut; |
|
12 | use std::convert::AsMut; | |
4 |
|
13 | |||
5 | /// Takes a slice and copies it into an array. |
|
14 | /// Takes a slice and copies it into an array. | |
6 | /// |
|
15 | /// | |
7 | /// # Panics |
|
16 | /// # Panics | |
8 | /// |
|
17 | /// | |
9 | /// Will panic if the slice and target array don't have the same length. |
|
18 | /// Will panic if the slice and target array don't have the same length. | |
10 | pub fn copy_into_array<A, T>(slice: &[T]) -> A |
|
19 | pub fn copy_into_array<A, T>(slice: &[T]) -> A | |
11 | where |
|
20 | where | |
12 | A: Sized + Default + AsMut<[T]>, |
|
21 | A: Sized + Default + AsMut<[T]>, | |
13 | T: Copy, |
|
22 | T: Copy, | |
14 | { |
|
23 | { | |
15 | let mut a = Default::default(); |
|
24 | let mut a = Default::default(); | |
16 | <A as AsMut<[T]>>::as_mut(&mut a).copy_from_slice(slice); |
|
25 | <A as AsMut<[T]>>::as_mut(&mut a).copy_from_slice(slice); | |
17 | a |
|
26 | a | |
18 | } |
|
27 | } | |
19 |
|
28 | |||
20 | /// Replaces the `from` slice with the `to` slice inside the `buf` slice. |
|
29 | /// Replaces the `from` slice with the `to` slice inside the `buf` slice. | |
21 | /// |
|
30 | /// | |
22 | /// # Examples |
|
31 | /// # Examples | |
23 | /// |
|
32 | /// | |
24 | /// ``` |
|
33 | /// ``` | |
25 | /// use crate::hg::utils::replace_slice; |
|
34 | /// use crate::hg::utils::replace_slice; | |
26 | /// let mut line = b"I hate writing tests!".to_vec(); |
|
35 | /// let mut line = b"I hate writing tests!".to_vec(); | |
27 | /// replace_slice(&mut line, b"hate", b"love"); |
|
36 | /// replace_slice(&mut line, b"hate", b"love"); | |
28 | /// assert_eq!( |
|
37 | /// assert_eq!( | |
29 | /// line, |
|
38 | /// line, | |
30 | /// b"I love writing tests!".to_vec() |
|
39 | /// b"I love writing tests!".to_vec() | |
31 | ///); |
|
40 | ///); | |
32 | /// |
|
41 | /// | |
33 | /// ``` |
|
42 | /// ``` | |
34 | pub fn replace_slice<T>(buf: &mut [T], from: &[T], to: &[T]) |
|
43 | pub fn replace_slice<T>(buf: &mut [T], from: &[T], to: &[T]) | |
35 | where |
|
44 | where | |
36 | T: Clone + PartialEq, |
|
45 | T: Clone + PartialEq, | |
37 | { |
|
46 | { | |
38 | if buf.len() < from.len() || from.len() != to.len() { |
|
47 | if buf.len() < from.len() || from.len() != to.len() { | |
39 | return; |
|
48 | return; | |
40 | } |
|
49 | } | |
41 | for i in 0..=buf.len() - from.len() { |
|
50 | for i in 0..=buf.len() - from.len() { | |
42 | if buf[i..].starts_with(from) { |
|
51 | if buf[i..].starts_with(from) { | |
43 | buf[i..(i + from.len())].clone_from_slice(to); |
|
52 | buf[i..(i + from.len())].clone_from_slice(to); | |
44 | } |
|
53 | } | |
45 | } |
|
54 | } | |
46 | } |
|
55 | } | |
47 |
|
56 | |||
48 | pub trait SliceExt { |
|
57 | pub trait SliceExt { | |
49 | fn trim_end(&self) -> &Self; |
|
58 | fn trim_end(&self) -> &Self; | |
50 | fn trim_start(&self) -> &Self; |
|
59 | fn trim_start(&self) -> &Self; | |
51 | fn trim(&self) -> &Self; |
|
60 | fn trim(&self) -> &Self; | |
52 | } |
|
61 | } | |
53 |
|
62 | |||
54 | fn is_not_whitespace(c: &u8) -> bool { |
|
63 | fn is_not_whitespace(c: &u8) -> bool { | |
55 | !(*c as char).is_whitespace() |
|
64 | !(*c as char).is_whitespace() | |
56 | } |
|
65 | } | |
57 |
|
66 | |||
58 | impl SliceExt for [u8] { |
|
67 | impl SliceExt for [u8] { | |
59 | fn trim_end(&self) -> &[u8] { |
|
68 | fn trim_end(&self) -> &[u8] { | |
60 | if let Some(last) = self.iter().rposition(is_not_whitespace) { |
|
69 | if let Some(last) = self.iter().rposition(is_not_whitespace) { | |
61 | &self[..last + 1] |
|
70 | &self[..last + 1] | |
62 | } else { |
|
71 | } else { | |
63 | &[] |
|
72 | &[] | |
64 | } |
|
73 | } | |
65 | } |
|
74 | } | |
66 | fn trim_start(&self) -> &[u8] { |
|
75 | fn trim_start(&self) -> &[u8] { | |
67 | if let Some(first) = self.iter().position(is_not_whitespace) { |
|
76 | if let Some(first) = self.iter().position(is_not_whitespace) { | |
68 | &self[first..] |
|
77 | &self[first..] | |
69 | } else { |
|
78 | } else { | |
70 | &[] |
|
79 | &[] | |
71 | } |
|
80 | } | |
72 | } |
|
81 | } | |
73 |
|
82 | |||
74 | /// ``` |
|
83 | /// ``` | |
75 | /// use hg::utils::SliceExt; |
|
84 | /// use hg::utils::SliceExt; | |
76 | /// assert_eq!( |
|
85 | /// assert_eq!( | |
77 | /// b" to trim ".trim(), |
|
86 | /// b" to trim ".trim(), | |
78 | /// b"to trim" |
|
87 | /// b"to trim" | |
79 | /// ); |
|
88 | /// ); | |
80 | /// assert_eq!( |
|
89 | /// assert_eq!( | |
81 | /// b"to trim ".trim(), |
|
90 | /// b"to trim ".trim(), | |
82 | /// b"to trim" |
|
91 | /// b"to trim" | |
83 | /// ); |
|
92 | /// ); | |
84 | /// assert_eq!( |
|
93 | /// assert_eq!( | |
85 | /// b" to trim".trim(), |
|
94 | /// b" to trim".trim(), | |
86 | /// b"to trim" |
|
95 | /// b"to trim" | |
87 | /// ); |
|
96 | /// ); | |
88 | /// ``` |
|
97 | /// ``` | |
89 | fn trim(&self) -> &[u8] { |
|
98 | fn trim(&self) -> &[u8] { | |
90 | self.trim_start().trim_end() |
|
99 | self.trim_start().trim_end() | |
91 | } |
|
100 | } | |
92 | } |
|
101 | } |
@@ -1,83 +1,94 | |||||
|
1 | // files.rs | |||
|
2 | // | |||
|
3 | // Copyright 2019 | |||
|
4 | // Raphaël Gomès <rgomes@octobus.net>, | |||
|
5 | // Yuya Nishihara <yuya@tcha.org> | |||
|
6 | // | |||
|
7 | // This software may be used and distributed according to the terms of the | |||
|
8 | // GNU General Public License version 2 or any later version. | |||
|
9 | ||||
|
10 | //! Functions for fiddling with files. | |||
|
11 | ||||
1 | use std::iter::FusedIterator; |
|
12 | use std::iter::FusedIterator; | |
2 | use std::path::Path; |
|
13 | use std::path::Path; | |
3 |
|
14 | |||
4 | pub fn get_path_from_bytes(bytes: &[u8]) -> &Path { |
|
15 | pub fn get_path_from_bytes(bytes: &[u8]) -> &Path { | |
5 | let os_str; |
|
16 | let os_str; | |
6 | #[cfg(unix)] |
|
17 | #[cfg(unix)] | |
7 | { |
|
18 | { | |
8 | use std::os::unix::ffi::OsStrExt; |
|
19 | use std::os::unix::ffi::OsStrExt; | |
9 | os_str = std::ffi::OsStr::from_bytes(bytes); |
|
20 | os_str = std::ffi::OsStr::from_bytes(bytes); | |
10 | } |
|
21 | } | |
11 | #[cfg(windows)] |
|
22 | #[cfg(windows)] | |
12 | { |
|
23 | { | |
13 | // TODO: convert from Windows MBCS (ANSI encoding) to WTF8. |
|
24 | // TODO: convert from Windows MBCS (ANSI encoding) to WTF8. | |
14 | // Perhaps, the return type would have to be Result<PathBuf>. |
|
25 | // Perhaps, the return type would have to be Result<PathBuf>. | |
15 | use std::os::windows::ffi::OsStrExt; |
|
26 | use std::os::windows::ffi::OsStrExt; | |
16 | os_str = std::ffi::OsString::from_wide(bytes); |
|
27 | os_str = std::ffi::OsString::from_wide(bytes); | |
17 | } |
|
28 | } | |
18 |
|
29 | |||
19 | Path::new(os_str) |
|
30 | Path::new(os_str) | |
20 | } |
|
31 | } | |
21 |
|
32 | |||
22 | /// An iterator over repository path yielding itself and its ancestors. |
|
33 | /// An iterator over repository path yielding itself and its ancestors. | |
23 | #[derive(Copy, Clone, Debug)] |
|
34 | #[derive(Copy, Clone, Debug)] | |
24 | pub struct Ancestors<'a> { |
|
35 | pub struct Ancestors<'a> { | |
25 | next: Option<&'a [u8]>, |
|
36 | next: Option<&'a [u8]>, | |
26 | } |
|
37 | } | |
27 |
|
38 | |||
28 | impl<'a> Iterator for Ancestors<'a> { |
|
39 | impl<'a> Iterator for Ancestors<'a> { | |
29 | // if we had an HgPath type, this would yield &'a HgPath |
|
40 | // if we had an HgPath type, this would yield &'a HgPath | |
30 | type Item = &'a [u8]; |
|
41 | type Item = &'a [u8]; | |
31 |
|
42 | |||
32 | fn next(&mut self) -> Option<Self::Item> { |
|
43 | fn next(&mut self) -> Option<Self::Item> { | |
33 | let next = self.next; |
|
44 | let next = self.next; | |
34 | self.next = match self.next { |
|
45 | self.next = match self.next { | |
35 | Some(s) if s.is_empty() => None, |
|
46 | Some(s) if s.is_empty() => None, | |
36 | Some(s) => { |
|
47 | Some(s) => { | |
37 | let p = s.iter().rposition(|&c| c == b'/').unwrap_or(0); |
|
48 | let p = s.iter().rposition(|&c| c == b'/').unwrap_or(0); | |
38 | Some(&s[..p]) |
|
49 | Some(&s[..p]) | |
39 | } |
|
50 | } | |
40 | None => None, |
|
51 | None => None, | |
41 | }; |
|
52 | }; | |
42 | next |
|
53 | next | |
43 | } |
|
54 | } | |
44 | } |
|
55 | } | |
45 |
|
56 | |||
46 | impl<'a> FusedIterator for Ancestors<'a> {} |
|
57 | impl<'a> FusedIterator for Ancestors<'a> {} | |
47 |
|
58 | |||
48 | /// Returns an iterator yielding ancestor directories of the given repository |
|
59 | /// Returns an iterator yielding ancestor directories of the given repository | |
49 | /// path. |
|
60 | /// path. | |
50 | /// |
|
61 | /// | |
51 | /// The path is separated by '/', and must not start with '/'. |
|
62 | /// The path is separated by '/', and must not start with '/'. | |
52 | /// |
|
63 | /// | |
53 | /// The path itself isn't included unless it is b"" (meaning the root |
|
64 | /// The path itself isn't included unless it is b"" (meaning the root | |
54 | /// directory.) |
|
65 | /// directory.) | |
55 | pub fn find_dirs<'a>(path: &'a [u8]) -> Ancestors<'a> { |
|
66 | pub fn find_dirs<'a>(path: &'a [u8]) -> Ancestors<'a> { | |
56 | let mut dirs = Ancestors { next: Some(path) }; |
|
67 | let mut dirs = Ancestors { next: Some(path) }; | |
57 | if !path.is_empty() { |
|
68 | if !path.is_empty() { | |
58 | dirs.next(); // skip itself |
|
69 | dirs.next(); // skip itself | |
59 | } |
|
70 | } | |
60 | dirs |
|
71 | dirs | |
61 | } |
|
72 | } | |
62 |
|
73 | |||
63 | #[cfg(test)] |
|
74 | #[cfg(test)] | |
64 | mod tests { |
|
75 | mod tests { | |
65 | #[test] |
|
76 | #[test] | |
66 | fn find_dirs_some() { |
|
77 | fn find_dirs_some() { | |
67 | let mut dirs = super::find_dirs(b"foo/bar/baz"); |
|
78 | let mut dirs = super::find_dirs(b"foo/bar/baz"); | |
68 | assert_eq!(dirs.next(), Some(b"foo/bar".as_ref())); |
|
79 | assert_eq!(dirs.next(), Some(b"foo/bar".as_ref())); | |
69 | assert_eq!(dirs.next(), Some(b"foo".as_ref())); |
|
80 | assert_eq!(dirs.next(), Some(b"foo".as_ref())); | |
70 | assert_eq!(dirs.next(), Some(b"".as_ref())); |
|
81 | assert_eq!(dirs.next(), Some(b"".as_ref())); | |
71 | assert_eq!(dirs.next(), None); |
|
82 | assert_eq!(dirs.next(), None); | |
72 | assert_eq!(dirs.next(), None); |
|
83 | assert_eq!(dirs.next(), None); | |
73 | } |
|
84 | } | |
74 |
|
85 | |||
75 | #[test] |
|
86 | #[test] | |
76 | fn find_dirs_empty() { |
|
87 | fn find_dirs_empty() { | |
77 | // looks weird, but mercurial.util.finddirs(b"") yields b"" |
|
88 | // looks weird, but mercurial.util.finddirs(b"") yields b"" | |
78 | let mut dirs = super::find_dirs(b""); |
|
89 | let mut dirs = super::find_dirs(b""); | |
79 | assert_eq!(dirs.next(), Some(b"".as_ref())); |
|
90 | assert_eq!(dirs.next(), Some(b"".as_ref())); | |
80 | assert_eq!(dirs.next(), None); |
|
91 | assert_eq!(dirs.next(), None); | |
81 | assert_eq!(dirs.next(), None); |
|
92 | assert_eq!(dirs.next(), None); | |
82 | } |
|
93 | } | |
83 | } |
|
94 | } |
General Comments 0
You need to be logged in to leave comments.
Login now