##// END OF EJS Templates
copies-rust: use matching to select the final copies information...
marmoute -
r47318:f8bdc832 default
parent child Browse files
Show More
@@ -1,891 +1,879 b''
1 use crate::utils::hg_path::HgPath;
1 use crate::utils::hg_path::HgPath;
2 use crate::utils::hg_path::HgPathBuf;
2 use crate::utils::hg_path::HgPathBuf;
3 use crate::Revision;
3 use crate::Revision;
4 use crate::NULL_REVISION;
4 use crate::NULL_REVISION;
5
5
6 use im_rc::ordmap::DiffItem;
6 use im_rc::ordmap::DiffItem;
7 use im_rc::ordmap::Entry;
7 use im_rc::ordmap::Entry;
8 use im_rc::ordmap::OrdMap;
8 use im_rc::ordmap::OrdMap;
9
9
10 use std::cmp::Ordering;
10 use std::cmp::Ordering;
11 use std::collections::HashMap;
11 use std::collections::HashMap;
12 use std::collections::HashSet;
12 use std::collections::HashSet;
13 use std::convert::TryInto;
13 use std::convert::TryInto;
14
14
15 pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>;
15 pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>;
16
16
17 type PathToken = usize;
17 type PathToken = usize;
18
18
19 #[derive(Clone, Debug, PartialEq)]
19 #[derive(Clone, Debug, PartialEq)]
20 struct CopySource {
20 struct CopySource {
21 /// revision at which the copy information was added
21 /// revision at which the copy information was added
22 rev: Revision,
22 rev: Revision,
23 /// the copy source, (Set to None in case of deletion of the associated
23 /// the copy source, (Set to None in case of deletion of the associated
24 /// key)
24 /// key)
25 path: Option<PathToken>,
25 path: Option<PathToken>,
26 /// a set of previous `CopySource.rev` value directly or indirectly
26 /// a set of previous `CopySource.rev` value directly or indirectly
27 /// overwritten by this one.
27 /// overwritten by this one.
28 overwritten: HashSet<Revision>,
28 overwritten: HashSet<Revision>,
29 }
29 }
30
30
31 impl CopySource {
31 impl CopySource {
32 /// create a new CopySource
32 /// create a new CopySource
33 ///
33 ///
34 /// Use this when no previous copy source existed.
34 /// Use this when no previous copy source existed.
35 fn new(rev: Revision, path: Option<PathToken>) -> Self {
35 fn new(rev: Revision, path: Option<PathToken>) -> Self {
36 Self {
36 Self {
37 rev,
37 rev,
38 path,
38 path,
39 overwritten: HashSet::new(),
39 overwritten: HashSet::new(),
40 }
40 }
41 }
41 }
42
42
43 /// create a new CopySource from merging two others
43 /// create a new CopySource from merging two others
44 ///
44 ///
45 /// Use this when merging two InternalPathCopies requires active merging of
45 /// Use this when merging two InternalPathCopies requires active merging of
46 /// some entries.
46 /// some entries.
47 fn new_from_merge(rev: Revision, winner: &Self, loser: &Self) -> Self {
47 fn new_from_merge(rev: Revision, winner: &Self, loser: &Self) -> Self {
48 let mut overwritten = HashSet::new();
48 let mut overwritten = HashSet::new();
49 overwritten.extend(winner.overwritten.iter().copied());
49 overwritten.extend(winner.overwritten.iter().copied());
50 overwritten.extend(loser.overwritten.iter().copied());
50 overwritten.extend(loser.overwritten.iter().copied());
51 overwritten.insert(winner.rev);
51 overwritten.insert(winner.rev);
52 overwritten.insert(loser.rev);
52 overwritten.insert(loser.rev);
53 Self {
53 Self {
54 rev,
54 rev,
55 path: winner.path,
55 path: winner.path,
56 overwritten: overwritten,
56 overwritten: overwritten,
57 }
57 }
58 }
58 }
59
59
60 /// Update the value of a pre-existing CopySource
60 /// Update the value of a pre-existing CopySource
61 ///
61 ///
62 /// Use this when recording copy information from parent β†’ child edges
62 /// Use this when recording copy information from parent β†’ child edges
63 fn overwrite(&mut self, rev: Revision, path: Option<PathToken>) {
63 fn overwrite(&mut self, rev: Revision, path: Option<PathToken>) {
64 self.overwritten.insert(self.rev);
64 self.overwritten.insert(self.rev);
65 self.rev = rev;
65 self.rev = rev;
66 self.path = path;
66 self.path = path;
67 }
67 }
68
68
69 /// Mark pre-existing copy information as "dropped" by a file deletion
69 /// Mark pre-existing copy information as "dropped" by a file deletion
70 ///
70 ///
71 /// Use this when recording copy information from parent β†’ child edges
71 /// Use this when recording copy information from parent β†’ child edges
72 fn mark_delete(&mut self, rev: Revision) {
72 fn mark_delete(&mut self, rev: Revision) {
73 self.overwritten.insert(self.rev);
73 self.overwritten.insert(self.rev);
74 self.rev = rev;
74 self.rev = rev;
75 self.path = None;
75 self.path = None;
76 }
76 }
77
77
78 fn is_overwritten_by(&self, other: &Self) -> bool {
78 fn is_overwritten_by(&self, other: &Self) -> bool {
79 other.overwritten.contains(&self.rev)
79 other.overwritten.contains(&self.rev)
80 }
80 }
81 }
81 }
82
82
83 /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation)
83 /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation)
84 type InternalPathCopies = OrdMap<PathToken, CopySource>;
84 type InternalPathCopies = OrdMap<PathToken, CopySource>;
85
85
86 /// hold parent 1, parent 2 and relevant files actions.
86 /// hold parent 1, parent 2 and relevant files actions.
87 pub type RevInfo<'a> = (Revision, Revision, ChangedFiles<'a>);
87 pub type RevInfo<'a> = (Revision, Revision, ChangedFiles<'a>);
88
88
89 /// represent the files affected by a changesets
89 /// represent the files affected by a changesets
90 ///
90 ///
91 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
91 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
92 /// all the data categories tracked by it.
92 /// all the data categories tracked by it.
93 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
93 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
94 /// all the data categories tracked by it.
94 /// all the data categories tracked by it.
95 pub struct ChangedFiles<'a> {
95 pub struct ChangedFiles<'a> {
96 nb_items: u32,
96 nb_items: u32,
97 index: &'a [u8],
97 index: &'a [u8],
98 data: &'a [u8],
98 data: &'a [u8],
99 }
99 }
100
100
101 /// Represent active changes that affect the copy tracing.
101 /// Represent active changes that affect the copy tracing.
102 enum Action<'a> {
102 enum Action<'a> {
103 /// The parent ? children edge is removing a file
103 /// The parent ? children edge is removing a file
104 ///
104 ///
105 /// (actually, this could be the edge from the other parent, but it does
105 /// (actually, this could be the edge from the other parent, but it does
106 /// not matters)
106 /// not matters)
107 Removed(&'a HgPath),
107 Removed(&'a HgPath),
108 /// The parent ? children edge introduce copy information between (dest,
108 /// The parent ? children edge introduce copy information between (dest,
109 /// source)
109 /// source)
110 Copied(&'a HgPath, &'a HgPath),
110 Copied(&'a HgPath, &'a HgPath),
111 }
111 }
112
112
113 /// This express the possible "special" case we can get in a merge
113 /// This express the possible "special" case we can get in a merge
114 ///
114 ///
115 /// See mercurial/metadata.py for details on these values.
115 /// See mercurial/metadata.py for details on these values.
116 #[derive(PartialEq)]
116 #[derive(PartialEq)]
117 enum MergeCase {
117 enum MergeCase {
118 /// Merged: file had history on both side that needed to be merged
118 /// Merged: file had history on both side that needed to be merged
119 Merged,
119 Merged,
120 /// Salvaged: file was candidate for deletion, but survived the merge
120 /// Salvaged: file was candidate for deletion, but survived the merge
121 Salvaged,
121 Salvaged,
122 /// Normal: Not one of the two cases above
122 /// Normal: Not one of the two cases above
123 Normal,
123 Normal,
124 }
124 }
125
125
126 type FileChange<'a> = (u8, &'a HgPath, &'a HgPath);
126 type FileChange<'a> = (u8, &'a HgPath, &'a HgPath);
127
127
128 const EMPTY: &[u8] = b"";
128 const EMPTY: &[u8] = b"";
129 const COPY_MASK: u8 = 3;
129 const COPY_MASK: u8 = 3;
130 const P1_COPY: u8 = 2;
130 const P1_COPY: u8 = 2;
131 const P2_COPY: u8 = 3;
131 const P2_COPY: u8 = 3;
132 const ACTION_MASK: u8 = 28;
132 const ACTION_MASK: u8 = 28;
133 const REMOVED: u8 = 12;
133 const REMOVED: u8 = 12;
134 const MERGED: u8 = 8;
134 const MERGED: u8 = 8;
135 const SALVAGED: u8 = 16;
135 const SALVAGED: u8 = 16;
136
136
137 impl<'a> ChangedFiles<'a> {
137 impl<'a> ChangedFiles<'a> {
138 const INDEX_START: usize = 4;
138 const INDEX_START: usize = 4;
139 const ENTRY_SIZE: u32 = 9;
139 const ENTRY_SIZE: u32 = 9;
140 const FILENAME_START: u32 = 1;
140 const FILENAME_START: u32 = 1;
141 const COPY_SOURCE_START: u32 = 5;
141 const COPY_SOURCE_START: u32 = 5;
142
142
143 pub fn new(data: &'a [u8]) -> Self {
143 pub fn new(data: &'a [u8]) -> Self {
144 assert!(
144 assert!(
145 data.len() >= 4,
145 data.len() >= 4,
146 "data size ({}) is too small to contain the header (4)",
146 "data size ({}) is too small to contain the header (4)",
147 data.len()
147 data.len()
148 );
148 );
149 let nb_items_raw: [u8; 4] = (&data[0..=3])
149 let nb_items_raw: [u8; 4] = (&data[0..=3])
150 .try_into()
150 .try_into()
151 .expect("failed to turn 4 bytes into 4 bytes");
151 .expect("failed to turn 4 bytes into 4 bytes");
152 let nb_items = u32::from_be_bytes(nb_items_raw);
152 let nb_items = u32::from_be_bytes(nb_items_raw);
153
153
154 let index_size = (nb_items * Self::ENTRY_SIZE) as usize;
154 let index_size = (nb_items * Self::ENTRY_SIZE) as usize;
155 let index_end = Self::INDEX_START + index_size;
155 let index_end = Self::INDEX_START + index_size;
156
156
157 assert!(
157 assert!(
158 data.len() >= index_end,
158 data.len() >= index_end,
159 "data size ({}) is too small to fit the index_data ({})",
159 "data size ({}) is too small to fit the index_data ({})",
160 data.len(),
160 data.len(),
161 index_end
161 index_end
162 );
162 );
163
163
164 let ret = ChangedFiles {
164 let ret = ChangedFiles {
165 nb_items,
165 nb_items,
166 index: &data[Self::INDEX_START..index_end],
166 index: &data[Self::INDEX_START..index_end],
167 data: &data[index_end..],
167 data: &data[index_end..],
168 };
168 };
169 let max_data = ret.filename_end(nb_items - 1) as usize;
169 let max_data = ret.filename_end(nb_items - 1) as usize;
170 assert!(
170 assert!(
171 ret.data.len() >= max_data,
171 ret.data.len() >= max_data,
172 "data size ({}) is too small to fit all data ({})",
172 "data size ({}) is too small to fit all data ({})",
173 data.len(),
173 data.len(),
174 index_end + max_data
174 index_end + max_data
175 );
175 );
176 ret
176 ret
177 }
177 }
178
178
179 pub fn new_empty() -> Self {
179 pub fn new_empty() -> Self {
180 ChangedFiles {
180 ChangedFiles {
181 nb_items: 0,
181 nb_items: 0,
182 index: EMPTY,
182 index: EMPTY,
183 data: EMPTY,
183 data: EMPTY,
184 }
184 }
185 }
185 }
186
186
187 /// internal function to return an individual entry at a given index
187 /// internal function to return an individual entry at a given index
188 fn entry(&'a self, idx: u32) -> FileChange<'a> {
188 fn entry(&'a self, idx: u32) -> FileChange<'a> {
189 if idx >= self.nb_items {
189 if idx >= self.nb_items {
190 panic!(
190 panic!(
191 "index for entry is higher that the number of file {} >= {}",
191 "index for entry is higher that the number of file {} >= {}",
192 idx, self.nb_items
192 idx, self.nb_items
193 )
193 )
194 }
194 }
195 let flags = self.flags(idx);
195 let flags = self.flags(idx);
196 let filename = self.filename(idx);
196 let filename = self.filename(idx);
197 let copy_idx = self.copy_idx(idx);
197 let copy_idx = self.copy_idx(idx);
198 let copy_source = self.filename(copy_idx);
198 let copy_source = self.filename(copy_idx);
199 (flags, filename, copy_source)
199 (flags, filename, copy_source)
200 }
200 }
201
201
202 /// internal function to return the filename of the entry at a given index
202 /// internal function to return the filename of the entry at a given index
203 fn filename(&self, idx: u32) -> &HgPath {
203 fn filename(&self, idx: u32) -> &HgPath {
204 let filename_start;
204 let filename_start;
205 if idx == 0 {
205 if idx == 0 {
206 filename_start = 0;
206 filename_start = 0;
207 } else {
207 } else {
208 filename_start = self.filename_end(idx - 1)
208 filename_start = self.filename_end(idx - 1)
209 }
209 }
210 let filename_end = self.filename_end(idx);
210 let filename_end = self.filename_end(idx);
211 let filename_start = filename_start as usize;
211 let filename_start = filename_start as usize;
212 let filename_end = filename_end as usize;
212 let filename_end = filename_end as usize;
213 HgPath::new(&self.data[filename_start..filename_end])
213 HgPath::new(&self.data[filename_start..filename_end])
214 }
214 }
215
215
216 /// internal function to return the flag field of the entry at a given
216 /// internal function to return the flag field of the entry at a given
217 /// index
217 /// index
218 fn flags(&self, idx: u32) -> u8 {
218 fn flags(&self, idx: u32) -> u8 {
219 let idx = idx as usize;
219 let idx = idx as usize;
220 self.index[idx * (Self::ENTRY_SIZE as usize)]
220 self.index[idx * (Self::ENTRY_SIZE as usize)]
221 }
221 }
222
222
223 /// internal function to return the end of a filename part at a given index
223 /// internal function to return the end of a filename part at a given index
224 fn filename_end(&self, idx: u32) -> u32 {
224 fn filename_end(&self, idx: u32) -> u32 {
225 let start = (idx * Self::ENTRY_SIZE) + Self::FILENAME_START;
225 let start = (idx * Self::ENTRY_SIZE) + Self::FILENAME_START;
226 let end = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
226 let end = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
227 let start = start as usize;
227 let start = start as usize;
228 let end = end as usize;
228 let end = end as usize;
229 let raw = (&self.index[start..end])
229 let raw = (&self.index[start..end])
230 .try_into()
230 .try_into()
231 .expect("failed to turn 4 bytes into 4 bytes");
231 .expect("failed to turn 4 bytes into 4 bytes");
232 u32::from_be_bytes(raw)
232 u32::from_be_bytes(raw)
233 }
233 }
234
234
235 /// internal function to return index of the copy source of the entry at a
235 /// internal function to return index of the copy source of the entry at a
236 /// given index
236 /// given index
237 fn copy_idx(&self, idx: u32) -> u32 {
237 fn copy_idx(&self, idx: u32) -> u32 {
238 let start = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
238 let start = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
239 let end = (idx + 1) * Self::ENTRY_SIZE;
239 let end = (idx + 1) * Self::ENTRY_SIZE;
240 let start = start as usize;
240 let start = start as usize;
241 let end = end as usize;
241 let end = end as usize;
242 let raw = (&self.index[start..end])
242 let raw = (&self.index[start..end])
243 .try_into()
243 .try_into()
244 .expect("failed to turn 4 bytes into 4 bytes");
244 .expect("failed to turn 4 bytes into 4 bytes");
245 u32::from_be_bytes(raw)
245 u32::from_be_bytes(raw)
246 }
246 }
247
247
248 /// Return an iterator over all the `Action` in this instance.
248 /// Return an iterator over all the `Action` in this instance.
249 fn iter_actions(&self, parent: Parent) -> ActionsIterator {
249 fn iter_actions(&self, parent: Parent) -> ActionsIterator {
250 ActionsIterator {
250 ActionsIterator {
251 changes: &self,
251 changes: &self,
252 parent: parent,
252 parent: parent,
253 current: 0,
253 current: 0,
254 }
254 }
255 }
255 }
256
256
257 /// return the MergeCase value associated with a filename
257 /// return the MergeCase value associated with a filename
258 fn get_merge_case(&self, path: &HgPath) -> MergeCase {
258 fn get_merge_case(&self, path: &HgPath) -> MergeCase {
259 if self.nb_items == 0 {
259 if self.nb_items == 0 {
260 return MergeCase::Normal;
260 return MergeCase::Normal;
261 }
261 }
262 let mut low_part = 0;
262 let mut low_part = 0;
263 let mut high_part = self.nb_items;
263 let mut high_part = self.nb_items;
264
264
265 while low_part < high_part {
265 while low_part < high_part {
266 let cursor = (low_part + high_part - 1) / 2;
266 let cursor = (low_part + high_part - 1) / 2;
267 let (flags, filename, _source) = self.entry(cursor);
267 let (flags, filename, _source) = self.entry(cursor);
268 match path.cmp(filename) {
268 match path.cmp(filename) {
269 Ordering::Less => low_part = cursor + 1,
269 Ordering::Less => low_part = cursor + 1,
270 Ordering::Greater => high_part = cursor,
270 Ordering::Greater => high_part = cursor,
271 Ordering::Equal => {
271 Ordering::Equal => {
272 return match flags & ACTION_MASK {
272 return match flags & ACTION_MASK {
273 MERGED => MergeCase::Merged,
273 MERGED => MergeCase::Merged,
274 SALVAGED => MergeCase::Salvaged,
274 SALVAGED => MergeCase::Salvaged,
275 _ => MergeCase::Normal,
275 _ => MergeCase::Normal,
276 };
276 };
277 }
277 }
278 }
278 }
279 }
279 }
280 MergeCase::Normal
280 MergeCase::Normal
281 }
281 }
282 }
282 }
283
283
284 struct ActionsIterator<'a> {
284 struct ActionsIterator<'a> {
285 changes: &'a ChangedFiles<'a>,
285 changes: &'a ChangedFiles<'a>,
286 parent: Parent,
286 parent: Parent,
287 current: u32,
287 current: u32,
288 }
288 }
289
289
290 impl<'a> Iterator for ActionsIterator<'a> {
290 impl<'a> Iterator for ActionsIterator<'a> {
291 type Item = Action<'a>;
291 type Item = Action<'a>;
292
292
293 fn next(&mut self) -> Option<Action<'a>> {
293 fn next(&mut self) -> Option<Action<'a>> {
294 let copy_flag = match self.parent {
294 let copy_flag = match self.parent {
295 Parent::FirstParent => P1_COPY,
295 Parent::FirstParent => P1_COPY,
296 Parent::SecondParent => P2_COPY,
296 Parent::SecondParent => P2_COPY,
297 };
297 };
298 while self.current < self.changes.nb_items {
298 while self.current < self.changes.nb_items {
299 let (flags, file, source) = self.changes.entry(self.current);
299 let (flags, file, source) = self.changes.entry(self.current);
300 self.current += 1;
300 self.current += 1;
301 if (flags & ACTION_MASK) == REMOVED {
301 if (flags & ACTION_MASK) == REMOVED {
302 return Some(Action::Removed(file));
302 return Some(Action::Removed(file));
303 }
303 }
304 let copy = flags & COPY_MASK;
304 let copy = flags & COPY_MASK;
305 if copy == copy_flag {
305 if copy == copy_flag {
306 return Some(Action::Copied(file, source));
306 return Some(Action::Copied(file, source));
307 }
307 }
308 }
308 }
309 return None;
309 return None;
310 }
310 }
311 }
311 }
312
312
313 /// A small struct whose purpose is to ensure lifetime of bytes referenced in
313 /// A small struct whose purpose is to ensure lifetime of bytes referenced in
314 /// ChangedFiles
314 /// ChangedFiles
315 ///
315 ///
316 /// It is passed to the RevInfoMaker callback who can assign any necessary
316 /// It is passed to the RevInfoMaker callback who can assign any necessary
317 /// content to the `data` attribute. The copy tracing code is responsible for
317 /// content to the `data` attribute. The copy tracing code is responsible for
318 /// keeping the DataHolder alive at least as long as the ChangedFiles object.
318 /// keeping the DataHolder alive at least as long as the ChangedFiles object.
319 pub struct DataHolder<D> {
319 pub struct DataHolder<D> {
320 /// RevInfoMaker callback should assign data referenced by the
320 /// RevInfoMaker callback should assign data referenced by the
321 /// ChangedFiles struct it return to this attribute. The DataHolder
321 /// ChangedFiles struct it return to this attribute. The DataHolder
322 /// lifetime will be at least as long as the ChangedFiles one.
322 /// lifetime will be at least as long as the ChangedFiles one.
323 pub data: Option<D>,
323 pub data: Option<D>,
324 }
324 }
325
325
326 pub type RevInfoMaker<'a, D> =
326 pub type RevInfoMaker<'a, D> =
327 Box<dyn for<'r> Fn(Revision, &'r mut DataHolder<D>) -> RevInfo<'r> + 'a>;
327 Box<dyn for<'r> Fn(Revision, &'r mut DataHolder<D>) -> RevInfo<'r> + 'a>;
328
328
329 /// enum used to carry information about the parent β†’ child currently processed
329 /// enum used to carry information about the parent β†’ child currently processed
330 #[derive(Copy, Clone, Debug)]
330 #[derive(Copy, Clone, Debug)]
331 enum Parent {
331 enum Parent {
332 /// The `p1(x) β†’ x` edge
332 /// The `p1(x) β†’ x` edge
333 FirstParent,
333 FirstParent,
334 /// The `p2(x) β†’ x` edge
334 /// The `p2(x) β†’ x` edge
335 SecondParent,
335 SecondParent,
336 }
336 }
337
337
338 /// A small "tokenizer" responsible of turning full HgPath into lighter
338 /// A small "tokenizer" responsible of turning full HgPath into lighter
339 /// PathToken
339 /// PathToken
340 ///
340 ///
341 /// Dealing with small object, like integer is much faster, so HgPath input are
341 /// Dealing with small object, like integer is much faster, so HgPath input are
342 /// turned into integer "PathToken" and converted back in the end.
342 /// turned into integer "PathToken" and converted back in the end.
343 #[derive(Clone, Debug, Default)]
343 #[derive(Clone, Debug, Default)]
344 struct TwoWayPathMap {
344 struct TwoWayPathMap {
345 token: HashMap<HgPathBuf, PathToken>,
345 token: HashMap<HgPathBuf, PathToken>,
346 path: Vec<HgPathBuf>,
346 path: Vec<HgPathBuf>,
347 }
347 }
348
348
349 impl TwoWayPathMap {
349 impl TwoWayPathMap {
350 fn tokenize(&mut self, path: &HgPath) -> PathToken {
350 fn tokenize(&mut self, path: &HgPath) -> PathToken {
351 match self.token.get(path) {
351 match self.token.get(path) {
352 Some(a) => *a,
352 Some(a) => *a,
353 None => {
353 None => {
354 let a = self.token.len();
354 let a = self.token.len();
355 let buf = path.to_owned();
355 let buf = path.to_owned();
356 self.path.push(buf.clone());
356 self.path.push(buf.clone());
357 self.token.insert(buf, a);
357 self.token.insert(buf, a);
358 a
358 a
359 }
359 }
360 }
360 }
361 }
361 }
362
362
363 fn untokenize(&self, token: PathToken) -> &HgPathBuf {
363 fn untokenize(&self, token: PathToken) -> &HgPathBuf {
364 assert!(token < self.path.len(), format!("Unknown token: {}", token));
364 assert!(token < self.path.len(), format!("Unknown token: {}", token));
365 &self.path[token]
365 &self.path[token]
366 }
366 }
367 }
367 }
368
368
369 /// Same as mercurial.copies._combine_changeset_copies, but in Rust.
369 /// Same as mercurial.copies._combine_changeset_copies, but in Rust.
370 ///
370 ///
371 /// Arguments are:
371 /// Arguments are:
372 ///
372 ///
373 /// revs: all revisions to be considered
373 /// revs: all revisions to be considered
374 /// children: a {parent ? [childrens]} mapping
374 /// children: a {parent ? [childrens]} mapping
375 /// target_rev: the final revision we are combining copies to
375 /// target_rev: the final revision we are combining copies to
376 /// rev_info(rev): callback to get revision information:
376 /// rev_info(rev): callback to get revision information:
377 /// * first parent
377 /// * first parent
378 /// * second parent
378 /// * second parent
379 /// * ChangedFiles
379 /// * ChangedFiles
380 /// isancestors(low_rev, high_rev): callback to check if a revision is an
380 /// isancestors(low_rev, high_rev): callback to check if a revision is an
381 /// ancestor of another
381 /// ancestor of another
382 pub fn combine_changeset_copies<D>(
382 pub fn combine_changeset_copies<D>(
383 revs: Vec<Revision>,
383 revs: Vec<Revision>,
384 mut children_count: HashMap<Revision, usize>,
384 mut children_count: HashMap<Revision, usize>,
385 target_rev: Revision,
385 target_rev: Revision,
386 rev_info: RevInfoMaker<D>,
386 rev_info: RevInfoMaker<D>,
387 ) -> PathCopies {
387 ) -> PathCopies {
388 let mut all_copies = HashMap::new();
388 let mut all_copies = HashMap::new();
389
389
390 let mut path_map = TwoWayPathMap::default();
390 let mut path_map = TwoWayPathMap::default();
391
391
392 for rev in revs {
392 for rev in revs {
393 let mut d: DataHolder<D> = DataHolder { data: None };
393 let mut d: DataHolder<D> = DataHolder { data: None };
394 let (p1, p2, changes) = rev_info(rev, &mut d);
394 let (p1, p2, changes) = rev_info(rev, &mut d);
395
395
396 // We will chain the copies information accumulated for the parent with
396 // We will chain the copies information accumulated for the parent with
397 // the individual copies information the curent revision. Creating a
397 // the individual copies information the curent revision. Creating a
398 // new TimeStampedPath for each `rev` β†’ `children` vertex.
398 // new TimeStampedPath for each `rev` β†’ `children` vertex.
399 let mut copies: Option<InternalPathCopies> = None;
399 let mut copies: Option<InternalPathCopies> = None;
400 // Retrieve data computed in a previous iteration
400 // Retrieve data computed in a previous iteration
401 let p1_copies = match p1 {
401 let p1_copies = match p1 {
402 NULL_REVISION => None,
402 NULL_REVISION => None,
403 _ => get_and_clean_parent_copies(
403 _ => get_and_clean_parent_copies(
404 &mut all_copies,
404 &mut all_copies,
405 &mut children_count,
405 &mut children_count,
406 p1,
406 p1,
407 ), // will be None if the vertex is not to be traversed
407 ), // will be None if the vertex is not to be traversed
408 };
408 };
409 let p2_copies = match p2 {
409 let p2_copies = match p2 {
410 NULL_REVISION => None,
410 NULL_REVISION => None,
411 _ => get_and_clean_parent_copies(
411 _ => get_and_clean_parent_copies(
412 &mut all_copies,
412 &mut all_copies,
413 &mut children_count,
413 &mut children_count,
414 p2,
414 p2,
415 ), // will be None if the vertex is not to be traversed
415 ), // will be None if the vertex is not to be traversed
416 };
416 };
417 if let Some(parent_copies) = p1_copies {
417 // combine it with data for that revision
418 // combine it with data for that revision
418 let p1_copies = match p1_copies {
419 let vertex_copies = add_from_changes(
419 None => None,
420 Some(parent_copies) => Some(add_from_changes(
420 &mut path_map,
421 &mut path_map,
421 &parent_copies,
422 &parent_copies,
422 &changes,
423 &changes,
423 Parent::FirstParent,
424 Parent::FirstParent,
424 rev,
425 rev,
425 );
426 )),
426 // keep that data around for potential later combination
427 };
427 copies = Some(vertex_copies);
428 let p2_copies = match p2_copies {
428 }
429 None => None,
429 if let Some(parent_copies) = p2_copies {
430 Some(parent_copies) => Some(add_from_changes(
430 // combine it with data for that revision
431 let vertex_copies = add_from_changes(
432 &mut path_map,
431 &mut path_map,
433 &parent_copies,
432 &parent_copies,
434 &changes,
433 &changes,
435 Parent::SecondParent,
434 Parent::SecondParent,
436 rev,
435 rev,
437 );
436 )),
438
437 };
439 copies = match copies {
438 let copies = match (p1_copies, p2_copies) {
440 None => Some(vertex_copies),
439 (None, None) => None,
441 // Merge has two parents needs to combines their copy
440 (c, None) => c,
442 // information.
441 (None, c) => c,
443 //
442 (Some(p1_copies), Some(p2_copies)) => Some(merge_copies_dict(
444 // If we got data from both parents, We need to combine
443 &path_map, rev, p2_copies, p1_copies, &changes,
445 // them.
444 )),
446 Some(copies) => Some(merge_copies_dict(
445 };
447 &path_map,
446 if let Some(c) = copies {
448 rev,
447 all_copies.insert(rev, c);
449 vertex_copies,
450 copies,
451 &changes,
452 )),
453 };
454 }
455 match copies {
456 Some(copies) => {
457 all_copies.insert(rev, copies);
458 }
459 _ => {}
460 }
448 }
461 }
449 }
462
450
463 // Drop internal information (like the timestamp) and return the final
451 // Drop internal information (like the timestamp) and return the final
464 // mapping.
452 // mapping.
465 let tt_result = all_copies
453 let tt_result = all_copies
466 .remove(&target_rev)
454 .remove(&target_rev)
467 .expect("target revision was not processed");
455 .expect("target revision was not processed");
468 let mut result = PathCopies::default();
456 let mut result = PathCopies::default();
469 for (dest, tt_source) in tt_result {
457 for (dest, tt_source) in tt_result {
470 if let Some(path) = tt_source.path {
458 if let Some(path) = tt_source.path {
471 let path_dest = path_map.untokenize(dest).to_owned();
459 let path_dest = path_map.untokenize(dest).to_owned();
472 let path_path = path_map.untokenize(path).to_owned();
460 let path_path = path_map.untokenize(path).to_owned();
473 result.insert(path_dest, path_path);
461 result.insert(path_dest, path_path);
474 }
462 }
475 }
463 }
476 result
464 result
477 }
465 }
478
466
479 /// fetch previous computed information
467 /// fetch previous computed information
480 ///
468 ///
481 /// If no other children are expected to need this information, we drop it from
469 /// If no other children are expected to need this information, we drop it from
482 /// the cache.
470 /// the cache.
483 ///
471 ///
484 /// If parent is not part of the set we are expected to walk, return None.
472 /// If parent is not part of the set we are expected to walk, return None.
485 fn get_and_clean_parent_copies(
473 fn get_and_clean_parent_copies(
486 all_copies: &mut HashMap<Revision, InternalPathCopies>,
474 all_copies: &mut HashMap<Revision, InternalPathCopies>,
487 children_count: &mut HashMap<Revision, usize>,
475 children_count: &mut HashMap<Revision, usize>,
488 parent_rev: Revision,
476 parent_rev: Revision,
489 ) -> Option<InternalPathCopies> {
477 ) -> Option<InternalPathCopies> {
490 let count = children_count.get_mut(&parent_rev)?;
478 let count = children_count.get_mut(&parent_rev)?;
491 *count -= 1;
479 *count -= 1;
492 if *count == 0 {
480 if *count == 0 {
493 match all_copies.remove(&parent_rev) {
481 match all_copies.remove(&parent_rev) {
494 Some(c) => Some(c),
482 Some(c) => Some(c),
495 None => Some(InternalPathCopies::default()),
483 None => Some(InternalPathCopies::default()),
496 }
484 }
497 } else {
485 } else {
498 match all_copies.get(&parent_rev) {
486 match all_copies.get(&parent_rev) {
499 Some(c) => Some(c.clone()),
487 Some(c) => Some(c.clone()),
500 None => Some(InternalPathCopies::default()),
488 None => Some(InternalPathCopies::default()),
501 }
489 }
502 }
490 }
503 }
491 }
504
492
505 /// Combine ChangedFiles with some existing PathCopies information and return
493 /// Combine ChangedFiles with some existing PathCopies information and return
506 /// the result
494 /// the result
507 fn add_from_changes(
495 fn add_from_changes(
508 path_map: &mut TwoWayPathMap,
496 path_map: &mut TwoWayPathMap,
509 base_copies: &InternalPathCopies,
497 base_copies: &InternalPathCopies,
510 changes: &ChangedFiles,
498 changes: &ChangedFiles,
511 parent: Parent,
499 parent: Parent,
512 current_rev: Revision,
500 current_rev: Revision,
513 ) -> InternalPathCopies {
501 ) -> InternalPathCopies {
514 let mut copies = base_copies.clone();
502 let mut copies = base_copies.clone();
515 for action in changes.iter_actions(parent) {
503 for action in changes.iter_actions(parent) {
516 match action {
504 match action {
517 Action::Copied(path_dest, path_source) => {
505 Action::Copied(path_dest, path_source) => {
518 let dest = path_map.tokenize(path_dest);
506 let dest = path_map.tokenize(path_dest);
519 let source = path_map.tokenize(path_source);
507 let source = path_map.tokenize(path_source);
520 let entry;
508 let entry;
521 if let Some(v) = base_copies.get(&source) {
509 if let Some(v) = base_copies.get(&source) {
522 entry = match &v.path {
510 entry = match &v.path {
523 Some(path) => Some((*(path)).to_owned()),
511 Some(path) => Some((*(path)).to_owned()),
524 None => Some(source.to_owned()),
512 None => Some(source.to_owned()),
525 }
513 }
526 } else {
514 } else {
527 entry = Some(source.to_owned());
515 entry = Some(source.to_owned());
528 }
516 }
529 // Each new entry is introduced by the children, we
517 // Each new entry is introduced by the children, we
530 // record this information as we will need it to take
518 // record this information as we will need it to take
531 // the right decision when merging conflicting copy
519 // the right decision when merging conflicting copy
532 // information. See merge_copies_dict for details.
520 // information. See merge_copies_dict for details.
533 match copies.entry(dest) {
521 match copies.entry(dest) {
534 Entry::Vacant(slot) => {
522 Entry::Vacant(slot) => {
535 let ttpc = CopySource::new(current_rev, entry);
523 let ttpc = CopySource::new(current_rev, entry);
536 slot.insert(ttpc);
524 slot.insert(ttpc);
537 }
525 }
538 Entry::Occupied(mut slot) => {
526 Entry::Occupied(mut slot) => {
539 let ttpc = slot.get_mut();
527 let ttpc = slot.get_mut();
540 ttpc.overwrite(current_rev, entry);
528 ttpc.overwrite(current_rev, entry);
541 }
529 }
542 }
530 }
543 }
531 }
544 Action::Removed(deleted_path) => {
532 Action::Removed(deleted_path) => {
545 // We must drop copy information for removed file.
533 // We must drop copy information for removed file.
546 //
534 //
547 // We need to explicitly record them as dropped to
535 // We need to explicitly record them as dropped to
548 // propagate this information when merging two
536 // propagate this information when merging two
549 // InternalPathCopies object.
537 // InternalPathCopies object.
550 let deleted = path_map.tokenize(deleted_path);
538 let deleted = path_map.tokenize(deleted_path);
551 copies.entry(deleted).and_modify(|old| {
539 copies.entry(deleted).and_modify(|old| {
552 old.mark_delete(current_rev);
540 old.mark_delete(current_rev);
553 });
541 });
554 }
542 }
555 }
543 }
556 }
544 }
557 copies
545 copies
558 }
546 }
559
547
560 /// merge two copies-mapping together, minor and major
548 /// merge two copies-mapping together, minor and major
561 ///
549 ///
562 /// In case of conflict, value from "major" will be picked, unless in some
550 /// In case of conflict, value from "major" will be picked, unless in some
563 /// cases. See inline documentation for details.
551 /// cases. See inline documentation for details.
564 fn merge_copies_dict(
552 fn merge_copies_dict(
565 path_map: &TwoWayPathMap,
553 path_map: &TwoWayPathMap,
566 current_merge: Revision,
554 current_merge: Revision,
567 mut minor: InternalPathCopies,
555 mut minor: InternalPathCopies,
568 mut major: InternalPathCopies,
556 mut major: InternalPathCopies,
569 changes: &ChangedFiles,
557 changes: &ChangedFiles,
570 ) -> InternalPathCopies {
558 ) -> InternalPathCopies {
571 // This closure exist as temporary help while multiple developper are
559 // This closure exist as temporary help while multiple developper are
572 // actively working on this code. Feel free to re-inline it once this
560 // actively working on this code. Feel free to re-inline it once this
573 // code is more settled.
561 // code is more settled.
574 let cmp_value =
562 let cmp_value =
575 |dest: &PathToken, src_minor: &CopySource, src_major: &CopySource| {
563 |dest: &PathToken, src_minor: &CopySource, src_major: &CopySource| {
576 compare_value(
564 compare_value(
577 path_map,
565 path_map,
578 current_merge,
566 current_merge,
579 changes,
567 changes,
580 dest,
568 dest,
581 src_minor,
569 src_minor,
582 src_major,
570 src_major,
583 )
571 )
584 };
572 };
585 if minor.is_empty() {
573 if minor.is_empty() {
586 major
574 major
587 } else if major.is_empty() {
575 } else if major.is_empty() {
588 minor
576 minor
589 } else if minor.len() * 2 < major.len() {
577 } else if minor.len() * 2 < major.len() {
590 // Lets says we are merging two InternalPathCopies instance A and B.
578 // Lets says we are merging two InternalPathCopies instance A and B.
591 //
579 //
592 // If A contains N items, the merge result will never contains more
580 // If A contains N items, the merge result will never contains more
593 // than N values differents than the one in A
581 // than N values differents than the one in A
594 //
582 //
595 // If B contains M items, with M > N, the merge result will always
583 // If B contains M items, with M > N, the merge result will always
596 // result in a minimum of M - N value differents than the on in
584 // result in a minimum of M - N value differents than the on in
597 // A
585 // A
598 //
586 //
599 // As a result, if N < (M-N), we know that simply iterating over A will
587 // As a result, if N < (M-N), we know that simply iterating over A will
600 // yield less difference than iterating over the difference
588 // yield less difference than iterating over the difference
601 // between A and B.
589 // between A and B.
602 //
590 //
603 // This help performance a lot in case were a tiny
591 // This help performance a lot in case were a tiny
604 // InternalPathCopies is merged with a much larger one.
592 // InternalPathCopies is merged with a much larger one.
605 for (dest, src_minor) in minor {
593 for (dest, src_minor) in minor {
606 let src_major = major.get(&dest);
594 let src_major = major.get(&dest);
607 match src_major {
595 match src_major {
608 None => {
596 None => {
609 major.insert(dest, src_minor);
597 major.insert(dest, src_minor);
610 }
598 }
611 Some(src_major) => {
599 Some(src_major) => {
612 let (pick, overwrite) =
600 let (pick, overwrite) =
613 cmp_value(&dest, &src_minor, src_major);
601 cmp_value(&dest, &src_minor, src_major);
614 if overwrite {
602 if overwrite {
615 let src = match pick {
603 let src = match pick {
616 MergePick::Major => CopySource::new_from_merge(
604 MergePick::Major => CopySource::new_from_merge(
617 current_merge,
605 current_merge,
618 src_major,
606 src_major,
619 &src_minor,
607 &src_minor,
620 ),
608 ),
621 MergePick::Minor => CopySource::new_from_merge(
609 MergePick::Minor => CopySource::new_from_merge(
622 current_merge,
610 current_merge,
623 &src_minor,
611 &src_minor,
624 src_major,
612 src_major,
625 ),
613 ),
626 MergePick::Any => CopySource::new_from_merge(
614 MergePick::Any => CopySource::new_from_merge(
627 current_merge,
615 current_merge,
628 src_major,
616 src_major,
629 &src_minor,
617 &src_minor,
630 ),
618 ),
631 };
619 };
632 major.insert(dest, src);
620 major.insert(dest, src);
633 } else {
621 } else {
634 match pick {
622 match pick {
635 MergePick::Any | MergePick::Major => None,
623 MergePick::Any | MergePick::Major => None,
636 MergePick::Minor => major.insert(dest, src_minor),
624 MergePick::Minor => major.insert(dest, src_minor),
637 };
625 };
638 }
626 }
639 }
627 }
640 };
628 };
641 }
629 }
642 major
630 major
643 } else if major.len() * 2 < minor.len() {
631 } else if major.len() * 2 < minor.len() {
644 // This use the same rational than the previous block.
632 // This use the same rational than the previous block.
645 // (Check previous block documentation for details.)
633 // (Check previous block documentation for details.)
646 for (dest, src_major) in major {
634 for (dest, src_major) in major {
647 let src_minor = minor.get(&dest);
635 let src_minor = minor.get(&dest);
648 match src_minor {
636 match src_minor {
649 None => {
637 None => {
650 minor.insert(dest, src_major);
638 minor.insert(dest, src_major);
651 }
639 }
652 Some(src_minor) => {
640 Some(src_minor) => {
653 let (pick, overwrite) =
641 let (pick, overwrite) =
654 cmp_value(&dest, src_minor, &src_major);
642 cmp_value(&dest, src_minor, &src_major);
655 if overwrite {
643 if overwrite {
656 let src = match pick {
644 let src = match pick {
657 MergePick::Major => CopySource::new_from_merge(
645 MergePick::Major => CopySource::new_from_merge(
658 current_merge,
646 current_merge,
659 &src_major,
647 &src_major,
660 src_minor,
648 src_minor,
661 ),
649 ),
662 MergePick::Minor => CopySource::new_from_merge(
650 MergePick::Minor => CopySource::new_from_merge(
663 current_merge,
651 current_merge,
664 src_minor,
652 src_minor,
665 &src_major,
653 &src_major,
666 ),
654 ),
667 MergePick::Any => CopySource::new_from_merge(
655 MergePick::Any => CopySource::new_from_merge(
668 current_merge,
656 current_merge,
669 &src_major,
657 &src_major,
670 src_minor,
658 src_minor,
671 ),
659 ),
672 };
660 };
673 minor.insert(dest, src);
661 minor.insert(dest, src);
674 } else {
662 } else {
675 match pick {
663 match pick {
676 MergePick::Any | MergePick::Minor => None,
664 MergePick::Any | MergePick::Minor => None,
677 MergePick::Major => minor.insert(dest, src_major),
665 MergePick::Major => minor.insert(dest, src_major),
678 };
666 };
679 }
667 }
680 }
668 }
681 };
669 };
682 }
670 }
683 minor
671 minor
684 } else {
672 } else {
685 let mut override_minor = Vec::new();
673 let mut override_minor = Vec::new();
686 let mut override_major = Vec::new();
674 let mut override_major = Vec::new();
687
675
688 let mut to_major = |k: &PathToken, v: &CopySource| {
676 let mut to_major = |k: &PathToken, v: &CopySource| {
689 override_major.push((k.clone(), v.clone()))
677 override_major.push((k.clone(), v.clone()))
690 };
678 };
691 let mut to_minor = |k: &PathToken, v: &CopySource| {
679 let mut to_minor = |k: &PathToken, v: &CopySource| {
692 override_minor.push((k.clone(), v.clone()))
680 override_minor.push((k.clone(), v.clone()))
693 };
681 };
694
682
695 // The diff function leverage detection of the identical subpart if
683 // The diff function leverage detection of the identical subpart if
696 // minor and major has some common ancestors. This make it very
684 // minor and major has some common ancestors. This make it very
697 // fast is most case.
685 // fast is most case.
698 //
686 //
699 // In case where the two map are vastly different in size, the current
687 // In case where the two map are vastly different in size, the current
700 // approach is still slowish because the iteration will iterate over
688 // approach is still slowish because the iteration will iterate over
701 // all the "exclusive" content of the larger on. This situation can be
689 // all the "exclusive" content of the larger on. This situation can be
702 // frequent when the subgraph of revision we are processing has a lot
690 // frequent when the subgraph of revision we are processing has a lot
703 // of roots. Each roots adding they own fully new map to the mix (and
691 // of roots. Each roots adding they own fully new map to the mix (and
704 // likely a small map, if the path from the root to the "main path" is
692 // likely a small map, if the path from the root to the "main path" is
705 // small.
693 // small.
706 //
694 //
707 // We could do better by detecting such situation and processing them
695 // We could do better by detecting such situation and processing them
708 // differently.
696 // differently.
709 for d in minor.diff(&major) {
697 for d in minor.diff(&major) {
710 match d {
698 match d {
711 DiffItem::Add(k, v) => to_minor(k, v),
699 DiffItem::Add(k, v) => to_minor(k, v),
712 DiffItem::Remove(k, v) => to_major(k, v),
700 DiffItem::Remove(k, v) => to_major(k, v),
713 DiffItem::Update { old, new } => {
701 DiffItem::Update { old, new } => {
714 let (dest, src_major) = new;
702 let (dest, src_major) = new;
715 let (_, src_minor) = old;
703 let (_, src_minor) = old;
716 let (pick, overwrite) =
704 let (pick, overwrite) =
717 cmp_value(dest, src_minor, src_major);
705 cmp_value(dest, src_minor, src_major);
718 if overwrite {
706 if overwrite {
719 let src = match pick {
707 let src = match pick {
720 MergePick::Major => CopySource::new_from_merge(
708 MergePick::Major => CopySource::new_from_merge(
721 current_merge,
709 current_merge,
722 src_major,
710 src_major,
723 src_minor,
711 src_minor,
724 ),
712 ),
725 MergePick::Minor => CopySource::new_from_merge(
713 MergePick::Minor => CopySource::new_from_merge(
726 current_merge,
714 current_merge,
727 src_minor,
715 src_minor,
728 src_major,
716 src_major,
729 ),
717 ),
730 MergePick::Any => CopySource::new_from_merge(
718 MergePick::Any => CopySource::new_from_merge(
731 current_merge,
719 current_merge,
732 src_major,
720 src_major,
733 src_minor,
721 src_minor,
734 ),
722 ),
735 };
723 };
736 to_minor(dest, &src);
724 to_minor(dest, &src);
737 to_major(dest, &src);
725 to_major(dest, &src);
738 } else {
726 } else {
739 match pick {
727 match pick {
740 MergePick::Major => to_minor(dest, src_major),
728 MergePick::Major => to_minor(dest, src_major),
741 MergePick::Minor => to_major(dest, src_minor),
729 MergePick::Minor => to_major(dest, src_minor),
742 // If the two entry are identical, no need to do
730 // If the two entry are identical, no need to do
743 // anything (but diff should not have yield them)
731 // anything (but diff should not have yield them)
744 MergePick::Any => unreachable!(),
732 MergePick::Any => unreachable!(),
745 }
733 }
746 }
734 }
747 }
735 }
748 };
736 };
749 }
737 }
750
738
751 let updates;
739 let updates;
752 let mut result;
740 let mut result;
753 if override_major.is_empty() {
741 if override_major.is_empty() {
754 result = major
742 result = major
755 } else if override_minor.is_empty() {
743 } else if override_minor.is_empty() {
756 result = minor
744 result = minor
757 } else {
745 } else {
758 if override_minor.len() < override_major.len() {
746 if override_minor.len() < override_major.len() {
759 updates = override_minor;
747 updates = override_minor;
760 result = minor;
748 result = minor;
761 } else {
749 } else {
762 updates = override_major;
750 updates = override_major;
763 result = major;
751 result = major;
764 }
752 }
765 for (k, v) in updates {
753 for (k, v) in updates {
766 result.insert(k, v);
754 result.insert(k, v);
767 }
755 }
768 }
756 }
769 result
757 result
770 }
758 }
771 }
759 }
772
760
773 /// represent the side that should prevail when merging two
761 /// represent the side that should prevail when merging two
774 /// InternalPathCopies
762 /// InternalPathCopies
775 enum MergePick {
763 enum MergePick {
776 /// The "major" (p1) side prevails
764 /// The "major" (p1) side prevails
777 Major,
765 Major,
778 /// The "minor" (p2) side prevails
766 /// The "minor" (p2) side prevails
779 Minor,
767 Minor,
780 /// Any side could be used (because they are the same)
768 /// Any side could be used (because they are the same)
781 Any,
769 Any,
782 }
770 }
783
771
784 /// decide which side prevails in case of conflicting values
772 /// decide which side prevails in case of conflicting values
785 #[allow(clippy::if_same_then_else)]
773 #[allow(clippy::if_same_then_else)]
786 fn compare_value(
774 fn compare_value(
787 path_map: &TwoWayPathMap,
775 path_map: &TwoWayPathMap,
788 current_merge: Revision,
776 current_merge: Revision,
789 changes: &ChangedFiles,
777 changes: &ChangedFiles,
790 dest: &PathToken,
778 dest: &PathToken,
791 src_minor: &CopySource,
779 src_minor: &CopySource,
792 src_major: &CopySource,
780 src_major: &CopySource,
793 ) -> (MergePick, bool) {
781 ) -> (MergePick, bool) {
794 if src_major.rev == current_merge {
782 if src_major.rev == current_merge {
795 if src_minor.rev == current_merge {
783 if src_minor.rev == current_merge {
796 if src_major.path.is_none() {
784 if src_major.path.is_none() {
797 // We cannot get different copy information for both p1 and p2
785 // We cannot get different copy information for both p1 and p2
798 // from the same revision. Unless this was a
786 // from the same revision. Unless this was a
799 // deletion.
787 // deletion.
800 //
788 //
801 // However the deletion might come over different data on each
789 // However the deletion might come over different data on each
802 // branch.
790 // branch.
803 let need_over = src_major.overwritten != src_minor.overwritten;
791 let need_over = src_major.overwritten != src_minor.overwritten;
804 (MergePick::Any, need_over)
792 (MergePick::Any, need_over)
805 } else {
793 } else {
806 unreachable!();
794 unreachable!();
807 }
795 }
808 } else {
796 } else {
809 // The last value comes the current merge, this value -will- win
797 // The last value comes the current merge, this value -will- win
810 // eventually.
798 // eventually.
811 (MergePick::Major, true)
799 (MergePick::Major, true)
812 }
800 }
813 } else if src_minor.rev == current_merge {
801 } else if src_minor.rev == current_merge {
814 // The last value comes the current merge, this value -will- win
802 // The last value comes the current merge, this value -will- win
815 // eventually.
803 // eventually.
816 (MergePick::Minor, true)
804 (MergePick::Minor, true)
817 } else if src_major.path == src_minor.path {
805 } else if src_major.path == src_minor.path {
818 // we have the same value, but from other source;
806 // we have the same value, but from other source;
819 if src_major.rev == src_minor.rev {
807 if src_major.rev == src_minor.rev {
820 // If the two entry are identical, they are both valid
808 // If the two entry are identical, they are both valid
821 debug_assert!(src_minor.overwritten == src_minor.overwritten);
809 debug_assert!(src_minor.overwritten == src_minor.overwritten);
822 (MergePick::Any, false)
810 (MergePick::Any, false)
823 } else if src_major.is_overwritten_by(src_minor) {
811 } else if src_major.is_overwritten_by(src_minor) {
824 (MergePick::Minor, false)
812 (MergePick::Minor, false)
825 } else if src_minor.is_overwritten_by(src_major) {
813 } else if src_minor.is_overwritten_by(src_major) {
826 (MergePick::Major, false)
814 (MergePick::Major, false)
827 } else {
815 } else {
828 (MergePick::Any, true)
816 (MergePick::Any, true)
829 }
817 }
830 } else if src_major.rev == src_minor.rev {
818 } else if src_major.rev == src_minor.rev {
831 // We cannot get copy information for both p1 and p2 in the
819 // We cannot get copy information for both p1 and p2 in the
832 // same rev. So this is the same value.
820 // same rev. So this is the same value.
833 unreachable!(
821 unreachable!(
834 "conflicting information from p1 and p2 in the same revision"
822 "conflicting information from p1 and p2 in the same revision"
835 );
823 );
836 } else {
824 } else {
837 let dest_path = path_map.untokenize(*dest);
825 let dest_path = path_map.untokenize(*dest);
838 let action = changes.get_merge_case(dest_path);
826 let action = changes.get_merge_case(dest_path);
839 if src_minor.path.is_some()
827 if src_minor.path.is_some()
840 && src_major.path.is_none()
828 && src_major.path.is_none()
841 && action == MergeCase::Salvaged
829 && action == MergeCase::Salvaged
842 {
830 {
843 // If the file is "deleted" in the major side but was
831 // If the file is "deleted" in the major side but was
844 // salvaged by the merge, we keep the minor side alive
832 // salvaged by the merge, we keep the minor side alive
845 (MergePick::Minor, true)
833 (MergePick::Minor, true)
846 } else if src_major.path.is_some()
834 } else if src_major.path.is_some()
847 && src_minor.path.is_none()
835 && src_minor.path.is_none()
848 && action == MergeCase::Salvaged
836 && action == MergeCase::Salvaged
849 {
837 {
850 // If the file is "deleted" in the minor side but was
838 // If the file is "deleted" in the minor side but was
851 // salvaged by the merge, unconditionnaly preserve the
839 // salvaged by the merge, unconditionnaly preserve the
852 // major side.
840 // major side.
853 (MergePick::Major, true)
841 (MergePick::Major, true)
854 } else if src_minor.is_overwritten_by(src_major) {
842 } else if src_minor.is_overwritten_by(src_major) {
855 // The information from the minor version are strictly older than
843 // The information from the minor version are strictly older than
856 // the major version
844 // the major version
857 if action == MergeCase::Merged {
845 if action == MergeCase::Merged {
858 // If the file was actively merged, its means some non-copy
846 // If the file was actively merged, its means some non-copy
859 // activity happened on the other branch. It
847 // activity happened on the other branch. It
860 // mean the older copy information are still relevant.
848 // mean the older copy information are still relevant.
861 //
849 //
862 // The major side wins such conflict.
850 // The major side wins such conflict.
863 (MergePick::Major, true)
851 (MergePick::Major, true)
864 } else {
852 } else {
865 // No activity on the minor branch, pick the newer one.
853 // No activity on the minor branch, pick the newer one.
866 (MergePick::Major, false)
854 (MergePick::Major, false)
867 }
855 }
868 } else if src_major.is_overwritten_by(src_minor) {
856 } else if src_major.is_overwritten_by(src_minor) {
869 if action == MergeCase::Merged {
857 if action == MergeCase::Merged {
870 // If the file was actively merged, its means some non-copy
858 // If the file was actively merged, its means some non-copy
871 // activity happened on the other branch. It
859 // activity happened on the other branch. It
872 // mean the older copy information are still relevant.
860 // mean the older copy information are still relevant.
873 //
861 //
874 // The major side wins such conflict.
862 // The major side wins such conflict.
875 (MergePick::Major, true)
863 (MergePick::Major, true)
876 } else {
864 } else {
877 // No activity on the minor branch, pick the newer one.
865 // No activity on the minor branch, pick the newer one.
878 (MergePick::Minor, false)
866 (MergePick::Minor, false)
879 }
867 }
880 } else if src_minor.path.is_none() {
868 } else if src_minor.path.is_none() {
881 // the minor side has no relevant information, pick the alive one
869 // the minor side has no relevant information, pick the alive one
882 (MergePick::Major, true)
870 (MergePick::Major, true)
883 } else if src_major.path.is_none() {
871 } else if src_major.path.is_none() {
884 // the major side has no relevant information, pick the alive one
872 // the major side has no relevant information, pick the alive one
885 (MergePick::Minor, true)
873 (MergePick::Minor, true)
886 } else {
874 } else {
887 // by default the major side wins
875 // by default the major side wins
888 (MergePick::Major, true)
876 (MergePick::Major, true)
889 }
877 }
890 }
878 }
891 }
879 }
General Comments 0
You need to be logged in to leave comments. Login now