##// END OF EJS Templates
copies-rust: use simpler overwrite when value on both side are identical...
marmoute -
r47326:aa19d60a default
parent child Browse files
Show More
@@ -1,929 +1,933 b''
1 use crate::utils::hg_path::HgPath;
1 use crate::utils::hg_path::HgPath;
2 use crate::utils::hg_path::HgPathBuf;
2 use crate::utils::hg_path::HgPathBuf;
3 use crate::Revision;
3 use crate::Revision;
4 use crate::NULL_REVISION;
4 use crate::NULL_REVISION;
5
5
6 use im_rc::ordmap::DiffItem;
6 use im_rc::ordmap::DiffItem;
7 use im_rc::ordmap::Entry;
7 use im_rc::ordmap::Entry;
8 use im_rc::ordmap::OrdMap;
8 use im_rc::ordmap::OrdMap;
9
9
10 use std::cmp::Ordering;
10 use std::cmp::Ordering;
11 use std::collections::HashMap;
11 use std::collections::HashMap;
12 use std::collections::HashSet;
12 use std::collections::HashSet;
13 use std::convert::TryInto;
13 use std::convert::TryInto;
14
14
15 pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>;
15 pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>;
16
16
17 type PathToken = usize;
17 type PathToken = usize;
18
18
19 #[derive(Clone, Debug)]
19 #[derive(Clone, Debug)]
20 struct CopySource {
20 struct CopySource {
21 /// revision at which the copy information was added
21 /// revision at which the copy information was added
22 rev: Revision,
22 rev: Revision,
23 /// the copy source, (Set to None in case of deletion of the associated
23 /// the copy source, (Set to None in case of deletion of the associated
24 /// key)
24 /// key)
25 path: Option<PathToken>,
25 path: Option<PathToken>,
26 /// a set of previous `CopySource.rev` value directly or indirectly
26 /// a set of previous `CopySource.rev` value directly or indirectly
27 /// overwritten by this one.
27 /// overwritten by this one.
28 overwritten: HashSet<Revision>,
28 overwritten: HashSet<Revision>,
29 }
29 }
30
30
31 impl CopySource {
31 impl CopySource {
32 /// create a new CopySource
32 /// create a new CopySource
33 ///
33 ///
34 /// Use this when no previous copy source existed.
34 /// Use this when no previous copy source existed.
35 fn new(rev: Revision, path: Option<PathToken>) -> Self {
35 fn new(rev: Revision, path: Option<PathToken>) -> Self {
36 Self {
36 Self {
37 rev,
37 rev,
38 path,
38 path,
39 overwritten: HashSet::new(),
39 overwritten: HashSet::new(),
40 }
40 }
41 }
41 }
42
42
43 /// create a new CopySource from merging two others
43 /// create a new CopySource from merging two others
44 ///
44 ///
45 /// Use this when merging two InternalPathCopies requires active merging of
45 /// Use this when merging two InternalPathCopies requires active merging of
46 /// some entries.
46 /// some entries.
47 fn new_from_merge(rev: Revision, winner: &Self, loser: &Self) -> Self {
47 fn new_from_merge(rev: Revision, winner: &Self, loser: &Self) -> Self {
48 let mut overwritten = HashSet::new();
48 let mut overwritten = HashSet::new();
49 overwritten.extend(winner.overwritten.iter().copied());
49 overwritten.extend(winner.overwritten.iter().copied());
50 overwritten.extend(loser.overwritten.iter().copied());
50 overwritten.extend(loser.overwritten.iter().copied());
51 overwritten.insert(winner.rev);
51 overwritten.insert(winner.rev);
52 overwritten.insert(loser.rev);
52 overwritten.insert(loser.rev);
53 Self {
53 Self {
54 rev,
54 rev,
55 path: winner.path,
55 path: winner.path,
56 overwritten: overwritten,
56 overwritten: overwritten,
57 }
57 }
58 }
58 }
59
59
60 /// Update the value of a pre-existing CopySource
60 /// Update the value of a pre-existing CopySource
61 ///
61 ///
62 /// Use this when recording copy information from parent → child edges
62 /// Use this when recording copy information from parent → child edges
63 fn overwrite(&mut self, rev: Revision, path: Option<PathToken>) {
63 fn overwrite(&mut self, rev: Revision, path: Option<PathToken>) {
64 self.overwritten.insert(self.rev);
64 self.overwritten.insert(self.rev);
65 self.rev = rev;
65 self.rev = rev;
66 self.path = path;
66 self.path = path;
67 }
67 }
68
68
69 /// Mark pre-existing copy information as "dropped" by a file deletion
69 /// Mark pre-existing copy information as "dropped" by a file deletion
70 ///
70 ///
71 /// Use this when recording copy information from parent → child edges
71 /// Use this when recording copy information from parent → child edges
72 fn mark_delete(&mut self, rev: Revision) {
72 fn mark_delete(&mut self, rev: Revision) {
73 self.overwritten.insert(self.rev);
73 self.overwritten.insert(self.rev);
74 self.rev = rev;
74 self.rev = rev;
75 self.path = None;
75 self.path = None;
76 }
76 }
77
77
78 /// Mark pre-existing copy information as "dropped" by a file deletion
78 /// Mark pre-existing copy information as "dropped" by a file deletion
79 ///
79 ///
80 /// Use this when recording copy information from parent → child edges
80 /// Use this when recording copy information from parent → child edges
81 fn mark_delete_with_pair(&mut self, rev: Revision, other: &Self) {
81 fn mark_delete_with_pair(&mut self, rev: Revision, other: &Self) {
82 self.overwritten.insert(self.rev);
82 self.overwritten.insert(self.rev);
83 if other.rev != rev {
83 if other.rev != rev {
84 self.overwritten.insert(other.rev);
84 self.overwritten.insert(other.rev);
85 }
85 }
86 self.overwritten.extend(other.overwritten.iter().copied());
86 self.overwritten.extend(other.overwritten.iter().copied());
87 self.rev = rev;
87 self.rev = rev;
88 self.path = None;
88 self.path = None;
89 }
89 }
90
90
91 fn is_overwritten_by(&self, other: &Self) -> bool {
91 fn is_overwritten_by(&self, other: &Self) -> bool {
92 other.overwritten.contains(&self.rev)
92 other.overwritten.contains(&self.rev)
93 }
93 }
94 }
94 }
95
95
96 // For the same "dest", content generated for a given revision will always be
96 // For the same "dest", content generated for a given revision will always be
97 // the same.
97 // the same.
98 impl PartialEq for CopySource {
98 impl PartialEq for CopySource {
99 fn eq(&self, other: &Self) -> bool {
99 fn eq(&self, other: &Self) -> bool {
100 #[cfg(debug_assertions)]
100 #[cfg(debug_assertions)]
101 {
101 {
102 if self.rev == other.rev {
102 if self.rev == other.rev {
103 debug_assert!(self.path == other.path);
103 debug_assert!(self.path == other.path);
104 debug_assert!(self.overwritten == other.overwritten);
104 debug_assert!(self.overwritten == other.overwritten);
105 }
105 }
106 }
106 }
107 self.rev == other.rev
107 self.rev == other.rev
108 }
108 }
109 }
109 }
110
110
111 /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation)
111 /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation)
112 type InternalPathCopies = OrdMap<PathToken, CopySource>;
112 type InternalPathCopies = OrdMap<PathToken, CopySource>;
113
113
114 /// hold parent 1, parent 2 and relevant files actions.
114 /// hold parent 1, parent 2 and relevant files actions.
115 pub type RevInfo<'a> = (Revision, Revision, ChangedFiles<'a>);
115 pub type RevInfo<'a> = (Revision, Revision, ChangedFiles<'a>);
116
116
117 /// represent the files affected by a changesets
117 /// represent the files affected by a changesets
118 ///
118 ///
119 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
119 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
120 /// all the data categories tracked by it.
120 /// all the data categories tracked by it.
121 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
121 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
122 /// all the data categories tracked by it.
122 /// all the data categories tracked by it.
123 pub struct ChangedFiles<'a> {
123 pub struct ChangedFiles<'a> {
124 nb_items: u32,
124 nb_items: u32,
125 index: &'a [u8],
125 index: &'a [u8],
126 data: &'a [u8],
126 data: &'a [u8],
127 }
127 }
128
128
129 /// Represent active changes that affect the copy tracing.
129 /// Represent active changes that affect the copy tracing.
130 enum Action<'a> {
130 enum Action<'a> {
131 /// The parent ? children edge is removing a file
131 /// The parent ? children edge is removing a file
132 ///
132 ///
133 /// (actually, this could be the edge from the other parent, but it does
133 /// (actually, this could be the edge from the other parent, but it does
134 /// not matters)
134 /// not matters)
135 Removed(&'a HgPath),
135 Removed(&'a HgPath),
136 /// The parent ? children edge introduce copy information between (dest,
136 /// The parent ? children edge introduce copy information between (dest,
137 /// source)
137 /// source)
138 CopiedFromP1(&'a HgPath, &'a HgPath),
138 CopiedFromP1(&'a HgPath, &'a HgPath),
139 CopiedFromP2(&'a HgPath, &'a HgPath),
139 CopiedFromP2(&'a HgPath, &'a HgPath),
140 }
140 }
141
141
142 /// This express the possible "special" case we can get in a merge
142 /// This express the possible "special" case we can get in a merge
143 ///
143 ///
144 /// See mercurial/metadata.py for details on these values.
144 /// See mercurial/metadata.py for details on these values.
145 #[derive(PartialEq)]
145 #[derive(PartialEq)]
146 enum MergeCase {
146 enum MergeCase {
147 /// Merged: file had history on both side that needed to be merged
147 /// Merged: file had history on both side that needed to be merged
148 Merged,
148 Merged,
149 /// Salvaged: file was candidate for deletion, but survived the merge
149 /// Salvaged: file was candidate for deletion, but survived the merge
150 Salvaged,
150 Salvaged,
151 /// Normal: Not one of the two cases above
151 /// Normal: Not one of the two cases above
152 Normal,
152 Normal,
153 }
153 }
154
154
155 type FileChange<'a> = (u8, &'a HgPath, &'a HgPath);
155 type FileChange<'a> = (u8, &'a HgPath, &'a HgPath);
156
156
157 const EMPTY: &[u8] = b"";
157 const EMPTY: &[u8] = b"";
158 const COPY_MASK: u8 = 3;
158 const COPY_MASK: u8 = 3;
159 const P1_COPY: u8 = 2;
159 const P1_COPY: u8 = 2;
160 const P2_COPY: u8 = 3;
160 const P2_COPY: u8 = 3;
161 const ACTION_MASK: u8 = 28;
161 const ACTION_MASK: u8 = 28;
162 const REMOVED: u8 = 12;
162 const REMOVED: u8 = 12;
163 const MERGED: u8 = 8;
163 const MERGED: u8 = 8;
164 const SALVAGED: u8 = 16;
164 const SALVAGED: u8 = 16;
165
165
166 impl<'a> ChangedFiles<'a> {
166 impl<'a> ChangedFiles<'a> {
167 const INDEX_START: usize = 4;
167 const INDEX_START: usize = 4;
168 const ENTRY_SIZE: u32 = 9;
168 const ENTRY_SIZE: u32 = 9;
169 const FILENAME_START: u32 = 1;
169 const FILENAME_START: u32 = 1;
170 const COPY_SOURCE_START: u32 = 5;
170 const COPY_SOURCE_START: u32 = 5;
171
171
172 pub fn new(data: &'a [u8]) -> Self {
172 pub fn new(data: &'a [u8]) -> Self {
173 assert!(
173 assert!(
174 data.len() >= 4,
174 data.len() >= 4,
175 "data size ({}) is too small to contain the header (4)",
175 "data size ({}) is too small to contain the header (4)",
176 data.len()
176 data.len()
177 );
177 );
178 let nb_items_raw: [u8; 4] = (&data[0..=3])
178 let nb_items_raw: [u8; 4] = (&data[0..=3])
179 .try_into()
179 .try_into()
180 .expect("failed to turn 4 bytes into 4 bytes");
180 .expect("failed to turn 4 bytes into 4 bytes");
181 let nb_items = u32::from_be_bytes(nb_items_raw);
181 let nb_items = u32::from_be_bytes(nb_items_raw);
182
182
183 let index_size = (nb_items * Self::ENTRY_SIZE) as usize;
183 let index_size = (nb_items * Self::ENTRY_SIZE) as usize;
184 let index_end = Self::INDEX_START + index_size;
184 let index_end = Self::INDEX_START + index_size;
185
185
186 assert!(
186 assert!(
187 data.len() >= index_end,
187 data.len() >= index_end,
188 "data size ({}) is too small to fit the index_data ({})",
188 "data size ({}) is too small to fit the index_data ({})",
189 data.len(),
189 data.len(),
190 index_end
190 index_end
191 );
191 );
192
192
193 let ret = ChangedFiles {
193 let ret = ChangedFiles {
194 nb_items,
194 nb_items,
195 index: &data[Self::INDEX_START..index_end],
195 index: &data[Self::INDEX_START..index_end],
196 data: &data[index_end..],
196 data: &data[index_end..],
197 };
197 };
198 let max_data = ret.filename_end(nb_items - 1) as usize;
198 let max_data = ret.filename_end(nb_items - 1) as usize;
199 assert!(
199 assert!(
200 ret.data.len() >= max_data,
200 ret.data.len() >= max_data,
201 "data size ({}) is too small to fit all data ({})",
201 "data size ({}) is too small to fit all data ({})",
202 data.len(),
202 data.len(),
203 index_end + max_data
203 index_end + max_data
204 );
204 );
205 ret
205 ret
206 }
206 }
207
207
208 pub fn new_empty() -> Self {
208 pub fn new_empty() -> Self {
209 ChangedFiles {
209 ChangedFiles {
210 nb_items: 0,
210 nb_items: 0,
211 index: EMPTY,
211 index: EMPTY,
212 data: EMPTY,
212 data: EMPTY,
213 }
213 }
214 }
214 }
215
215
216 /// internal function to return an individual entry at a given index
216 /// internal function to return an individual entry at a given index
217 fn entry(&'a self, idx: u32) -> FileChange<'a> {
217 fn entry(&'a self, idx: u32) -> FileChange<'a> {
218 if idx >= self.nb_items {
218 if idx >= self.nb_items {
219 panic!(
219 panic!(
220 "index for entry is higher that the number of file {} >= {}",
220 "index for entry is higher that the number of file {} >= {}",
221 idx, self.nb_items
221 idx, self.nb_items
222 )
222 )
223 }
223 }
224 let flags = self.flags(idx);
224 let flags = self.flags(idx);
225 let filename = self.filename(idx);
225 let filename = self.filename(idx);
226 let copy_idx = self.copy_idx(idx);
226 let copy_idx = self.copy_idx(idx);
227 let copy_source = self.filename(copy_idx);
227 let copy_source = self.filename(copy_idx);
228 (flags, filename, copy_source)
228 (flags, filename, copy_source)
229 }
229 }
230
230
231 /// internal function to return the filename of the entry at a given index
231 /// internal function to return the filename of the entry at a given index
232 fn filename(&self, idx: u32) -> &HgPath {
232 fn filename(&self, idx: u32) -> &HgPath {
233 let filename_start;
233 let filename_start;
234 if idx == 0 {
234 if idx == 0 {
235 filename_start = 0;
235 filename_start = 0;
236 } else {
236 } else {
237 filename_start = self.filename_end(idx - 1)
237 filename_start = self.filename_end(idx - 1)
238 }
238 }
239 let filename_end = self.filename_end(idx);
239 let filename_end = self.filename_end(idx);
240 let filename_start = filename_start as usize;
240 let filename_start = filename_start as usize;
241 let filename_end = filename_end as usize;
241 let filename_end = filename_end as usize;
242 HgPath::new(&self.data[filename_start..filename_end])
242 HgPath::new(&self.data[filename_start..filename_end])
243 }
243 }
244
244
245 /// internal function to return the flag field of the entry at a given
245 /// internal function to return the flag field of the entry at a given
246 /// index
246 /// index
247 fn flags(&self, idx: u32) -> u8 {
247 fn flags(&self, idx: u32) -> u8 {
248 let idx = idx as usize;
248 let idx = idx as usize;
249 self.index[idx * (Self::ENTRY_SIZE as usize)]
249 self.index[idx * (Self::ENTRY_SIZE as usize)]
250 }
250 }
251
251
252 /// internal function to return the end of a filename part at a given index
252 /// internal function to return the end of a filename part at a given index
253 fn filename_end(&self, idx: u32) -> u32 {
253 fn filename_end(&self, idx: u32) -> u32 {
254 let start = (idx * Self::ENTRY_SIZE) + Self::FILENAME_START;
254 let start = (idx * Self::ENTRY_SIZE) + Self::FILENAME_START;
255 let end = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
255 let end = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
256 let start = start as usize;
256 let start = start as usize;
257 let end = end as usize;
257 let end = end as usize;
258 let raw = (&self.index[start..end])
258 let raw = (&self.index[start..end])
259 .try_into()
259 .try_into()
260 .expect("failed to turn 4 bytes into 4 bytes");
260 .expect("failed to turn 4 bytes into 4 bytes");
261 u32::from_be_bytes(raw)
261 u32::from_be_bytes(raw)
262 }
262 }
263
263
264 /// internal function to return index of the copy source of the entry at a
264 /// internal function to return index of the copy source of the entry at a
265 /// given index
265 /// given index
266 fn copy_idx(&self, idx: u32) -> u32 {
266 fn copy_idx(&self, idx: u32) -> u32 {
267 let start = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
267 let start = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
268 let end = (idx + 1) * Self::ENTRY_SIZE;
268 let end = (idx + 1) * Self::ENTRY_SIZE;
269 let start = start as usize;
269 let start = start as usize;
270 let end = end as usize;
270 let end = end as usize;
271 let raw = (&self.index[start..end])
271 let raw = (&self.index[start..end])
272 .try_into()
272 .try_into()
273 .expect("failed to turn 4 bytes into 4 bytes");
273 .expect("failed to turn 4 bytes into 4 bytes");
274 u32::from_be_bytes(raw)
274 u32::from_be_bytes(raw)
275 }
275 }
276
276
277 /// Return an iterator over all the `Action` in this instance.
277 /// Return an iterator over all the `Action` in this instance.
278 fn iter_actions(&self) -> ActionsIterator {
278 fn iter_actions(&self) -> ActionsIterator {
279 ActionsIterator {
279 ActionsIterator {
280 changes: &self,
280 changes: &self,
281 current: 0,
281 current: 0,
282 }
282 }
283 }
283 }
284
284
285 /// return the MergeCase value associated with a filename
285 /// return the MergeCase value associated with a filename
286 fn get_merge_case(&self, path: &HgPath) -> MergeCase {
286 fn get_merge_case(&self, path: &HgPath) -> MergeCase {
287 if self.nb_items == 0 {
287 if self.nb_items == 0 {
288 return MergeCase::Normal;
288 return MergeCase::Normal;
289 }
289 }
290 let mut low_part = 0;
290 let mut low_part = 0;
291 let mut high_part = self.nb_items;
291 let mut high_part = self.nb_items;
292
292
293 while low_part < high_part {
293 while low_part < high_part {
294 let cursor = (low_part + high_part - 1) / 2;
294 let cursor = (low_part + high_part - 1) / 2;
295 let (flags, filename, _source) = self.entry(cursor);
295 let (flags, filename, _source) = self.entry(cursor);
296 match path.cmp(filename) {
296 match path.cmp(filename) {
297 Ordering::Less => low_part = cursor + 1,
297 Ordering::Less => low_part = cursor + 1,
298 Ordering::Greater => high_part = cursor,
298 Ordering::Greater => high_part = cursor,
299 Ordering::Equal => {
299 Ordering::Equal => {
300 return match flags & ACTION_MASK {
300 return match flags & ACTION_MASK {
301 MERGED => MergeCase::Merged,
301 MERGED => MergeCase::Merged,
302 SALVAGED => MergeCase::Salvaged,
302 SALVAGED => MergeCase::Salvaged,
303 _ => MergeCase::Normal,
303 _ => MergeCase::Normal,
304 };
304 };
305 }
305 }
306 }
306 }
307 }
307 }
308 MergeCase::Normal
308 MergeCase::Normal
309 }
309 }
310 }
310 }
311
311
312 struct ActionsIterator<'a> {
312 struct ActionsIterator<'a> {
313 changes: &'a ChangedFiles<'a>,
313 changes: &'a ChangedFiles<'a>,
314 current: u32,
314 current: u32,
315 }
315 }
316
316
317 impl<'a> Iterator for ActionsIterator<'a> {
317 impl<'a> Iterator for ActionsIterator<'a> {
318 type Item = Action<'a>;
318 type Item = Action<'a>;
319
319
320 fn next(&mut self) -> Option<Action<'a>> {
320 fn next(&mut self) -> Option<Action<'a>> {
321 while self.current < self.changes.nb_items {
321 while self.current < self.changes.nb_items {
322 let (flags, file, source) = self.changes.entry(self.current);
322 let (flags, file, source) = self.changes.entry(self.current);
323 self.current += 1;
323 self.current += 1;
324 if (flags & ACTION_MASK) == REMOVED {
324 if (flags & ACTION_MASK) == REMOVED {
325 return Some(Action::Removed(file));
325 return Some(Action::Removed(file));
326 }
326 }
327 let copy = flags & COPY_MASK;
327 let copy = flags & COPY_MASK;
328 if copy == P1_COPY {
328 if copy == P1_COPY {
329 return Some(Action::CopiedFromP1(file, source));
329 return Some(Action::CopiedFromP1(file, source));
330 } else if copy == P2_COPY {
330 } else if copy == P2_COPY {
331 return Some(Action::CopiedFromP2(file, source));
331 return Some(Action::CopiedFromP2(file, source));
332 }
332 }
333 }
333 }
334 return None;
334 return None;
335 }
335 }
336 }
336 }
337
337
338 /// A small struct whose purpose is to ensure lifetime of bytes referenced in
338 /// A small struct whose purpose is to ensure lifetime of bytes referenced in
339 /// ChangedFiles
339 /// ChangedFiles
340 ///
340 ///
341 /// It is passed to the RevInfoMaker callback who can assign any necessary
341 /// It is passed to the RevInfoMaker callback who can assign any necessary
342 /// content to the `data` attribute. The copy tracing code is responsible for
342 /// content to the `data` attribute. The copy tracing code is responsible for
343 /// keeping the DataHolder alive at least as long as the ChangedFiles object.
343 /// keeping the DataHolder alive at least as long as the ChangedFiles object.
344 pub struct DataHolder<D> {
344 pub struct DataHolder<D> {
345 /// RevInfoMaker callback should assign data referenced by the
345 /// RevInfoMaker callback should assign data referenced by the
346 /// ChangedFiles struct it return to this attribute. The DataHolder
346 /// ChangedFiles struct it return to this attribute. The DataHolder
347 /// lifetime will be at least as long as the ChangedFiles one.
347 /// lifetime will be at least as long as the ChangedFiles one.
348 pub data: Option<D>,
348 pub data: Option<D>,
349 }
349 }
350
350
351 pub type RevInfoMaker<'a, D> =
351 pub type RevInfoMaker<'a, D> =
352 Box<dyn for<'r> Fn(Revision, &'r mut DataHolder<D>) -> RevInfo<'r> + 'a>;
352 Box<dyn for<'r> Fn(Revision, &'r mut DataHolder<D>) -> RevInfo<'r> + 'a>;
353
353
354 /// A small "tokenizer" responsible of turning full HgPath into lighter
354 /// A small "tokenizer" responsible of turning full HgPath into lighter
355 /// PathToken
355 /// PathToken
356 ///
356 ///
357 /// Dealing with small object, like integer is much faster, so HgPath input are
357 /// Dealing with small object, like integer is much faster, so HgPath input are
358 /// turned into integer "PathToken" and converted back in the end.
358 /// turned into integer "PathToken" and converted back in the end.
359 #[derive(Clone, Debug, Default)]
359 #[derive(Clone, Debug, Default)]
360 struct TwoWayPathMap {
360 struct TwoWayPathMap {
361 token: HashMap<HgPathBuf, PathToken>,
361 token: HashMap<HgPathBuf, PathToken>,
362 path: Vec<HgPathBuf>,
362 path: Vec<HgPathBuf>,
363 }
363 }
364
364
365 impl TwoWayPathMap {
365 impl TwoWayPathMap {
366 fn tokenize(&mut self, path: &HgPath) -> PathToken {
366 fn tokenize(&mut self, path: &HgPath) -> PathToken {
367 match self.token.get(path) {
367 match self.token.get(path) {
368 Some(a) => *a,
368 Some(a) => *a,
369 None => {
369 None => {
370 let a = self.token.len();
370 let a = self.token.len();
371 let buf = path.to_owned();
371 let buf = path.to_owned();
372 self.path.push(buf.clone());
372 self.path.push(buf.clone());
373 self.token.insert(buf, a);
373 self.token.insert(buf, a);
374 a
374 a
375 }
375 }
376 }
376 }
377 }
377 }
378
378
379 fn untokenize(&self, token: PathToken) -> &HgPathBuf {
379 fn untokenize(&self, token: PathToken) -> &HgPathBuf {
380 assert!(token < self.path.len(), format!("Unknown token: {}", token));
380 assert!(token < self.path.len(), format!("Unknown token: {}", token));
381 &self.path[token]
381 &self.path[token]
382 }
382 }
383 }
383 }
384
384
385 /// Same as mercurial.copies._combine_changeset_copies, but in Rust.
385 /// Same as mercurial.copies._combine_changeset_copies, but in Rust.
386 ///
386 ///
387 /// Arguments are:
387 /// Arguments are:
388 ///
388 ///
389 /// revs: all revisions to be considered
389 /// revs: all revisions to be considered
390 /// children: a {parent ? [childrens]} mapping
390 /// children: a {parent ? [childrens]} mapping
391 /// target_rev: the final revision we are combining copies to
391 /// target_rev: the final revision we are combining copies to
392 /// rev_info(rev): callback to get revision information:
392 /// rev_info(rev): callback to get revision information:
393 /// * first parent
393 /// * first parent
394 /// * second parent
394 /// * second parent
395 /// * ChangedFiles
395 /// * ChangedFiles
396 /// isancestors(low_rev, high_rev): callback to check if a revision is an
396 /// isancestors(low_rev, high_rev): callback to check if a revision is an
397 /// ancestor of another
397 /// ancestor of another
398 pub fn combine_changeset_copies<D>(
398 pub fn combine_changeset_copies<D>(
399 revs: Vec<Revision>,
399 revs: Vec<Revision>,
400 mut children_count: HashMap<Revision, usize>,
400 mut children_count: HashMap<Revision, usize>,
401 target_rev: Revision,
401 target_rev: Revision,
402 rev_info: RevInfoMaker<D>,
402 rev_info: RevInfoMaker<D>,
403 ) -> PathCopies {
403 ) -> PathCopies {
404 let mut all_copies = HashMap::new();
404 let mut all_copies = HashMap::new();
405
405
406 let mut path_map = TwoWayPathMap::default();
406 let mut path_map = TwoWayPathMap::default();
407
407
408 for rev in revs {
408 for rev in revs {
409 let mut d: DataHolder<D> = DataHolder { data: None };
409 let mut d: DataHolder<D> = DataHolder { data: None };
410 let (p1, p2, changes) = rev_info(rev, &mut d);
410 let (p1, p2, changes) = rev_info(rev, &mut d);
411
411
412 // We will chain the copies information accumulated for the parent with
412 // We will chain the copies information accumulated for the parent with
413 // the individual copies information the curent revision. Creating a
413 // the individual copies information the curent revision. Creating a
414 // new TimeStampedPath for each `rev` → `children` vertex.
414 // new TimeStampedPath for each `rev` → `children` vertex.
415 // Retrieve data computed in a previous iteration
415 // Retrieve data computed in a previous iteration
416 let p1_copies = match p1 {
416 let p1_copies = match p1 {
417 NULL_REVISION => None,
417 NULL_REVISION => None,
418 _ => get_and_clean_parent_copies(
418 _ => get_and_clean_parent_copies(
419 &mut all_copies,
419 &mut all_copies,
420 &mut children_count,
420 &mut children_count,
421 p1,
421 p1,
422 ), // will be None if the vertex is not to be traversed
422 ), // will be None if the vertex is not to be traversed
423 };
423 };
424 let p2_copies = match p2 {
424 let p2_copies = match p2 {
425 NULL_REVISION => None,
425 NULL_REVISION => None,
426 _ => get_and_clean_parent_copies(
426 _ => get_and_clean_parent_copies(
427 &mut all_copies,
427 &mut all_copies,
428 &mut children_count,
428 &mut children_count,
429 p2,
429 p2,
430 ), // will be None if the vertex is not to be traversed
430 ), // will be None if the vertex is not to be traversed
431 };
431 };
432 // combine it with data for that revision
432 // combine it with data for that revision
433 let (p1_copies, p2_copies) =
433 let (p1_copies, p2_copies) =
434 chain_changes(&mut path_map, p1_copies, p2_copies, &changes, rev);
434 chain_changes(&mut path_map, p1_copies, p2_copies, &changes, rev);
435 let copies = match (p1_copies, p2_copies) {
435 let copies = match (p1_copies, p2_copies) {
436 (None, None) => None,
436 (None, None) => None,
437 (c, None) => c,
437 (c, None) => c,
438 (None, c) => c,
438 (None, c) => c,
439 (Some(p1_copies), Some(p2_copies)) => Some(merge_copies_dict(
439 (Some(p1_copies), Some(p2_copies)) => Some(merge_copies_dict(
440 &path_map, rev, p2_copies, p1_copies, &changes,
440 &path_map, rev, p2_copies, p1_copies, &changes,
441 )),
441 )),
442 };
442 };
443 if let Some(c) = copies {
443 if let Some(c) = copies {
444 all_copies.insert(rev, c);
444 all_copies.insert(rev, c);
445 }
445 }
446 }
446 }
447
447
448 // Drop internal information (like the timestamp) and return the final
448 // Drop internal information (like the timestamp) and return the final
449 // mapping.
449 // mapping.
450 let tt_result = all_copies
450 let tt_result = all_copies
451 .remove(&target_rev)
451 .remove(&target_rev)
452 .expect("target revision was not processed");
452 .expect("target revision was not processed");
453 let mut result = PathCopies::default();
453 let mut result = PathCopies::default();
454 for (dest, tt_source) in tt_result {
454 for (dest, tt_source) in tt_result {
455 if let Some(path) = tt_source.path {
455 if let Some(path) = tt_source.path {
456 let path_dest = path_map.untokenize(dest).to_owned();
456 let path_dest = path_map.untokenize(dest).to_owned();
457 let path_path = path_map.untokenize(path).to_owned();
457 let path_path = path_map.untokenize(path).to_owned();
458 result.insert(path_dest, path_path);
458 result.insert(path_dest, path_path);
459 }
459 }
460 }
460 }
461 result
461 result
462 }
462 }
463
463
464 /// fetch previous computed information
464 /// fetch previous computed information
465 ///
465 ///
466 /// If no other children are expected to need this information, we drop it from
466 /// If no other children are expected to need this information, we drop it from
467 /// the cache.
467 /// the cache.
468 ///
468 ///
469 /// If parent is not part of the set we are expected to walk, return None.
469 /// If parent is not part of the set we are expected to walk, return None.
470 fn get_and_clean_parent_copies(
470 fn get_and_clean_parent_copies(
471 all_copies: &mut HashMap<Revision, InternalPathCopies>,
471 all_copies: &mut HashMap<Revision, InternalPathCopies>,
472 children_count: &mut HashMap<Revision, usize>,
472 children_count: &mut HashMap<Revision, usize>,
473 parent_rev: Revision,
473 parent_rev: Revision,
474 ) -> Option<InternalPathCopies> {
474 ) -> Option<InternalPathCopies> {
475 let count = children_count.get_mut(&parent_rev)?;
475 let count = children_count.get_mut(&parent_rev)?;
476 *count -= 1;
476 *count -= 1;
477 if *count == 0 {
477 if *count == 0 {
478 match all_copies.remove(&parent_rev) {
478 match all_copies.remove(&parent_rev) {
479 Some(c) => Some(c),
479 Some(c) => Some(c),
480 None => Some(InternalPathCopies::default()),
480 None => Some(InternalPathCopies::default()),
481 }
481 }
482 } else {
482 } else {
483 match all_copies.get(&parent_rev) {
483 match all_copies.get(&parent_rev) {
484 Some(c) => Some(c.clone()),
484 Some(c) => Some(c.clone()),
485 None => Some(InternalPathCopies::default()),
485 None => Some(InternalPathCopies::default()),
486 }
486 }
487 }
487 }
488 }
488 }
489
489
490 /// Combine ChangedFiles with some existing PathCopies information and return
490 /// Combine ChangedFiles with some existing PathCopies information and return
491 /// the result
491 /// the result
492 fn chain_changes(
492 fn chain_changes(
493 path_map: &mut TwoWayPathMap,
493 path_map: &mut TwoWayPathMap,
494 base_p1_copies: Option<InternalPathCopies>,
494 base_p1_copies: Option<InternalPathCopies>,
495 base_p2_copies: Option<InternalPathCopies>,
495 base_p2_copies: Option<InternalPathCopies>,
496 changes: &ChangedFiles,
496 changes: &ChangedFiles,
497 current_rev: Revision,
497 current_rev: Revision,
498 ) -> (Option<InternalPathCopies>, Option<InternalPathCopies>) {
498 ) -> (Option<InternalPathCopies>, Option<InternalPathCopies>) {
499 // Fast path the "nothing to do" case.
499 // Fast path the "nothing to do" case.
500 if let (None, None) = (&base_p1_copies, &base_p2_copies) {
500 if let (None, None) = (&base_p1_copies, &base_p2_copies) {
501 return (None, None);
501 return (None, None);
502 }
502 }
503
503
504 let mut p1_copies = base_p1_copies.clone();
504 let mut p1_copies = base_p1_copies.clone();
505 let mut p2_copies = base_p2_copies.clone();
505 let mut p2_copies = base_p2_copies.clone();
506 for action in changes.iter_actions() {
506 for action in changes.iter_actions() {
507 match action {
507 match action {
508 Action::CopiedFromP1(path_dest, path_source) => {
508 Action::CopiedFromP1(path_dest, path_source) => {
509 match &mut p1_copies {
509 match &mut p1_copies {
510 None => (), // This is not a vertex we should proceed.
510 None => (), // This is not a vertex we should proceed.
511 Some(copies) => add_one_copy(
511 Some(copies) => add_one_copy(
512 current_rev,
512 current_rev,
513 path_map,
513 path_map,
514 copies,
514 copies,
515 base_p1_copies.as_ref().unwrap(),
515 base_p1_copies.as_ref().unwrap(),
516 path_dest,
516 path_dest,
517 path_source,
517 path_source,
518 ),
518 ),
519 }
519 }
520 }
520 }
521 Action::CopiedFromP2(path_dest, path_source) => {
521 Action::CopiedFromP2(path_dest, path_source) => {
522 match &mut p2_copies {
522 match &mut p2_copies {
523 None => (), // This is not a vertex we should proceed.
523 None => (), // This is not a vertex we should proceed.
524 Some(copies) => add_one_copy(
524 Some(copies) => add_one_copy(
525 current_rev,
525 current_rev,
526 path_map,
526 path_map,
527 copies,
527 copies,
528 base_p2_copies.as_ref().unwrap(),
528 base_p2_copies.as_ref().unwrap(),
529 path_dest,
529 path_dest,
530 path_source,
530 path_source,
531 ),
531 ),
532 }
532 }
533 }
533 }
534 Action::Removed(deleted_path) => {
534 Action::Removed(deleted_path) => {
535 // We must drop copy information for removed file.
535 // We must drop copy information for removed file.
536 //
536 //
537 // We need to explicitly record them as dropped to
537 // We need to explicitly record them as dropped to
538 // propagate this information when merging two
538 // propagate this information when merging two
539 // InternalPathCopies object.
539 // InternalPathCopies object.
540 let deleted = path_map.tokenize(deleted_path);
540 let deleted = path_map.tokenize(deleted_path);
541
541
542 let p1_entry = match &mut p1_copies {
542 let p1_entry = match &mut p1_copies {
543 None => None,
543 None => None,
544 Some(copies) => match copies.entry(deleted) {
544 Some(copies) => match copies.entry(deleted) {
545 Entry::Occupied(e) => Some(e),
545 Entry::Occupied(e) => Some(e),
546 Entry::Vacant(_) => None,
546 Entry::Vacant(_) => None,
547 },
547 },
548 };
548 };
549 let p2_entry = match &mut p2_copies {
549 let p2_entry = match &mut p2_copies {
550 None => None,
550 None => None,
551 Some(copies) => match copies.entry(deleted) {
551 Some(copies) => match copies.entry(deleted) {
552 Entry::Occupied(e) => Some(e),
552 Entry::Occupied(e) => Some(e),
553 Entry::Vacant(_) => None,
553 Entry::Vacant(_) => None,
554 },
554 },
555 };
555 };
556
556
557 match (p1_entry, p2_entry) {
557 match (p1_entry, p2_entry) {
558 (None, None) => (),
558 (None, None) => (),
559 (Some(mut e), None) => {
559 (Some(mut e), None) => {
560 e.get_mut().mark_delete(current_rev)
560 e.get_mut().mark_delete(current_rev)
561 }
561 }
562 (None, Some(mut e)) => {
562 (None, Some(mut e)) => {
563 e.get_mut().mark_delete(current_rev)
563 e.get_mut().mark_delete(current_rev)
564 }
564 }
565 (Some(mut e1), Some(mut e2)) => {
565 (Some(mut e1), Some(mut e2)) => {
566 let cs1 = e1.get_mut();
566 let cs1 = e1.get_mut();
567 let cs2 = e2.get();
567 let cs2 = e2.get();
568 cs1.mark_delete_with_pair(current_rev, &cs2);
568 if cs1 == cs2 {
569 cs1.mark_delete(current_rev);
570 } else {
571 cs1.mark_delete_with_pair(current_rev, &cs2);
572 }
569 e2.insert(cs1.clone());
573 e2.insert(cs1.clone());
570 }
574 }
571 }
575 }
572 }
576 }
573 }
577 }
574 }
578 }
575 (p1_copies, p2_copies)
579 (p1_copies, p2_copies)
576 }
580 }
577
581
578 // insert one new copy information in an InternalPathCopies
582 // insert one new copy information in an InternalPathCopies
579 //
583 //
580 // This deal with chaining and overwrite.
584 // This deal with chaining and overwrite.
581 fn add_one_copy(
585 fn add_one_copy(
582 current_rev: Revision,
586 current_rev: Revision,
583 path_map: &mut TwoWayPathMap,
587 path_map: &mut TwoWayPathMap,
584 copies: &mut InternalPathCopies,
588 copies: &mut InternalPathCopies,
585 base_copies: &InternalPathCopies,
589 base_copies: &InternalPathCopies,
586 path_dest: &HgPath,
590 path_dest: &HgPath,
587 path_source: &HgPath,
591 path_source: &HgPath,
588 ) {
592 ) {
589 let dest = path_map.tokenize(path_dest);
593 let dest = path_map.tokenize(path_dest);
590 let source = path_map.tokenize(path_source);
594 let source = path_map.tokenize(path_source);
591 let entry;
595 let entry;
592 if let Some(v) = base_copies.get(&source) {
596 if let Some(v) = base_copies.get(&source) {
593 entry = match &v.path {
597 entry = match &v.path {
594 Some(path) => Some((*(path)).to_owned()),
598 Some(path) => Some((*(path)).to_owned()),
595 None => Some(source.to_owned()),
599 None => Some(source.to_owned()),
596 }
600 }
597 } else {
601 } else {
598 entry = Some(source.to_owned());
602 entry = Some(source.to_owned());
599 }
603 }
600 // Each new entry is introduced by the children, we
604 // Each new entry is introduced by the children, we
601 // record this information as we will need it to take
605 // record this information as we will need it to take
602 // the right decision when merging conflicting copy
606 // the right decision when merging conflicting copy
603 // information. See merge_copies_dict for details.
607 // information. See merge_copies_dict for details.
604 match copies.entry(dest) {
608 match copies.entry(dest) {
605 Entry::Vacant(slot) => {
609 Entry::Vacant(slot) => {
606 let ttpc = CopySource::new(current_rev, entry);
610 let ttpc = CopySource::new(current_rev, entry);
607 slot.insert(ttpc);
611 slot.insert(ttpc);
608 }
612 }
609 Entry::Occupied(mut slot) => {
613 Entry::Occupied(mut slot) => {
610 let ttpc = slot.get_mut();
614 let ttpc = slot.get_mut();
611 ttpc.overwrite(current_rev, entry);
615 ttpc.overwrite(current_rev, entry);
612 }
616 }
613 }
617 }
614 }
618 }
615
619
616 /// merge two copies-mapping together, minor and major
620 /// merge two copies-mapping together, minor and major
617 ///
621 ///
618 /// In case of conflict, value from "major" will be picked, unless in some
622 /// In case of conflict, value from "major" will be picked, unless in some
619 /// cases. See inline documentation for details.
623 /// cases. See inline documentation for details.
620 fn merge_copies_dict(
624 fn merge_copies_dict(
621 path_map: &TwoWayPathMap,
625 path_map: &TwoWayPathMap,
622 current_merge: Revision,
626 current_merge: Revision,
623 mut minor: InternalPathCopies,
627 mut minor: InternalPathCopies,
624 mut major: InternalPathCopies,
628 mut major: InternalPathCopies,
625 changes: &ChangedFiles,
629 changes: &ChangedFiles,
626 ) -> InternalPathCopies {
630 ) -> InternalPathCopies {
627 // This closure exist as temporary help while multiple developper are
631 // This closure exist as temporary help while multiple developper are
628 // actively working on this code. Feel free to re-inline it once this
632 // actively working on this code. Feel free to re-inline it once this
629 // code is more settled.
633 // code is more settled.
630 let cmp_value =
634 let cmp_value =
631 |dest: &PathToken, src_minor: &CopySource, src_major: &CopySource| {
635 |dest: &PathToken, src_minor: &CopySource, src_major: &CopySource| {
632 compare_value(
636 compare_value(
633 path_map,
637 path_map,
634 current_merge,
638 current_merge,
635 changes,
639 changes,
636 dest,
640 dest,
637 src_minor,
641 src_minor,
638 src_major,
642 src_major,
639 )
643 )
640 };
644 };
641 if minor.is_empty() {
645 if minor.is_empty() {
642 major
646 major
643 } else if major.is_empty() {
647 } else if major.is_empty() {
644 minor
648 minor
645 } else if minor.len() * 2 < major.len() {
649 } else if minor.len() * 2 < major.len() {
646 // Lets says we are merging two InternalPathCopies instance A and B.
650 // Lets says we are merging two InternalPathCopies instance A and B.
647 //
651 //
648 // If A contains N items, the merge result will never contains more
652 // If A contains N items, the merge result will never contains more
649 // than N values differents than the one in A
653 // than N values differents than the one in A
650 //
654 //
651 // If B contains M items, with M > N, the merge result will always
655 // If B contains M items, with M > N, the merge result will always
652 // result in a minimum of M - N value differents than the on in
656 // result in a minimum of M - N value differents than the on in
653 // A
657 // A
654 //
658 //
655 // As a result, if N < (M-N), we know that simply iterating over A will
659 // As a result, if N < (M-N), we know that simply iterating over A will
656 // yield less difference than iterating over the difference
660 // yield less difference than iterating over the difference
657 // between A and B.
661 // between A and B.
658 //
662 //
659 // This help performance a lot in case were a tiny
663 // This help performance a lot in case were a tiny
660 // InternalPathCopies is merged with a much larger one.
664 // InternalPathCopies is merged with a much larger one.
661 for (dest, src_minor) in minor {
665 for (dest, src_minor) in minor {
662 let src_major = major.get(&dest);
666 let src_major = major.get(&dest);
663 match src_major {
667 match src_major {
664 None => {
668 None => {
665 major.insert(dest, src_minor);
669 major.insert(dest, src_minor);
666 }
670 }
667 Some(src_major) => {
671 Some(src_major) => {
668 let (pick, overwrite) =
672 let (pick, overwrite) =
669 cmp_value(&dest, &src_minor, src_major);
673 cmp_value(&dest, &src_minor, src_major);
670 if overwrite {
674 if overwrite {
671 let src = match pick {
675 let src = match pick {
672 MergePick::Major => CopySource::new_from_merge(
676 MergePick::Major => CopySource::new_from_merge(
673 current_merge,
677 current_merge,
674 src_major,
678 src_major,
675 &src_minor,
679 &src_minor,
676 ),
680 ),
677 MergePick::Minor => CopySource::new_from_merge(
681 MergePick::Minor => CopySource::new_from_merge(
678 current_merge,
682 current_merge,
679 &src_minor,
683 &src_minor,
680 src_major,
684 src_major,
681 ),
685 ),
682 MergePick::Any => CopySource::new_from_merge(
686 MergePick::Any => CopySource::new_from_merge(
683 current_merge,
687 current_merge,
684 src_major,
688 src_major,
685 &src_minor,
689 &src_minor,
686 ),
690 ),
687 };
691 };
688 major.insert(dest, src);
692 major.insert(dest, src);
689 } else {
693 } else {
690 match pick {
694 match pick {
691 MergePick::Any | MergePick::Major => None,
695 MergePick::Any | MergePick::Major => None,
692 MergePick::Minor => major.insert(dest, src_minor),
696 MergePick::Minor => major.insert(dest, src_minor),
693 };
697 };
694 }
698 }
695 }
699 }
696 };
700 };
697 }
701 }
698 major
702 major
699 } else if major.len() * 2 < minor.len() {
703 } else if major.len() * 2 < minor.len() {
700 // This use the same rational than the previous block.
704 // This use the same rational than the previous block.
701 // (Check previous block documentation for details.)
705 // (Check previous block documentation for details.)
702 for (dest, src_major) in major {
706 for (dest, src_major) in major {
703 let src_minor = minor.get(&dest);
707 let src_minor = minor.get(&dest);
704 match src_minor {
708 match src_minor {
705 None => {
709 None => {
706 minor.insert(dest, src_major);
710 minor.insert(dest, src_major);
707 }
711 }
708 Some(src_minor) => {
712 Some(src_minor) => {
709 let (pick, overwrite) =
713 let (pick, overwrite) =
710 cmp_value(&dest, src_minor, &src_major);
714 cmp_value(&dest, src_minor, &src_major);
711 if overwrite {
715 if overwrite {
712 let src = match pick {
716 let src = match pick {
713 MergePick::Major => CopySource::new_from_merge(
717 MergePick::Major => CopySource::new_from_merge(
714 current_merge,
718 current_merge,
715 &src_major,
719 &src_major,
716 src_minor,
720 src_minor,
717 ),
721 ),
718 MergePick::Minor => CopySource::new_from_merge(
722 MergePick::Minor => CopySource::new_from_merge(
719 current_merge,
723 current_merge,
720 src_minor,
724 src_minor,
721 &src_major,
725 &src_major,
722 ),
726 ),
723 MergePick::Any => CopySource::new_from_merge(
727 MergePick::Any => CopySource::new_from_merge(
724 current_merge,
728 current_merge,
725 &src_major,
729 &src_major,
726 src_minor,
730 src_minor,
727 ),
731 ),
728 };
732 };
729 minor.insert(dest, src);
733 minor.insert(dest, src);
730 } else {
734 } else {
731 match pick {
735 match pick {
732 MergePick::Any | MergePick::Minor => None,
736 MergePick::Any | MergePick::Minor => None,
733 MergePick::Major => minor.insert(dest, src_major),
737 MergePick::Major => minor.insert(dest, src_major),
734 };
738 };
735 }
739 }
736 }
740 }
737 };
741 };
738 }
742 }
739 minor
743 minor
740 } else {
744 } else {
741 let mut override_minor = Vec::new();
745 let mut override_minor = Vec::new();
742 let mut override_major = Vec::new();
746 let mut override_major = Vec::new();
743
747
744 let mut to_major = |k: &PathToken, v: &CopySource| {
748 let mut to_major = |k: &PathToken, v: &CopySource| {
745 override_major.push((k.clone(), v.clone()))
749 override_major.push((k.clone(), v.clone()))
746 };
750 };
747 let mut to_minor = |k: &PathToken, v: &CopySource| {
751 let mut to_minor = |k: &PathToken, v: &CopySource| {
748 override_minor.push((k.clone(), v.clone()))
752 override_minor.push((k.clone(), v.clone()))
749 };
753 };
750
754
751 // The diff function leverage detection of the identical subpart if
755 // The diff function leverage detection of the identical subpart if
752 // minor and major has some common ancestors. This make it very
756 // minor and major has some common ancestors. This make it very
753 // fast is most case.
757 // fast is most case.
754 //
758 //
755 // In case where the two map are vastly different in size, the current
759 // In case where the two map are vastly different in size, the current
756 // approach is still slowish because the iteration will iterate over
760 // approach is still slowish because the iteration will iterate over
757 // all the "exclusive" content of the larger on. This situation can be
761 // all the "exclusive" content of the larger on. This situation can be
758 // frequent when the subgraph of revision we are processing has a lot
762 // frequent when the subgraph of revision we are processing has a lot
759 // of roots. Each roots adding they own fully new map to the mix (and
763 // of roots. Each roots adding they own fully new map to the mix (and
760 // likely a small map, if the path from the root to the "main path" is
764 // likely a small map, if the path from the root to the "main path" is
761 // small.
765 // small.
762 //
766 //
763 // We could do better by detecting such situation and processing them
767 // We could do better by detecting such situation and processing them
764 // differently.
768 // differently.
765 for d in minor.diff(&major) {
769 for d in minor.diff(&major) {
766 match d {
770 match d {
767 DiffItem::Add(k, v) => to_minor(k, v),
771 DiffItem::Add(k, v) => to_minor(k, v),
768 DiffItem::Remove(k, v) => to_major(k, v),
772 DiffItem::Remove(k, v) => to_major(k, v),
769 DiffItem::Update { old, new } => {
773 DiffItem::Update { old, new } => {
770 let (dest, src_major) = new;
774 let (dest, src_major) = new;
771 let (_, src_minor) = old;
775 let (_, src_minor) = old;
772 let (pick, overwrite) =
776 let (pick, overwrite) =
773 cmp_value(dest, src_minor, src_major);
777 cmp_value(dest, src_minor, src_major);
774 if overwrite {
778 if overwrite {
775 let src = match pick {
779 let src = match pick {
776 MergePick::Major => CopySource::new_from_merge(
780 MergePick::Major => CopySource::new_from_merge(
777 current_merge,
781 current_merge,
778 src_major,
782 src_major,
779 src_minor,
783 src_minor,
780 ),
784 ),
781 MergePick::Minor => CopySource::new_from_merge(
785 MergePick::Minor => CopySource::new_from_merge(
782 current_merge,
786 current_merge,
783 src_minor,
787 src_minor,
784 src_major,
788 src_major,
785 ),
789 ),
786 MergePick::Any => CopySource::new_from_merge(
790 MergePick::Any => CopySource::new_from_merge(
787 current_merge,
791 current_merge,
788 src_major,
792 src_major,
789 src_minor,
793 src_minor,
790 ),
794 ),
791 };
795 };
792 to_minor(dest, &src);
796 to_minor(dest, &src);
793 to_major(dest, &src);
797 to_major(dest, &src);
794 } else {
798 } else {
795 match pick {
799 match pick {
796 MergePick::Major => to_minor(dest, src_major),
800 MergePick::Major => to_minor(dest, src_major),
797 MergePick::Minor => to_major(dest, src_minor),
801 MergePick::Minor => to_major(dest, src_minor),
798 // If the two entry are identical, no need to do
802 // If the two entry are identical, no need to do
799 // anything (but diff should not have yield them)
803 // anything (but diff should not have yield them)
800 MergePick::Any => unreachable!(),
804 MergePick::Any => unreachable!(),
801 }
805 }
802 }
806 }
803 }
807 }
804 };
808 };
805 }
809 }
806
810
807 let updates;
811 let updates;
808 let mut result;
812 let mut result;
809 if override_major.is_empty() {
813 if override_major.is_empty() {
810 result = major
814 result = major
811 } else if override_minor.is_empty() {
815 } else if override_minor.is_empty() {
812 result = minor
816 result = minor
813 } else {
817 } else {
814 if override_minor.len() < override_major.len() {
818 if override_minor.len() < override_major.len() {
815 updates = override_minor;
819 updates = override_minor;
816 result = minor;
820 result = minor;
817 } else {
821 } else {
818 updates = override_major;
822 updates = override_major;
819 result = major;
823 result = major;
820 }
824 }
821 for (k, v) in updates {
825 for (k, v) in updates {
822 result.insert(k, v);
826 result.insert(k, v);
823 }
827 }
824 }
828 }
825 result
829 result
826 }
830 }
827 }
831 }
828
832
829 /// represent the side that should prevail when merging two
833 /// represent the side that should prevail when merging two
830 /// InternalPathCopies
834 /// InternalPathCopies
831 enum MergePick {
835 enum MergePick {
832 /// The "major" (p1) side prevails
836 /// The "major" (p1) side prevails
833 Major,
837 Major,
834 /// The "minor" (p2) side prevails
838 /// The "minor" (p2) side prevails
835 Minor,
839 Minor,
836 /// Any side could be used (because they are the same)
840 /// Any side could be used (because they are the same)
837 Any,
841 Any,
838 }
842 }
839
843
840 /// decide which side prevails in case of conflicting values
844 /// decide which side prevails in case of conflicting values
841 #[allow(clippy::if_same_then_else)]
845 #[allow(clippy::if_same_then_else)]
842 fn compare_value(
846 fn compare_value(
843 path_map: &TwoWayPathMap,
847 path_map: &TwoWayPathMap,
844 current_merge: Revision,
848 current_merge: Revision,
845 changes: &ChangedFiles,
849 changes: &ChangedFiles,
846 dest: &PathToken,
850 dest: &PathToken,
847 src_minor: &CopySource,
851 src_minor: &CopySource,
848 src_major: &CopySource,
852 src_major: &CopySource,
849 ) -> (MergePick, bool) {
853 ) -> (MergePick, bool) {
850 if src_major == src_minor {
854 if src_major == src_minor {
851 (MergePick::Any, false)
855 (MergePick::Any, false)
852 } else if src_major.rev == current_merge {
856 } else if src_major.rev == current_merge {
853 // minor is different according to per minor == major check earlier
857 // minor is different according to per minor == major check earlier
854 debug_assert!(src_minor.rev != current_merge);
858 debug_assert!(src_minor.rev != current_merge);
855
859
856 // The last value comes the current merge, this value -will- win
860 // The last value comes the current merge, this value -will- win
857 // eventually.
861 // eventually.
858 (MergePick::Major, true)
862 (MergePick::Major, true)
859 } else if src_minor.rev == current_merge {
863 } else if src_minor.rev == current_merge {
860 // The last value comes the current merge, this value -will- win
864 // The last value comes the current merge, this value -will- win
861 // eventually.
865 // eventually.
862 (MergePick::Minor, true)
866 (MergePick::Minor, true)
863 } else if src_major.path == src_minor.path {
867 } else if src_major.path == src_minor.path {
864 debug_assert!(src_major.rev != src_major.rev);
868 debug_assert!(src_major.rev != src_major.rev);
865 // we have the same value, but from other source;
869 // we have the same value, but from other source;
866 if src_major.is_overwritten_by(src_minor) {
870 if src_major.is_overwritten_by(src_minor) {
867 (MergePick::Minor, false)
871 (MergePick::Minor, false)
868 } else if src_minor.is_overwritten_by(src_major) {
872 } else if src_minor.is_overwritten_by(src_major) {
869 (MergePick::Major, false)
873 (MergePick::Major, false)
870 } else {
874 } else {
871 (MergePick::Any, true)
875 (MergePick::Any, true)
872 }
876 }
873 } else {
877 } else {
874 debug_assert!(src_major.rev != src_major.rev);
878 debug_assert!(src_major.rev != src_major.rev);
875 let dest_path = path_map.untokenize(*dest);
879 let dest_path = path_map.untokenize(*dest);
876 let action = changes.get_merge_case(dest_path);
880 let action = changes.get_merge_case(dest_path);
877 if src_minor.path.is_some()
881 if src_minor.path.is_some()
878 && src_major.path.is_none()
882 && src_major.path.is_none()
879 && action == MergeCase::Salvaged
883 && action == MergeCase::Salvaged
880 {
884 {
881 // If the file is "deleted" in the major side but was
885 // If the file is "deleted" in the major side but was
882 // salvaged by the merge, we keep the minor side alive
886 // salvaged by the merge, we keep the minor side alive
883 (MergePick::Minor, true)
887 (MergePick::Minor, true)
884 } else if src_major.path.is_some()
888 } else if src_major.path.is_some()
885 && src_minor.path.is_none()
889 && src_minor.path.is_none()
886 && action == MergeCase::Salvaged
890 && action == MergeCase::Salvaged
887 {
891 {
888 // If the file is "deleted" in the minor side but was
892 // If the file is "deleted" in the minor side but was
889 // salvaged by the merge, unconditionnaly preserve the
893 // salvaged by the merge, unconditionnaly preserve the
890 // major side.
894 // major side.
891 (MergePick::Major, true)
895 (MergePick::Major, true)
892 } else if src_minor.is_overwritten_by(src_major) {
896 } else if src_minor.is_overwritten_by(src_major) {
893 // The information from the minor version are strictly older than
897 // The information from the minor version are strictly older than
894 // the major version
898 // the major version
895 if action == MergeCase::Merged {
899 if action == MergeCase::Merged {
896 // If the file was actively merged, its means some non-copy
900 // If the file was actively merged, its means some non-copy
897 // activity happened on the other branch. It
901 // activity happened on the other branch. It
898 // mean the older copy information are still relevant.
902 // mean the older copy information are still relevant.
899 //
903 //
900 // The major side wins such conflict.
904 // The major side wins such conflict.
901 (MergePick::Major, true)
905 (MergePick::Major, true)
902 } else {
906 } else {
903 // No activity on the minor branch, pick the newer one.
907 // No activity on the minor branch, pick the newer one.
904 (MergePick::Major, false)
908 (MergePick::Major, false)
905 }
909 }
906 } else if src_major.is_overwritten_by(src_minor) {
910 } else if src_major.is_overwritten_by(src_minor) {
907 if action == MergeCase::Merged {
911 if action == MergeCase::Merged {
908 // If the file was actively merged, its means some non-copy
912 // If the file was actively merged, its means some non-copy
909 // activity happened on the other branch. It
913 // activity happened on the other branch. It
910 // mean the older copy information are still relevant.
914 // mean the older copy information are still relevant.
911 //
915 //
912 // The major side wins such conflict.
916 // The major side wins such conflict.
913 (MergePick::Major, true)
917 (MergePick::Major, true)
914 } else {
918 } else {
915 // No activity on the minor branch, pick the newer one.
919 // No activity on the minor branch, pick the newer one.
916 (MergePick::Minor, false)
920 (MergePick::Minor, false)
917 }
921 }
918 } else if src_minor.path.is_none() {
922 } else if src_minor.path.is_none() {
919 // the minor side has no relevant information, pick the alive one
923 // the minor side has no relevant information, pick the alive one
920 (MergePick::Major, true)
924 (MergePick::Major, true)
921 } else if src_major.path.is_none() {
925 } else if src_major.path.is_none() {
922 // the major side has no relevant information, pick the alive one
926 // the major side has no relevant information, pick the alive one
923 (MergePick::Minor, true)
927 (MergePick::Minor, true)
924 } else {
928 } else {
925 // by default the major side wins
929 // by default the major side wins
926 (MergePick::Major, true)
930 (MergePick::Major, true)
927 }
931 }
928 }
932 }
929 }
933 }
General Comments 0
You need to be logged in to leave comments. Login now