##// END OF EJS Templates
copies-rust: get the parents' copies earlier...
marmoute -
r47317:389b0328 default
parent child Browse files
Show More
@@ -1,890 +1,891
1 use crate::utils::hg_path::HgPath;
1 use crate::utils::hg_path::HgPath;
2 use crate::utils::hg_path::HgPathBuf;
2 use crate::utils::hg_path::HgPathBuf;
3 use crate::Revision;
3 use crate::Revision;
4 use crate::NULL_REVISION;
4 use crate::NULL_REVISION;
5
5
6 use im_rc::ordmap::DiffItem;
6 use im_rc::ordmap::DiffItem;
7 use im_rc::ordmap::Entry;
7 use im_rc::ordmap::Entry;
8 use im_rc::ordmap::OrdMap;
8 use im_rc::ordmap::OrdMap;
9
9
10 use std::cmp::Ordering;
10 use std::cmp::Ordering;
11 use std::collections::HashMap;
11 use std::collections::HashMap;
12 use std::collections::HashSet;
12 use std::collections::HashSet;
13 use std::convert::TryInto;
13 use std::convert::TryInto;
14
14
15 pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>;
15 pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>;
16
16
17 type PathToken = usize;
17 type PathToken = usize;
18
18
19 #[derive(Clone, Debug, PartialEq)]
19 #[derive(Clone, Debug, PartialEq)]
20 struct CopySource {
20 struct CopySource {
21 /// revision at which the copy information was added
21 /// revision at which the copy information was added
22 rev: Revision,
22 rev: Revision,
23 /// the copy source, (Set to None in case of deletion of the associated
23 /// the copy source, (Set to None in case of deletion of the associated
24 /// key)
24 /// key)
25 path: Option<PathToken>,
25 path: Option<PathToken>,
26 /// a set of previous `CopySource.rev` value directly or indirectly
26 /// a set of previous `CopySource.rev` value directly or indirectly
27 /// overwritten by this one.
27 /// overwritten by this one.
28 overwritten: HashSet<Revision>,
28 overwritten: HashSet<Revision>,
29 }
29 }
30
30
31 impl CopySource {
31 impl CopySource {
32 /// create a new CopySource
32 /// create a new CopySource
33 ///
33 ///
34 /// Use this when no previous copy source existed.
34 /// Use this when no previous copy source existed.
35 fn new(rev: Revision, path: Option<PathToken>) -> Self {
35 fn new(rev: Revision, path: Option<PathToken>) -> Self {
36 Self {
36 Self {
37 rev,
37 rev,
38 path,
38 path,
39 overwritten: HashSet::new(),
39 overwritten: HashSet::new(),
40 }
40 }
41 }
41 }
42
42
43 /// create a new CopySource from merging two others
43 /// create a new CopySource from merging two others
44 ///
44 ///
45 /// Use this when merging two InternalPathCopies requires active merging of
45 /// Use this when merging two InternalPathCopies requires active merging of
46 /// some entries.
46 /// some entries.
47 fn new_from_merge(rev: Revision, winner: &Self, loser: &Self) -> Self {
47 fn new_from_merge(rev: Revision, winner: &Self, loser: &Self) -> Self {
48 let mut overwritten = HashSet::new();
48 let mut overwritten = HashSet::new();
49 overwritten.extend(winner.overwritten.iter().copied());
49 overwritten.extend(winner.overwritten.iter().copied());
50 overwritten.extend(loser.overwritten.iter().copied());
50 overwritten.extend(loser.overwritten.iter().copied());
51 overwritten.insert(winner.rev);
51 overwritten.insert(winner.rev);
52 overwritten.insert(loser.rev);
52 overwritten.insert(loser.rev);
53 Self {
53 Self {
54 rev,
54 rev,
55 path: winner.path,
55 path: winner.path,
56 overwritten: overwritten,
56 overwritten: overwritten,
57 }
57 }
58 }
58 }
59
59
60 /// Update the value of a pre-existing CopySource
60 /// Update the value of a pre-existing CopySource
61 ///
61 ///
62 /// Use this when recording copy information from parent β†’ child edges
62 /// Use this when recording copy information from parent β†’ child edges
63 fn overwrite(&mut self, rev: Revision, path: Option<PathToken>) {
63 fn overwrite(&mut self, rev: Revision, path: Option<PathToken>) {
64 self.overwritten.insert(self.rev);
64 self.overwritten.insert(self.rev);
65 self.rev = rev;
65 self.rev = rev;
66 self.path = path;
66 self.path = path;
67 }
67 }
68
68
69 /// Mark pre-existing copy information as "dropped" by a file deletion
69 /// Mark pre-existing copy information as "dropped" by a file deletion
70 ///
70 ///
71 /// Use this when recording copy information from parent β†’ child edges
71 /// Use this when recording copy information from parent β†’ child edges
72 fn mark_delete(&mut self, rev: Revision) {
72 fn mark_delete(&mut self, rev: Revision) {
73 self.overwritten.insert(self.rev);
73 self.overwritten.insert(self.rev);
74 self.rev = rev;
74 self.rev = rev;
75 self.path = None;
75 self.path = None;
76 }
76 }
77
77
78 fn is_overwritten_by(&self, other: &Self) -> bool {
78 fn is_overwritten_by(&self, other: &Self) -> bool {
79 other.overwritten.contains(&self.rev)
79 other.overwritten.contains(&self.rev)
80 }
80 }
81 }
81 }
82
82
83 /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation)
83 /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation)
84 type InternalPathCopies = OrdMap<PathToken, CopySource>;
84 type InternalPathCopies = OrdMap<PathToken, CopySource>;
85
85
86 /// hold parent 1, parent 2 and relevant files actions.
86 /// hold parent 1, parent 2 and relevant files actions.
87 pub type RevInfo<'a> = (Revision, Revision, ChangedFiles<'a>);
87 pub type RevInfo<'a> = (Revision, Revision, ChangedFiles<'a>);
88
88
89 /// represent the files affected by a changesets
89 /// represent the files affected by a changesets
90 ///
90 ///
91 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
91 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
92 /// all the data categories tracked by it.
92 /// all the data categories tracked by it.
93 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
93 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
94 /// all the data categories tracked by it.
94 /// all the data categories tracked by it.
95 pub struct ChangedFiles<'a> {
95 pub struct ChangedFiles<'a> {
96 nb_items: u32,
96 nb_items: u32,
97 index: &'a [u8],
97 index: &'a [u8],
98 data: &'a [u8],
98 data: &'a [u8],
99 }
99 }
100
100
101 /// Represent active changes that affect the copy tracing.
101 /// Represent active changes that affect the copy tracing.
102 enum Action<'a> {
102 enum Action<'a> {
103 /// The parent ? children edge is removing a file
103 /// The parent ? children edge is removing a file
104 ///
104 ///
105 /// (actually, this could be the edge from the other parent, but it does
105 /// (actually, this could be the edge from the other parent, but it does
106 /// not matters)
106 /// not matters)
107 Removed(&'a HgPath),
107 Removed(&'a HgPath),
108 /// The parent ? children edge introduce copy information between (dest,
108 /// The parent ? children edge introduce copy information between (dest,
109 /// source)
109 /// source)
110 Copied(&'a HgPath, &'a HgPath),
110 Copied(&'a HgPath, &'a HgPath),
111 }
111 }
112
112
113 /// This express the possible "special" case we can get in a merge
113 /// This express the possible "special" case we can get in a merge
114 ///
114 ///
115 /// See mercurial/metadata.py for details on these values.
115 /// See mercurial/metadata.py for details on these values.
116 #[derive(PartialEq)]
116 #[derive(PartialEq)]
117 enum MergeCase {
117 enum MergeCase {
118 /// Merged: file had history on both side that needed to be merged
118 /// Merged: file had history on both side that needed to be merged
119 Merged,
119 Merged,
120 /// Salvaged: file was candidate for deletion, but survived the merge
120 /// Salvaged: file was candidate for deletion, but survived the merge
121 Salvaged,
121 Salvaged,
122 /// Normal: Not one of the two cases above
122 /// Normal: Not one of the two cases above
123 Normal,
123 Normal,
124 }
124 }
125
125
126 type FileChange<'a> = (u8, &'a HgPath, &'a HgPath);
126 type FileChange<'a> = (u8, &'a HgPath, &'a HgPath);
127
127
128 const EMPTY: &[u8] = b"";
128 const EMPTY: &[u8] = b"";
129 const COPY_MASK: u8 = 3;
129 const COPY_MASK: u8 = 3;
130 const P1_COPY: u8 = 2;
130 const P1_COPY: u8 = 2;
131 const P2_COPY: u8 = 3;
131 const P2_COPY: u8 = 3;
132 const ACTION_MASK: u8 = 28;
132 const ACTION_MASK: u8 = 28;
133 const REMOVED: u8 = 12;
133 const REMOVED: u8 = 12;
134 const MERGED: u8 = 8;
134 const MERGED: u8 = 8;
135 const SALVAGED: u8 = 16;
135 const SALVAGED: u8 = 16;
136
136
137 impl<'a> ChangedFiles<'a> {
137 impl<'a> ChangedFiles<'a> {
138 const INDEX_START: usize = 4;
138 const INDEX_START: usize = 4;
139 const ENTRY_SIZE: u32 = 9;
139 const ENTRY_SIZE: u32 = 9;
140 const FILENAME_START: u32 = 1;
140 const FILENAME_START: u32 = 1;
141 const COPY_SOURCE_START: u32 = 5;
141 const COPY_SOURCE_START: u32 = 5;
142
142
143 pub fn new(data: &'a [u8]) -> Self {
143 pub fn new(data: &'a [u8]) -> Self {
144 assert!(
144 assert!(
145 data.len() >= 4,
145 data.len() >= 4,
146 "data size ({}) is too small to contain the header (4)",
146 "data size ({}) is too small to contain the header (4)",
147 data.len()
147 data.len()
148 );
148 );
149 let nb_items_raw: [u8; 4] = (&data[0..=3])
149 let nb_items_raw: [u8; 4] = (&data[0..=3])
150 .try_into()
150 .try_into()
151 .expect("failed to turn 4 bytes into 4 bytes");
151 .expect("failed to turn 4 bytes into 4 bytes");
152 let nb_items = u32::from_be_bytes(nb_items_raw);
152 let nb_items = u32::from_be_bytes(nb_items_raw);
153
153
154 let index_size = (nb_items * Self::ENTRY_SIZE) as usize;
154 let index_size = (nb_items * Self::ENTRY_SIZE) as usize;
155 let index_end = Self::INDEX_START + index_size;
155 let index_end = Self::INDEX_START + index_size;
156
156
157 assert!(
157 assert!(
158 data.len() >= index_end,
158 data.len() >= index_end,
159 "data size ({}) is too small to fit the index_data ({})",
159 "data size ({}) is too small to fit the index_data ({})",
160 data.len(),
160 data.len(),
161 index_end
161 index_end
162 );
162 );
163
163
164 let ret = ChangedFiles {
164 let ret = ChangedFiles {
165 nb_items,
165 nb_items,
166 index: &data[Self::INDEX_START..index_end],
166 index: &data[Self::INDEX_START..index_end],
167 data: &data[index_end..],
167 data: &data[index_end..],
168 };
168 };
169 let max_data = ret.filename_end(nb_items - 1) as usize;
169 let max_data = ret.filename_end(nb_items - 1) as usize;
170 assert!(
170 assert!(
171 ret.data.len() >= max_data,
171 ret.data.len() >= max_data,
172 "data size ({}) is too small to fit all data ({})",
172 "data size ({}) is too small to fit all data ({})",
173 data.len(),
173 data.len(),
174 index_end + max_data
174 index_end + max_data
175 );
175 );
176 ret
176 ret
177 }
177 }
178
178
179 pub fn new_empty() -> Self {
179 pub fn new_empty() -> Self {
180 ChangedFiles {
180 ChangedFiles {
181 nb_items: 0,
181 nb_items: 0,
182 index: EMPTY,
182 index: EMPTY,
183 data: EMPTY,
183 data: EMPTY,
184 }
184 }
185 }
185 }
186
186
187 /// internal function to return an individual entry at a given index
187 /// internal function to return an individual entry at a given index
188 fn entry(&'a self, idx: u32) -> FileChange<'a> {
188 fn entry(&'a self, idx: u32) -> FileChange<'a> {
189 if idx >= self.nb_items {
189 if idx >= self.nb_items {
190 panic!(
190 panic!(
191 "index for entry is higher that the number of file {} >= {}",
191 "index for entry is higher that the number of file {} >= {}",
192 idx, self.nb_items
192 idx, self.nb_items
193 )
193 )
194 }
194 }
195 let flags = self.flags(idx);
195 let flags = self.flags(idx);
196 let filename = self.filename(idx);
196 let filename = self.filename(idx);
197 let copy_idx = self.copy_idx(idx);
197 let copy_idx = self.copy_idx(idx);
198 let copy_source = self.filename(copy_idx);
198 let copy_source = self.filename(copy_idx);
199 (flags, filename, copy_source)
199 (flags, filename, copy_source)
200 }
200 }
201
201
202 /// internal function to return the filename of the entry at a given index
202 /// internal function to return the filename of the entry at a given index
203 fn filename(&self, idx: u32) -> &HgPath {
203 fn filename(&self, idx: u32) -> &HgPath {
204 let filename_start;
204 let filename_start;
205 if idx == 0 {
205 if idx == 0 {
206 filename_start = 0;
206 filename_start = 0;
207 } else {
207 } else {
208 filename_start = self.filename_end(idx - 1)
208 filename_start = self.filename_end(idx - 1)
209 }
209 }
210 let filename_end = self.filename_end(idx);
210 let filename_end = self.filename_end(idx);
211 let filename_start = filename_start as usize;
211 let filename_start = filename_start as usize;
212 let filename_end = filename_end as usize;
212 let filename_end = filename_end as usize;
213 HgPath::new(&self.data[filename_start..filename_end])
213 HgPath::new(&self.data[filename_start..filename_end])
214 }
214 }
215
215
216 /// internal function to return the flag field of the entry at a given
216 /// internal function to return the flag field of the entry at a given
217 /// index
217 /// index
218 fn flags(&self, idx: u32) -> u8 {
218 fn flags(&self, idx: u32) -> u8 {
219 let idx = idx as usize;
219 let idx = idx as usize;
220 self.index[idx * (Self::ENTRY_SIZE as usize)]
220 self.index[idx * (Self::ENTRY_SIZE as usize)]
221 }
221 }
222
222
223 /// internal function to return the end of a filename part at a given index
223 /// internal function to return the end of a filename part at a given index
224 fn filename_end(&self, idx: u32) -> u32 {
224 fn filename_end(&self, idx: u32) -> u32 {
225 let start = (idx * Self::ENTRY_SIZE) + Self::FILENAME_START;
225 let start = (idx * Self::ENTRY_SIZE) + Self::FILENAME_START;
226 let end = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
226 let end = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
227 let start = start as usize;
227 let start = start as usize;
228 let end = end as usize;
228 let end = end as usize;
229 let raw = (&self.index[start..end])
229 let raw = (&self.index[start..end])
230 .try_into()
230 .try_into()
231 .expect("failed to turn 4 bytes into 4 bytes");
231 .expect("failed to turn 4 bytes into 4 bytes");
232 u32::from_be_bytes(raw)
232 u32::from_be_bytes(raw)
233 }
233 }
234
234
235 /// internal function to return index of the copy source of the entry at a
235 /// internal function to return index of the copy source of the entry at a
236 /// given index
236 /// given index
237 fn copy_idx(&self, idx: u32) -> u32 {
237 fn copy_idx(&self, idx: u32) -> u32 {
238 let start = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
238 let start = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
239 let end = (idx + 1) * Self::ENTRY_SIZE;
239 let end = (idx + 1) * Self::ENTRY_SIZE;
240 let start = start as usize;
240 let start = start as usize;
241 let end = end as usize;
241 let end = end as usize;
242 let raw = (&self.index[start..end])
242 let raw = (&self.index[start..end])
243 .try_into()
243 .try_into()
244 .expect("failed to turn 4 bytes into 4 bytes");
244 .expect("failed to turn 4 bytes into 4 bytes");
245 u32::from_be_bytes(raw)
245 u32::from_be_bytes(raw)
246 }
246 }
247
247
248 /// Return an iterator over all the `Action` in this instance.
248 /// Return an iterator over all the `Action` in this instance.
249 fn iter_actions(&self, parent: Parent) -> ActionsIterator {
249 fn iter_actions(&self, parent: Parent) -> ActionsIterator {
250 ActionsIterator {
250 ActionsIterator {
251 changes: &self,
251 changes: &self,
252 parent: parent,
252 parent: parent,
253 current: 0,
253 current: 0,
254 }
254 }
255 }
255 }
256
256
257 /// return the MergeCase value associated with a filename
257 /// return the MergeCase value associated with a filename
258 fn get_merge_case(&self, path: &HgPath) -> MergeCase {
258 fn get_merge_case(&self, path: &HgPath) -> MergeCase {
259 if self.nb_items == 0 {
259 if self.nb_items == 0 {
260 return MergeCase::Normal;
260 return MergeCase::Normal;
261 }
261 }
262 let mut low_part = 0;
262 let mut low_part = 0;
263 let mut high_part = self.nb_items;
263 let mut high_part = self.nb_items;
264
264
265 while low_part < high_part {
265 while low_part < high_part {
266 let cursor = (low_part + high_part - 1) / 2;
266 let cursor = (low_part + high_part - 1) / 2;
267 let (flags, filename, _source) = self.entry(cursor);
267 let (flags, filename, _source) = self.entry(cursor);
268 match path.cmp(filename) {
268 match path.cmp(filename) {
269 Ordering::Less => low_part = cursor + 1,
269 Ordering::Less => low_part = cursor + 1,
270 Ordering::Greater => high_part = cursor,
270 Ordering::Greater => high_part = cursor,
271 Ordering::Equal => {
271 Ordering::Equal => {
272 return match flags & ACTION_MASK {
272 return match flags & ACTION_MASK {
273 MERGED => MergeCase::Merged,
273 MERGED => MergeCase::Merged,
274 SALVAGED => MergeCase::Salvaged,
274 SALVAGED => MergeCase::Salvaged,
275 _ => MergeCase::Normal,
275 _ => MergeCase::Normal,
276 };
276 };
277 }
277 }
278 }
278 }
279 }
279 }
280 MergeCase::Normal
280 MergeCase::Normal
281 }
281 }
282 }
282 }
283
283
284 struct ActionsIterator<'a> {
284 struct ActionsIterator<'a> {
285 changes: &'a ChangedFiles<'a>,
285 changes: &'a ChangedFiles<'a>,
286 parent: Parent,
286 parent: Parent,
287 current: u32,
287 current: u32,
288 }
288 }
289
289
290 impl<'a> Iterator for ActionsIterator<'a> {
290 impl<'a> Iterator for ActionsIterator<'a> {
291 type Item = Action<'a>;
291 type Item = Action<'a>;
292
292
293 fn next(&mut self) -> Option<Action<'a>> {
293 fn next(&mut self) -> Option<Action<'a>> {
294 let copy_flag = match self.parent {
294 let copy_flag = match self.parent {
295 Parent::FirstParent => P1_COPY,
295 Parent::FirstParent => P1_COPY,
296 Parent::SecondParent => P2_COPY,
296 Parent::SecondParent => P2_COPY,
297 };
297 };
298 while self.current < self.changes.nb_items {
298 while self.current < self.changes.nb_items {
299 let (flags, file, source) = self.changes.entry(self.current);
299 let (flags, file, source) = self.changes.entry(self.current);
300 self.current += 1;
300 self.current += 1;
301 if (flags & ACTION_MASK) == REMOVED {
301 if (flags & ACTION_MASK) == REMOVED {
302 return Some(Action::Removed(file));
302 return Some(Action::Removed(file));
303 }
303 }
304 let copy = flags & COPY_MASK;
304 let copy = flags & COPY_MASK;
305 if copy == copy_flag {
305 if copy == copy_flag {
306 return Some(Action::Copied(file, source));
306 return Some(Action::Copied(file, source));
307 }
307 }
308 }
308 }
309 return None;
309 return None;
310 }
310 }
311 }
311 }
312
312
313 /// A small struct whose purpose is to ensure lifetime of bytes referenced in
313 /// A small struct whose purpose is to ensure lifetime of bytes referenced in
314 /// ChangedFiles
314 /// ChangedFiles
315 ///
315 ///
316 /// It is passed to the RevInfoMaker callback who can assign any necessary
316 /// It is passed to the RevInfoMaker callback who can assign any necessary
317 /// content to the `data` attribute. The copy tracing code is responsible for
317 /// content to the `data` attribute. The copy tracing code is responsible for
318 /// keeping the DataHolder alive at least as long as the ChangedFiles object.
318 /// keeping the DataHolder alive at least as long as the ChangedFiles object.
319 pub struct DataHolder<D> {
319 pub struct DataHolder<D> {
320 /// RevInfoMaker callback should assign data referenced by the
320 /// RevInfoMaker callback should assign data referenced by the
321 /// ChangedFiles struct it return to this attribute. The DataHolder
321 /// ChangedFiles struct it return to this attribute. The DataHolder
322 /// lifetime will be at least as long as the ChangedFiles one.
322 /// lifetime will be at least as long as the ChangedFiles one.
323 pub data: Option<D>,
323 pub data: Option<D>,
324 }
324 }
325
325
326 pub type RevInfoMaker<'a, D> =
326 pub type RevInfoMaker<'a, D> =
327 Box<dyn for<'r> Fn(Revision, &'r mut DataHolder<D>) -> RevInfo<'r> + 'a>;
327 Box<dyn for<'r> Fn(Revision, &'r mut DataHolder<D>) -> RevInfo<'r> + 'a>;
328
328
329 /// enum used to carry information about the parent β†’ child currently processed
329 /// enum used to carry information about the parent β†’ child currently processed
330 #[derive(Copy, Clone, Debug)]
330 #[derive(Copy, Clone, Debug)]
331 enum Parent {
331 enum Parent {
332 /// The `p1(x) β†’ x` edge
332 /// The `p1(x) β†’ x` edge
333 FirstParent,
333 FirstParent,
334 /// The `p2(x) β†’ x` edge
334 /// The `p2(x) β†’ x` edge
335 SecondParent,
335 SecondParent,
336 }
336 }
337
337
338 /// A small "tokenizer" responsible of turning full HgPath into lighter
338 /// A small "tokenizer" responsible of turning full HgPath into lighter
339 /// PathToken
339 /// PathToken
340 ///
340 ///
341 /// Dealing with small object, like integer is much faster, so HgPath input are
341 /// Dealing with small object, like integer is much faster, so HgPath input are
342 /// turned into integer "PathToken" and converted back in the end.
342 /// turned into integer "PathToken" and converted back in the end.
343 #[derive(Clone, Debug, Default)]
343 #[derive(Clone, Debug, Default)]
344 struct TwoWayPathMap {
344 struct TwoWayPathMap {
345 token: HashMap<HgPathBuf, PathToken>,
345 token: HashMap<HgPathBuf, PathToken>,
346 path: Vec<HgPathBuf>,
346 path: Vec<HgPathBuf>,
347 }
347 }
348
348
349 impl TwoWayPathMap {
349 impl TwoWayPathMap {
350 fn tokenize(&mut self, path: &HgPath) -> PathToken {
350 fn tokenize(&mut self, path: &HgPath) -> PathToken {
351 match self.token.get(path) {
351 match self.token.get(path) {
352 Some(a) => *a,
352 Some(a) => *a,
353 None => {
353 None => {
354 let a = self.token.len();
354 let a = self.token.len();
355 let buf = path.to_owned();
355 let buf = path.to_owned();
356 self.path.push(buf.clone());
356 self.path.push(buf.clone());
357 self.token.insert(buf, a);
357 self.token.insert(buf, a);
358 a
358 a
359 }
359 }
360 }
360 }
361 }
361 }
362
362
363 fn untokenize(&self, token: PathToken) -> &HgPathBuf {
363 fn untokenize(&self, token: PathToken) -> &HgPathBuf {
364 assert!(token < self.path.len(), format!("Unknown token: {}", token));
364 assert!(token < self.path.len(), format!("Unknown token: {}", token));
365 &self.path[token]
365 &self.path[token]
366 }
366 }
367 }
367 }
368
368
369 /// Same as mercurial.copies._combine_changeset_copies, but in Rust.
369 /// Same as mercurial.copies._combine_changeset_copies, but in Rust.
370 ///
370 ///
371 /// Arguments are:
371 /// Arguments are:
372 ///
372 ///
373 /// revs: all revisions to be considered
373 /// revs: all revisions to be considered
374 /// children: a {parent ? [childrens]} mapping
374 /// children: a {parent ? [childrens]} mapping
375 /// target_rev: the final revision we are combining copies to
375 /// target_rev: the final revision we are combining copies to
376 /// rev_info(rev): callback to get revision information:
376 /// rev_info(rev): callback to get revision information:
377 /// * first parent
377 /// * first parent
378 /// * second parent
378 /// * second parent
379 /// * ChangedFiles
379 /// * ChangedFiles
380 /// isancestors(low_rev, high_rev): callback to check if a revision is an
380 /// isancestors(low_rev, high_rev): callback to check if a revision is an
381 /// ancestor of another
381 /// ancestor of another
382 pub fn combine_changeset_copies<D>(
382 pub fn combine_changeset_copies<D>(
383 revs: Vec<Revision>,
383 revs: Vec<Revision>,
384 mut children_count: HashMap<Revision, usize>,
384 mut children_count: HashMap<Revision, usize>,
385 target_rev: Revision,
385 target_rev: Revision,
386 rev_info: RevInfoMaker<D>,
386 rev_info: RevInfoMaker<D>,
387 ) -> PathCopies {
387 ) -> PathCopies {
388 let mut all_copies = HashMap::new();
388 let mut all_copies = HashMap::new();
389
389
390 let mut path_map = TwoWayPathMap::default();
390 let mut path_map = TwoWayPathMap::default();
391
391
392 for rev in revs {
392 for rev in revs {
393 let mut d: DataHolder<D> = DataHolder { data: None };
393 let mut d: DataHolder<D> = DataHolder { data: None };
394 let (p1, p2, changes) = rev_info(rev, &mut d);
394 let (p1, p2, changes) = rev_info(rev, &mut d);
395
395
396 // We will chain the copies information accumulated for the parent with
396 // We will chain the copies information accumulated for the parent with
397 // the individual copies information the curent revision. Creating a
397 // the individual copies information the curent revision. Creating a
398 // new TimeStampedPath for each `rev` β†’ `children` vertex.
398 // new TimeStampedPath for each `rev` β†’ `children` vertex.
399 let mut copies: Option<InternalPathCopies> = None;
399 let mut copies: Option<InternalPathCopies> = None;
400 if p1 != NULL_REVISION {
400 // Retrieve data computed in a previous iteration
401 // Retrieve data computed in a previous iteration
401 let p1_copies = match p1 {
402 let parent_copies = get_and_clean_parent_copies(
402 NULL_REVISION => None,
403 _ => get_and_clean_parent_copies(
403 &mut all_copies,
404 &mut all_copies,
404 &mut children_count,
405 &mut children_count,
405 p1,
406 p1,
406 );
407 ), // will be None if the vertex is not to be traversed
407 if let Some(parent_copies) = parent_copies {
408 };
408 // combine it with data for that revision
409 let p2_copies = match p2 {
409 let vertex_copies = add_from_changes(
410 NULL_REVISION => None,
410 &mut path_map,
411 _ => get_and_clean_parent_copies(
411 &parent_copies,
412 &changes,
413 Parent::FirstParent,
414 rev,
415 );
416 // keep that data around for potential later combination
417 copies = Some(vertex_copies);
418 }
419 }
420 if p2 != NULL_REVISION {
421 // Retrieve data computed in a previous iteration
422 let parent_copies = get_and_clean_parent_copies(
423 &mut all_copies,
412 &mut all_copies,
424 &mut children_count,
413 &mut children_count,
425 p2,
414 p2,
415 ), // will be None if the vertex is not to be traversed
416 };
417 if let Some(parent_copies) = p1_copies {
418 // combine it with data for that revision
419 let vertex_copies = add_from_changes(
420 &mut path_map,
421 &parent_copies,
422 &changes,
423 Parent::FirstParent,
424 rev,
426 );
425 );
427 if let Some(parent_copies) = parent_copies {
426 // keep that data around for potential later combination
428 // combine it with data for that revision
427 copies = Some(vertex_copies);
429 let vertex_copies = add_from_changes(
428 }
430 &mut path_map,
429 if let Some(parent_copies) = p2_copies {
431 &parent_copies,
430 // combine it with data for that revision
432 &changes,
431 let vertex_copies = add_from_changes(
433 Parent::SecondParent,
432 &mut path_map,
434 rev,
433 &parent_copies,
435 );
434 &changes,
435 Parent::SecondParent,
436 rev,
437 );
436
438
437 copies = match copies {
439 copies = match copies {
438 None => Some(vertex_copies),
440 None => Some(vertex_copies),
439 // Merge has two parents needs to combines their copy
441 // Merge has two parents needs to combines their copy
440 // information.
442 // information.
441 //
443 //
442 // If we got data from both parents, We need to combine
444 // If we got data from both parents, We need to combine
443 // them.
445 // them.
444 Some(copies) => Some(merge_copies_dict(
446 Some(copies) => Some(merge_copies_dict(
445 &path_map,
447 &path_map,
446 rev,
448 rev,
447 vertex_copies,
449 vertex_copies,
448 copies,
450 copies,
449 &changes,
451 &changes,
450 )),
452 )),
451 };
453 };
452 }
453 }
454 }
454 match copies {
455 match copies {
455 Some(copies) => {
456 Some(copies) => {
456 all_copies.insert(rev, copies);
457 all_copies.insert(rev, copies);
457 }
458 }
458 _ => {}
459 _ => {}
459 }
460 }
460 }
461 }
461
462
462 // Drop internal information (like the timestamp) and return the final
463 // Drop internal information (like the timestamp) and return the final
463 // mapping.
464 // mapping.
464 let tt_result = all_copies
465 let tt_result = all_copies
465 .remove(&target_rev)
466 .remove(&target_rev)
466 .expect("target revision was not processed");
467 .expect("target revision was not processed");
467 let mut result = PathCopies::default();
468 let mut result = PathCopies::default();
468 for (dest, tt_source) in tt_result {
469 for (dest, tt_source) in tt_result {
469 if let Some(path) = tt_source.path {
470 if let Some(path) = tt_source.path {
470 let path_dest = path_map.untokenize(dest).to_owned();
471 let path_dest = path_map.untokenize(dest).to_owned();
471 let path_path = path_map.untokenize(path).to_owned();
472 let path_path = path_map.untokenize(path).to_owned();
472 result.insert(path_dest, path_path);
473 result.insert(path_dest, path_path);
473 }
474 }
474 }
475 }
475 result
476 result
476 }
477 }
477
478
478 /// fetch previous computed information
479 /// fetch previous computed information
479 ///
480 ///
480 /// If no other children are expected to need this information, we drop it from
481 /// If no other children are expected to need this information, we drop it from
481 /// the cache.
482 /// the cache.
482 ///
483 ///
483 /// If parent is not part of the set we are expected to walk, return None.
484 /// If parent is not part of the set we are expected to walk, return None.
484 fn get_and_clean_parent_copies(
485 fn get_and_clean_parent_copies(
485 all_copies: &mut HashMap<Revision, InternalPathCopies>,
486 all_copies: &mut HashMap<Revision, InternalPathCopies>,
486 children_count: &mut HashMap<Revision, usize>,
487 children_count: &mut HashMap<Revision, usize>,
487 parent_rev: Revision,
488 parent_rev: Revision,
488 ) -> Option<InternalPathCopies> {
489 ) -> Option<InternalPathCopies> {
489 let count = children_count.get_mut(&parent_rev)?;
490 let count = children_count.get_mut(&parent_rev)?;
490 *count -= 1;
491 *count -= 1;
491 if *count == 0 {
492 if *count == 0 {
492 match all_copies.remove(&parent_rev) {
493 match all_copies.remove(&parent_rev) {
493 Some(c) => Some(c),
494 Some(c) => Some(c),
494 None => Some(InternalPathCopies::default()),
495 None => Some(InternalPathCopies::default()),
495 }
496 }
496 } else {
497 } else {
497 match all_copies.get(&parent_rev) {
498 match all_copies.get(&parent_rev) {
498 Some(c) => Some(c.clone()),
499 Some(c) => Some(c.clone()),
499 None => Some(InternalPathCopies::default()),
500 None => Some(InternalPathCopies::default()),
500 }
501 }
501 }
502 }
502 }
503 }
503
504
504 /// Combine ChangedFiles with some existing PathCopies information and return
505 /// Combine ChangedFiles with some existing PathCopies information and return
505 /// the result
506 /// the result
506 fn add_from_changes(
507 fn add_from_changes(
507 path_map: &mut TwoWayPathMap,
508 path_map: &mut TwoWayPathMap,
508 base_copies: &InternalPathCopies,
509 base_copies: &InternalPathCopies,
509 changes: &ChangedFiles,
510 changes: &ChangedFiles,
510 parent: Parent,
511 parent: Parent,
511 current_rev: Revision,
512 current_rev: Revision,
512 ) -> InternalPathCopies {
513 ) -> InternalPathCopies {
513 let mut copies = base_copies.clone();
514 let mut copies = base_copies.clone();
514 for action in changes.iter_actions(parent) {
515 for action in changes.iter_actions(parent) {
515 match action {
516 match action {
516 Action::Copied(path_dest, path_source) => {
517 Action::Copied(path_dest, path_source) => {
517 let dest = path_map.tokenize(path_dest);
518 let dest = path_map.tokenize(path_dest);
518 let source = path_map.tokenize(path_source);
519 let source = path_map.tokenize(path_source);
519 let entry;
520 let entry;
520 if let Some(v) = base_copies.get(&source) {
521 if let Some(v) = base_copies.get(&source) {
521 entry = match &v.path {
522 entry = match &v.path {
522 Some(path) => Some((*(path)).to_owned()),
523 Some(path) => Some((*(path)).to_owned()),
523 None => Some(source.to_owned()),
524 None => Some(source.to_owned()),
524 }
525 }
525 } else {
526 } else {
526 entry = Some(source.to_owned());
527 entry = Some(source.to_owned());
527 }
528 }
528 // Each new entry is introduced by the children, we
529 // Each new entry is introduced by the children, we
529 // record this information as we will need it to take
530 // record this information as we will need it to take
530 // the right decision when merging conflicting copy
531 // the right decision when merging conflicting copy
531 // information. See merge_copies_dict for details.
532 // information. See merge_copies_dict for details.
532 match copies.entry(dest) {
533 match copies.entry(dest) {
533 Entry::Vacant(slot) => {
534 Entry::Vacant(slot) => {
534 let ttpc = CopySource::new(current_rev, entry);
535 let ttpc = CopySource::new(current_rev, entry);
535 slot.insert(ttpc);
536 slot.insert(ttpc);
536 }
537 }
537 Entry::Occupied(mut slot) => {
538 Entry::Occupied(mut slot) => {
538 let ttpc = slot.get_mut();
539 let ttpc = slot.get_mut();
539 ttpc.overwrite(current_rev, entry);
540 ttpc.overwrite(current_rev, entry);
540 }
541 }
541 }
542 }
542 }
543 }
543 Action::Removed(deleted_path) => {
544 Action::Removed(deleted_path) => {
544 // We must drop copy information for removed file.
545 // We must drop copy information for removed file.
545 //
546 //
546 // We need to explicitly record them as dropped to
547 // We need to explicitly record them as dropped to
547 // propagate this information when merging two
548 // propagate this information when merging two
548 // InternalPathCopies object.
549 // InternalPathCopies object.
549 let deleted = path_map.tokenize(deleted_path);
550 let deleted = path_map.tokenize(deleted_path);
550 copies.entry(deleted).and_modify(|old| {
551 copies.entry(deleted).and_modify(|old| {
551 old.mark_delete(current_rev);
552 old.mark_delete(current_rev);
552 });
553 });
553 }
554 }
554 }
555 }
555 }
556 }
556 copies
557 copies
557 }
558 }
558
559
559 /// merge two copies-mapping together, minor and major
560 /// merge two copies-mapping together, minor and major
560 ///
561 ///
561 /// In case of conflict, value from "major" will be picked, unless in some
562 /// In case of conflict, value from "major" will be picked, unless in some
562 /// cases. See inline documentation for details.
563 /// cases. See inline documentation for details.
563 fn merge_copies_dict(
564 fn merge_copies_dict(
564 path_map: &TwoWayPathMap,
565 path_map: &TwoWayPathMap,
565 current_merge: Revision,
566 current_merge: Revision,
566 mut minor: InternalPathCopies,
567 mut minor: InternalPathCopies,
567 mut major: InternalPathCopies,
568 mut major: InternalPathCopies,
568 changes: &ChangedFiles,
569 changes: &ChangedFiles,
569 ) -> InternalPathCopies {
570 ) -> InternalPathCopies {
570 // This closure exist as temporary help while multiple developper are
571 // This closure exist as temporary help while multiple developper are
571 // actively working on this code. Feel free to re-inline it once this
572 // actively working on this code. Feel free to re-inline it once this
572 // code is more settled.
573 // code is more settled.
573 let cmp_value =
574 let cmp_value =
574 |dest: &PathToken, src_minor: &CopySource, src_major: &CopySource| {
575 |dest: &PathToken, src_minor: &CopySource, src_major: &CopySource| {
575 compare_value(
576 compare_value(
576 path_map,
577 path_map,
577 current_merge,
578 current_merge,
578 changes,
579 changes,
579 dest,
580 dest,
580 src_minor,
581 src_minor,
581 src_major,
582 src_major,
582 )
583 )
583 };
584 };
584 if minor.is_empty() {
585 if minor.is_empty() {
585 major
586 major
586 } else if major.is_empty() {
587 } else if major.is_empty() {
587 minor
588 minor
588 } else if minor.len() * 2 < major.len() {
589 } else if minor.len() * 2 < major.len() {
589 // Lets says we are merging two InternalPathCopies instance A and B.
590 // Lets says we are merging two InternalPathCopies instance A and B.
590 //
591 //
591 // If A contains N items, the merge result will never contains more
592 // If A contains N items, the merge result will never contains more
592 // than N values differents than the one in A
593 // than N values differents than the one in A
593 //
594 //
594 // If B contains M items, with M > N, the merge result will always
595 // If B contains M items, with M > N, the merge result will always
595 // result in a minimum of M - N value differents than the on in
596 // result in a minimum of M - N value differents than the on in
596 // A
597 // A
597 //
598 //
598 // As a result, if N < (M-N), we know that simply iterating over A will
599 // As a result, if N < (M-N), we know that simply iterating over A will
599 // yield less difference than iterating over the difference
600 // yield less difference than iterating over the difference
600 // between A and B.
601 // between A and B.
601 //
602 //
602 // This help performance a lot in case were a tiny
603 // This help performance a lot in case were a tiny
603 // InternalPathCopies is merged with a much larger one.
604 // InternalPathCopies is merged with a much larger one.
604 for (dest, src_minor) in minor {
605 for (dest, src_minor) in minor {
605 let src_major = major.get(&dest);
606 let src_major = major.get(&dest);
606 match src_major {
607 match src_major {
607 None => {
608 None => {
608 major.insert(dest, src_minor);
609 major.insert(dest, src_minor);
609 }
610 }
610 Some(src_major) => {
611 Some(src_major) => {
611 let (pick, overwrite) =
612 let (pick, overwrite) =
612 cmp_value(&dest, &src_minor, src_major);
613 cmp_value(&dest, &src_minor, src_major);
613 if overwrite {
614 if overwrite {
614 let src = match pick {
615 let src = match pick {
615 MergePick::Major => CopySource::new_from_merge(
616 MergePick::Major => CopySource::new_from_merge(
616 current_merge,
617 current_merge,
617 src_major,
618 src_major,
618 &src_minor,
619 &src_minor,
619 ),
620 ),
620 MergePick::Minor => CopySource::new_from_merge(
621 MergePick::Minor => CopySource::new_from_merge(
621 current_merge,
622 current_merge,
622 &src_minor,
623 &src_minor,
623 src_major,
624 src_major,
624 ),
625 ),
625 MergePick::Any => CopySource::new_from_merge(
626 MergePick::Any => CopySource::new_from_merge(
626 current_merge,
627 current_merge,
627 src_major,
628 src_major,
628 &src_minor,
629 &src_minor,
629 ),
630 ),
630 };
631 };
631 major.insert(dest, src);
632 major.insert(dest, src);
632 } else {
633 } else {
633 match pick {
634 match pick {
634 MergePick::Any | MergePick::Major => None,
635 MergePick::Any | MergePick::Major => None,
635 MergePick::Minor => major.insert(dest, src_minor),
636 MergePick::Minor => major.insert(dest, src_minor),
636 };
637 };
637 }
638 }
638 }
639 }
639 };
640 };
640 }
641 }
641 major
642 major
642 } else if major.len() * 2 < minor.len() {
643 } else if major.len() * 2 < minor.len() {
643 // This use the same rational than the previous block.
644 // This use the same rational than the previous block.
644 // (Check previous block documentation for details.)
645 // (Check previous block documentation for details.)
645 for (dest, src_major) in major {
646 for (dest, src_major) in major {
646 let src_minor = minor.get(&dest);
647 let src_minor = minor.get(&dest);
647 match src_minor {
648 match src_minor {
648 None => {
649 None => {
649 minor.insert(dest, src_major);
650 minor.insert(dest, src_major);
650 }
651 }
651 Some(src_minor) => {
652 Some(src_minor) => {
652 let (pick, overwrite) =
653 let (pick, overwrite) =
653 cmp_value(&dest, src_minor, &src_major);
654 cmp_value(&dest, src_minor, &src_major);
654 if overwrite {
655 if overwrite {
655 let src = match pick {
656 let src = match pick {
656 MergePick::Major => CopySource::new_from_merge(
657 MergePick::Major => CopySource::new_from_merge(
657 current_merge,
658 current_merge,
658 &src_major,
659 &src_major,
659 src_minor,
660 src_minor,
660 ),
661 ),
661 MergePick::Minor => CopySource::new_from_merge(
662 MergePick::Minor => CopySource::new_from_merge(
662 current_merge,
663 current_merge,
663 src_minor,
664 src_minor,
664 &src_major,
665 &src_major,
665 ),
666 ),
666 MergePick::Any => CopySource::new_from_merge(
667 MergePick::Any => CopySource::new_from_merge(
667 current_merge,
668 current_merge,
668 &src_major,
669 &src_major,
669 src_minor,
670 src_minor,
670 ),
671 ),
671 };
672 };
672 minor.insert(dest, src);
673 minor.insert(dest, src);
673 } else {
674 } else {
674 match pick {
675 match pick {
675 MergePick::Any | MergePick::Minor => None,
676 MergePick::Any | MergePick::Minor => None,
676 MergePick::Major => minor.insert(dest, src_major),
677 MergePick::Major => minor.insert(dest, src_major),
677 };
678 };
678 }
679 }
679 }
680 }
680 };
681 };
681 }
682 }
682 minor
683 minor
683 } else {
684 } else {
684 let mut override_minor = Vec::new();
685 let mut override_minor = Vec::new();
685 let mut override_major = Vec::new();
686 let mut override_major = Vec::new();
686
687
687 let mut to_major = |k: &PathToken, v: &CopySource| {
688 let mut to_major = |k: &PathToken, v: &CopySource| {
688 override_major.push((k.clone(), v.clone()))
689 override_major.push((k.clone(), v.clone()))
689 };
690 };
690 let mut to_minor = |k: &PathToken, v: &CopySource| {
691 let mut to_minor = |k: &PathToken, v: &CopySource| {
691 override_minor.push((k.clone(), v.clone()))
692 override_minor.push((k.clone(), v.clone()))
692 };
693 };
693
694
694 // The diff function leverage detection of the identical subpart if
695 // The diff function leverage detection of the identical subpart if
695 // minor and major has some common ancestors. This make it very
696 // minor and major has some common ancestors. This make it very
696 // fast is most case.
697 // fast is most case.
697 //
698 //
698 // In case where the two map are vastly different in size, the current
699 // In case where the two map are vastly different in size, the current
699 // approach is still slowish because the iteration will iterate over
700 // approach is still slowish because the iteration will iterate over
700 // all the "exclusive" content of the larger on. This situation can be
701 // all the "exclusive" content of the larger on. This situation can be
701 // frequent when the subgraph of revision we are processing has a lot
702 // frequent when the subgraph of revision we are processing has a lot
702 // of roots. Each roots adding they own fully new map to the mix (and
703 // of roots. Each roots adding they own fully new map to the mix (and
703 // likely a small map, if the path from the root to the "main path" is
704 // likely a small map, if the path from the root to the "main path" is
704 // small.
705 // small.
705 //
706 //
706 // We could do better by detecting such situation and processing them
707 // We could do better by detecting such situation and processing them
707 // differently.
708 // differently.
708 for d in minor.diff(&major) {
709 for d in minor.diff(&major) {
709 match d {
710 match d {
710 DiffItem::Add(k, v) => to_minor(k, v),
711 DiffItem::Add(k, v) => to_minor(k, v),
711 DiffItem::Remove(k, v) => to_major(k, v),
712 DiffItem::Remove(k, v) => to_major(k, v),
712 DiffItem::Update { old, new } => {
713 DiffItem::Update { old, new } => {
713 let (dest, src_major) = new;
714 let (dest, src_major) = new;
714 let (_, src_minor) = old;
715 let (_, src_minor) = old;
715 let (pick, overwrite) =
716 let (pick, overwrite) =
716 cmp_value(dest, src_minor, src_major);
717 cmp_value(dest, src_minor, src_major);
717 if overwrite {
718 if overwrite {
718 let src = match pick {
719 let src = match pick {
719 MergePick::Major => CopySource::new_from_merge(
720 MergePick::Major => CopySource::new_from_merge(
720 current_merge,
721 current_merge,
721 src_major,
722 src_major,
722 src_minor,
723 src_minor,
723 ),
724 ),
724 MergePick::Minor => CopySource::new_from_merge(
725 MergePick::Minor => CopySource::new_from_merge(
725 current_merge,
726 current_merge,
726 src_minor,
727 src_minor,
727 src_major,
728 src_major,
728 ),
729 ),
729 MergePick::Any => CopySource::new_from_merge(
730 MergePick::Any => CopySource::new_from_merge(
730 current_merge,
731 current_merge,
731 src_major,
732 src_major,
732 src_minor,
733 src_minor,
733 ),
734 ),
734 };
735 };
735 to_minor(dest, &src);
736 to_minor(dest, &src);
736 to_major(dest, &src);
737 to_major(dest, &src);
737 } else {
738 } else {
738 match pick {
739 match pick {
739 MergePick::Major => to_minor(dest, src_major),
740 MergePick::Major => to_minor(dest, src_major),
740 MergePick::Minor => to_major(dest, src_minor),
741 MergePick::Minor => to_major(dest, src_minor),
741 // If the two entry are identical, no need to do
742 // If the two entry are identical, no need to do
742 // anything (but diff should not have yield them)
743 // anything (but diff should not have yield them)
743 MergePick::Any => unreachable!(),
744 MergePick::Any => unreachable!(),
744 }
745 }
745 }
746 }
746 }
747 }
747 };
748 };
748 }
749 }
749
750
750 let updates;
751 let updates;
751 let mut result;
752 let mut result;
752 if override_major.is_empty() {
753 if override_major.is_empty() {
753 result = major
754 result = major
754 } else if override_minor.is_empty() {
755 } else if override_minor.is_empty() {
755 result = minor
756 result = minor
756 } else {
757 } else {
757 if override_minor.len() < override_major.len() {
758 if override_minor.len() < override_major.len() {
758 updates = override_minor;
759 updates = override_minor;
759 result = minor;
760 result = minor;
760 } else {
761 } else {
761 updates = override_major;
762 updates = override_major;
762 result = major;
763 result = major;
763 }
764 }
764 for (k, v) in updates {
765 for (k, v) in updates {
765 result.insert(k, v);
766 result.insert(k, v);
766 }
767 }
767 }
768 }
768 result
769 result
769 }
770 }
770 }
771 }
771
772
772 /// represent the side that should prevail when merging two
773 /// represent the side that should prevail when merging two
773 /// InternalPathCopies
774 /// InternalPathCopies
774 enum MergePick {
775 enum MergePick {
775 /// The "major" (p1) side prevails
776 /// The "major" (p1) side prevails
776 Major,
777 Major,
777 /// The "minor" (p2) side prevails
778 /// The "minor" (p2) side prevails
778 Minor,
779 Minor,
779 /// Any side could be used (because they are the same)
780 /// Any side could be used (because they are the same)
780 Any,
781 Any,
781 }
782 }
782
783
783 /// decide which side prevails in case of conflicting values
784 /// decide which side prevails in case of conflicting values
784 #[allow(clippy::if_same_then_else)]
785 #[allow(clippy::if_same_then_else)]
785 fn compare_value(
786 fn compare_value(
786 path_map: &TwoWayPathMap,
787 path_map: &TwoWayPathMap,
787 current_merge: Revision,
788 current_merge: Revision,
788 changes: &ChangedFiles,
789 changes: &ChangedFiles,
789 dest: &PathToken,
790 dest: &PathToken,
790 src_minor: &CopySource,
791 src_minor: &CopySource,
791 src_major: &CopySource,
792 src_major: &CopySource,
792 ) -> (MergePick, bool) {
793 ) -> (MergePick, bool) {
793 if src_major.rev == current_merge {
794 if src_major.rev == current_merge {
794 if src_minor.rev == current_merge {
795 if src_minor.rev == current_merge {
795 if src_major.path.is_none() {
796 if src_major.path.is_none() {
796 // We cannot get different copy information for both p1 and p2
797 // We cannot get different copy information for both p1 and p2
797 // from the same revision. Unless this was a
798 // from the same revision. Unless this was a
798 // deletion.
799 // deletion.
799 //
800 //
800 // However the deletion might come over different data on each
801 // However the deletion might come over different data on each
801 // branch.
802 // branch.
802 let need_over = src_major.overwritten != src_minor.overwritten;
803 let need_over = src_major.overwritten != src_minor.overwritten;
803 (MergePick::Any, need_over)
804 (MergePick::Any, need_over)
804 } else {
805 } else {
805 unreachable!();
806 unreachable!();
806 }
807 }
807 } else {
808 } else {
808 // The last value comes the current merge, this value -will- win
809 // The last value comes the current merge, this value -will- win
809 // eventually.
810 // eventually.
810 (MergePick::Major, true)
811 (MergePick::Major, true)
811 }
812 }
812 } else if src_minor.rev == current_merge {
813 } else if src_minor.rev == current_merge {
813 // The last value comes the current merge, this value -will- win
814 // The last value comes the current merge, this value -will- win
814 // eventually.
815 // eventually.
815 (MergePick::Minor, true)
816 (MergePick::Minor, true)
816 } else if src_major.path == src_minor.path {
817 } else if src_major.path == src_minor.path {
817 // we have the same value, but from other source;
818 // we have the same value, but from other source;
818 if src_major.rev == src_minor.rev {
819 if src_major.rev == src_minor.rev {
819 // If the two entry are identical, they are both valid
820 // If the two entry are identical, they are both valid
820 debug_assert!(src_minor.overwritten == src_minor.overwritten);
821 debug_assert!(src_minor.overwritten == src_minor.overwritten);
821 (MergePick::Any, false)
822 (MergePick::Any, false)
822 } else if src_major.is_overwritten_by(src_minor) {
823 } else if src_major.is_overwritten_by(src_minor) {
823 (MergePick::Minor, false)
824 (MergePick::Minor, false)
824 } else if src_minor.is_overwritten_by(src_major) {
825 } else if src_minor.is_overwritten_by(src_major) {
825 (MergePick::Major, false)
826 (MergePick::Major, false)
826 } else {
827 } else {
827 (MergePick::Any, true)
828 (MergePick::Any, true)
828 }
829 }
829 } else if src_major.rev == src_minor.rev {
830 } else if src_major.rev == src_minor.rev {
830 // We cannot get copy information for both p1 and p2 in the
831 // We cannot get copy information for both p1 and p2 in the
831 // same rev. So this is the same value.
832 // same rev. So this is the same value.
832 unreachable!(
833 unreachable!(
833 "conflicting information from p1 and p2 in the same revision"
834 "conflicting information from p1 and p2 in the same revision"
834 );
835 );
835 } else {
836 } else {
836 let dest_path = path_map.untokenize(*dest);
837 let dest_path = path_map.untokenize(*dest);
837 let action = changes.get_merge_case(dest_path);
838 let action = changes.get_merge_case(dest_path);
838 if src_minor.path.is_some()
839 if src_minor.path.is_some()
839 && src_major.path.is_none()
840 && src_major.path.is_none()
840 && action == MergeCase::Salvaged
841 && action == MergeCase::Salvaged
841 {
842 {
842 // If the file is "deleted" in the major side but was
843 // If the file is "deleted" in the major side but was
843 // salvaged by the merge, we keep the minor side alive
844 // salvaged by the merge, we keep the minor side alive
844 (MergePick::Minor, true)
845 (MergePick::Minor, true)
845 } else if src_major.path.is_some()
846 } else if src_major.path.is_some()
846 && src_minor.path.is_none()
847 && src_minor.path.is_none()
847 && action == MergeCase::Salvaged
848 && action == MergeCase::Salvaged
848 {
849 {
849 // If the file is "deleted" in the minor side but was
850 // If the file is "deleted" in the minor side but was
850 // salvaged by the merge, unconditionnaly preserve the
851 // salvaged by the merge, unconditionnaly preserve the
851 // major side.
852 // major side.
852 (MergePick::Major, true)
853 (MergePick::Major, true)
853 } else if src_minor.is_overwritten_by(src_major) {
854 } else if src_minor.is_overwritten_by(src_major) {
854 // The information from the minor version are strictly older than
855 // The information from the minor version are strictly older than
855 // the major version
856 // the major version
856 if action == MergeCase::Merged {
857 if action == MergeCase::Merged {
857 // If the file was actively merged, its means some non-copy
858 // If the file was actively merged, its means some non-copy
858 // activity happened on the other branch. It
859 // activity happened on the other branch. It
859 // mean the older copy information are still relevant.
860 // mean the older copy information are still relevant.
860 //
861 //
861 // The major side wins such conflict.
862 // The major side wins such conflict.
862 (MergePick::Major, true)
863 (MergePick::Major, true)
863 } else {
864 } else {
864 // No activity on the minor branch, pick the newer one.
865 // No activity on the minor branch, pick the newer one.
865 (MergePick::Major, false)
866 (MergePick::Major, false)
866 }
867 }
867 } else if src_major.is_overwritten_by(src_minor) {
868 } else if src_major.is_overwritten_by(src_minor) {
868 if action == MergeCase::Merged {
869 if action == MergeCase::Merged {
869 // If the file was actively merged, its means some non-copy
870 // If the file was actively merged, its means some non-copy
870 // activity happened on the other branch. It
871 // activity happened on the other branch. It
871 // mean the older copy information are still relevant.
872 // mean the older copy information are still relevant.
872 //
873 //
873 // The major side wins such conflict.
874 // The major side wins such conflict.
874 (MergePick::Major, true)
875 (MergePick::Major, true)
875 } else {
876 } else {
876 // No activity on the minor branch, pick the newer one.
877 // No activity on the minor branch, pick the newer one.
877 (MergePick::Minor, false)
878 (MergePick::Minor, false)
878 }
879 }
879 } else if src_minor.path.is_none() {
880 } else if src_minor.path.is_none() {
880 // the minor side has no relevant information, pick the alive one
881 // the minor side has no relevant information, pick the alive one
881 (MergePick::Major, true)
882 (MergePick::Major, true)
882 } else if src_major.path.is_none() {
883 } else if src_major.path.is_none() {
883 // the major side has no relevant information, pick the alive one
884 // the major side has no relevant information, pick the alive one
884 (MergePick::Minor, true)
885 (MergePick::Minor, true)
885 } else {
886 } else {
886 // by default the major side wins
887 // by default the major side wins
887 (MergePick::Major, true)
888 (MergePick::Major, true)
888 }
889 }
889 }
890 }
890 }
891 }
General Comments 0
You need to be logged in to leave comments. Login now