##// END OF EJS Templates
copies-rust: encapsulate internal sets on `changes`...
marmoute -
r46589:23679379 default
parent child Browse files
Show More
@@ -1,390 +1,415 b''
1 use crate::utils::hg_path::HgPath;
1 use crate::utils::hg_path::HgPath;
2 use crate::utils::hg_path::HgPathBuf;
2 use crate::utils::hg_path::HgPathBuf;
3 use crate::Revision;
3 use crate::Revision;
4
4
5 use im_rc::ordmap::DiffItem;
5 use im_rc::ordmap::DiffItem;
6 use im_rc::ordmap::OrdMap;
6 use im_rc::ordmap::OrdMap;
7
7
8 use std::collections::HashMap;
8 use std::collections::HashMap;
9 use std::collections::HashSet;
9 use std::collections::HashSet;
10
10
11 pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>;
11 pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>;
12
12
13 #[derive(Clone, Debug, PartialEq)]
13 #[derive(Clone, Debug, PartialEq)]
14 struct TimeStampedPathCopy {
14 struct TimeStampedPathCopy {
15 /// revision at which the copy information was added
15 /// revision at which the copy information was added
16 rev: Revision,
16 rev: Revision,
17 /// the copy source, (Set to None in case of deletion of the associated
17 /// the copy source, (Set to None in case of deletion of the associated
18 /// key)
18 /// key)
19 path: Option<HgPathBuf>,
19 path: Option<HgPathBuf>,
20 }
20 }
21
21
22 /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation)
22 /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation)
23 type TimeStampedPathCopies = OrdMap<HgPathBuf, TimeStampedPathCopy>;
23 type TimeStampedPathCopies = OrdMap<HgPathBuf, TimeStampedPathCopy>;
24
24
25 /// hold parent 1, parent 2 and relevant files actions.
25 /// hold parent 1, parent 2 and relevant files actions.
26 pub type RevInfo = (Revision, Revision, ChangedFiles);
26 pub type RevInfo = (Revision, Revision, ChangedFiles);
27
27
28 /// represent the files affected by a changesets
28 /// represent the files affected by a changesets
29 ///
29 ///
30 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
30 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
31 /// all the data categories tracked by it.
31 /// all the data categories tracked by it.
32 pub struct ChangedFiles {
32 pub struct ChangedFiles {
33 removed: HashSet<HgPathBuf>,
33 removed: HashSet<HgPathBuf>,
34 merged: HashSet<HgPathBuf>,
34 merged: HashSet<HgPathBuf>,
35 salvaged: HashSet<HgPathBuf>,
35 salvaged: HashSet<HgPathBuf>,
36 copied_from_p1: PathCopies,
36 copied_from_p1: PathCopies,
37 copied_from_p2: PathCopies,
37 copied_from_p2: PathCopies,
38 }
38 }
39
39
40 /// Represent active changes that affect the copy tracing.
40 /// Represent active changes that affect the copy tracing.
41 enum Action<'a> {
41 enum Action<'a> {
42 /// The parent ? children edge is removing a file
42 /// The parent ? children edge is removing a file
43 ///
43 ///
44 /// (actually, this could be the edge from the other parent, but it does
44 /// (actually, this could be the edge from the other parent, but it does
45 /// not matters)
45 /// not matters)
46 Removed(&'a HgPath),
46 Removed(&'a HgPath),
47 /// The parent ? children edge introduce copy information between (dest,
47 /// The parent ? children edge introduce copy information between (dest,
48 /// source)
48 /// source)
49 Copied(&'a HgPath, &'a HgPath),
49 Copied(&'a HgPath, &'a HgPath),
50 }
50 }
51
51
52 /// This express the possible "special" case we can get in a merge
53 ///
54 /// See mercurial/metadata.py for details on these values.
55 #[derive(PartialEq)]
56 enum MergeCase {
57 /// Merged: file had history on both side that needed to be merged
58 Merged,
59 /// Salvaged: file was candidate for deletion, but survived the merge
60 Salvaged,
61 /// Normal: Not one of the two cases above
62 Normal,
63 }
64
52 impl ChangedFiles {
65 impl ChangedFiles {
53 pub fn new(
66 pub fn new(
54 removed: HashSet<HgPathBuf>,
67 removed: HashSet<HgPathBuf>,
55 merged: HashSet<HgPathBuf>,
68 merged: HashSet<HgPathBuf>,
56 salvaged: HashSet<HgPathBuf>,
69 salvaged: HashSet<HgPathBuf>,
57 copied_from_p1: PathCopies,
70 copied_from_p1: PathCopies,
58 copied_from_p2: PathCopies,
71 copied_from_p2: PathCopies,
59 ) -> Self {
72 ) -> Self {
60 ChangedFiles {
73 ChangedFiles {
61 removed,
74 removed,
62 merged,
75 merged,
63 salvaged,
76 salvaged,
64 copied_from_p1,
77 copied_from_p1,
65 copied_from_p2,
78 copied_from_p2,
66 }
79 }
67 }
80 }
68
81
69 pub fn new_empty() -> Self {
82 pub fn new_empty() -> Self {
70 ChangedFiles {
83 ChangedFiles {
71 removed: HashSet::new(),
84 removed: HashSet::new(),
72 merged: HashSet::new(),
85 merged: HashSet::new(),
73 salvaged: HashSet::new(),
86 salvaged: HashSet::new(),
74 copied_from_p1: PathCopies::new(),
87 copied_from_p1: PathCopies::new(),
75 copied_from_p2: PathCopies::new(),
88 copied_from_p2: PathCopies::new(),
76 }
89 }
77 }
90 }
78
91
79 /// Return an iterator over all the `Action` in this instance.
92 /// Return an iterator over all the `Action` in this instance.
80 fn iter_actions(&self, parent: usize) -> impl Iterator<Item = Action> {
93 fn iter_actions(&self, parent: usize) -> impl Iterator<Item = Action> {
81 let copies_iter = match parent {
94 let copies_iter = match parent {
82 1 => self.copied_from_p1.iter(),
95 1 => self.copied_from_p1.iter(),
83 2 => self.copied_from_p2.iter(),
96 2 => self.copied_from_p2.iter(),
84 _ => unreachable!(),
97 _ => unreachable!(),
85 };
98 };
86 let remove_iter = self.removed.iter();
99 let remove_iter = self.removed.iter();
87 let copies_iter = copies_iter.map(|(x, y)| Action::Copied(x, y));
100 let copies_iter = copies_iter.map(|(x, y)| Action::Copied(x, y));
88 let remove_iter = remove_iter.map(|x| Action::Removed(x));
101 let remove_iter = remove_iter.map(|x| Action::Removed(x));
89 copies_iter.chain(remove_iter)
102 copies_iter.chain(remove_iter)
90 }
103 }
104
105 /// return the MergeCase value associated with a filename
106 fn get_merge_case(&self, path: &HgPath) -> MergeCase {
107 if self.salvaged.contains(path) {
108 return MergeCase::Salvaged;
109 } else if self.merged.contains(path) {
110 return MergeCase::Merged;
111 } else {
112 return MergeCase::Normal;
113 }
114 }
91 }
115 }
92
116
93 /// A struct responsible for answering "is X ancestors of Y" quickly
117 /// A struct responsible for answering "is X ancestors of Y" quickly
94 ///
118 ///
95 /// The structure will delegate ancestors call to a callback, and cache the
119 /// The structure will delegate ancestors call to a callback, and cache the
96 /// result.
120 /// result.
97 #[derive(Debug)]
121 #[derive(Debug)]
98 struct AncestorOracle<'a, A: Fn(Revision, Revision) -> bool> {
122 struct AncestorOracle<'a, A: Fn(Revision, Revision) -> bool> {
99 inner: &'a A,
123 inner: &'a A,
100 pairs: HashMap<(Revision, Revision), bool>,
124 pairs: HashMap<(Revision, Revision), bool>,
101 }
125 }
102
126
103 impl<'a, A: Fn(Revision, Revision) -> bool> AncestorOracle<'a, A> {
127 impl<'a, A: Fn(Revision, Revision) -> bool> AncestorOracle<'a, A> {
104 fn new(func: &'a A) -> Self {
128 fn new(func: &'a A) -> Self {
105 Self {
129 Self {
106 inner: func,
130 inner: func,
107 pairs: HashMap::default(),
131 pairs: HashMap::default(),
108 }
132 }
109 }
133 }
110
134
111 /// returns `true` if `anc` is an ancestors of `desc`, `false` otherwise
135 /// returns `true` if `anc` is an ancestors of `desc`, `false` otherwise
112 fn is_ancestor(&mut self, anc: Revision, desc: Revision) -> bool {
136 fn is_ancestor(&mut self, anc: Revision, desc: Revision) -> bool {
113 if anc > desc {
137 if anc > desc {
114 false
138 false
115 } else if anc == desc {
139 } else if anc == desc {
116 true
140 true
117 } else {
141 } else {
118 if let Some(b) = self.pairs.get(&(anc, desc)) {
142 if let Some(b) = self.pairs.get(&(anc, desc)) {
119 *b
143 *b
120 } else {
144 } else {
121 let b = (self.inner)(anc, desc);
145 let b = (self.inner)(anc, desc);
122 self.pairs.insert((anc, desc), b);
146 self.pairs.insert((anc, desc), b);
123 b
147 b
124 }
148 }
125 }
149 }
126 }
150 }
127 }
151 }
128
152
129 /// Same as mercurial.copies._combine_changeset_copies, but in Rust.
153 /// Same as mercurial.copies._combine_changeset_copies, but in Rust.
130 ///
154 ///
131 /// Arguments are:
155 /// Arguments are:
132 ///
156 ///
133 /// revs: all revisions to be considered
157 /// revs: all revisions to be considered
134 /// children: a {parent ? [childrens]} mapping
158 /// children: a {parent ? [childrens]} mapping
135 /// target_rev: the final revision we are combining copies to
159 /// target_rev: the final revision we are combining copies to
136 /// rev_info(rev): callback to get revision information:
160 /// rev_info(rev): callback to get revision information:
137 /// * first parent
161 /// * first parent
138 /// * second parent
162 /// * second parent
139 /// * ChangedFiles
163 /// * ChangedFiles
140 /// isancestors(low_rev, high_rev): callback to check if a revision is an
164 /// isancestors(low_rev, high_rev): callback to check if a revision is an
141 /// ancestor of another
165 /// ancestor of another
142 pub fn combine_changeset_copies<A: Fn(Revision, Revision) -> bool>(
166 pub fn combine_changeset_copies<A: Fn(Revision, Revision) -> bool>(
143 revs: Vec<Revision>,
167 revs: Vec<Revision>,
144 children: HashMap<Revision, Vec<Revision>>,
168 children: HashMap<Revision, Vec<Revision>>,
145 target_rev: Revision,
169 target_rev: Revision,
146 rev_info: &impl Fn(Revision) -> RevInfo,
170 rev_info: &impl Fn(Revision) -> RevInfo,
147 is_ancestor: &A,
171 is_ancestor: &A,
148 ) -> PathCopies {
172 ) -> PathCopies {
149 let mut all_copies = HashMap::new();
173 let mut all_copies = HashMap::new();
150 let mut oracle = AncestorOracle::new(is_ancestor);
174 let mut oracle = AncestorOracle::new(is_ancestor);
151
175
152 for rev in revs {
176 for rev in revs {
153 // Retrieve data computed in a previous iteration
177 // Retrieve data computed in a previous iteration
154 let copies = all_copies.remove(&rev);
178 let copies = all_copies.remove(&rev);
155 let copies = match copies {
179 let copies = match copies {
156 Some(c) => c,
180 Some(c) => c,
157 None => TimeStampedPathCopies::default(), // root of the walked set
181 None => TimeStampedPathCopies::default(), // root of the walked set
158 };
182 };
159
183
160 let current_children = match children.get(&rev) {
184 let current_children = match children.get(&rev) {
161 Some(c) => c,
185 Some(c) => c,
162 None => panic!("inconsistent `revs` and `children`"),
186 None => panic!("inconsistent `revs` and `children`"),
163 };
187 };
164
188
165 for child in current_children {
189 for child in current_children {
166 // We will chain the copies information accumulated for `rev` with
190 // We will chain the copies information accumulated for `rev` with
167 // the individual copies information for each of its children.
191 // the individual copies information for each of its children.
168 // Creating a new PathCopies for each `rev` ? `children` vertex.
192 // Creating a new PathCopies for each `rev` ? `children` vertex.
169 let (p1, p2, changes) = rev_info(*child);
193 let (p1, p2, changes) = rev_info(*child);
170
194
171 let parent = if rev == p1 {
195 let parent = if rev == p1 {
172 1
196 1
173 } else {
197 } else {
174 assert_eq!(rev, p2);
198 assert_eq!(rev, p2);
175 2
199 2
176 };
200 };
177 let mut new_copies = copies.clone();
201 let mut new_copies = copies.clone();
178
202
179 for action in changes.iter_actions(parent) {
203 for action in changes.iter_actions(parent) {
180 match action {
204 match action {
181 Action::Copied(dest, source) => {
205 Action::Copied(dest, source) => {
182 let entry;
206 let entry;
183 if let Some(v) = copies.get(source) {
207 if let Some(v) = copies.get(source) {
184 entry = match &v.path {
208 entry = match &v.path {
185 Some(path) => Some((*(path)).to_owned()),
209 Some(path) => Some((*(path)).to_owned()),
186 None => Some(source.to_owned()),
210 None => Some(source.to_owned()),
187 }
211 }
188 } else {
212 } else {
189 entry = Some(source.to_owned());
213 entry = Some(source.to_owned());
190 }
214 }
191 // Each new entry is introduced by the children, we
215 // Each new entry is introduced by the children, we
192 // record this information as we will need it to take
216 // record this information as we will need it to take
193 // the right decision when merging conflicting copy
217 // the right decision when merging conflicting copy
194 // information. See merge_copies_dict for details.
218 // information. See merge_copies_dict for details.
195 let ttpc = TimeStampedPathCopy {
219 let ttpc = TimeStampedPathCopy {
196 rev: *child,
220 rev: *child,
197 path: entry,
221 path: entry,
198 };
222 };
199 new_copies.insert(dest.to_owned(), ttpc);
223 new_copies.insert(dest.to_owned(), ttpc);
200 }
224 }
201 Action::Removed(f) => {
225 Action::Removed(f) => {
202 // We must drop copy information for removed file.
226 // We must drop copy information for removed file.
203 //
227 //
204 // We need to explicitly record them as dropped to
228 // We need to explicitly record them as dropped to
205 // propagate this information when merging two
229 // propagate this information when merging two
206 // TimeStampedPathCopies object.
230 // TimeStampedPathCopies object.
207 if new_copies.contains_key(f.as_ref()) {
231 if new_copies.contains_key(f.as_ref()) {
208 let ttpc = TimeStampedPathCopy {
232 let ttpc = TimeStampedPathCopy {
209 rev: *child,
233 rev: *child,
210 path: None,
234 path: None,
211 };
235 };
212 new_copies.insert(f.to_owned(), ttpc);
236 new_copies.insert(f.to_owned(), ttpc);
213 }
237 }
214 }
238 }
215 }
239 }
216 }
240 }
217
241
218 // Merge has two parents needs to combines their copy information.
242 // Merge has two parents needs to combines their copy information.
219 //
243 //
220 // If the vertex from the other parent was already processed, we
244 // If the vertex from the other parent was already processed, we
221 // will have a value for the child ready to be used. We need to
245 // will have a value for the child ready to be used. We need to
222 // grab it and combine it with the one we already
246 // grab it and combine it with the one we already
223 // computed. If not we can simply store the newly
247 // computed. If not we can simply store the newly
224 // computed data. The processing happening at
248 // computed data. The processing happening at
225 // the time of the second parent will take care of combining the
249 // the time of the second parent will take care of combining the
226 // two TimeStampedPathCopies instance.
250 // two TimeStampedPathCopies instance.
227 match all_copies.remove(child) {
251 match all_copies.remove(child) {
228 None => {
252 None => {
229 all_copies.insert(child, new_copies);
253 all_copies.insert(child, new_copies);
230 }
254 }
231 Some(other_copies) => {
255 Some(other_copies) => {
232 let (minor, major) = match parent {
256 let (minor, major) = match parent {
233 1 => (other_copies, new_copies),
257 1 => (other_copies, new_copies),
234 2 => (new_copies, other_copies),
258 2 => (new_copies, other_copies),
235 _ => unreachable!(),
259 _ => unreachable!(),
236 };
260 };
237 let merged_copies =
261 let merged_copies =
238 merge_copies_dict(minor, major, &changes, &mut oracle);
262 merge_copies_dict(minor, major, &changes, &mut oracle);
239 all_copies.insert(child, merged_copies);
263 all_copies.insert(child, merged_copies);
240 }
264 }
241 };
265 };
242 }
266 }
243 }
267 }
244
268
245 // Drop internal information (like the timestamp) and return the final
269 // Drop internal information (like the timestamp) and return the final
246 // mapping.
270 // mapping.
247 let tt_result = all_copies
271 let tt_result = all_copies
248 .remove(&target_rev)
272 .remove(&target_rev)
249 .expect("target revision was not processed");
273 .expect("target revision was not processed");
250 let mut result = PathCopies::default();
274 let mut result = PathCopies::default();
251 for (dest, tt_source) in tt_result {
275 for (dest, tt_source) in tt_result {
252 if let Some(path) = tt_source.path {
276 if let Some(path) = tt_source.path {
253 result.insert(dest, path);
277 result.insert(dest, path);
254 }
278 }
255 }
279 }
256 result
280 result
257 }
281 }
258
282
259 /// merge two copies-mapping together, minor and major
283 /// merge two copies-mapping together, minor and major
260 ///
284 ///
261 /// In case of conflict, value from "major" will be picked, unless in some
285 /// In case of conflict, value from "major" will be picked, unless in some
262 /// cases. See inline documentation for details.
286 /// cases. See inline documentation for details.
263 #[allow(clippy::if_same_then_else)]
287 #[allow(clippy::if_same_then_else)]
264 fn merge_copies_dict<A: Fn(Revision, Revision) -> bool>(
288 fn merge_copies_dict<A: Fn(Revision, Revision) -> bool>(
265 minor: TimeStampedPathCopies,
289 minor: TimeStampedPathCopies,
266 major: TimeStampedPathCopies,
290 major: TimeStampedPathCopies,
267 changes: &ChangedFiles,
291 changes: &ChangedFiles,
268 oracle: &mut AncestorOracle<A>,
292 oracle: &mut AncestorOracle<A>,
269 ) -> TimeStampedPathCopies {
293 ) -> TimeStampedPathCopies {
270 if minor.is_empty() {
294 if minor.is_empty() {
271 return major;
295 return major;
272 } else if major.is_empty() {
296 } else if major.is_empty() {
273 return minor;
297 return minor;
274 }
298 }
275 let mut override_minor = Vec::new();
299 let mut override_minor = Vec::new();
276 let mut override_major = Vec::new();
300 let mut override_major = Vec::new();
277
301
278 let mut to_major = |k: &HgPathBuf, v: &TimeStampedPathCopy| {
302 let mut to_major = |k: &HgPathBuf, v: &TimeStampedPathCopy| {
279 override_major.push((k.clone(), v.clone()))
303 override_major.push((k.clone(), v.clone()))
280 };
304 };
281 let mut to_minor = |k: &HgPathBuf, v: &TimeStampedPathCopy| {
305 let mut to_minor = |k: &HgPathBuf, v: &TimeStampedPathCopy| {
282 override_minor.push((k.clone(), v.clone()))
306 override_minor.push((k.clone(), v.clone()))
283 };
307 };
284
308
285 // The diff function leverage detection of the identical subpart if minor
309 // The diff function leverage detection of the identical subpart if minor
286 // and major has some common ancestors. This make it very fast is most
310 // and major has some common ancestors. This make it very fast is most
287 // case.
311 // case.
288 //
312 //
289 // In case where the two map are vastly different in size, the current
313 // In case where the two map are vastly different in size, the current
290 // approach is still slowish because the iteration will iterate over
314 // approach is still slowish because the iteration will iterate over
291 // all the "exclusive" content of the larger on. This situation can be
315 // all the "exclusive" content of the larger on. This situation can be
292 // frequent when the subgraph of revision we are processing has a lot
316 // frequent when the subgraph of revision we are processing has a lot
293 // of roots. Each roots adding they own fully new map to the mix (and
317 // of roots. Each roots adding they own fully new map to the mix (and
294 // likely a small map, if the path from the root to the "main path" is
318 // likely a small map, if the path from the root to the "main path" is
295 // small.
319 // small.
296 //
320 //
297 // We could do better by detecting such situation and processing them
321 // We could do better by detecting such situation and processing them
298 // differently.
322 // differently.
299 for d in minor.diff(&major) {
323 for d in minor.diff(&major) {
300 match d {
324 match d {
301 DiffItem::Add(k, v) => to_minor(k, v),
325 DiffItem::Add(k, v) => to_minor(k, v),
302 DiffItem::Remove(k, v) => to_major(k, v),
326 DiffItem::Remove(k, v) => to_major(k, v),
303 DiffItem::Update { old, new } => {
327 DiffItem::Update { old, new } => {
304 let (dest, src_major) = new;
328 let (dest, src_major) = new;
305 let (_, src_minor) = old;
329 let (_, src_minor) = old;
306 let mut pick_minor = || (to_major(dest, src_minor));
330 let mut pick_minor = || (to_major(dest, src_minor));
307 let mut pick_major = || (to_minor(dest, src_major));
331 let mut pick_major = || (to_minor(dest, src_major));
308 if src_major.path == src_minor.path {
332 if src_major.path == src_minor.path {
309 // we have the same value, but from other source;
333 // we have the same value, but from other source;
310 if src_major.rev == src_minor.rev {
334 if src_major.rev == src_minor.rev {
311 // If the two entry are identical, no need to do
335 // If the two entry are identical, no need to do
312 // anything (but diff should not have yield them)
336 // anything (but diff should not have yield them)
313 unreachable!();
337 unreachable!();
314 } else if oracle.is_ancestor(src_major.rev, src_minor.rev)
338 } else if oracle.is_ancestor(src_major.rev, src_minor.rev)
315 {
339 {
316 pick_minor();
340 pick_minor();
317 } else {
341 } else {
318 pick_major();
342 pick_major();
319 }
343 }
320 } else if src_major.rev == src_minor.rev {
344 } else if src_major.rev == src_minor.rev {
321 // We cannot get copy information for both p1 and p2 in the
345 // We cannot get copy information for both p1 and p2 in the
322 // same rev. So this is the same value.
346 // same rev. So this is the same value.
323 unreachable!();
347 unreachable!();
324 } else {
348 } else {
349 let action = changes.get_merge_case(&dest);
325 if src_major.path.is_none()
350 if src_major.path.is_none()
326 && changes.salvaged.contains(dest)
351 && action == MergeCase::Salvaged
327 {
352 {
328 // If the file is "deleted" in the major side but was
353 // If the file is "deleted" in the major side but was
329 // salvaged by the merge, we keep the minor side alive
354 // salvaged by the merge, we keep the minor side alive
330 pick_minor();
355 pick_minor();
331 } else if src_minor.path.is_none()
356 } else if src_minor.path.is_none()
332 && changes.salvaged.contains(dest)
357 && action == MergeCase::Salvaged
333 {
358 {
334 // If the file is "deleted" in the minor side but was
359 // If the file is "deleted" in the minor side but was
335 // salvaged by the merge, unconditionnaly preserve the
360 // salvaged by the merge, unconditionnaly preserve the
336 // major side.
361 // major side.
337 pick_major();
362 pick_major();
338 } else if changes.merged.contains(dest) {
363 } else if action == MergeCase::Merged {
339 // If the file was actively merged, copy information
364 // If the file was actively merged, copy information
340 // from each side might conflict. The major side will
365 // from each side might conflict. The major side will
341 // win such conflict.
366 // win such conflict.
342 pick_major();
367 pick_major();
343 } else if oracle.is_ancestor(src_major.rev, src_minor.rev)
368 } else if oracle.is_ancestor(src_major.rev, src_minor.rev)
344 {
369 {
345 // If the minor side is strictly newer than the major
370 // If the minor side is strictly newer than the major
346 // side, it should be kept.
371 // side, it should be kept.
347 pick_minor();
372 pick_minor();
348 } else if src_major.path.is_some() {
373 } else if src_major.path.is_some() {
349 // without any special case, the "major" value win
374 // without any special case, the "major" value win
350 // other the "minor" one.
375 // other the "minor" one.
351 pick_major();
376 pick_major();
352 } else if oracle.is_ancestor(src_minor.rev, src_major.rev)
377 } else if oracle.is_ancestor(src_minor.rev, src_major.rev)
353 {
378 {
354 // the "major" rev is a direct ancestors of "minor",
379 // the "major" rev is a direct ancestors of "minor",
355 // any different value should
380 // any different value should
356 // overwrite
381 // overwrite
357 pick_major();
382 pick_major();
358 } else {
383 } else {
359 // major version is None (so the file was deleted on
384 // major version is None (so the file was deleted on
360 // that branch) and that branch is independant (neither
385 // that branch) and that branch is independant (neither
361 // minor nor major is an ancestors of the other one.)
386 // minor nor major is an ancestors of the other one.)
362 // We preserve the new
387 // We preserve the new
363 // information about the new file.
388 // information about the new file.
364 pick_minor();
389 pick_minor();
365 }
390 }
366 }
391 }
367 }
392 }
368 };
393 };
369 }
394 }
370
395
371 let updates;
396 let updates;
372 let mut result;
397 let mut result;
373 if override_major.is_empty() {
398 if override_major.is_empty() {
374 result = major
399 result = major
375 } else if override_minor.is_empty() {
400 } else if override_minor.is_empty() {
376 result = minor
401 result = minor
377 } else {
402 } else {
378 if override_minor.len() < override_major.len() {
403 if override_minor.len() < override_major.len() {
379 updates = override_minor;
404 updates = override_minor;
380 result = minor;
405 result = minor;
381 } else {
406 } else {
382 updates = override_major;
407 updates = override_major;
383 result = major;
408 result = major;
384 }
409 }
385 for (k, v) in updates {
410 for (k, v) in updates {
386 result.insert(k, v);
411 result.insert(k, v);
387 }
412 }
388 }
413 }
389 result
414 result
390 }
415 }
General Comments 0
You need to be logged in to leave comments. Login now