##// END OF EJS Templates
copies-rust: pass closures and iterators instead of `&ChangedFiles`...
Simon Sapin -
r47356:80f7567a default
parent child Browse files
Show More
@@ -1,750 +1,761 b''
1 use crate::utils::hg_path::HgPath;
1 use crate::utils::hg_path::HgPath;
2 use crate::utils::hg_path::HgPathBuf;
2 use crate::utils::hg_path::HgPathBuf;
3 use crate::Revision;
3 use crate::Revision;
4 use crate::NULL_REVISION;
4 use crate::NULL_REVISION;
5
5
6 use im_rc::ordmap::Entry;
6 use im_rc::ordmap::Entry;
7 use im_rc::ordmap::OrdMap;
7 use im_rc::ordmap::OrdMap;
8 use im_rc::OrdSet;
8 use im_rc::OrdSet;
9
9
10 use std::cmp::Ordering;
10 use std::cmp::Ordering;
11 use std::collections::HashMap;
11 use std::collections::HashMap;
12 use std::convert::TryInto;
12 use std::convert::TryInto;
13
13
14 pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>;
14 pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>;
15
15
16 type PathToken = usize;
16 type PathToken = usize;
17
17
18 #[derive(Clone, Debug)]
18 #[derive(Clone, Debug)]
19 struct CopySource {
19 struct CopySource {
20 /// revision at which the copy information was added
20 /// revision at which the copy information was added
21 rev: Revision,
21 rev: Revision,
22 /// the copy source, (Set to None in case of deletion of the associated
22 /// the copy source, (Set to None in case of deletion of the associated
23 /// key)
23 /// key)
24 path: Option<PathToken>,
24 path: Option<PathToken>,
25 /// a set of previous `CopySource.rev` value directly or indirectly
25 /// a set of previous `CopySource.rev` value directly or indirectly
26 /// overwritten by this one.
26 /// overwritten by this one.
27 overwritten: OrdSet<Revision>,
27 overwritten: OrdSet<Revision>,
28 }
28 }
29
29
30 impl CopySource {
30 impl CopySource {
31 /// create a new CopySource
31 /// create a new CopySource
32 ///
32 ///
33 /// Use this when no previous copy source existed.
33 /// Use this when no previous copy source existed.
34 fn new(rev: Revision, path: Option<PathToken>) -> Self {
34 fn new(rev: Revision, path: Option<PathToken>) -> Self {
35 Self {
35 Self {
36 rev,
36 rev,
37 path,
37 path,
38 overwritten: OrdSet::new(),
38 overwritten: OrdSet::new(),
39 }
39 }
40 }
40 }
41
41
42 /// create a new CopySource from merging two others
42 /// create a new CopySource from merging two others
43 ///
43 ///
44 /// Use this when merging two InternalPathCopies requires active merging of
44 /// Use this when merging two InternalPathCopies requires active merging of
45 /// some entries.
45 /// some entries.
46 fn new_from_merge(rev: Revision, winner: &Self, loser: &Self) -> Self {
46 fn new_from_merge(rev: Revision, winner: &Self, loser: &Self) -> Self {
47 let mut overwritten = OrdSet::new();
47 let mut overwritten = OrdSet::new();
48 overwritten.extend(winner.overwritten.iter().copied());
48 overwritten.extend(winner.overwritten.iter().copied());
49 overwritten.extend(loser.overwritten.iter().copied());
49 overwritten.extend(loser.overwritten.iter().copied());
50 overwritten.insert(winner.rev);
50 overwritten.insert(winner.rev);
51 overwritten.insert(loser.rev);
51 overwritten.insert(loser.rev);
52 Self {
52 Self {
53 rev,
53 rev,
54 path: winner.path,
54 path: winner.path,
55 overwritten: overwritten,
55 overwritten: overwritten,
56 }
56 }
57 }
57 }
58
58
59 /// Update the value of a pre-existing CopySource
59 /// Update the value of a pre-existing CopySource
60 ///
60 ///
61 /// Use this when recording copy information from parent → child edges
61 /// Use this when recording copy information from parent → child edges
62 fn overwrite(&mut self, rev: Revision, path: Option<PathToken>) {
62 fn overwrite(&mut self, rev: Revision, path: Option<PathToken>) {
63 self.overwritten.insert(self.rev);
63 self.overwritten.insert(self.rev);
64 self.rev = rev;
64 self.rev = rev;
65 self.path = path;
65 self.path = path;
66 }
66 }
67
67
68 /// Mark pre-existing copy information as "dropped" by a file deletion
68 /// Mark pre-existing copy information as "dropped" by a file deletion
69 ///
69 ///
70 /// Use this when recording copy information from parent → child edges
70 /// Use this when recording copy information from parent → child edges
71 fn mark_delete(&mut self, rev: Revision) {
71 fn mark_delete(&mut self, rev: Revision) {
72 self.overwritten.insert(self.rev);
72 self.overwritten.insert(self.rev);
73 self.rev = rev;
73 self.rev = rev;
74 self.path = None;
74 self.path = None;
75 }
75 }
76
76
77 /// Mark pre-existing copy information as "dropped" by a file deletion
77 /// Mark pre-existing copy information as "dropped" by a file deletion
78 ///
78 ///
79 /// Use this when recording copy information from parent → child edges
79 /// Use this when recording copy information from parent → child edges
80 fn mark_delete_with_pair(&mut self, rev: Revision, other: &Self) {
80 fn mark_delete_with_pair(&mut self, rev: Revision, other: &Self) {
81 self.overwritten.insert(self.rev);
81 self.overwritten.insert(self.rev);
82 if other.rev != rev {
82 if other.rev != rev {
83 self.overwritten.insert(other.rev);
83 self.overwritten.insert(other.rev);
84 }
84 }
85 self.overwritten.extend(other.overwritten.iter().copied());
85 self.overwritten.extend(other.overwritten.iter().copied());
86 self.rev = rev;
86 self.rev = rev;
87 self.path = None;
87 self.path = None;
88 }
88 }
89
89
90 fn is_overwritten_by(&self, other: &Self) -> bool {
90 fn is_overwritten_by(&self, other: &Self) -> bool {
91 other.overwritten.contains(&self.rev)
91 other.overwritten.contains(&self.rev)
92 }
92 }
93 }
93 }
94
94
95 // For the same "dest", content generated for a given revision will always be
95 // For the same "dest", content generated for a given revision will always be
96 // the same.
96 // the same.
97 impl PartialEq for CopySource {
97 impl PartialEq for CopySource {
98 fn eq(&self, other: &Self) -> bool {
98 fn eq(&self, other: &Self) -> bool {
99 #[cfg(debug_assertions)]
99 #[cfg(debug_assertions)]
100 {
100 {
101 if self.rev == other.rev {
101 if self.rev == other.rev {
102 debug_assert!(self.path == other.path);
102 debug_assert!(self.path == other.path);
103 debug_assert!(self.overwritten == other.overwritten);
103 debug_assert!(self.overwritten == other.overwritten);
104 }
104 }
105 }
105 }
106 self.rev == other.rev
106 self.rev == other.rev
107 }
107 }
108 }
108 }
109
109
110 /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation)
110 /// maps CopyDestination to Copy Source (+ a "timestamp" for the operation)
111 type InternalPathCopies = OrdMap<PathToken, CopySource>;
111 type InternalPathCopies = OrdMap<PathToken, CopySource>;
112
112
113 /// represent the files affected by a changesets
113 /// represent the files affected by a changesets
114 ///
114 ///
115 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
115 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
116 /// all the data categories tracked by it.
116 /// all the data categories tracked by it.
117 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
117 /// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
118 /// all the data categories tracked by it.
118 /// all the data categories tracked by it.
119 pub struct ChangedFiles<'a> {
119 pub struct ChangedFiles<'a> {
120 nb_items: u32,
120 nb_items: u32,
121 index: &'a [u8],
121 index: &'a [u8],
122 data: &'a [u8],
122 data: &'a [u8],
123 }
123 }
124
124
125 /// Represent active changes that affect the copy tracing.
125 /// Represent active changes that affect the copy tracing.
126 enum Action<'a> {
126 enum Action<'a> {
127 /// The parent ? children edge is removing a file
127 /// The parent ? children edge is removing a file
128 ///
128 ///
129 /// (actually, this could be the edge from the other parent, but it does
129 /// (actually, this could be the edge from the other parent, but it does
130 /// not matters)
130 /// not matters)
131 Removed(&'a HgPath),
131 Removed(&'a HgPath),
132 /// The parent ? children edge introduce copy information between (dest,
132 /// The parent ? children edge introduce copy information between (dest,
133 /// source)
133 /// source)
134 CopiedFromP1(&'a HgPath, &'a HgPath),
134 CopiedFromP1(&'a HgPath, &'a HgPath),
135 CopiedFromP2(&'a HgPath, &'a HgPath),
135 CopiedFromP2(&'a HgPath, &'a HgPath),
136 }
136 }
137
137
138 /// This express the possible "special" case we can get in a merge
138 /// This express the possible "special" case we can get in a merge
139 ///
139 ///
140 /// See mercurial/metadata.py for details on these values.
140 /// See mercurial/metadata.py for details on these values.
141 #[derive(PartialEq)]
141 #[derive(PartialEq)]
142 enum MergeCase {
142 enum MergeCase {
143 /// Merged: file had history on both side that needed to be merged
143 /// Merged: file had history on both side that needed to be merged
144 Merged,
144 Merged,
145 /// Salvaged: file was candidate for deletion, but survived the merge
145 /// Salvaged: file was candidate for deletion, but survived the merge
146 Salvaged,
146 Salvaged,
147 /// Normal: Not one of the two cases above
147 /// Normal: Not one of the two cases above
148 Normal,
148 Normal,
149 }
149 }
150
150
151 type FileChange<'a> = (u8, &'a HgPath, &'a HgPath);
151 type FileChange<'a> = (u8, &'a HgPath, &'a HgPath);
152
152
153 const EMPTY: &[u8] = b"";
153 const EMPTY: &[u8] = b"";
154 const COPY_MASK: u8 = 3;
154 const COPY_MASK: u8 = 3;
155 const P1_COPY: u8 = 2;
155 const P1_COPY: u8 = 2;
156 const P2_COPY: u8 = 3;
156 const P2_COPY: u8 = 3;
157 const ACTION_MASK: u8 = 28;
157 const ACTION_MASK: u8 = 28;
158 const REMOVED: u8 = 12;
158 const REMOVED: u8 = 12;
159 const MERGED: u8 = 8;
159 const MERGED: u8 = 8;
160 const SALVAGED: u8 = 16;
160 const SALVAGED: u8 = 16;
161
161
162 impl<'a> ChangedFiles<'a> {
162 impl<'a> ChangedFiles<'a> {
163 const INDEX_START: usize = 4;
163 const INDEX_START: usize = 4;
164 const ENTRY_SIZE: u32 = 9;
164 const ENTRY_SIZE: u32 = 9;
165 const FILENAME_START: u32 = 1;
165 const FILENAME_START: u32 = 1;
166 const COPY_SOURCE_START: u32 = 5;
166 const COPY_SOURCE_START: u32 = 5;
167
167
168 pub fn new(data: &'a [u8]) -> Self {
168 pub fn new(data: &'a [u8]) -> Self {
169 assert!(
169 assert!(
170 data.len() >= 4,
170 data.len() >= 4,
171 "data size ({}) is too small to contain the header (4)",
171 "data size ({}) is too small to contain the header (4)",
172 data.len()
172 data.len()
173 );
173 );
174 let nb_items_raw: [u8; 4] = (&data[0..=3])
174 let nb_items_raw: [u8; 4] = (&data[0..=3])
175 .try_into()
175 .try_into()
176 .expect("failed to turn 4 bytes into 4 bytes");
176 .expect("failed to turn 4 bytes into 4 bytes");
177 let nb_items = u32::from_be_bytes(nb_items_raw);
177 let nb_items = u32::from_be_bytes(nb_items_raw);
178
178
179 let index_size = (nb_items * Self::ENTRY_SIZE) as usize;
179 let index_size = (nb_items * Self::ENTRY_SIZE) as usize;
180 let index_end = Self::INDEX_START + index_size;
180 let index_end = Self::INDEX_START + index_size;
181
181
182 assert!(
182 assert!(
183 data.len() >= index_end,
183 data.len() >= index_end,
184 "data size ({}) is too small to fit the index_data ({})",
184 "data size ({}) is too small to fit the index_data ({})",
185 data.len(),
185 data.len(),
186 index_end
186 index_end
187 );
187 );
188
188
189 let ret = ChangedFiles {
189 let ret = ChangedFiles {
190 nb_items,
190 nb_items,
191 index: &data[Self::INDEX_START..index_end],
191 index: &data[Self::INDEX_START..index_end],
192 data: &data[index_end..],
192 data: &data[index_end..],
193 };
193 };
194 let max_data = ret.filename_end(nb_items - 1) as usize;
194 let max_data = ret.filename_end(nb_items - 1) as usize;
195 assert!(
195 assert!(
196 ret.data.len() >= max_data,
196 ret.data.len() >= max_data,
197 "data size ({}) is too small to fit all data ({})",
197 "data size ({}) is too small to fit all data ({})",
198 data.len(),
198 data.len(),
199 index_end + max_data
199 index_end + max_data
200 );
200 );
201 ret
201 ret
202 }
202 }
203
203
204 pub fn new_empty() -> Self {
204 pub fn new_empty() -> Self {
205 ChangedFiles {
205 ChangedFiles {
206 nb_items: 0,
206 nb_items: 0,
207 index: EMPTY,
207 index: EMPTY,
208 data: EMPTY,
208 data: EMPTY,
209 }
209 }
210 }
210 }
211
211
212 /// internal function to return an individual entry at a given index
212 /// internal function to return an individual entry at a given index
213 fn entry(&'a self, idx: u32) -> FileChange<'a> {
213 fn entry(&'a self, idx: u32) -> FileChange<'a> {
214 if idx >= self.nb_items {
214 if idx >= self.nb_items {
215 panic!(
215 panic!(
216 "index for entry is higher that the number of file {} >= {}",
216 "index for entry is higher that the number of file {} >= {}",
217 idx, self.nb_items
217 idx, self.nb_items
218 )
218 )
219 }
219 }
220 let flags = self.flags(idx);
220 let flags = self.flags(idx);
221 let filename = self.filename(idx);
221 let filename = self.filename(idx);
222 let copy_idx = self.copy_idx(idx);
222 let copy_idx = self.copy_idx(idx);
223 let copy_source = self.filename(copy_idx);
223 let copy_source = self.filename(copy_idx);
224 (flags, filename, copy_source)
224 (flags, filename, copy_source)
225 }
225 }
226
226
227 /// internal function to return the filename of the entry at a given index
227 /// internal function to return the filename of the entry at a given index
228 fn filename(&self, idx: u32) -> &HgPath {
228 fn filename(&self, idx: u32) -> &HgPath {
229 let filename_start;
229 let filename_start;
230 if idx == 0 {
230 if idx == 0 {
231 filename_start = 0;
231 filename_start = 0;
232 } else {
232 } else {
233 filename_start = self.filename_end(idx - 1)
233 filename_start = self.filename_end(idx - 1)
234 }
234 }
235 let filename_end = self.filename_end(idx);
235 let filename_end = self.filename_end(idx);
236 let filename_start = filename_start as usize;
236 let filename_start = filename_start as usize;
237 let filename_end = filename_end as usize;
237 let filename_end = filename_end as usize;
238 HgPath::new(&self.data[filename_start..filename_end])
238 HgPath::new(&self.data[filename_start..filename_end])
239 }
239 }
240
240
241 /// internal function to return the flag field of the entry at a given
241 /// internal function to return the flag field of the entry at a given
242 /// index
242 /// index
243 fn flags(&self, idx: u32) -> u8 {
243 fn flags(&self, idx: u32) -> u8 {
244 let idx = idx as usize;
244 let idx = idx as usize;
245 self.index[idx * (Self::ENTRY_SIZE as usize)]
245 self.index[idx * (Self::ENTRY_SIZE as usize)]
246 }
246 }
247
247
248 /// internal function to return the end of a filename part at a given index
248 /// internal function to return the end of a filename part at a given index
249 fn filename_end(&self, idx: u32) -> u32 {
249 fn filename_end(&self, idx: u32) -> u32 {
250 let start = (idx * Self::ENTRY_SIZE) + Self::FILENAME_START;
250 let start = (idx * Self::ENTRY_SIZE) + Self::FILENAME_START;
251 let end = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
251 let end = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
252 let start = start as usize;
252 let start = start as usize;
253 let end = end as usize;
253 let end = end as usize;
254 let raw = (&self.index[start..end])
254 let raw = (&self.index[start..end])
255 .try_into()
255 .try_into()
256 .expect("failed to turn 4 bytes into 4 bytes");
256 .expect("failed to turn 4 bytes into 4 bytes");
257 u32::from_be_bytes(raw)
257 u32::from_be_bytes(raw)
258 }
258 }
259
259
260 /// internal function to return index of the copy source of the entry at a
260 /// internal function to return index of the copy source of the entry at a
261 /// given index
261 /// given index
262 fn copy_idx(&self, idx: u32) -> u32 {
262 fn copy_idx(&self, idx: u32) -> u32 {
263 let start = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
263 let start = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
264 let end = (idx + 1) * Self::ENTRY_SIZE;
264 let end = (idx + 1) * Self::ENTRY_SIZE;
265 let start = start as usize;
265 let start = start as usize;
266 let end = end as usize;
266 let end = end as usize;
267 let raw = (&self.index[start..end])
267 let raw = (&self.index[start..end])
268 .try_into()
268 .try_into()
269 .expect("failed to turn 4 bytes into 4 bytes");
269 .expect("failed to turn 4 bytes into 4 bytes");
270 u32::from_be_bytes(raw)
270 u32::from_be_bytes(raw)
271 }
271 }
272
272
273 /// Return an iterator over all the `Action` in this instance.
273 /// Return an iterator over all the `Action` in this instance.
274 fn iter_actions(&self) -> ActionsIterator {
274 fn iter_actions(&self) -> ActionsIterator {
275 ActionsIterator {
275 ActionsIterator {
276 changes: &self,
276 changes: &self,
277 current: 0,
277 current: 0,
278 }
278 }
279 }
279 }
280
280
281 /// return the MergeCase value associated with a filename
281 /// return the MergeCase value associated with a filename
282 fn get_merge_case(&self, path: &HgPath) -> MergeCase {
282 fn get_merge_case(&self, path: &HgPath) -> MergeCase {
283 if self.nb_items == 0 {
283 if self.nb_items == 0 {
284 return MergeCase::Normal;
284 return MergeCase::Normal;
285 }
285 }
286 let mut low_part = 0;
286 let mut low_part = 0;
287 let mut high_part = self.nb_items;
287 let mut high_part = self.nb_items;
288
288
289 while low_part < high_part {
289 while low_part < high_part {
290 let cursor = (low_part + high_part - 1) / 2;
290 let cursor = (low_part + high_part - 1) / 2;
291 let (flags, filename, _source) = self.entry(cursor);
291 let (flags, filename, _source) = self.entry(cursor);
292 match path.cmp(filename) {
292 match path.cmp(filename) {
293 Ordering::Less => low_part = cursor + 1,
293 Ordering::Less => low_part = cursor + 1,
294 Ordering::Greater => high_part = cursor,
294 Ordering::Greater => high_part = cursor,
295 Ordering::Equal => {
295 Ordering::Equal => {
296 return match flags & ACTION_MASK {
296 return match flags & ACTION_MASK {
297 MERGED => MergeCase::Merged,
297 MERGED => MergeCase::Merged,
298 SALVAGED => MergeCase::Salvaged,
298 SALVAGED => MergeCase::Salvaged,
299 _ => MergeCase::Normal,
299 _ => MergeCase::Normal,
300 };
300 };
301 }
301 }
302 }
302 }
303 }
303 }
304 MergeCase::Normal
304 MergeCase::Normal
305 }
305 }
306 }
306 }
307
307
308 struct ActionsIterator<'a> {
308 struct ActionsIterator<'a> {
309 changes: &'a ChangedFiles<'a>,
309 changes: &'a ChangedFiles<'a>,
310 current: u32,
310 current: u32,
311 }
311 }
312
312
313 impl<'a> Iterator for ActionsIterator<'a> {
313 impl<'a> Iterator for ActionsIterator<'a> {
314 type Item = Action<'a>;
314 type Item = Action<'a>;
315
315
316 fn next(&mut self) -> Option<Action<'a>> {
316 fn next(&mut self) -> Option<Action<'a>> {
317 while self.current < self.changes.nb_items {
317 while self.current < self.changes.nb_items {
318 let (flags, file, source) = self.changes.entry(self.current);
318 let (flags, file, source) = self.changes.entry(self.current);
319 self.current += 1;
319 self.current += 1;
320 if (flags & ACTION_MASK) == REMOVED {
320 if (flags & ACTION_MASK) == REMOVED {
321 return Some(Action::Removed(file));
321 return Some(Action::Removed(file));
322 }
322 }
323 let copy = flags & COPY_MASK;
323 let copy = flags & COPY_MASK;
324 if copy == P1_COPY {
324 if copy == P1_COPY {
325 return Some(Action::CopiedFromP1(file, source));
325 return Some(Action::CopiedFromP1(file, source));
326 } else if copy == P2_COPY {
326 } else if copy == P2_COPY {
327 return Some(Action::CopiedFromP2(file, source));
327 return Some(Action::CopiedFromP2(file, source));
328 }
328 }
329 }
329 }
330 return None;
330 return None;
331 }
331 }
332 }
332 }
333
333
334 /// A small "tokenizer" responsible of turning full HgPath into lighter
334 /// A small "tokenizer" responsible of turning full HgPath into lighter
335 /// PathToken
335 /// PathToken
336 ///
336 ///
337 /// Dealing with small object, like integer is much faster, so HgPath input are
337 /// Dealing with small object, like integer is much faster, so HgPath input are
338 /// turned into integer "PathToken" and converted back in the end.
338 /// turned into integer "PathToken" and converted back in the end.
339 #[derive(Clone, Debug, Default)]
339 #[derive(Clone, Debug, Default)]
340 struct TwoWayPathMap {
340 struct TwoWayPathMap {
341 token: HashMap<HgPathBuf, PathToken>,
341 token: HashMap<HgPathBuf, PathToken>,
342 path: Vec<HgPathBuf>,
342 path: Vec<HgPathBuf>,
343 }
343 }
344
344
345 impl TwoWayPathMap {
345 impl TwoWayPathMap {
346 fn tokenize(&mut self, path: &HgPath) -> PathToken {
346 fn tokenize(&mut self, path: &HgPath) -> PathToken {
347 match self.token.get(path) {
347 match self.token.get(path) {
348 Some(a) => *a,
348 Some(a) => *a,
349 None => {
349 None => {
350 let a = self.token.len();
350 let a = self.token.len();
351 let buf = path.to_owned();
351 let buf = path.to_owned();
352 self.path.push(buf.clone());
352 self.path.push(buf.clone());
353 self.token.insert(buf, a);
353 self.token.insert(buf, a);
354 a
354 a
355 }
355 }
356 }
356 }
357 }
357 }
358
358
359 fn untokenize(&self, token: PathToken) -> &HgPathBuf {
359 fn untokenize(&self, token: PathToken) -> &HgPathBuf {
360 assert!(token < self.path.len(), format!("Unknown token: {}", token));
360 assert!(token < self.path.len(), format!("Unknown token: {}", token));
361 &self.path[token]
361 &self.path[token]
362 }
362 }
363 }
363 }
364
364
365 /// Same as mercurial.copies._combine_changeset_copies, but in Rust.
365 /// Same as mercurial.copies._combine_changeset_copies, but in Rust.
366 pub struct CombineChangesetCopies {
366 pub struct CombineChangesetCopies {
367 all_copies: HashMap<Revision, InternalPathCopies>,
367 all_copies: HashMap<Revision, InternalPathCopies>,
368 path_map: TwoWayPathMap,
368 path_map: TwoWayPathMap,
369 children_count: HashMap<Revision, usize>,
369 children_count: HashMap<Revision, usize>,
370 }
370 }
371
371
372 impl CombineChangesetCopies {
372 impl CombineChangesetCopies {
373 pub fn new(children_count: HashMap<Revision, usize>) -> Self {
373 pub fn new(children_count: HashMap<Revision, usize>) -> Self {
374 Self {
374 Self {
375 all_copies: HashMap::new(),
375 all_copies: HashMap::new(),
376 path_map: TwoWayPathMap::default(),
376 path_map: TwoWayPathMap::default(),
377 children_count,
377 children_count,
378 }
378 }
379 }
379 }
380
380
381 /// Combined the given `changes` data specific to `rev` with the data
381 /// Combined the given `changes` data specific to `rev` with the data
382 /// previously given for its parents (and transitively, its ancestors).
382 /// previously given for its parents (and transitively, its ancestors).
383 pub fn add_revision(
383 pub fn add_revision(
384 &mut self,
384 &mut self,
385 rev: Revision,
385 rev: Revision,
386 p1: Revision,
386 p1: Revision,
387 p2: Revision,
387 p2: Revision,
388 changes: ChangedFiles<'_>,
388 changes: ChangedFiles<'_>,
389 ) {
389 ) {
390 self.add_revision_inner(rev, p1, p2, changes.iter_actions(), |path| {
391 changes.get_merge_case(path)
392 })
393 }
394
395 /// Separated out from `add_revsion` so that unit tests can call this
396 /// without synthetizing a `ChangedFiles` in binary format.
397 fn add_revision_inner<'a>(
398 &mut self,
399 rev: Revision,
400 p1: Revision,
401 p2: Revision,
402 copy_actions: impl Iterator<Item = Action<'a>>,
403 get_merge_case: impl Fn(&HgPath) -> MergeCase + Copy,
404 ) {
390 // Retrieve data computed in a previous iteration
405 // Retrieve data computed in a previous iteration
391 let p1_copies = match p1 {
406 let p1_copies = match p1 {
392 NULL_REVISION => None,
407 NULL_REVISION => None,
393 _ => get_and_clean_parent_copies(
408 _ => get_and_clean_parent_copies(
394 &mut self.all_copies,
409 &mut self.all_copies,
395 &mut self.children_count,
410 &mut self.children_count,
396 p1,
411 p1,
397 ), // will be None if the vertex is not to be traversed
412 ), // will be None if the vertex is not to be traversed
398 };
413 };
399 let p2_copies = match p2 {
414 let p2_copies = match p2 {
400 NULL_REVISION => None,
415 NULL_REVISION => None,
401 _ => get_and_clean_parent_copies(
416 _ => get_and_clean_parent_copies(
402 &mut self.all_copies,
417 &mut self.all_copies,
403 &mut self.children_count,
418 &mut self.children_count,
404 p2,
419 p2,
405 ), // will be None if the vertex is not to be traversed
420 ), // will be None if the vertex is not to be traversed
406 };
421 };
407 // combine it with data for that revision
422 // combine it with data for that revision
408 let (p1_copies, p2_copies) = chain_changes(
423 let (p1_copies, p2_copies) = chain_changes(
409 &mut self.path_map,
424 &mut self.path_map,
410 p1_copies,
425 p1_copies,
411 p2_copies,
426 p2_copies,
412 &changes,
427 copy_actions,
413 rev,
428 rev,
414 );
429 );
415 let copies = match (p1_copies, p2_copies) {
430 let copies = match (p1_copies, p2_copies) {
416 (None, None) => None,
431 (None, None) => None,
417 (c, None) => c,
432 (c, None) => c,
418 (None, c) => c,
433 (None, c) => c,
419 (Some(p1_copies), Some(p2_copies)) => Some(merge_copies_dict(
434 (Some(p1_copies), Some(p2_copies)) => Some(merge_copies_dict(
420 &self.path_map,
435 &self.path_map,
421 rev,
436 rev,
422 p2_copies,
437 p2_copies,
423 p1_copies,
438 p1_copies,
424 &changes,
439 get_merge_case,
425 )),
440 )),
426 };
441 };
427 if let Some(c) = copies {
442 if let Some(c) = copies {
428 self.all_copies.insert(rev, c);
443 self.all_copies.insert(rev, c);
429 }
444 }
430 }
445 }
431
446
432 /// Drop intermediate data (such as which revision a copy was from) and
447 /// Drop intermediate data (such as which revision a copy was from) and
433 /// return the final mapping.
448 /// return the final mapping.
434 pub fn finish(mut self, target_rev: Revision) -> PathCopies {
449 pub fn finish(mut self, target_rev: Revision) -> PathCopies {
435 let tt_result = self
450 let tt_result = self
436 .all_copies
451 .all_copies
437 .remove(&target_rev)
452 .remove(&target_rev)
438 .expect("target revision was not processed");
453 .expect("target revision was not processed");
439 let mut result = PathCopies::default();
454 let mut result = PathCopies::default();
440 for (dest, tt_source) in tt_result {
455 for (dest, tt_source) in tt_result {
441 if let Some(path) = tt_source.path {
456 if let Some(path) = tt_source.path {
442 let path_dest = self.path_map.untokenize(dest).to_owned();
457 let path_dest = self.path_map.untokenize(dest).to_owned();
443 let path_path = self.path_map.untokenize(path).to_owned();
458 let path_path = self.path_map.untokenize(path).to_owned();
444 result.insert(path_dest, path_path);
459 result.insert(path_dest, path_path);
445 }
460 }
446 }
461 }
447 result
462 result
448 }
463 }
449 }
464 }
450
465
451 /// fetch previous computed information
466 /// fetch previous computed information
452 ///
467 ///
453 /// If no other children are expected to need this information, we drop it from
468 /// If no other children are expected to need this information, we drop it from
454 /// the cache.
469 /// the cache.
455 ///
470 ///
456 /// If parent is not part of the set we are expected to walk, return None.
471 /// If parent is not part of the set we are expected to walk, return None.
457 fn get_and_clean_parent_copies(
472 fn get_and_clean_parent_copies(
458 all_copies: &mut HashMap<Revision, InternalPathCopies>,
473 all_copies: &mut HashMap<Revision, InternalPathCopies>,
459 children_count: &mut HashMap<Revision, usize>,
474 children_count: &mut HashMap<Revision, usize>,
460 parent_rev: Revision,
475 parent_rev: Revision,
461 ) -> Option<InternalPathCopies> {
476 ) -> Option<InternalPathCopies> {
462 let count = children_count.get_mut(&parent_rev)?;
477 let count = children_count.get_mut(&parent_rev)?;
463 *count -= 1;
478 *count -= 1;
464 if *count == 0 {
479 if *count == 0 {
465 match all_copies.remove(&parent_rev) {
480 match all_copies.remove(&parent_rev) {
466 Some(c) => Some(c),
481 Some(c) => Some(c),
467 None => Some(InternalPathCopies::default()),
482 None => Some(InternalPathCopies::default()),
468 }
483 }
469 } else {
484 } else {
470 match all_copies.get(&parent_rev) {
485 match all_copies.get(&parent_rev) {
471 Some(c) => Some(c.clone()),
486 Some(c) => Some(c.clone()),
472 None => Some(InternalPathCopies::default()),
487 None => Some(InternalPathCopies::default()),
473 }
488 }
474 }
489 }
475 }
490 }
476
491
477 /// Combine ChangedFiles with some existing PathCopies information and return
492 /// Combine ChangedFiles with some existing PathCopies information and return
478 /// the result
493 /// the result
479 fn chain_changes(
494 fn chain_changes<'a>(
480 path_map: &mut TwoWayPathMap,
495 path_map: &mut TwoWayPathMap,
481 base_p1_copies: Option<InternalPathCopies>,
496 base_p1_copies: Option<InternalPathCopies>,
482 base_p2_copies: Option<InternalPathCopies>,
497 base_p2_copies: Option<InternalPathCopies>,
483 changes: &ChangedFiles,
498 copy_actions: impl Iterator<Item = Action<'a>>,
484 current_rev: Revision,
499 current_rev: Revision,
485 ) -> (Option<InternalPathCopies>, Option<InternalPathCopies>) {
500 ) -> (Option<InternalPathCopies>, Option<InternalPathCopies>) {
486 // Fast path the "nothing to do" case.
501 // Fast path the "nothing to do" case.
487 if let (None, None) = (&base_p1_copies, &base_p2_copies) {
502 if let (None, None) = (&base_p1_copies, &base_p2_copies) {
488 return (None, None);
503 return (None, None);
489 }
504 }
490
505
491 let mut p1_copies = base_p1_copies.clone();
506 let mut p1_copies = base_p1_copies.clone();
492 let mut p2_copies = base_p2_copies.clone();
507 let mut p2_copies = base_p2_copies.clone();
493 for action in changes.iter_actions() {
508 for action in copy_actions {
494 match action {
509 match action {
495 Action::CopiedFromP1(path_dest, path_source) => {
510 Action::CopiedFromP1(path_dest, path_source) => {
496 match &mut p1_copies {
511 match &mut p1_copies {
497 None => (), // This is not a vertex we should proceed.
512 None => (), // This is not a vertex we should proceed.
498 Some(copies) => add_one_copy(
513 Some(copies) => add_one_copy(
499 current_rev,
514 current_rev,
500 path_map,
515 path_map,
501 copies,
516 copies,
502 base_p1_copies.as_ref().unwrap(),
517 base_p1_copies.as_ref().unwrap(),
503 path_dest,
518 path_dest,
504 path_source,
519 path_source,
505 ),
520 ),
506 }
521 }
507 }
522 }
508 Action::CopiedFromP2(path_dest, path_source) => {
523 Action::CopiedFromP2(path_dest, path_source) => {
509 match &mut p2_copies {
524 match &mut p2_copies {
510 None => (), // This is not a vertex we should proceed.
525 None => (), // This is not a vertex we should proceed.
511 Some(copies) => add_one_copy(
526 Some(copies) => add_one_copy(
512 current_rev,
527 current_rev,
513 path_map,
528 path_map,
514 copies,
529 copies,
515 base_p2_copies.as_ref().unwrap(),
530 base_p2_copies.as_ref().unwrap(),
516 path_dest,
531 path_dest,
517 path_source,
532 path_source,
518 ),
533 ),
519 }
534 }
520 }
535 }
521 Action::Removed(deleted_path) => {
536 Action::Removed(deleted_path) => {
522 // We must drop copy information for removed file.
537 // We must drop copy information for removed file.
523 //
538 //
524 // We need to explicitly record them as dropped to
539 // We need to explicitly record them as dropped to
525 // propagate this information when merging two
540 // propagate this information when merging two
526 // InternalPathCopies object.
541 // InternalPathCopies object.
527 let deleted = path_map.tokenize(deleted_path);
542 let deleted = path_map.tokenize(deleted_path);
528
543
529 let p1_entry = match &mut p1_copies {
544 let p1_entry = match &mut p1_copies {
530 None => None,
545 None => None,
531 Some(copies) => match copies.entry(deleted) {
546 Some(copies) => match copies.entry(deleted) {
532 Entry::Occupied(e) => Some(e),
547 Entry::Occupied(e) => Some(e),
533 Entry::Vacant(_) => None,
548 Entry::Vacant(_) => None,
534 },
549 },
535 };
550 };
536 let p2_entry = match &mut p2_copies {
551 let p2_entry = match &mut p2_copies {
537 None => None,
552 None => None,
538 Some(copies) => match copies.entry(deleted) {
553 Some(copies) => match copies.entry(deleted) {
539 Entry::Occupied(e) => Some(e),
554 Entry::Occupied(e) => Some(e),
540 Entry::Vacant(_) => None,
555 Entry::Vacant(_) => None,
541 },
556 },
542 };
557 };
543
558
544 match (p1_entry, p2_entry) {
559 match (p1_entry, p2_entry) {
545 (None, None) => (),
560 (None, None) => (),
546 (Some(mut e), None) => {
561 (Some(mut e), None) => {
547 e.get_mut().mark_delete(current_rev)
562 e.get_mut().mark_delete(current_rev)
548 }
563 }
549 (None, Some(mut e)) => {
564 (None, Some(mut e)) => {
550 e.get_mut().mark_delete(current_rev)
565 e.get_mut().mark_delete(current_rev)
551 }
566 }
552 (Some(mut e1), Some(mut e2)) => {
567 (Some(mut e1), Some(mut e2)) => {
553 let cs1 = e1.get_mut();
568 let cs1 = e1.get_mut();
554 let cs2 = e2.get();
569 let cs2 = e2.get();
555 if cs1 == cs2 {
570 if cs1 == cs2 {
556 cs1.mark_delete(current_rev);
571 cs1.mark_delete(current_rev);
557 } else {
572 } else {
558 cs1.mark_delete_with_pair(current_rev, &cs2);
573 cs1.mark_delete_with_pair(current_rev, &cs2);
559 }
574 }
560 e2.insert(cs1.clone());
575 e2.insert(cs1.clone());
561 }
576 }
562 }
577 }
563 }
578 }
564 }
579 }
565 }
580 }
566 (p1_copies, p2_copies)
581 (p1_copies, p2_copies)
567 }
582 }
568
583
569 // insert one new copy information in an InternalPathCopies
584 // insert one new copy information in an InternalPathCopies
570 //
585 //
571 // This deal with chaining and overwrite.
586 // This deal with chaining and overwrite.
572 fn add_one_copy(
587 fn add_one_copy(
573 current_rev: Revision,
588 current_rev: Revision,
574 path_map: &mut TwoWayPathMap,
589 path_map: &mut TwoWayPathMap,
575 copies: &mut InternalPathCopies,
590 copies: &mut InternalPathCopies,
576 base_copies: &InternalPathCopies,
591 base_copies: &InternalPathCopies,
577 path_dest: &HgPath,
592 path_dest: &HgPath,
578 path_source: &HgPath,
593 path_source: &HgPath,
579 ) {
594 ) {
580 let dest = path_map.tokenize(path_dest);
595 let dest = path_map.tokenize(path_dest);
581 let source = path_map.tokenize(path_source);
596 let source = path_map.tokenize(path_source);
582 let entry;
597 let entry;
583 if let Some(v) = base_copies.get(&source) {
598 if let Some(v) = base_copies.get(&source) {
584 entry = match &v.path {
599 entry = match &v.path {
585 Some(path) => Some((*(path)).to_owned()),
600 Some(path) => Some((*(path)).to_owned()),
586 None => Some(source.to_owned()),
601 None => Some(source.to_owned()),
587 }
602 }
588 } else {
603 } else {
589 entry = Some(source.to_owned());
604 entry = Some(source.to_owned());
590 }
605 }
591 // Each new entry is introduced by the children, we
606 // Each new entry is introduced by the children, we
592 // record this information as we will need it to take
607 // record this information as we will need it to take
593 // the right decision when merging conflicting copy
608 // the right decision when merging conflicting copy
594 // information. See merge_copies_dict for details.
609 // information. See merge_copies_dict for details.
595 match copies.entry(dest) {
610 match copies.entry(dest) {
596 Entry::Vacant(slot) => {
611 Entry::Vacant(slot) => {
597 let ttpc = CopySource::new(current_rev, entry);
612 let ttpc = CopySource::new(current_rev, entry);
598 slot.insert(ttpc);
613 slot.insert(ttpc);
599 }
614 }
600 Entry::Occupied(mut slot) => {
615 Entry::Occupied(mut slot) => {
601 let ttpc = slot.get_mut();
616 let ttpc = slot.get_mut();
602 ttpc.overwrite(current_rev, entry);
617 ttpc.overwrite(current_rev, entry);
603 }
618 }
604 }
619 }
605 }
620 }
606
621
607 /// merge two copies-mapping together, minor and major
622 /// merge two copies-mapping together, minor and major
608 ///
623 ///
609 /// In case of conflict, value from "major" will be picked, unless in some
624 /// In case of conflict, value from "major" will be picked, unless in some
610 /// cases. See inline documentation for details.
625 /// cases. See inline documentation for details.
611 fn merge_copies_dict(
626 fn merge_copies_dict(
612 path_map: &TwoWayPathMap,
627 path_map: &TwoWayPathMap,
613 current_merge: Revision,
628 current_merge: Revision,
614 minor: InternalPathCopies,
629 minor: InternalPathCopies,
615 major: InternalPathCopies,
630 major: InternalPathCopies,
616 changes: &ChangedFiles,
631 get_merge_case: impl Fn(&HgPath) -> MergeCase + Copy,
617 ) -> InternalPathCopies {
632 ) -> InternalPathCopies {
618 use crate::utils::{ordmap_union_with_merge, MergeResult};
633 use crate::utils::{ordmap_union_with_merge, MergeResult};
619
634
620 ordmap_union_with_merge(minor, major, |&dest, src_minor, src_major| {
635 ordmap_union_with_merge(minor, major, |&dest, src_minor, src_major| {
621 let (pick, overwrite) = compare_value(
636 let (pick, overwrite) = compare_value(
622 path_map,
623 current_merge,
637 current_merge,
624 changes,
638 || get_merge_case(path_map.untokenize(dest)),
625 dest,
626 src_minor,
639 src_minor,
627 src_major,
640 src_major,
628 );
641 );
629 if overwrite {
642 if overwrite {
630 let (winner, loser) = match pick {
643 let (winner, loser) = match pick {
631 MergePick::Major | MergePick::Any => (src_major, src_minor),
644 MergePick::Major | MergePick::Any => (src_major, src_minor),
632 MergePick::Minor => (src_minor, src_major),
645 MergePick::Minor => (src_minor, src_major),
633 };
646 };
634 MergeResult::UseNewValue(CopySource::new_from_merge(
647 MergeResult::UseNewValue(CopySource::new_from_merge(
635 current_merge,
648 current_merge,
636 winner,
649 winner,
637 loser,
650 loser,
638 ))
651 ))
639 } else {
652 } else {
640 match pick {
653 match pick {
641 MergePick::Any | MergePick::Major => {
654 MergePick::Any | MergePick::Major => {
642 MergeResult::UseRightValue
655 MergeResult::UseRightValue
643 }
656 }
644 MergePick::Minor => MergeResult::UseLeftValue,
657 MergePick::Minor => MergeResult::UseLeftValue,
645 }
658 }
646 }
659 }
647 })
660 })
648 }
661 }
649
662
650 /// represent the side that should prevail when merging two
663 /// represent the side that should prevail when merging two
651 /// InternalPathCopies
664 /// InternalPathCopies
665 #[derive(Debug, PartialEq)]
652 enum MergePick {
666 enum MergePick {
653 /// The "major" (p1) side prevails
667 /// The "major" (p1) side prevails
654 Major,
668 Major,
655 /// The "minor" (p2) side prevails
669 /// The "minor" (p2) side prevails
656 Minor,
670 Minor,
657 /// Any side could be used (because they are the same)
671 /// Any side could be used (because they are the same)
658 Any,
672 Any,
659 }
673 }
660
674
661 /// decide which side prevails in case of conflicting values
675 /// decide which side prevails in case of conflicting values
662 #[allow(clippy::if_same_then_else)]
676 #[allow(clippy::if_same_then_else)]
663 fn compare_value(
677 fn compare_value(
664 path_map: &TwoWayPathMap,
665 current_merge: Revision,
678 current_merge: Revision,
666 changes: &ChangedFiles,
679 merge_case_for_dest: impl Fn() -> MergeCase,
667 dest: PathToken,
668 src_minor: &CopySource,
680 src_minor: &CopySource,
669 src_major: &CopySource,
681 src_major: &CopySource,
670 ) -> (MergePick, bool) {
682 ) -> (MergePick, bool) {
671 if src_major == src_minor {
683 if src_major == src_minor {
672 (MergePick::Any, false)
684 (MergePick::Any, false)
673 } else if src_major.rev == current_merge {
685 } else if src_major.rev == current_merge {
674 // minor is different according to per minor == major check earlier
686 // minor is different according to per minor == major check earlier
675 debug_assert!(src_minor.rev != current_merge);
687 debug_assert!(src_minor.rev != current_merge);
676
688
677 // The last value comes the current merge, this value -will- win
689 // The last value comes the current merge, this value -will- win
678 // eventually.
690 // eventually.
679 (MergePick::Major, true)
691 (MergePick::Major, true)
680 } else if src_minor.rev == current_merge {
692 } else if src_minor.rev == current_merge {
681 // The last value comes the current merge, this value -will- win
693 // The last value comes the current merge, this value -will- win
682 // eventually.
694 // eventually.
683 (MergePick::Minor, true)
695 (MergePick::Minor, true)
684 } else if src_major.path == src_minor.path {
696 } else if src_major.path == src_minor.path {
685 debug_assert!(src_major.rev != src_major.rev);
697 debug_assert!(src_major.rev != src_major.rev);
686 // we have the same value, but from other source;
698 // we have the same value, but from other source;
687 if src_major.is_overwritten_by(src_minor) {
699 if src_major.is_overwritten_by(src_minor) {
688 (MergePick::Minor, false)
700 (MergePick::Minor, false)
689 } else if src_minor.is_overwritten_by(src_major) {
701 } else if src_minor.is_overwritten_by(src_major) {
690 (MergePick::Major, false)
702 (MergePick::Major, false)
691 } else {
703 } else {
692 (MergePick::Any, true)
704 (MergePick::Any, true)
693 }
705 }
694 } else {
706 } else {
695 debug_assert!(src_major.rev != src_major.rev);
707 debug_assert!(src_major.rev != src_major.rev);
696 let dest_path = path_map.untokenize(dest);
708 let action = merge_case_for_dest();
697 let action = changes.get_merge_case(dest_path);
698 if src_minor.path.is_some()
709 if src_minor.path.is_some()
699 && src_major.path.is_none()
710 && src_major.path.is_none()
700 && action == MergeCase::Salvaged
711 && action == MergeCase::Salvaged
701 {
712 {
702 // If the file is "deleted" in the major side but was
713 // If the file is "deleted" in the major side but was
703 // salvaged by the merge, we keep the minor side alive
714 // salvaged by the merge, we keep the minor side alive
704 (MergePick::Minor, true)
715 (MergePick::Minor, true)
705 } else if src_major.path.is_some()
716 } else if src_major.path.is_some()
706 && src_minor.path.is_none()
717 && src_minor.path.is_none()
707 && action == MergeCase::Salvaged
718 && action == MergeCase::Salvaged
708 {
719 {
709 // If the file is "deleted" in the minor side but was
720 // If the file is "deleted" in the minor side but was
710 // salvaged by the merge, unconditionnaly preserve the
721 // salvaged by the merge, unconditionnaly preserve the
711 // major side.
722 // major side.
712 (MergePick::Major, true)
723 (MergePick::Major, true)
713 } else if src_minor.is_overwritten_by(src_major) {
724 } else if src_minor.is_overwritten_by(src_major) {
714 // The information from the minor version are strictly older than
725 // The information from the minor version are strictly older than
715 // the major version
726 // the major version
716 if action == MergeCase::Merged {
727 if action == MergeCase::Merged {
717 // If the file was actively merged, its means some non-copy
728 // If the file was actively merged, its means some non-copy
718 // activity happened on the other branch. It
729 // activity happened on the other branch. It
719 // mean the older copy information are still relevant.
730 // mean the older copy information are still relevant.
720 //
731 //
721 // The major side wins such conflict.
732 // The major side wins such conflict.
722 (MergePick::Major, true)
733 (MergePick::Major, true)
723 } else {
734 } else {
724 // No activity on the minor branch, pick the newer one.
735 // No activity on the minor branch, pick the newer one.
725 (MergePick::Major, false)
736 (MergePick::Major, false)
726 }
737 }
727 } else if src_major.is_overwritten_by(src_minor) {
738 } else if src_major.is_overwritten_by(src_minor) {
728 if action == MergeCase::Merged {
739 if action == MergeCase::Merged {
729 // If the file was actively merged, its means some non-copy
740 // If the file was actively merged, its means some non-copy
730 // activity happened on the other branch. It
741 // activity happened on the other branch. It
731 // mean the older copy information are still relevant.
742 // mean the older copy information are still relevant.
732 //
743 //
733 // The major side wins such conflict.
744 // The major side wins such conflict.
734 (MergePick::Major, true)
745 (MergePick::Major, true)
735 } else {
746 } else {
736 // No activity on the minor branch, pick the newer one.
747 // No activity on the minor branch, pick the newer one.
737 (MergePick::Minor, false)
748 (MergePick::Minor, false)
738 }
749 }
739 } else if src_minor.path.is_none() {
750 } else if src_minor.path.is_none() {
740 // the minor side has no relevant information, pick the alive one
751 // the minor side has no relevant information, pick the alive one
741 (MergePick::Major, true)
752 (MergePick::Major, true)
742 } else if src_major.path.is_none() {
753 } else if src_major.path.is_none() {
743 // the major side has no relevant information, pick the alive one
754 // the major side has no relevant information, pick the alive one
744 (MergePick::Minor, true)
755 (MergePick::Minor, true)
745 } else {
756 } else {
746 // by default the major side wins
757 // by default the major side wins
747 (MergePick::Major, true)
758 (MergePick::Major, true)
748 }
759 }
749 }
760 }
750 }
761 }
General Comments 0
You need to be logged in to leave comments. Login now