##// END OF EJS Templates
copies: use the rust code for `combine_changeset_copies`...
copies: use the rust code for `combine_changeset_copies` Changeset centric copy tracing now use the rust code. The rust code focussed on simplicity and will be optimised later. So the performance is not great yet. Now that all the pieces are in place we can start working on performance in the coming changesets. Below is a table that summarize how slower we got: Repo Cases Source-Rev Dest-Rev Py-time Rust-time Difference Factor ------------------------------------------------------------------------------------------------------------------------------------ mercurial x_revs_x_added_0_copies ad6b123de1c7 39cfcef4f463 : 0.000049 s, 0.000046 s, -0.000003 s, × 0.9388 mercurial x_revs_x_added_x_copies 2b1c78674230 0c1d10351869 : 0.000112 s, 0.000173 s, +0.000061 s, × 1.5446 mercurial x000_revs_x000_added_x_copies 81f8ff2a9bf2 dd3267698d84 : 0.004216 s, 0.006303 s, +0.002087 s, × 1.4950 pypy x_revs_x_added_0_copies aed021ee8ae8 099ed31b181b : 0.000204 s, 0.000229 s, +0.000025 s, × 1.1225 pypy x_revs_x000_added_0_copies 4aa4e1f8e19a 359343b9ac0e : 0.000058 s, 0.000056 s, -0.000002 s, × 0.9655 pypy x_revs_x_added_x_copies ac52eb7bbbb0 72e022663155 : 0.000112 s, 0.000143 s, +0.000031 s, × 1.2768 pypy x_revs_x00_added_x_copies c3b14617fbd7 ace7255d9a26 : 0.000339 s, 0.001166 s, +0.000827 s, × 3.4395 pypy x_revs_x000_added_x000_copies df6f7a526b60 a83dc6a2d56f : 0.010214 s, 0.022931 s, +0.012717 s, × 2.2451 pypy x000_revs_xx00_added_0_copies 89a76aede314 2f22446ff07e : 0.047497 s, 0.852446 s, +0.804949 s, × 17.9474 pypy x000_revs_x000_added_x_copies 8a3b5bfd266e 2c68e87c3efe : 0.075297 s, 2.221824 s, +2.146527 s, × 29.5075 pypy x000_revs_x000_added_x000_copies 89a76aede314 7b3dda341c84 : 0.057322 s, 1.194162 s, +1.136840 s, × 20.8325 pypy x0000_revs_x_added_0_copies d1defd0dc478 c9cb1334cc78 : 0.796264 s, 62.468362 s, +61.672098 s, × 78.4518 pypy x0000_revs_xx000_added_0_copies bf2c629d0071 4ffed77c095c : 0.020491 s, 0.022116 s, +0.001625 s, × 1.0793 pypy x0000_revs_xx000_added_x000_copies 08ea3258278e d9fa043f30c0 : 0.121612 s, 2.972788 s, +2.851176 s, × 24.4449 netbeans x_revs_x_added_0_copies fb0955ffcbcd a01e9239f9e7 : 0.000143 s, 0.000180 s, +0.000037 s, × 1.2587 netbeans x_revs_x000_added_0_copies 6f360122949f 20eb231cc7d0 : 0.000112 s, 0.000123 s, +0.000011 s, × 1.0982 netbeans x_revs_x_added_x_copies 1ada3faf6fb6 5a39d12eecf4 : 0.000232 s, 0.000315 s, +0.000083 s, × 1.3578 netbeans x_revs_x00_added_x_copies 35be93ba1e2c 9eec5e90c05f : 0.000721 s, 0.001297 s, +0.000576 s, × 1.7989 netbeans x000_revs_xx00_added_0_copies eac3045b4fdd 51d4ae7f1290 : 0.010115 s, 0.024884 s, +0.014769 s, × 2.4601 netbeans x000_revs_x000_added_x_copies e2063d266acd 6081d72689dc : 0.015461 s, 0.032653 s, +0.017192 s, × 2.1120 netbeans x000_revs_x000_added_x000_copies ff453e9fee32 411350406ec2 : 0.060756 s, 4.230118 s, +4.169362 s, × 69.6247 netbeans x0000_revs_xx000_added_x000_copies 588c2d1ced70 1aad62e59ddd : 0.605842 s, killed mozilla-central x_revs_x_added_0_copies 3697f962bb7b 7015fcdd43a2 : 0.000164 s, 0.000197 s, +0.000033 s, × 1.2012 mozilla-central x_revs_x000_added_0_copies dd390860c6c9 40d0c5bed75d : 0.000331 s, 0.000622 s, +0.000291 s, × 1.8792 mozilla-central x_revs_x_added_x_copies 8d198483ae3b 14207ffc2b2f : 0.000249 s, 0.000296 s, +0.000047 s, × 1.1888 mozilla-central x_revs_x00_added_x_copies 98cbc58cc6bc 446a150332c3 : 0.000711 s, 0.001626 s, +0.000915 s, × 2.2869 mozilla-central x_revs_x000_added_x000_copies 3c684b4b8f68 0a5e72d1b479 : 0.003438 s, 0.006218 s, +0.002780 s, × 1.8086 mozilla-central x_revs_x0000_added_x0000_copies effb563bb7e5 c07a39dc4e80 : 0.069869 s, 0.132760 s, +0.062891 s, × 1.9001 mozilla-central x000_revs_xx00_added_0_copies 6100d773079a 04a55431795e : 0.005701 s, 0.029001 s, +0.023300 s, × 5.0870 mozilla-central x000_revs_x000_added_x_copies 9f17a6fc04f9 2d37b966abed : 0.005757 s, 0.005886 s, +0.000129 s, × 1.0224 mozilla-central x000_revs_x000_added_x000_copies 7c97034feb78 4407bd0c6330 : 0.061826 s, 3.619850 s, +3.558024 s, × 58.5490 mozilla-central x0000_revs_xx000_added_0_copies 9eec5917337d 67118cc6dcad : 0.043354 s, 0.058678 s, +0.015324 s, × 1.3535 mozilla-central x0000_revs_xx000_added_x000_copies f78c615a656c 96a38b690156 : 0.198979 s, 11.926587 s, +11.727608 s, × 59.9389 mozilla-central x00000_revs_x0000_added_x0000_copies 6832ae71433c 4c222a1d9a00 : 2.067096 s, killed mozilla-central x00000_revs_x00000_added_x000_copies 76caed42cf7c 1daa622bbe42 : 3.102616 s, killed mozilla-try x_revs_x_added_0_copies aaf6dde0deb8 9790f499805a : 0.001212 s, 0.001204 s, -0.000008 s, × 0.9934 mozilla-try x_revs_x000_added_0_copies d8d0222927b4 5bb8ce8c7450 : 0.001237 s, 0.001217 s, -0.000020 s, × 0.9838 mozilla-try x_revs_x_added_x_copies 092fcca11bdb 936255a0384a : 0.000557 s, 0.000605 s, +0.000048 s, × 1.0862 mozilla-try x_revs_x00_added_x_copies b53d2fadbdb5 017afae788ec : 0.001532 s, 0.001876 s, +0.000344 s, × 1.2245 mozilla-try x_revs_x000_added_x000_copies 20408ad61ce5 6f0ee96e21ad : 0.035166 s, 0.078190 s, +0.043024 s, × 2.2235 mozilla-try x_revs_x0000_added_x0000_copies effb563bb7e5 c07a39dc4e80 : 0.070336 s, 0.135428 s, +0.065092 s, × 1.9254 mozilla-try x000_revs_xx00_added_0_copies 6100d773079a 04a55431795e : 0.006080 s, 0.029123 s, +0.023043 s, × 4.7900 mozilla-try x000_revs_x000_added_x_copies 9f17a6fc04f9 2d37b966abed : 0.006099 s, 0.006141 s, +0.000042 s, × 1.0069 mozilla-try x000_revs_x000_added_x000_copies 1346fd0130e4 4c65cbdabc1f : 0.064317 s, 4.857827 s, +4.793510 s, × 75.5294 mozilla-try x0000_revs_x_added_0_copies 63519bfd42ee a36a2a865d92 : 0.303263 s, 10.674920 s, +10.371657 s, × 35.2002 mozilla-try x0000_revs_x_added_x_copies 9fe69ff0762d bcabf2a78927 : 0.292804 s, 9.789462 s, +9.496658 s, × 33.4335 mozilla-try x0000_revs_xx000_added_x_copies 156f6e2674f2 4d0f2c178e66 : 0.107594 s, 1.087890 s, +0.980296 s, × 10.1111 mozilla-try x0000_revs_xx000_added_0_copies 9eec5917337d 67118cc6dcad : 0.045202 s, 0.060556 s, +0.015354 s, × 1.3397 mozilla-try x0000_revs_xx000_added_x000_copies 89294cd501d9 7ccb2fc7ccb5 : 1.926277 s, killed mozilla-try x0000_revs_x0000_added_x0000_copies e928c65095ed e951f4ad123a : 0.794492 s, killed mozilla-try x00000_revs_x_added_0_copies 6a320851d377 1ebb79acd503 : 84.521497 s, killed mozilla-try x00000_revs_x00000_added_0_copies dc8a3ca7010e d16fde900c9c : 0.965937 s, 19.647038 s, +18.681101 s, × 20.3399 mozilla-try x00000_revs_x_added_x_copies 5173c4b6f97c 95d83ee7242d : 83.367146 s, killed mozilla-try x00000_revs_x000_added_x_copies 9126823d0e9c ca82787bb23c : 84.260895 s, killed mozilla-try x00000_revs_x0000_added_x0000_copies 8d3fafa80d4b eb884023b810 : 3.274537 s, killed mozilla-try x00000_revs_x00000_added_x0000_copies 1b661134e2ca 1ae03d022d6d : 42.235843 s, killed mozilla-try x00000_revs_x00000_added_x000_copies 9b2a99adc05e 8e29777b48e6 : 49.872829 s, killed Differential Revision: https://phab.mercurial-scm.org/D9299

File last commit:

r46556:595979dc default
r46576:a66568f2 default
Show More
copy_tracing.rs
262 lines | 9.7 KiB | application/rls-services+xml | RustLexer
use crate::utils::hg_path::HgPathBuf;
use crate::Revision;
use std::collections::HashMap;
use std::collections::HashSet;
pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>;
#[derive(Clone, Debug)]
struct TimeStampedPathCopy {
/// revision at which the copy information was added
rev: Revision,
/// the copy source, (Set to None in case of deletion of the associated
/// key)
path: Option<HgPathBuf>,
}
/// maps CopyDestination to Copy Source (+ a "timestamp" for the operation)
type TimeStampedPathCopies = HashMap<HgPathBuf, TimeStampedPathCopy>;
/// hold parent 1, parent 2 and relevant files actions.
pub type RevInfo = (Revision, Revision, ChangedFiles);
/// represent the files affected by a changesets
///
/// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
/// all the data categories tracked by it.
pub struct ChangedFiles {
removed: HashSet<HgPathBuf>,
merged: HashSet<HgPathBuf>,
salvaged: HashSet<HgPathBuf>,
copied_from_p1: PathCopies,
copied_from_p2: PathCopies,
}
impl ChangedFiles {
pub fn new(
removed: HashSet<HgPathBuf>,
merged: HashSet<HgPathBuf>,
salvaged: HashSet<HgPathBuf>,
copied_from_p1: PathCopies,
copied_from_p2: PathCopies,
) -> Self {
ChangedFiles {
removed,
merged,
salvaged,
copied_from_p1,
copied_from_p2,
}
}
pub fn new_empty() -> Self {
ChangedFiles {
removed: HashSet::new(),
merged: HashSet::new(),
salvaged: HashSet::new(),
copied_from_p1: PathCopies::new(),
copied_from_p2: PathCopies::new(),
}
}
}
/// Same as mercurial.copies._combine_changeset_copies, but in Rust.
///
/// Arguments are:
///
/// revs: all revisions to be considered
/// children: a {parent ? [childrens]} mapping
/// target_rev: the final revision we are combining copies to
/// rev_info(rev): callback to get revision information:
/// * first parent
/// * second parent
/// * ChangedFiles
/// isancestors(low_rev, high_rev): callback to check if a revision is an
/// ancestor of another
pub fn combine_changeset_copies(
revs: Vec<Revision>,
children: HashMap<Revision, Vec<Revision>>,
target_rev: Revision,
rev_info: &impl Fn(Revision) -> RevInfo,
is_ancestor: &impl Fn(Revision, Revision) -> bool,
) -> PathCopies {
let mut all_copies = HashMap::new();
for rev in revs {
// Retrieve data computed in a previous iteration
let copies = all_copies.remove(&rev);
let copies = match copies {
Some(c) => c,
None => TimeStampedPathCopies::default(), // root of the walked set
};
let current_children = match children.get(&rev) {
Some(c) => c,
None => panic!("inconsistent `revs` and `children`"),
};
for child in current_children {
// We will chain the copies information accumulated for `rev` with
// the individual copies information for each of its children.
// Creating a new PathCopies for each `rev` ? `children` vertex.
let (p1, p2, changes) = rev_info(*child);
let (parent, child_copies) = if rev == p1 {
(1, &changes.copied_from_p1)
} else {
assert_eq!(rev, p2);
(2, &changes.copied_from_p2)
};
let mut new_copies = copies.clone();
for (dest, source) in child_copies {
let entry;
if let Some(v) = copies.get(source) {
entry = match &v.path {
Some(path) => Some((*(path)).to_owned()),
None => Some(source.to_owned()),
}
} else {
entry = Some(source.to_owned());
}
// Each new entry is introduced by the children, we record this
// information as we will need it to take the right decision
// when merging conflicting copy information. See
// merge_copies_dict for details.
let ttpc = TimeStampedPathCopy {
rev: *child,
path: entry,
};
new_copies.insert(dest.to_owned(), ttpc);
}
// We must drop copy information for removed file.
//
// We need to explicitly record them as dropped to propagate this
// information when merging two TimeStampedPathCopies object.
for f in changes.removed.iter() {
if new_copies.contains_key(f.as_ref()) {
let ttpc = TimeStampedPathCopy {
rev: *child,
path: None,
};
new_copies.insert(f.to_owned(), ttpc);
}
}
// Merge has two parents needs to combines their copy information.
//
// If the vertex from the other parent was already processed, we
// will have a value for the child ready to be used. We need to
// grab it and combine it with the one we already
// computed. If not we can simply store the newly
// computed data. The processing happening at
// the time of the second parent will take care of combining the
// two TimeStampedPathCopies instance.
match all_copies.remove(child) {
None => {
all_copies.insert(child, new_copies);
}
Some(other_copies) => {
let (minor, major) = match parent {
1 => (other_copies, new_copies),
2 => (new_copies, other_copies),
_ => unreachable!(),
};
let merged_copies =
merge_copies_dict(minor, major, &changes, is_ancestor);
all_copies.insert(child, merged_copies);
}
};
}
}
// Drop internal information (like the timestamp) and return the final
// mapping.
let tt_result = all_copies
.remove(&target_rev)
.expect("target revision was not processed");
let mut result = PathCopies::default();
for (dest, tt_source) in tt_result {
if let Some(path) = tt_source.path {
result.insert(dest, path);
}
}
result
}
/// merge two copies-mapping together, minor and major
///
/// In case of conflict, value from "major" will be picked, unless in some
/// cases. See inline documentation for details.
#[allow(clippy::if_same_then_else)]
fn merge_copies_dict(
minor: TimeStampedPathCopies,
major: TimeStampedPathCopies,
changes: &ChangedFiles,
is_ancestor: &impl Fn(Revision, Revision) -> bool,
) -> TimeStampedPathCopies {
let mut result = minor.clone();
for (dest, src_major) in major {
let overwrite;
if let Some(src_minor) = minor.get(&dest) {
if src_major.path == src_minor.path {
// we have the same value, but from other source;
if src_major.rev == src_minor.rev {
// If the two entry are identical, no need to do anything
overwrite = false;
} else if is_ancestor(src_major.rev, src_minor.rev) {
overwrite = false;
} else {
overwrite = true;
}
} else if src_major.rev == src_minor.rev {
// We cannot get copy information for both p1 and p2 in the
// same rev. So this is the same value.
overwrite = false;
} else if src_major.path.is_none()
&& changes.salvaged.contains(&dest)
{
// If the file is "deleted" in the major side but was salvaged
// by the merge, we keep the minor side alive
overwrite = false;
} else if src_minor.path.is_none()
&& changes.salvaged.contains(&dest)
{
// If the file is "deleted" in the minor side but was salvaged
// by the merge, unconditionnaly preserve the major side.
overwrite = true;
} else if changes.merged.contains(&dest) {
// If the file was actively merged, copy information from each
// side might conflict. The major side will win such conflict.
overwrite = true;
} else if is_ancestor(src_major.rev, src_minor.rev) {
// If the minor side is strictly newer than the major side, it
// should be kept.
overwrite = false;
} else if src_major.path.is_some() {
// without any special case, the "major" value win other the
// "minor" one.
overwrite = true;
} else if is_ancestor(src_minor.rev, src_major.rev) {
// the "major" rev is a direct ancestors of "minor", any
// different value should overwrite
overwrite = true;
} else {
// major version is None (so the file was deleted on that
// branch) annd that branch is independant (neither minor nor
// major is an ancestors of the other one.) We preserve the new
// information about the new file.
overwrite = false;
}
} else {
// minor had no value
overwrite = true;
}
if overwrite {
result.insert(dest, src_major);
}
}
result
}