##// END OF EJS Templates
debugdiscovery: add flags to run discovery on subsets of the local repo...
debugdiscovery: add flags to run discovery on subsets of the local repo Generating new repository using strip of local clone is very expensive for large repositories. And such large repository are the most likely to requires debugging around discovery. So we add a simple way to run discovery using provided sets of heads. Differential Revision: https://phab.mercurial-scm.org/D9945

File last commit:

r47191:95b27628 default
r47205:4f5e9a77 default
Show More
repo.rs
170 lines | 5.5 KiB | application/rls-services+xml | RustLexer
use crate::errors::{HgError, IoResultExt};
use crate::requirements;
use crate::utils::files::get_path_from_bytes;
use memmap::{Mmap, MmapOptions};
use std::collections::HashSet;
use std::path::{Path, PathBuf};
/// A repository on disk
pub struct Repo {
working_directory: PathBuf,
dot_hg: PathBuf,
store: PathBuf,
requirements: HashSet<String>,
}
#[derive(Debug, derive_more::From)]
pub enum RepoFindError {
NotFoundInCurrentDirectoryOrAncestors {
current_directory: PathBuf,
},
#[from]
Other(HgError),
}
/// Filesystem access abstraction for the contents of a given "base" diretory
#[derive(Clone, Copy)]
pub(crate) struct Vfs<'a> {
base: &'a Path,
}
impl Repo {
/// Search the current directory and its ancestores for a repository:
/// a working directory that contains a `.hg` sub-directory.
pub fn find() -> Result<Self, RepoFindError> {
let current_directory = crate::utils::current_dir()?;
// ancestors() is inclusive: it first yields `current_directory` as-is.
for ancestor in current_directory.ancestors() {
if ancestor.join(".hg").is_dir() {
return Ok(Self::new_at_path(ancestor.to_owned())?);
}
}
Err(RepoFindError::NotFoundInCurrentDirectoryOrAncestors {
current_directory,
})
}
/// To be called after checking that `.hg` is a sub-directory
fn new_at_path(working_directory: PathBuf) -> Result<Self, HgError> {
let dot_hg = working_directory.join(".hg");
let hg_vfs = Vfs { base: &dot_hg };
let mut reqs = requirements::load_if_exists(hg_vfs)?;
let relative =
reqs.contains(requirements::RELATIVE_SHARED_REQUIREMENT);
let shared =
reqs.contains(requirements::SHARED_REQUIREMENT) || relative;
// From `mercurial/localrepo.py`:
//
// if .hg/requires contains the sharesafe requirement, it means
// there exists a `.hg/store/requires` too and we should read it
// NOTE: presence of SHARESAFE_REQUIREMENT imply that store requirement
// is present. We never write SHARESAFE_REQUIREMENT for a repo if store
// is not present, refer checkrequirementscompat() for that
//
// However, if SHARESAFE_REQUIREMENT is not present, it means that the
// repository was shared the old way. We check the share source
// .hg/requires for SHARESAFE_REQUIREMENT to detect whether the
// current repository needs to be reshared
let share_safe = reqs.contains(requirements::SHARESAFE_REQUIREMENT);
let store_path;
if !shared {
store_path = dot_hg.join("store");
if share_safe {
reqs.extend(requirements::load(Vfs { base: &store_path })?);
}
} else {
let bytes = hg_vfs.read("sharedpath")?;
let mut shared_path = get_path_from_bytes(&bytes).to_owned();
if relative {
shared_path = dot_hg.join(shared_path)
}
if !shared_path.is_dir() {
return Err(HgError::corrupted(format!(
".hg/sharedpath points to nonexistent directory {}",
shared_path.display()
)));
}
store_path = shared_path.join("store");
let source_is_share_safe =
requirements::load(Vfs { base: &shared_path })?
.contains(requirements::SHARESAFE_REQUIREMENT);
// TODO: support for `share.safe-mismatch.*` config
if share_safe && !source_is_share_safe {
return Err(HgError::unsupported("share-safe downgrade"));
} else if source_is_share_safe && !share_safe {
return Err(HgError::unsupported("share-safe upgrade"));
}
}
let repo = Self {
requirements: reqs,
working_directory,
store: store_path,
dot_hg,
};
requirements::check(&repo)?;
Ok(repo)
}
pub fn working_directory_path(&self) -> &Path {
&self.working_directory
}
pub fn requirements(&self) -> &HashSet<String> {
&self.requirements
}
/// For accessing repository files (in `.hg`), except for the store
/// (`.hg/store`).
pub(crate) fn hg_vfs(&self) -> Vfs<'_> {
Vfs { base: &self.dot_hg }
}
/// For accessing repository store files (in `.hg/store`)
pub(crate) fn store_vfs(&self) -> Vfs<'_> {
Vfs { base: &self.store }
}
/// For accessing the working copy
// The undescore prefix silences the "never used" warning. Remove before
// using.
pub(crate) fn _working_directory_vfs(&self) -> Vfs<'_> {
Vfs {
base: &self.working_directory,
}
}
}
impl Vfs<'_> {
pub(crate) fn join(&self, relative_path: impl AsRef<Path>) -> PathBuf {
self.base.join(relative_path)
}
pub(crate) fn read(
&self,
relative_path: impl AsRef<Path>,
) -> Result<Vec<u8>, HgError> {
let path = self.join(relative_path);
std::fs::read(&path).for_file(&path)
}
pub(crate) fn mmap_open(
&self,
relative_path: impl AsRef<Path>,
) -> Result<Mmap, HgError> {
let path = self.base.join(relative_path);
let file = std::fs::File::open(&path).for_file(&path)?;
// TODO: what are the safety requirements here?
let mmap = unsafe { MmapOptions::new().map(&file) }.for_file(&path)?;
Ok(mmap)
}
}