##// END OF EJS Templates
rhg: initial support for shared repositories...
Simon Sapin -
r47190:d03b0601 default
parent child Browse files
Show More
@@ -1,114 +1,118 b''
1 use std::fmt;
1 use std::fmt;
2
2
3 /// Common error cases that can happen in many different APIs
3 /// Common error cases that can happen in many different APIs
4 #[derive(Debug)]
4 #[derive(Debug)]
5 pub enum HgError {
5 pub enum HgError {
6 IoError {
6 IoError {
7 error: std::io::Error,
7 error: std::io::Error,
8 context: IoErrorContext,
8 context: IoErrorContext,
9 },
9 },
10
10
11 /// A file under `.hg/` normally only written by Mercurial
11 /// A file under `.hg/` normally only written by Mercurial
12 ///
12 ///
13 /// The given string is a short explanation for users, not intended to be
13 /// The given string is a short explanation for users, not intended to be
14 /// machine-readable.
14 /// machine-readable.
15 CorruptedRepository(String),
15 CorruptedRepository(String),
16
16
17 /// The respository or requested operation involves a feature not
17 /// The respository or requested operation involves a feature not
18 /// supported by the Rust implementation. Falling back to the Python
18 /// supported by the Rust implementation. Falling back to the Python
19 /// implementation may or may not work.
19 /// implementation may or may not work.
20 ///
20 ///
21 /// The given string is a short explanation for users, not intended to be
21 /// The given string is a short explanation for users, not intended to be
22 /// machine-readable.
22 /// machine-readable.
23 UnsupportedFeature(String),
23 UnsupportedFeature(String),
24 }
24 }
25
25
26 /// Details about where an I/O error happened
26 /// Details about where an I/O error happened
27 #[derive(Debug, derive_more::From)]
27 #[derive(Debug, derive_more::From)]
28 pub enum IoErrorContext {
28 pub enum IoErrorContext {
29 /// A filesystem operation returned `std::io::Error`
29 /// A filesystem operation returned `std::io::Error`
30 #[from]
30 #[from]
31 File(std::path::PathBuf),
31 File(std::path::PathBuf),
32 /// `std::env::current_dir` returned `std::io::Error`
32 /// `std::env::current_dir` returned `std::io::Error`
33 CurrentDir,
33 CurrentDir,
34 }
34 }
35
35
36 impl HgError {
36 impl HgError {
37 pub fn corrupted(explanation: impl Into<String>) -> Self {
37 pub fn corrupted(explanation: impl Into<String>) -> Self {
38 // TODO: capture a backtrace here and keep it in the error value
38 // TODO: capture a backtrace here and keep it in the error value
39 // to aid debugging?
39 // to aid debugging?
40 // https://doc.rust-lang.org/std/backtrace/struct.Backtrace.html
40 // https://doc.rust-lang.org/std/backtrace/struct.Backtrace.html
41 HgError::CorruptedRepository(explanation.into())
41 HgError::CorruptedRepository(explanation.into())
42 }
42 }
43
44 pub fn unsupported(explanation: impl Into<String>) -> Self {
45 HgError::UnsupportedFeature(explanation.into())
46 }
43 }
47 }
44
48
45 // TODO: use `DisplayBytes` instead to show non-Unicode filenames losslessly?
49 // TODO: use `DisplayBytes` instead to show non-Unicode filenames losslessly?
46 impl fmt::Display for HgError {
50 impl fmt::Display for HgError {
47 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
51 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
48 match self {
52 match self {
49 HgError::IoError { error, context } => {
53 HgError::IoError { error, context } => {
50 write!(f, "{}: {}", error, context)
54 write!(f, "{}: {}", error, context)
51 }
55 }
52 HgError::CorruptedRepository(explanation) => {
56 HgError::CorruptedRepository(explanation) => {
53 write!(f, "corrupted repository: {}", explanation)
57 write!(f, "corrupted repository: {}", explanation)
54 }
58 }
55 HgError::UnsupportedFeature(explanation) => {
59 HgError::UnsupportedFeature(explanation) => {
56 write!(f, "unsupported feature: {}", explanation)
60 write!(f, "unsupported feature: {}", explanation)
57 }
61 }
58 }
62 }
59 }
63 }
60 }
64 }
61
65
62 // TODO: use `DisplayBytes` instead to show non-Unicode filenames losslessly?
66 // TODO: use `DisplayBytes` instead to show non-Unicode filenames losslessly?
63 impl fmt::Display for IoErrorContext {
67 impl fmt::Display for IoErrorContext {
64 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
68 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
65 match self {
69 match self {
66 IoErrorContext::File(path) => path.display().fmt(f),
70 IoErrorContext::File(path) => path.display().fmt(f),
67 IoErrorContext::CurrentDir => f.write_str("current directory"),
71 IoErrorContext::CurrentDir => f.write_str("current directory"),
68 }
72 }
69 }
73 }
70 }
74 }
71
75
72 pub trait IoResultExt<T> {
76 pub trait IoResultExt<T> {
73 /// Annotate a possible I/O error as related to a file at the given path.
77 /// Annotate a possible I/O error as related to a file at the given path.
74 ///
78 ///
75 /// This allows printing something like β€œFile not found: example.txt”
79 /// This allows printing something like β€œFile not found: example.txt”
76 /// instead of just β€œFile not found”.
80 /// instead of just β€œFile not found”.
77 ///
81 ///
78 /// Converts a `Result` with `std::io::Error` into one with `HgError`.
82 /// Converts a `Result` with `std::io::Error` into one with `HgError`.
79 fn for_file(self, path: &std::path::Path) -> Result<T, HgError>;
83 fn for_file(self, path: &std::path::Path) -> Result<T, HgError>;
80 }
84 }
81
85
82 impl<T> IoResultExt<T> for std::io::Result<T> {
86 impl<T> IoResultExt<T> for std::io::Result<T> {
83 fn for_file(self, path: &std::path::Path) -> Result<T, HgError> {
87 fn for_file(self, path: &std::path::Path) -> Result<T, HgError> {
84 self.map_err(|error| HgError::IoError {
88 self.map_err(|error| HgError::IoError {
85 error,
89 error,
86 context: IoErrorContext::File(path.to_owned()),
90 context: IoErrorContext::File(path.to_owned()),
87 })
91 })
88 }
92 }
89 }
93 }
90
94
91 pub trait HgResultExt<T> {
95 pub trait HgResultExt<T> {
92 /// Handle missing files separately from other I/O error cases.
96 /// Handle missing files separately from other I/O error cases.
93 ///
97 ///
94 /// Wraps the `Ok` type in an `Option`:
98 /// Wraps the `Ok` type in an `Option`:
95 ///
99 ///
96 /// * `Ok(x)` becomes `Ok(Some(x))`
100 /// * `Ok(x)` becomes `Ok(Some(x))`
97 /// * An I/O "not found" error becomes `Ok(None)`
101 /// * An I/O "not found" error becomes `Ok(None)`
98 /// * Other errors are unchanged
102 /// * Other errors are unchanged
99 fn io_not_found_as_none(self) -> Result<Option<T>, HgError>;
103 fn io_not_found_as_none(self) -> Result<Option<T>, HgError>;
100 }
104 }
101
105
102 impl<T> HgResultExt<T> for Result<T, HgError> {
106 impl<T> HgResultExt<T> for Result<T, HgError> {
103 fn io_not_found_as_none(self) -> Result<Option<T>, HgError> {
107 fn io_not_found_as_none(self) -> Result<Option<T>, HgError> {
104 match self {
108 match self {
105 Ok(x) => Ok(Some(x)),
109 Ok(x) => Ok(Some(x)),
106 Err(HgError::IoError { error, .. })
110 Err(HgError::IoError { error, .. })
107 if error.kind() == std::io::ErrorKind::NotFound =>
111 if error.kind() == std::io::ErrorKind::NotFound =>
108 {
112 {
109 Ok(None)
113 Ok(None)
110 }
114 }
111 Err(other_error) => Err(other_error),
115 Err(other_error) => Err(other_error),
112 }
116 }
113 }
117 }
114 }
118 }
@@ -1,100 +1,140 b''
1 use crate::errors::{HgError, IoResultExt};
1 use crate::errors::{HgError, IoResultExt};
2 use crate::requirements;
2 use crate::requirements;
3 use crate::utils::files::get_path_from_bytes;
3 use memmap::{Mmap, MmapOptions};
4 use memmap::{Mmap, MmapOptions};
5 use std::collections::HashSet;
4 use std::path::{Path, PathBuf};
6 use std::path::{Path, PathBuf};
5
7
6 /// A repository on disk
8 /// A repository on disk
7 pub struct Repo {
9 pub struct Repo {
8 working_directory: PathBuf,
10 working_directory: PathBuf,
9 dot_hg: PathBuf,
11 dot_hg: PathBuf,
10 store: PathBuf,
12 store: PathBuf,
13 requirements: HashSet<String>,
11 }
14 }
12
15
13 #[derive(Debug, derive_more::From)]
16 #[derive(Debug, derive_more::From)]
14 pub enum RepoFindError {
17 pub enum RepoFindError {
15 NotFoundInCurrentDirectoryOrAncestors {
18 NotFoundInCurrentDirectoryOrAncestors {
16 current_directory: PathBuf,
19 current_directory: PathBuf,
17 },
20 },
18 #[from]
21 #[from]
19 Other(HgError),
22 Other(HgError),
20 }
23 }
21
24
22 /// Filesystem access abstraction for the contents of a given "base" diretory
25 /// Filesystem access abstraction for the contents of a given "base" diretory
23 #[derive(Clone, Copy)]
26 #[derive(Clone, Copy)]
24 pub(crate) struct Vfs<'a> {
27 pub(crate) struct Vfs<'a> {
25 base: &'a Path,
28 base: &'a Path,
26 }
29 }
27
30
28 impl Repo {
31 impl Repo {
29 /// Search the current directory and its ancestores for a repository:
32 /// Search the current directory and its ancestores for a repository:
30 /// a working directory that contains a `.hg` sub-directory.
33 /// a working directory that contains a `.hg` sub-directory.
31 pub fn find() -> Result<Self, RepoFindError> {
34 pub fn find() -> Result<Self, RepoFindError> {
32 let current_directory = crate::utils::current_dir()?;
35 let current_directory = crate::utils::current_dir()?;
33 // ancestors() is inclusive: it first yields `current_directory` as-is.
36 // ancestors() is inclusive: it first yields `current_directory` as-is.
34 for ancestor in current_directory.ancestors() {
37 for ancestor in current_directory.ancestors() {
35 let dot_hg = ancestor.join(".hg");
38 if ancestor.join(".hg").is_dir() {
36 if dot_hg.is_dir() {
39 return Ok(Self::new_at_path(ancestor.to_owned())?);
37 let repo = Self {
38 store: dot_hg.join("store"),
39 dot_hg,
40 working_directory: ancestor.to_owned(),
41 };
42 requirements::check(&repo)?;
43 return Ok(repo);
44 }
40 }
45 }
41 }
46 Err(RepoFindError::NotFoundInCurrentDirectoryOrAncestors {
42 Err(RepoFindError::NotFoundInCurrentDirectoryOrAncestors {
47 current_directory,
43 current_directory,
48 })
44 })
49 }
45 }
50
46
47 /// To be called after checking that `.hg` is a sub-directory
48 fn new_at_path(working_directory: PathBuf) -> Result<Self, HgError> {
49 let dot_hg = working_directory.join(".hg");
50 let hg_vfs = Vfs { base: &dot_hg };
51 let reqs = requirements::load_if_exists(hg_vfs)?;
52 let relative =
53 reqs.contains(requirements::RELATIVE_SHARED_REQUIREMENT);
54 let shared =
55 reqs.contains(requirements::SHARED_REQUIREMENT) || relative;
56 let store_path;
57 if !shared {
58 store_path = dot_hg.join("store");
59 } else {
60 let bytes = hg_vfs.read("sharedpath")?;
61 let mut shared_path = get_path_from_bytes(&bytes).to_owned();
62 if relative {
63 shared_path = dot_hg.join(shared_path)
64 }
65 if !shared_path.is_dir() {
66 return Err(HgError::corrupted(format!(
67 ".hg/sharedpath points to nonexistent directory {}",
68 shared_path.display()
69 )));
70 }
71
72 store_path = shared_path.join("store");
73 }
74
75 let repo = Self {
76 requirements: reqs,
77 working_directory,
78 store: store_path,
79 dot_hg,
80 };
81
82 requirements::check(&repo)?;
83
84 Ok(repo)
85 }
86
51 pub fn working_directory_path(&self) -> &Path {
87 pub fn working_directory_path(&self) -> &Path {
52 &self.working_directory
88 &self.working_directory
53 }
89 }
54
90
91 pub fn requirements(&self) -> &HashSet<String> {
92 &self.requirements
93 }
94
55 /// For accessing repository files (in `.hg`), except for the store
95 /// For accessing repository files (in `.hg`), except for the store
56 /// (`.hg/store`).
96 /// (`.hg/store`).
57 pub(crate) fn hg_vfs(&self) -> Vfs<'_> {
97 pub(crate) fn hg_vfs(&self) -> Vfs<'_> {
58 Vfs { base: &self.dot_hg }
98 Vfs { base: &self.dot_hg }
59 }
99 }
60
100
61 /// For accessing repository store files (in `.hg/store`)
101 /// For accessing repository store files (in `.hg/store`)
62 pub(crate) fn store_vfs(&self) -> Vfs<'_> {
102 pub(crate) fn store_vfs(&self) -> Vfs<'_> {
63 Vfs { base: &self.store }
103 Vfs { base: &self.store }
64 }
104 }
65
105
66 /// For accessing the working copy
106 /// For accessing the working copy
67
107
68 // The undescore prefix silences the "never used" warning. Remove before
108 // The undescore prefix silences the "never used" warning. Remove before
69 // using.
109 // using.
70 pub(crate) fn _working_directory_vfs(&self) -> Vfs<'_> {
110 pub(crate) fn _working_directory_vfs(&self) -> Vfs<'_> {
71 Vfs {
111 Vfs {
72 base: &self.working_directory,
112 base: &self.working_directory,
73 }
113 }
74 }
114 }
75 }
115 }
76
116
77 impl Vfs<'_> {
117 impl Vfs<'_> {
78 pub(crate) fn join(&self, relative_path: impl AsRef<Path>) -> PathBuf {
118 pub(crate) fn join(&self, relative_path: impl AsRef<Path>) -> PathBuf {
79 self.base.join(relative_path)
119 self.base.join(relative_path)
80 }
120 }
81
121
82 pub(crate) fn read(
122 pub(crate) fn read(
83 &self,
123 &self,
84 relative_path: impl AsRef<Path>,
124 relative_path: impl AsRef<Path>,
85 ) -> Result<Vec<u8>, HgError> {
125 ) -> Result<Vec<u8>, HgError> {
86 let path = self.join(relative_path);
126 let path = self.join(relative_path);
87 std::fs::read(&path).for_file(&path)
127 std::fs::read(&path).for_file(&path)
88 }
128 }
89
129
90 pub(crate) fn mmap_open(
130 pub(crate) fn mmap_open(
91 &self,
131 &self,
92 relative_path: impl AsRef<Path>,
132 relative_path: impl AsRef<Path>,
93 ) -> Result<Mmap, HgError> {
133 ) -> Result<Mmap, HgError> {
94 let path = self.base.join(relative_path);
134 let path = self.base.join(relative_path);
95 let file = std::fs::File::open(&path).for_file(&path)?;
135 let file = std::fs::File::open(&path).for_file(&path)?;
96 // TODO: what are the safety requirements here?
136 // TODO: what are the safety requirements here?
97 let mmap = unsafe { MmapOptions::new().map(&file) }.for_file(&path)?;
137 let mmap = unsafe { MmapOptions::new().map(&file) }.for_file(&path)?;
98 Ok(mmap)
138 Ok(mmap)
99 }
139 }
100 }
140 }
@@ -1,67 +1,133 b''
1 use crate::errors::{HgError, HgResultExt};
1 use crate::errors::{HgError, HgResultExt};
2 use crate::repo::Repo;
2 use crate::repo::{Repo, Vfs};
3 use std::collections::HashSet;
3
4
4 fn parse(bytes: &[u8]) -> Result<Vec<String>, HgError> {
5 fn parse(bytes: &[u8]) -> Result<HashSet<String>, HgError> {
5 // The Python code reading this file uses `str.splitlines`
6 // The Python code reading this file uses `str.splitlines`
6 // which looks for a number of line separators (even including a couple of
7 // which looks for a number of line separators (even including a couple of
7 // non-ASCII ones), but Python code writing it always uses `\n`.
8 // non-ASCII ones), but Python code writing it always uses `\n`.
8 let lines = bytes.split(|&byte| byte == b'\n');
9 let lines = bytes.split(|&byte| byte == b'\n');
9
10
10 lines
11 lines
11 .filter(|line| !line.is_empty())
12 .filter(|line| !line.is_empty())
12 .map(|line| {
13 .map(|line| {
13 // Python uses Unicode `str.isalnum` but feature names are all
14 // Python uses Unicode `str.isalnum` but feature names are all
14 // ASCII
15 // ASCII
15 if line[0].is_ascii_alphanumeric() && line.is_ascii() {
16 if line[0].is_ascii_alphanumeric() && line.is_ascii() {
16 Ok(String::from_utf8(line.into()).unwrap())
17 Ok(String::from_utf8(line.into()).unwrap())
17 } else {
18 } else {
18 Err(HgError::corrupted("parse error in 'requires' file"))
19 Err(HgError::corrupted("parse error in 'requires' file"))
19 }
20 }
20 })
21 })
21 .collect()
22 .collect()
22 }
23 }
23
24
24 pub fn load(repo: &Repo) -> Result<Vec<String>, HgError> {
25 pub(crate) fn load_if_exists(hg_vfs: Vfs) -> Result<HashSet<String>, HgError> {
25 if let Some(bytes) =
26 if let Some(bytes) = hg_vfs.read("requires").io_not_found_as_none()? {
26 repo.hg_vfs().read("requires").io_not_found_as_none()?
27 {
28 parse(&bytes)
27 parse(&bytes)
29 } else {
28 } else {
30 // Treat a missing file the same as an empty file.
29 // Treat a missing file the same as an empty file.
31 // From `mercurial/localrepo.py`:
30 // From `mercurial/localrepo.py`:
32 // > requires file contains a newline-delimited list of
31 // > requires file contains a newline-delimited list of
33 // > features/capabilities the opener (us) must have in order to use
32 // > features/capabilities the opener (us) must have in order to use
34 // > the repository. This file was introduced in Mercurial 0.9.2,
33 // > the repository. This file was introduced in Mercurial 0.9.2,
35 // > which means very old repositories may not have one. We assume
34 // > which means very old repositories may not have one. We assume
36 // > a missing file translates to no requirements.
35 // > a missing file translates to no requirements.
37 Ok(Vec::new())
36 Ok(HashSet::new())
38 }
37 }
39 }
38 }
40
39
41 pub fn check(repo: &Repo) -> Result<(), HgError> {
40 pub(crate) fn check(repo: &Repo) -> Result<(), HgError> {
42 for feature in load(repo)? {
41 for feature in repo.requirements() {
43 if !SUPPORTED.contains(&&*feature) {
42 if !SUPPORTED.contains(&feature.as_str()) {
44 // TODO: collect and all unknown features and include them in the
43 // TODO: collect and all unknown features and include them in the
45 // error message?
44 // error message?
46 return Err(HgError::UnsupportedFeature(format!(
45 return Err(HgError::UnsupportedFeature(format!(
47 "repository requires feature unknown to this Mercurial: {}",
46 "repository requires feature unknown to this Mercurial: {}",
48 feature
47 feature
49 )));
48 )));
50 }
49 }
51 }
50 }
52 Ok(())
51 Ok(())
53 }
52 }
54
53
55 // TODO: set this to actually-supported features
54 // TODO: set this to actually-supported features
56 const SUPPORTED: &[&str] = &[
55 const SUPPORTED: &[&str] = &[
57 "dotencode",
56 "dotencode",
58 "fncache",
57 "fncache",
59 "generaldelta",
58 "generaldelta",
60 "revlogv1",
59 "revlogv1",
61 "sparserevlog",
60 SHARED_REQUIREMENT,
61 SPARSEREVLOG_REQUIREMENT,
62 RELATIVE_SHARED_REQUIREMENT,
62 "store",
63 "store",
63 // As of this writing everything rhg does is read-only.
64 // As of this writing everything rhg does is read-only.
64 // When it starts writing to the repository, it’ll need to either keep the
65 // When it starts writing to the repository, it’ll need to either keep the
65 // persistent nodemap up to date or remove this entry:
66 // persistent nodemap up to date or remove this entry:
66 "persistent-nodemap",
67 "persistent-nodemap",
67 ];
68 ];
69
70 // Copied from mercurial/requirements.py:
71
72 /// When narrowing is finalized and no longer subject to format changes,
73 /// we should move this to just "narrow" or similar.
74 #[allow(unused)]
75 pub(crate) const NARROW_REQUIREMENT: &str = "narrowhg-experimental";
76
77 /// Enables sparse working directory usage
78 #[allow(unused)]
79 pub(crate) const SPARSE_REQUIREMENT: &str = "exp-sparse";
80
81 /// Enables the internal phase which is used to hide changesets instead
82 /// of stripping them
83 #[allow(unused)]
84 pub(crate) const INTERNAL_PHASE_REQUIREMENT: &str = "internal-phase";
85
86 /// Stores manifest in Tree structure
87 #[allow(unused)]
88 pub(crate) const TREEMANIFEST_REQUIREMENT: &str = "treemanifest";
89
90 /// Increment the sub-version when the revlog v2 format changes to lock out old
91 /// clients.
92 #[allow(unused)]
93 pub(crate) const REVLOGV2_REQUIREMENT: &str = "exp-revlogv2.1";
94
95 /// A repository with the sparserevlog feature will have delta chains that
96 /// can spread over a larger span. Sparse reading cuts these large spans into
97 /// pieces, so that each piece isn't too big.
98 /// Without the sparserevlog capability, reading from the repository could use
99 /// huge amounts of memory, because the whole span would be read at once,
100 /// including all the intermediate revisions that aren't pertinent for the
101 /// chain. This is why once a repository has enabled sparse-read, it becomes
102 /// required.
103 #[allow(unused)]
104 pub(crate) const SPARSEREVLOG_REQUIREMENT: &str = "sparserevlog";
105
106 /// A repository with the sidedataflag requirement will allow to store extra
107 /// information for revision without altering their original hashes.
108 #[allow(unused)]
109 pub(crate) const SIDEDATA_REQUIREMENT: &str = "exp-sidedata-flag";
110
111 /// A repository with the the copies-sidedata-changeset requirement will store
112 /// copies related information in changeset's sidedata.
113 #[allow(unused)]
114 pub(crate) const COPIESSDC_REQUIREMENT: &str = "exp-copies-sidedata-changeset";
115
116 /// The repository use persistent nodemap for the changelog and the manifest.
117 #[allow(unused)]
118 pub(crate) const NODEMAP_REQUIREMENT: &str = "persistent-nodemap";
119
120 /// Denotes that the current repository is a share
121 #[allow(unused)]
122 pub(crate) const SHARED_REQUIREMENT: &str = "shared";
123
124 /// Denotes that current repository is a share and the shared source path is
125 /// relative to the current repository root path
126 #[allow(unused)]
127 pub(crate) const RELATIVE_SHARED_REQUIREMENT: &str = "relshared";
128
129 /// A repository with share implemented safely. The repository has different
130 /// store and working copy requirements i.e. both `.hg/requires` and
131 /// `.hg/store/requires` are present.
132 #[allow(unused)]
133 pub(crate) const SHARESAFE_REQUIREMENT: &str = "exp-sharesafe";
@@ -1,30 +1,31 b''
1 use crate::commands::Command;
1 use crate::commands::Command;
2 use crate::error::CommandError;
2 use crate::error::CommandError;
3 use crate::ui::Ui;
3 use crate::ui::Ui;
4 use hg::repo::Repo;
4 use hg::repo::Repo;
5 use hg::requirements;
6
5
7 pub const HELP_TEXT: &str = "
6 pub const HELP_TEXT: &str = "
8 Print the current repo requirements.
7 Print the current repo requirements.
9 ";
8 ";
10
9
11 pub struct DebugRequirementsCommand {}
10 pub struct DebugRequirementsCommand {}
12
11
13 impl DebugRequirementsCommand {
12 impl DebugRequirementsCommand {
14 pub fn new() -> Self {
13 pub fn new() -> Self {
15 DebugRequirementsCommand {}
14 DebugRequirementsCommand {}
16 }
15 }
17 }
16 }
18
17
19 impl Command for DebugRequirementsCommand {
18 impl Command for DebugRequirementsCommand {
20 fn run(&self, ui: &Ui) -> Result<(), CommandError> {
19 fn run(&self, ui: &Ui) -> Result<(), CommandError> {
21 let repo = Repo::find()?;
20 let repo = Repo::find()?;
22 let mut output = String::new();
21 let mut output = String::new();
23 for req in requirements::load(&repo)? {
22 let mut requirements: Vec<_> = repo.requirements().iter().collect();
24 output.push_str(&req);
23 requirements.sort();
24 for req in requirements {
25 output.push_str(req);
25 output.push('\n');
26 output.push('\n');
26 }
27 }
27 ui.write_stdout(output.as_bytes())?;
28 ui.write_stdout(output.as_bytes())?;
28 Ok(())
29 Ok(())
29 }
30 }
30 }
31 }
@@ -1,262 +1,262 b''
1 #require rust
1 #require rust
2
2
3 Define an rhg function that will only run if rhg exists
3 Define an rhg function that will only run if rhg exists
4 $ rhg() {
4 $ rhg() {
5 > if [ -f "$RUNTESTDIR/../rust/target/release/rhg" ]; then
5 > if [ -f "$RUNTESTDIR/../rust/target/release/rhg" ]; then
6 > "$RUNTESTDIR/../rust/target/release/rhg" "$@"
6 > "$RUNTESTDIR/../rust/target/release/rhg" "$@"
7 > else
7 > else
8 > echo "skipped: Cannot find rhg. Try to run cargo build in rust/rhg."
8 > echo "skipped: Cannot find rhg. Try to run cargo build in rust/rhg."
9 > exit 80
9 > exit 80
10 > fi
10 > fi
11 > }
11 > }
12
12
13 Unimplemented command
13 Unimplemented command
14 $ rhg unimplemented-command
14 $ rhg unimplemented-command
15 error: Found argument 'unimplemented-command' which wasn't expected, or isn't valid in this context
15 error: Found argument 'unimplemented-command' which wasn't expected, or isn't valid in this context
16
16
17 USAGE:
17 USAGE:
18 rhg <SUBCOMMAND>
18 rhg <SUBCOMMAND>
19
19
20 For more information try --help
20 For more information try --help
21 [252]
21 [252]
22
22
23 Finding root
23 Finding root
24 $ rhg root
24 $ rhg root
25 abort: no repository found in '$TESTTMP' (.hg not found)!
25 abort: no repository found in '$TESTTMP' (.hg not found)!
26 [255]
26 [255]
27
27
28 $ hg init repository
28 $ hg init repository
29 $ cd repository
29 $ cd repository
30 $ rhg root
30 $ rhg root
31 $TESTTMP/repository
31 $TESTTMP/repository
32
32
33 Unwritable file descriptor
33 Unwritable file descriptor
34 $ rhg root > /dev/full
34 $ rhg root > /dev/full
35 abort: No space left on device (os error 28)
35 abort: No space left on device (os error 28)
36 [255]
36 [255]
37
37
38 Deleted repository
38 Deleted repository
39 $ rm -rf `pwd`
39 $ rm -rf `pwd`
40 $ rhg root
40 $ rhg root
41 abort: $ENOENT$: current directory
41 abort: $ENOENT$: current directory
42 [255]
42 [255]
43
43
44 Listing tracked files
44 Listing tracked files
45 $ cd $TESTTMP
45 $ cd $TESTTMP
46 $ hg init repository
46 $ hg init repository
47 $ cd repository
47 $ cd repository
48 $ for i in 1 2 3; do
48 $ for i in 1 2 3; do
49 > echo $i >> file$i
49 > echo $i >> file$i
50 > hg add file$i
50 > hg add file$i
51 > done
51 > done
52 > hg commit -m "commit $i" -q
52 > hg commit -m "commit $i" -q
53
53
54 Listing tracked files from root
54 Listing tracked files from root
55 $ rhg files
55 $ rhg files
56 file1
56 file1
57 file2
57 file2
58 file3
58 file3
59
59
60 Listing tracked files from subdirectory
60 Listing tracked files from subdirectory
61 $ mkdir -p path/to/directory
61 $ mkdir -p path/to/directory
62 $ cd path/to/directory
62 $ cd path/to/directory
63 $ rhg files
63 $ rhg files
64 ../../../file1
64 ../../../file1
65 ../../../file2
65 ../../../file2
66 ../../../file3
66 ../../../file3
67
67
68 Listing tracked files through broken pipe
68 Listing tracked files through broken pipe
69 $ rhg files | head -n 1
69 $ rhg files | head -n 1
70 ../../../file1
70 ../../../file1
71
71
72 Debuging data in inline index
72 Debuging data in inline index
73 $ cd $TESTTMP
73 $ cd $TESTTMP
74 $ rm -rf repository
74 $ rm -rf repository
75 $ hg init repository
75 $ hg init repository
76 $ cd repository
76 $ cd repository
77 $ for i in 1 2 3 4 5 6; do
77 $ for i in 1 2 3 4 5 6; do
78 > echo $i >> file-$i
78 > echo $i >> file-$i
79 > hg add file-$i
79 > hg add file-$i
80 > hg commit -m "Commit $i" -q
80 > hg commit -m "Commit $i" -q
81 > done
81 > done
82 $ rhg debugdata -c 2
82 $ rhg debugdata -c 2
83 8d0267cb034247ebfa5ee58ce59e22e57a492297
83 8d0267cb034247ebfa5ee58ce59e22e57a492297
84 test
84 test
85 0 0
85 0 0
86 file-3
86 file-3
87
87
88 Commit 3 (no-eol)
88 Commit 3 (no-eol)
89 $ rhg debugdata -m 2
89 $ rhg debugdata -m 2
90 file-1\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc)
90 file-1\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc)
91 file-2\x005d9299349fc01ddd25d0070d149b124d8f10411e (esc)
91 file-2\x005d9299349fc01ddd25d0070d149b124d8f10411e (esc)
92 file-3\x002661d26c649684b482d10f91960cc3db683c38b4 (esc)
92 file-3\x002661d26c649684b482d10f91960cc3db683c38b4 (esc)
93
93
94 Debuging with full node id
94 Debuging with full node id
95 $ rhg debugdata -c `hg log -r 0 -T '{node}'`
95 $ rhg debugdata -c `hg log -r 0 -T '{node}'`
96 d1d1c679d3053e8926061b6f45ca52009f011e3f
96 d1d1c679d3053e8926061b6f45ca52009f011e3f
97 test
97 test
98 0 0
98 0 0
99 file-1
99 file-1
100
100
101 Commit 1 (no-eol)
101 Commit 1 (no-eol)
102
102
103 Specifying revisions by changeset ID
103 Specifying revisions by changeset ID
104 $ hg log -T '{node}\n'
104 $ hg log -T '{node}\n'
105 c6ad58c44207b6ff8a4fbbca7045a5edaa7e908b
105 c6ad58c44207b6ff8a4fbbca7045a5edaa7e908b
106 d654274993d0149eecc3cc03214f598320211900
106 d654274993d0149eecc3cc03214f598320211900
107 f646af7e96481d3a5470b695cf30ad8e3ab6c575
107 f646af7e96481d3a5470b695cf30ad8e3ab6c575
108 cf8b83f14ead62b374b6e91a0e9303b85dfd9ed7
108 cf8b83f14ead62b374b6e91a0e9303b85dfd9ed7
109 91c6f6e73e39318534dc415ea4e8a09c99cd74d6
109 91c6f6e73e39318534dc415ea4e8a09c99cd74d6
110 6ae9681c6d30389694d8701faf24b583cf3ccafe
110 6ae9681c6d30389694d8701faf24b583cf3ccafe
111 $ rhg files -r cf8b83
111 $ rhg files -r cf8b83
112 file-1
112 file-1
113 file-2
113 file-2
114 file-3
114 file-3
115 $ rhg cat -r cf8b83 file-2
115 $ rhg cat -r cf8b83 file-2
116 2
116 2
117 $ rhg cat -r c file-2
117 $ rhg cat -r c file-2
118 abort: ambiguous revision identifier c
118 abort: ambiguous revision identifier c
119 [255]
119 [255]
120 $ rhg cat -r d file-2
120 $ rhg cat -r d file-2
121 2
121 2
122
122
123 Cat files
123 Cat files
124 $ cd $TESTTMP
124 $ cd $TESTTMP
125 $ rm -rf repository
125 $ rm -rf repository
126 $ hg init repository
126 $ hg init repository
127 $ cd repository
127 $ cd repository
128 $ echo "original content" > original
128 $ echo "original content" > original
129 $ hg add original
129 $ hg add original
130 $ hg commit -m "add original" original
130 $ hg commit -m "add original" original
131 $ rhg cat -r 0 original
131 $ rhg cat -r 0 original
132 original content
132 original content
133 Cat copied file should not display copy metadata
133 Cat copied file should not display copy metadata
134 $ hg copy original copy_of_original
134 $ hg copy original copy_of_original
135 $ hg commit -m "add copy of original"
135 $ hg commit -m "add copy of original"
136 $ rhg cat -r 1 copy_of_original
136 $ rhg cat -r 1 copy_of_original
137 original content
137 original content
138
138
139 Requirements
139 Requirements
140 $ rhg debugrequirements
140 $ rhg debugrequirements
141 dotencode
141 dotencode
142 fncache
142 fncache
143 generaldelta
143 generaldelta
144 revlogv1
144 revlogv1
145 sparserevlog
145 sparserevlog
146 store
146 store
147
147
148 $ echo indoor-pool >> .hg/requires
148 $ echo indoor-pool >> .hg/requires
149 $ rhg files
149 $ rhg files
150 [252]
150 [252]
151
151
152 $ rhg cat -r 1 copy_of_original
152 $ rhg cat -r 1 copy_of_original
153 [252]
153 [252]
154
154
155 $ rhg debugrequirements
155 $ rhg debugrequirements
156 [252]
156 [252]
157
157
158 $ echo -e '\xFF' >> .hg/requires
158 $ echo -e '\xFF' >> .hg/requires
159 $ rhg debugrequirements
159 $ rhg debugrequirements
160 abort: corrupted repository: parse error in 'requires' file
160 abort: corrupted repository: parse error in 'requires' file
161 [255]
161 [255]
162
162
163 Persistent nodemap
163 Persistent nodemap
164 $ cd $TESTTMP
164 $ cd $TESTTMP
165 $ rm -rf repository
165 $ rm -rf repository
166 $ hg init repository
166 $ hg init repository
167 $ cd repository
167 $ cd repository
168 $ rhg debugrequirements | grep nodemap
168 $ rhg debugrequirements | grep nodemap
169 [1]
169 [1]
170 $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn"
170 $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn"
171 $ hg id -r tip
171 $ hg id -r tip
172 c3ae8dec9fad tip
172 c3ae8dec9fad tip
173 $ ls .hg/store/00changelog*
173 $ ls .hg/store/00changelog*
174 .hg/store/00changelog.d
174 .hg/store/00changelog.d
175 .hg/store/00changelog.i
175 .hg/store/00changelog.i
176 $ rhg files -r c3ae8dec9fad
176 $ rhg files -r c3ae8dec9fad
177 of
177 of
178
178
179 $ cd $TESTTMP
179 $ cd $TESTTMP
180 $ rm -rf repository
180 $ rm -rf repository
181 $ hg --config format.use-persistent-nodemap=True init repository
181 $ hg --config format.use-persistent-nodemap=True init repository
182 $ cd repository
182 $ cd repository
183 $ rhg debugrequirements | grep nodemap
183 $ rhg debugrequirements | grep nodemap
184 persistent-nodemap
184 persistent-nodemap
185 $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn"
185 $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn"
186 $ hg id -r tip
186 $ hg id -r tip
187 c3ae8dec9fad tip
187 c3ae8dec9fad tip
188 $ ls .hg/store/00changelog*
188 $ ls .hg/store/00changelog*
189 .hg/store/00changelog-*.nd (glob)
189 .hg/store/00changelog-*.nd (glob)
190 .hg/store/00changelog.d
190 .hg/store/00changelog.d
191 .hg/store/00changelog.i
191 .hg/store/00changelog.i
192 .hg/store/00changelog.n
192 .hg/store/00changelog.n
193
193
194 Specifying revisions by changeset ID
194 Specifying revisions by changeset ID
195 $ rhg files -r c3ae8dec9fad
195 $ rhg files -r c3ae8dec9fad
196 of
196 of
197 $ rhg cat -r c3ae8dec9fad of
197 $ rhg cat -r c3ae8dec9fad of
198 r5000
198 r5000
199
199
200 Crate a shared repository
200 Crate a shared repository
201
201
202 $ echo "[extensions]" >> $HGRCPATH
202 $ echo "[extensions]" >> $HGRCPATH
203 $ echo "share = " >> $HGRCPATH
203 $ echo "share = " >> $HGRCPATH
204
204
205 $ cd $TESTTMP
205 $ cd $TESTTMP
206 $ hg init repo1
206 $ hg init repo1
207 $ cd repo1
207 $ cd repo1
208 $ echo a > a
208 $ echo a > a
209 $ hg commit -A -m'init'
209 $ hg commit -A -m'init'
210 adding a
210 adding a
211
211
212 $ cd ..
212 $ cd ..
213 $ hg share repo1 repo2
213 $ hg share repo1 repo2
214 updating working directory
214 updating working directory
215 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
215 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
216
216
217 And check that basic rhg commands work with sharing
217 And check that basic rhg commands work with sharing
218
218
219 $ cd repo2
219 $ cd repo2
220 $ rhg files
220 $ rhg files
221 [252]
221 a
222 $ rhg cat -r 0 a
222 $ rhg cat -r 0 a
223 [252]
223 a
224
224
225 Same with relative sharing
225 Same with relative sharing
226
226
227 $ cd ..
227 $ cd ..
228 $ hg share repo2 repo3 --relative
228 $ hg share repo2 repo3 --relative
229 updating working directory
229 updating working directory
230 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
230 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
231
231
232 $ cd repo3
232 $ cd repo3
233 $ rhg files
233 $ rhg files
234 [252]
234 a
235 $ rhg cat -r 0 a
235 $ rhg cat -r 0 a
236 [252]
236 a
237
237
238 Same with share-safe
238 Same with share-safe
239
239
240 $ echo "[format]" >> $HGRCPATH
240 $ echo "[format]" >> $HGRCPATH
241 $ echo "use-share-safe = True" >> $HGRCPATH
241 $ echo "use-share-safe = True" >> $HGRCPATH
242
242
243 $ cd $TESTTMP
243 $ cd $TESTTMP
244 $ hg init repo4
244 $ hg init repo4
245 $ cd repo4
245 $ cd repo4
246 $ echo a > a
246 $ echo a > a
247 $ hg commit -A -m'init'
247 $ hg commit -A -m'init'
248 adding a
248 adding a
249
249
250 $ cd ..
250 $ cd ..
251 $ hg share repo4 repo5
251 $ hg share repo4 repo5
252 updating working directory
252 updating working directory
253 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
253 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
254
254
255 And check that basic rhg commands work with sharing
255 And check that basic rhg commands work with sharing
256
256
257 $ cd repo5
257 $ cd repo5
258 $ rhg files
258 $ rhg files
259 [252]
259 [252]
260 $ rhg cat -r 0 a
260 $ rhg cat -r 0 a
261 [252]
261 [252]
262
262
General Comments 0
You need to be logged in to leave comments. Login now