Show More
@@ -1,140 +1,170 | |||||
1 | use crate::errors::{HgError, IoResultExt}; |
|
1 | use crate::errors::{HgError, IoResultExt}; | |
2 | use crate::requirements; |
|
2 | use crate::requirements; | |
3 | use crate::utils::files::get_path_from_bytes; |
|
3 | use crate::utils::files::get_path_from_bytes; | |
4 | use memmap::{Mmap, MmapOptions}; |
|
4 | use memmap::{Mmap, MmapOptions}; | |
5 | use std::collections::HashSet; |
|
5 | use std::collections::HashSet; | |
6 | use std::path::{Path, PathBuf}; |
|
6 | use std::path::{Path, PathBuf}; | |
7 |
|
7 | |||
8 | /// A repository on disk |
|
8 | /// A repository on disk | |
9 | pub struct Repo { |
|
9 | pub struct Repo { | |
10 | working_directory: PathBuf, |
|
10 | working_directory: PathBuf, | |
11 | dot_hg: PathBuf, |
|
11 | dot_hg: PathBuf, | |
12 | store: PathBuf, |
|
12 | store: PathBuf, | |
13 | requirements: HashSet<String>, |
|
13 | requirements: HashSet<String>, | |
14 | } |
|
14 | } | |
15 |
|
15 | |||
16 | #[derive(Debug, derive_more::From)] |
|
16 | #[derive(Debug, derive_more::From)] | |
17 | pub enum RepoFindError { |
|
17 | pub enum RepoFindError { | |
18 | NotFoundInCurrentDirectoryOrAncestors { |
|
18 | NotFoundInCurrentDirectoryOrAncestors { | |
19 | current_directory: PathBuf, |
|
19 | current_directory: PathBuf, | |
20 | }, |
|
20 | }, | |
21 | #[from] |
|
21 | #[from] | |
22 | Other(HgError), |
|
22 | Other(HgError), | |
23 | } |
|
23 | } | |
24 |
|
24 | |||
25 | /// Filesystem access abstraction for the contents of a given "base" diretory |
|
25 | /// Filesystem access abstraction for the contents of a given "base" diretory | |
26 | #[derive(Clone, Copy)] |
|
26 | #[derive(Clone, Copy)] | |
27 | pub(crate) struct Vfs<'a> { |
|
27 | pub(crate) struct Vfs<'a> { | |
28 | base: &'a Path, |
|
28 | base: &'a Path, | |
29 | } |
|
29 | } | |
30 |
|
30 | |||
31 | impl Repo { |
|
31 | impl Repo { | |
32 | /// Search the current directory and its ancestores for a repository: |
|
32 | /// Search the current directory and its ancestores for a repository: | |
33 | /// a working directory that contains a `.hg` sub-directory. |
|
33 | /// a working directory that contains a `.hg` sub-directory. | |
34 | pub fn find() -> Result<Self, RepoFindError> { |
|
34 | pub fn find() -> Result<Self, RepoFindError> { | |
35 | let current_directory = crate::utils::current_dir()?; |
|
35 | let current_directory = crate::utils::current_dir()?; | |
36 | // ancestors() is inclusive: it first yields `current_directory` as-is. |
|
36 | // ancestors() is inclusive: it first yields `current_directory` as-is. | |
37 | for ancestor in current_directory.ancestors() { |
|
37 | for ancestor in current_directory.ancestors() { | |
38 | if ancestor.join(".hg").is_dir() { |
|
38 | if ancestor.join(".hg").is_dir() { | |
39 | return Ok(Self::new_at_path(ancestor.to_owned())?); |
|
39 | return Ok(Self::new_at_path(ancestor.to_owned())?); | |
40 | } |
|
40 | } | |
41 | } |
|
41 | } | |
42 | Err(RepoFindError::NotFoundInCurrentDirectoryOrAncestors { |
|
42 | Err(RepoFindError::NotFoundInCurrentDirectoryOrAncestors { | |
43 | current_directory, |
|
43 | current_directory, | |
44 | }) |
|
44 | }) | |
45 | } |
|
45 | } | |
46 |
|
46 | |||
47 | /// To be called after checking that `.hg` is a sub-directory |
|
47 | /// To be called after checking that `.hg` is a sub-directory | |
48 | fn new_at_path(working_directory: PathBuf) -> Result<Self, HgError> { |
|
48 | fn new_at_path(working_directory: PathBuf) -> Result<Self, HgError> { | |
49 | let dot_hg = working_directory.join(".hg"); |
|
49 | let dot_hg = working_directory.join(".hg"); | |
|
50 | ||||
50 | let hg_vfs = Vfs { base: &dot_hg }; |
|
51 | let hg_vfs = Vfs { base: &dot_hg }; | |
51 | let reqs = requirements::load_if_exists(hg_vfs)?; |
|
52 | let mut reqs = requirements::load_if_exists(hg_vfs)?; | |
52 | let relative = |
|
53 | let relative = | |
53 | reqs.contains(requirements::RELATIVE_SHARED_REQUIREMENT); |
|
54 | reqs.contains(requirements::RELATIVE_SHARED_REQUIREMENT); | |
54 | let shared = |
|
55 | let shared = | |
55 | reqs.contains(requirements::SHARED_REQUIREMENT) || relative; |
|
56 | reqs.contains(requirements::SHARED_REQUIREMENT) || relative; | |
|
57 | ||||
|
58 | // From `mercurial/localrepo.py`: | |||
|
59 | // | |||
|
60 | // if .hg/requires contains the sharesafe requirement, it means | |||
|
61 | // there exists a `.hg/store/requires` too and we should read it | |||
|
62 | // NOTE: presence of SHARESAFE_REQUIREMENT imply that store requirement | |||
|
63 | // is present. We never write SHARESAFE_REQUIREMENT for a repo if store | |||
|
64 | // is not present, refer checkrequirementscompat() for that | |||
|
65 | // | |||
|
66 | // However, if SHARESAFE_REQUIREMENT is not present, it means that the | |||
|
67 | // repository was shared the old way. We check the share source | |||
|
68 | // .hg/requires for SHARESAFE_REQUIREMENT to detect whether the | |||
|
69 | // current repository needs to be reshared | |||
|
70 | let share_safe = reqs.contains(requirements::SHARESAFE_REQUIREMENT); | |||
|
71 | ||||
56 | let store_path; |
|
72 | let store_path; | |
57 | if !shared { |
|
73 | if !shared { | |
58 | store_path = dot_hg.join("store"); |
|
74 | store_path = dot_hg.join("store"); | |
|
75 | if share_safe { | |||
|
76 | reqs.extend(requirements::load(Vfs { base: &store_path })?); | |||
|
77 | } | |||
59 | } else { |
|
78 | } else { | |
60 | let bytes = hg_vfs.read("sharedpath")?; |
|
79 | let bytes = hg_vfs.read("sharedpath")?; | |
61 | let mut shared_path = get_path_from_bytes(&bytes).to_owned(); |
|
80 | let mut shared_path = get_path_from_bytes(&bytes).to_owned(); | |
62 | if relative { |
|
81 | if relative { | |
63 | shared_path = dot_hg.join(shared_path) |
|
82 | shared_path = dot_hg.join(shared_path) | |
64 | } |
|
83 | } | |
65 | if !shared_path.is_dir() { |
|
84 | if !shared_path.is_dir() { | |
66 | return Err(HgError::corrupted(format!( |
|
85 | return Err(HgError::corrupted(format!( | |
67 | ".hg/sharedpath points to nonexistent directory {}", |
|
86 | ".hg/sharedpath points to nonexistent directory {}", | |
68 | shared_path.display() |
|
87 | shared_path.display() | |
69 | ))); |
|
88 | ))); | |
70 | } |
|
89 | } | |
71 |
|
90 | |||
72 | store_path = shared_path.join("store"); |
|
91 | store_path = shared_path.join("store"); | |
|
92 | ||||
|
93 | let source_is_share_safe = | |||
|
94 | requirements::load(Vfs { base: &shared_path })? | |||
|
95 | .contains(requirements::SHARESAFE_REQUIREMENT); | |||
|
96 | ||||
|
97 | // TODO: support for `share.safe-mismatch.*` config | |||
|
98 | if share_safe && !source_is_share_safe { | |||
|
99 | return Err(HgError::unsupported("share-safe downgrade")); | |||
|
100 | } else if source_is_share_safe && !share_safe { | |||
|
101 | return Err(HgError::unsupported("share-safe upgrade")); | |||
|
102 | } | |||
73 | } |
|
103 | } | |
74 |
|
104 | |||
75 | let repo = Self { |
|
105 | let repo = Self { | |
76 | requirements: reqs, |
|
106 | requirements: reqs, | |
77 | working_directory, |
|
107 | working_directory, | |
78 | store: store_path, |
|
108 | store: store_path, | |
79 | dot_hg, |
|
109 | dot_hg, | |
80 | }; |
|
110 | }; | |
81 |
|
111 | |||
82 | requirements::check(&repo)?; |
|
112 | requirements::check(&repo)?; | |
83 |
|
113 | |||
84 | Ok(repo) |
|
114 | Ok(repo) | |
85 | } |
|
115 | } | |
86 |
|
116 | |||
87 | pub fn working_directory_path(&self) -> &Path { |
|
117 | pub fn working_directory_path(&self) -> &Path { | |
88 | &self.working_directory |
|
118 | &self.working_directory | |
89 | } |
|
119 | } | |
90 |
|
120 | |||
91 | pub fn requirements(&self) -> &HashSet<String> { |
|
121 | pub fn requirements(&self) -> &HashSet<String> { | |
92 | &self.requirements |
|
122 | &self.requirements | |
93 | } |
|
123 | } | |
94 |
|
124 | |||
95 | /// For accessing repository files (in `.hg`), except for the store |
|
125 | /// For accessing repository files (in `.hg`), except for the store | |
96 | /// (`.hg/store`). |
|
126 | /// (`.hg/store`). | |
97 | pub(crate) fn hg_vfs(&self) -> Vfs<'_> { |
|
127 | pub(crate) fn hg_vfs(&self) -> Vfs<'_> { | |
98 | Vfs { base: &self.dot_hg } |
|
128 | Vfs { base: &self.dot_hg } | |
99 | } |
|
129 | } | |
100 |
|
130 | |||
101 | /// For accessing repository store files (in `.hg/store`) |
|
131 | /// For accessing repository store files (in `.hg/store`) | |
102 | pub(crate) fn store_vfs(&self) -> Vfs<'_> { |
|
132 | pub(crate) fn store_vfs(&self) -> Vfs<'_> { | |
103 | Vfs { base: &self.store } |
|
133 | Vfs { base: &self.store } | |
104 | } |
|
134 | } | |
105 |
|
135 | |||
106 | /// For accessing the working copy |
|
136 | /// For accessing the working copy | |
107 |
|
137 | |||
108 | // The undescore prefix silences the "never used" warning. Remove before |
|
138 | // The undescore prefix silences the "never used" warning. Remove before | |
109 | // using. |
|
139 | // using. | |
110 | pub(crate) fn _working_directory_vfs(&self) -> Vfs<'_> { |
|
140 | pub(crate) fn _working_directory_vfs(&self) -> Vfs<'_> { | |
111 | Vfs { |
|
141 | Vfs { | |
112 | base: &self.working_directory, |
|
142 | base: &self.working_directory, | |
113 | } |
|
143 | } | |
114 | } |
|
144 | } | |
115 | } |
|
145 | } | |
116 |
|
146 | |||
117 | impl Vfs<'_> { |
|
147 | impl Vfs<'_> { | |
118 | pub(crate) fn join(&self, relative_path: impl AsRef<Path>) -> PathBuf { |
|
148 | pub(crate) fn join(&self, relative_path: impl AsRef<Path>) -> PathBuf { | |
119 | self.base.join(relative_path) |
|
149 | self.base.join(relative_path) | |
120 | } |
|
150 | } | |
121 |
|
151 | |||
122 | pub(crate) fn read( |
|
152 | pub(crate) fn read( | |
123 | &self, |
|
153 | &self, | |
124 | relative_path: impl AsRef<Path>, |
|
154 | relative_path: impl AsRef<Path>, | |
125 | ) -> Result<Vec<u8>, HgError> { |
|
155 | ) -> Result<Vec<u8>, HgError> { | |
126 | let path = self.join(relative_path); |
|
156 | let path = self.join(relative_path); | |
127 | std::fs::read(&path).for_file(&path) |
|
157 | std::fs::read(&path).for_file(&path) | |
128 | } |
|
158 | } | |
129 |
|
159 | |||
130 | pub(crate) fn mmap_open( |
|
160 | pub(crate) fn mmap_open( | |
131 | &self, |
|
161 | &self, | |
132 | relative_path: impl AsRef<Path>, |
|
162 | relative_path: impl AsRef<Path>, | |
133 | ) -> Result<Mmap, HgError> { |
|
163 | ) -> Result<Mmap, HgError> { | |
134 | let path = self.base.join(relative_path); |
|
164 | let path = self.base.join(relative_path); | |
135 | let file = std::fs::File::open(&path).for_file(&path)?; |
|
165 | let file = std::fs::File::open(&path).for_file(&path)?; | |
136 | // TODO: what are the safety requirements here? |
|
166 | // TODO: what are the safety requirements here? | |
137 | let mmap = unsafe { MmapOptions::new().map(&file) }.for_file(&path)?; |
|
167 | let mmap = unsafe { MmapOptions::new().map(&file) }.for_file(&path)?; | |
138 | Ok(mmap) |
|
168 | Ok(mmap) | |
139 | } |
|
169 | } | |
140 | } |
|
170 | } |
@@ -1,133 +1,138 | |||||
1 | use crate::errors::{HgError, HgResultExt}; |
|
1 | use crate::errors::{HgError, HgResultExt}; | |
2 | use crate::repo::{Repo, Vfs}; |
|
2 | use crate::repo::{Repo, Vfs}; | |
3 | use std::collections::HashSet; |
|
3 | use std::collections::HashSet; | |
4 |
|
4 | |||
5 | fn parse(bytes: &[u8]) -> Result<HashSet<String>, HgError> { |
|
5 | fn parse(bytes: &[u8]) -> Result<HashSet<String>, HgError> { | |
6 | // The Python code reading this file uses `str.splitlines` |
|
6 | // The Python code reading this file uses `str.splitlines` | |
7 | // which looks for a number of line separators (even including a couple of |
|
7 | // which looks for a number of line separators (even including a couple of | |
8 | // non-ASCII ones), but Python code writing it always uses `\n`. |
|
8 | // non-ASCII ones), but Python code writing it always uses `\n`. | |
9 | let lines = bytes.split(|&byte| byte == b'\n'); |
|
9 | let lines = bytes.split(|&byte| byte == b'\n'); | |
10 |
|
10 | |||
11 | lines |
|
11 | lines | |
12 | .filter(|line| !line.is_empty()) |
|
12 | .filter(|line| !line.is_empty()) | |
13 | .map(|line| { |
|
13 | .map(|line| { | |
14 | // Python uses Unicode `str.isalnum` but feature names are all |
|
14 | // Python uses Unicode `str.isalnum` but feature names are all | |
15 | // ASCII |
|
15 | // ASCII | |
16 | if line[0].is_ascii_alphanumeric() && line.is_ascii() { |
|
16 | if line[0].is_ascii_alphanumeric() && line.is_ascii() { | |
17 | Ok(String::from_utf8(line.into()).unwrap()) |
|
17 | Ok(String::from_utf8(line.into()).unwrap()) | |
18 | } else { |
|
18 | } else { | |
19 | Err(HgError::corrupted("parse error in 'requires' file")) |
|
19 | Err(HgError::corrupted("parse error in 'requires' file")) | |
20 | } |
|
20 | } | |
21 | }) |
|
21 | }) | |
22 | .collect() |
|
22 | .collect() | |
23 | } |
|
23 | } | |
24 |
|
24 | |||
|
25 | pub(crate) fn load(hg_vfs: Vfs) -> Result<HashSet<String>, HgError> { | |||
|
26 | parse(&hg_vfs.read("requires")?) | |||
|
27 | } | |||
|
28 | ||||
25 | pub(crate) fn load_if_exists(hg_vfs: Vfs) -> Result<HashSet<String>, HgError> { |
|
29 | pub(crate) fn load_if_exists(hg_vfs: Vfs) -> Result<HashSet<String>, HgError> { | |
26 | if let Some(bytes) = hg_vfs.read("requires").io_not_found_as_none()? { |
|
30 | if let Some(bytes) = hg_vfs.read("requires").io_not_found_as_none()? { | |
27 | parse(&bytes) |
|
31 | parse(&bytes) | |
28 | } else { |
|
32 | } else { | |
29 | // Treat a missing file the same as an empty file. |
|
33 | // Treat a missing file the same as an empty file. | |
30 | // From `mercurial/localrepo.py`: |
|
34 | // From `mercurial/localrepo.py`: | |
31 | // > requires file contains a newline-delimited list of |
|
35 | // > requires file contains a newline-delimited list of | |
32 | // > features/capabilities the opener (us) must have in order to use |
|
36 | // > features/capabilities the opener (us) must have in order to use | |
33 | // > the repository. This file was introduced in Mercurial 0.9.2, |
|
37 | // > the repository. This file was introduced in Mercurial 0.9.2, | |
34 | // > which means very old repositories may not have one. We assume |
|
38 | // > which means very old repositories may not have one. We assume | |
35 | // > a missing file translates to no requirements. |
|
39 | // > a missing file translates to no requirements. | |
36 | Ok(HashSet::new()) |
|
40 | Ok(HashSet::new()) | |
37 | } |
|
41 | } | |
38 | } |
|
42 | } | |
39 |
|
43 | |||
40 | pub(crate) fn check(repo: &Repo) -> Result<(), HgError> { |
|
44 | pub(crate) fn check(repo: &Repo) -> Result<(), HgError> { | |
41 | for feature in repo.requirements() { |
|
45 | for feature in repo.requirements() { | |
42 | if !SUPPORTED.contains(&feature.as_str()) { |
|
46 | if !SUPPORTED.contains(&feature.as_str()) { | |
43 | // TODO: collect and all unknown features and include them in the |
|
47 | // TODO: collect and all unknown features and include them in the | |
44 | // error message? |
|
48 | // error message? | |
45 | return Err(HgError::UnsupportedFeature(format!( |
|
49 | return Err(HgError::UnsupportedFeature(format!( | |
46 | "repository requires feature unknown to this Mercurial: {}", |
|
50 | "repository requires feature unknown to this Mercurial: {}", | |
47 | feature |
|
51 | feature | |
48 | ))); |
|
52 | ))); | |
49 | } |
|
53 | } | |
50 | } |
|
54 | } | |
51 | Ok(()) |
|
55 | Ok(()) | |
52 | } |
|
56 | } | |
53 |
|
57 | |||
54 | // TODO: set this to actually-supported features |
|
58 | // TODO: set this to actually-supported features | |
55 | const SUPPORTED: &[&str] = &[ |
|
59 | const SUPPORTED: &[&str] = &[ | |
56 | "dotencode", |
|
60 | "dotencode", | |
57 | "fncache", |
|
61 | "fncache", | |
58 | "generaldelta", |
|
62 | "generaldelta", | |
59 | "revlogv1", |
|
63 | "revlogv1", | |
60 | SHARED_REQUIREMENT, |
|
64 | SHARED_REQUIREMENT, | |
|
65 | SHARESAFE_REQUIREMENT, | |||
61 | SPARSEREVLOG_REQUIREMENT, |
|
66 | SPARSEREVLOG_REQUIREMENT, | |
62 | RELATIVE_SHARED_REQUIREMENT, |
|
67 | RELATIVE_SHARED_REQUIREMENT, | |
63 | "store", |
|
68 | "store", | |
64 | // As of this writing everything rhg does is read-only. |
|
69 | // As of this writing everything rhg does is read-only. | |
65 | // When it starts writing to the repository, itβll need to either keep the |
|
70 | // When it starts writing to the repository, itβll need to either keep the | |
66 | // persistent nodemap up to date or remove this entry: |
|
71 | // persistent nodemap up to date or remove this entry: | |
67 | "persistent-nodemap", |
|
72 | "persistent-nodemap", | |
68 | ]; |
|
73 | ]; | |
69 |
|
74 | |||
70 | // Copied from mercurial/requirements.py: |
|
75 | // Copied from mercurial/requirements.py: | |
71 |
|
76 | |||
72 | /// When narrowing is finalized and no longer subject to format changes, |
|
77 | /// When narrowing is finalized and no longer subject to format changes, | |
73 | /// we should move this to just "narrow" or similar. |
|
78 | /// we should move this to just "narrow" or similar. | |
74 | #[allow(unused)] |
|
79 | #[allow(unused)] | |
75 | pub(crate) const NARROW_REQUIREMENT: &str = "narrowhg-experimental"; |
|
80 | pub(crate) const NARROW_REQUIREMENT: &str = "narrowhg-experimental"; | |
76 |
|
81 | |||
77 | /// Enables sparse working directory usage |
|
82 | /// Enables sparse working directory usage | |
78 | #[allow(unused)] |
|
83 | #[allow(unused)] | |
79 | pub(crate) const SPARSE_REQUIREMENT: &str = "exp-sparse"; |
|
84 | pub(crate) const SPARSE_REQUIREMENT: &str = "exp-sparse"; | |
80 |
|
85 | |||
81 | /// Enables the internal phase which is used to hide changesets instead |
|
86 | /// Enables the internal phase which is used to hide changesets instead | |
82 | /// of stripping them |
|
87 | /// of stripping them | |
83 | #[allow(unused)] |
|
88 | #[allow(unused)] | |
84 | pub(crate) const INTERNAL_PHASE_REQUIREMENT: &str = "internal-phase"; |
|
89 | pub(crate) const INTERNAL_PHASE_REQUIREMENT: &str = "internal-phase"; | |
85 |
|
90 | |||
86 | /// Stores manifest in Tree structure |
|
91 | /// Stores manifest in Tree structure | |
87 | #[allow(unused)] |
|
92 | #[allow(unused)] | |
88 | pub(crate) const TREEMANIFEST_REQUIREMENT: &str = "treemanifest"; |
|
93 | pub(crate) const TREEMANIFEST_REQUIREMENT: &str = "treemanifest"; | |
89 |
|
94 | |||
90 | /// Increment the sub-version when the revlog v2 format changes to lock out old |
|
95 | /// Increment the sub-version when the revlog v2 format changes to lock out old | |
91 | /// clients. |
|
96 | /// clients. | |
92 | #[allow(unused)] |
|
97 | #[allow(unused)] | |
93 | pub(crate) const REVLOGV2_REQUIREMENT: &str = "exp-revlogv2.1"; |
|
98 | pub(crate) const REVLOGV2_REQUIREMENT: &str = "exp-revlogv2.1"; | |
94 |
|
99 | |||
95 | /// A repository with the sparserevlog feature will have delta chains that |
|
100 | /// A repository with the sparserevlog feature will have delta chains that | |
96 | /// can spread over a larger span. Sparse reading cuts these large spans into |
|
101 | /// can spread over a larger span. Sparse reading cuts these large spans into | |
97 | /// pieces, so that each piece isn't too big. |
|
102 | /// pieces, so that each piece isn't too big. | |
98 | /// Without the sparserevlog capability, reading from the repository could use |
|
103 | /// Without the sparserevlog capability, reading from the repository could use | |
99 | /// huge amounts of memory, because the whole span would be read at once, |
|
104 | /// huge amounts of memory, because the whole span would be read at once, | |
100 | /// including all the intermediate revisions that aren't pertinent for the |
|
105 | /// including all the intermediate revisions that aren't pertinent for the | |
101 | /// chain. This is why once a repository has enabled sparse-read, it becomes |
|
106 | /// chain. This is why once a repository has enabled sparse-read, it becomes | |
102 | /// required. |
|
107 | /// required. | |
103 | #[allow(unused)] |
|
108 | #[allow(unused)] | |
104 | pub(crate) const SPARSEREVLOG_REQUIREMENT: &str = "sparserevlog"; |
|
109 | pub(crate) const SPARSEREVLOG_REQUIREMENT: &str = "sparserevlog"; | |
105 |
|
110 | |||
106 | /// A repository with the sidedataflag requirement will allow to store extra |
|
111 | /// A repository with the sidedataflag requirement will allow to store extra | |
107 | /// information for revision without altering their original hashes. |
|
112 | /// information for revision without altering their original hashes. | |
108 | #[allow(unused)] |
|
113 | #[allow(unused)] | |
109 | pub(crate) const SIDEDATA_REQUIREMENT: &str = "exp-sidedata-flag"; |
|
114 | pub(crate) const SIDEDATA_REQUIREMENT: &str = "exp-sidedata-flag"; | |
110 |
|
115 | |||
111 | /// A repository with the the copies-sidedata-changeset requirement will store |
|
116 | /// A repository with the the copies-sidedata-changeset requirement will store | |
112 | /// copies related information in changeset's sidedata. |
|
117 | /// copies related information in changeset's sidedata. | |
113 | #[allow(unused)] |
|
118 | #[allow(unused)] | |
114 | pub(crate) const COPIESSDC_REQUIREMENT: &str = "exp-copies-sidedata-changeset"; |
|
119 | pub(crate) const COPIESSDC_REQUIREMENT: &str = "exp-copies-sidedata-changeset"; | |
115 |
|
120 | |||
116 | /// The repository use persistent nodemap for the changelog and the manifest. |
|
121 | /// The repository use persistent nodemap for the changelog and the manifest. | |
117 | #[allow(unused)] |
|
122 | #[allow(unused)] | |
118 | pub(crate) const NODEMAP_REQUIREMENT: &str = "persistent-nodemap"; |
|
123 | pub(crate) const NODEMAP_REQUIREMENT: &str = "persistent-nodemap"; | |
119 |
|
124 | |||
120 | /// Denotes that the current repository is a share |
|
125 | /// Denotes that the current repository is a share | |
121 | #[allow(unused)] |
|
126 | #[allow(unused)] | |
122 | pub(crate) const SHARED_REQUIREMENT: &str = "shared"; |
|
127 | pub(crate) const SHARED_REQUIREMENT: &str = "shared"; | |
123 |
|
128 | |||
124 | /// Denotes that current repository is a share and the shared source path is |
|
129 | /// Denotes that current repository is a share and the shared source path is | |
125 | /// relative to the current repository root path |
|
130 | /// relative to the current repository root path | |
126 | #[allow(unused)] |
|
131 | #[allow(unused)] | |
127 | pub(crate) const RELATIVE_SHARED_REQUIREMENT: &str = "relshared"; |
|
132 | pub(crate) const RELATIVE_SHARED_REQUIREMENT: &str = "relshared"; | |
128 |
|
133 | |||
129 | /// A repository with share implemented safely. The repository has different |
|
134 | /// A repository with share implemented safely. The repository has different | |
130 | /// store and working copy requirements i.e. both `.hg/requires` and |
|
135 | /// store and working copy requirements i.e. both `.hg/requires` and | |
131 | /// `.hg/store/requires` are present. |
|
136 | /// `.hg/store/requires` are present. | |
132 | #[allow(unused)] |
|
137 | #[allow(unused)] | |
133 |
pub(crate) const SHARESAFE_REQUIREMENT: &str = " |
|
138 | pub(crate) const SHARESAFE_REQUIREMENT: &str = "share-safe"; |
@@ -1,262 +1,262 | |||||
1 | #require rust |
|
1 | #require rust | |
2 |
|
2 | |||
3 | Define an rhg function that will only run if rhg exists |
|
3 | Define an rhg function that will only run if rhg exists | |
4 | $ rhg() { |
|
4 | $ rhg() { | |
5 | > if [ -f "$RUNTESTDIR/../rust/target/release/rhg" ]; then |
|
5 | > if [ -f "$RUNTESTDIR/../rust/target/release/rhg" ]; then | |
6 | > "$RUNTESTDIR/../rust/target/release/rhg" "$@" |
|
6 | > "$RUNTESTDIR/../rust/target/release/rhg" "$@" | |
7 | > else |
|
7 | > else | |
8 | > echo "skipped: Cannot find rhg. Try to run cargo build in rust/rhg." |
|
8 | > echo "skipped: Cannot find rhg. Try to run cargo build in rust/rhg." | |
9 | > exit 80 |
|
9 | > exit 80 | |
10 | > fi |
|
10 | > fi | |
11 | > } |
|
11 | > } | |
12 |
|
12 | |||
13 | Unimplemented command |
|
13 | Unimplemented command | |
14 | $ rhg unimplemented-command |
|
14 | $ rhg unimplemented-command | |
15 | error: Found argument 'unimplemented-command' which wasn't expected, or isn't valid in this context |
|
15 | error: Found argument 'unimplemented-command' which wasn't expected, or isn't valid in this context | |
16 |
|
16 | |||
17 | USAGE: |
|
17 | USAGE: | |
18 | rhg <SUBCOMMAND> |
|
18 | rhg <SUBCOMMAND> | |
19 |
|
19 | |||
20 | For more information try --help |
|
20 | For more information try --help | |
21 | [252] |
|
21 | [252] | |
22 |
|
22 | |||
23 | Finding root |
|
23 | Finding root | |
24 | $ rhg root |
|
24 | $ rhg root | |
25 | abort: no repository found in '$TESTTMP' (.hg not found)! |
|
25 | abort: no repository found in '$TESTTMP' (.hg not found)! | |
26 | [255] |
|
26 | [255] | |
27 |
|
27 | |||
28 | $ hg init repository |
|
28 | $ hg init repository | |
29 | $ cd repository |
|
29 | $ cd repository | |
30 | $ rhg root |
|
30 | $ rhg root | |
31 | $TESTTMP/repository |
|
31 | $TESTTMP/repository | |
32 |
|
32 | |||
33 | Unwritable file descriptor |
|
33 | Unwritable file descriptor | |
34 | $ rhg root > /dev/full |
|
34 | $ rhg root > /dev/full | |
35 | abort: No space left on device (os error 28) |
|
35 | abort: No space left on device (os error 28) | |
36 | [255] |
|
36 | [255] | |
37 |
|
37 | |||
38 | Deleted repository |
|
38 | Deleted repository | |
39 | $ rm -rf `pwd` |
|
39 | $ rm -rf `pwd` | |
40 | $ rhg root |
|
40 | $ rhg root | |
41 | abort: $ENOENT$: current directory |
|
41 | abort: $ENOENT$: current directory | |
42 | [255] |
|
42 | [255] | |
43 |
|
43 | |||
44 | Listing tracked files |
|
44 | Listing tracked files | |
45 | $ cd $TESTTMP |
|
45 | $ cd $TESTTMP | |
46 | $ hg init repository |
|
46 | $ hg init repository | |
47 | $ cd repository |
|
47 | $ cd repository | |
48 | $ for i in 1 2 3; do |
|
48 | $ for i in 1 2 3; do | |
49 | > echo $i >> file$i |
|
49 | > echo $i >> file$i | |
50 | > hg add file$i |
|
50 | > hg add file$i | |
51 | > done |
|
51 | > done | |
52 | > hg commit -m "commit $i" -q |
|
52 | > hg commit -m "commit $i" -q | |
53 |
|
53 | |||
54 | Listing tracked files from root |
|
54 | Listing tracked files from root | |
55 | $ rhg files |
|
55 | $ rhg files | |
56 | file1 |
|
56 | file1 | |
57 | file2 |
|
57 | file2 | |
58 | file3 |
|
58 | file3 | |
59 |
|
59 | |||
60 | Listing tracked files from subdirectory |
|
60 | Listing tracked files from subdirectory | |
61 | $ mkdir -p path/to/directory |
|
61 | $ mkdir -p path/to/directory | |
62 | $ cd path/to/directory |
|
62 | $ cd path/to/directory | |
63 | $ rhg files |
|
63 | $ rhg files | |
64 | ../../../file1 |
|
64 | ../../../file1 | |
65 | ../../../file2 |
|
65 | ../../../file2 | |
66 | ../../../file3 |
|
66 | ../../../file3 | |
67 |
|
67 | |||
68 | Listing tracked files through broken pipe |
|
68 | Listing tracked files through broken pipe | |
69 | $ rhg files | head -n 1 |
|
69 | $ rhg files | head -n 1 | |
70 | ../../../file1 |
|
70 | ../../../file1 | |
71 |
|
71 | |||
72 | Debuging data in inline index |
|
72 | Debuging data in inline index | |
73 | $ cd $TESTTMP |
|
73 | $ cd $TESTTMP | |
74 | $ rm -rf repository |
|
74 | $ rm -rf repository | |
75 | $ hg init repository |
|
75 | $ hg init repository | |
76 | $ cd repository |
|
76 | $ cd repository | |
77 | $ for i in 1 2 3 4 5 6; do |
|
77 | $ for i in 1 2 3 4 5 6; do | |
78 | > echo $i >> file-$i |
|
78 | > echo $i >> file-$i | |
79 | > hg add file-$i |
|
79 | > hg add file-$i | |
80 | > hg commit -m "Commit $i" -q |
|
80 | > hg commit -m "Commit $i" -q | |
81 | > done |
|
81 | > done | |
82 | $ rhg debugdata -c 2 |
|
82 | $ rhg debugdata -c 2 | |
83 | 8d0267cb034247ebfa5ee58ce59e22e57a492297 |
|
83 | 8d0267cb034247ebfa5ee58ce59e22e57a492297 | |
84 | test |
|
84 | test | |
85 | 0 0 |
|
85 | 0 0 | |
86 | file-3 |
|
86 | file-3 | |
87 |
|
87 | |||
88 | Commit 3 (no-eol) |
|
88 | Commit 3 (no-eol) | |
89 | $ rhg debugdata -m 2 |
|
89 | $ rhg debugdata -m 2 | |
90 | file-1\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc) |
|
90 | file-1\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc) | |
91 | file-2\x005d9299349fc01ddd25d0070d149b124d8f10411e (esc) |
|
91 | file-2\x005d9299349fc01ddd25d0070d149b124d8f10411e (esc) | |
92 | file-3\x002661d26c649684b482d10f91960cc3db683c38b4 (esc) |
|
92 | file-3\x002661d26c649684b482d10f91960cc3db683c38b4 (esc) | |
93 |
|
93 | |||
94 | Debuging with full node id |
|
94 | Debuging with full node id | |
95 | $ rhg debugdata -c `hg log -r 0 -T '{node}'` |
|
95 | $ rhg debugdata -c `hg log -r 0 -T '{node}'` | |
96 | d1d1c679d3053e8926061b6f45ca52009f011e3f |
|
96 | d1d1c679d3053e8926061b6f45ca52009f011e3f | |
97 | test |
|
97 | test | |
98 | 0 0 |
|
98 | 0 0 | |
99 | file-1 |
|
99 | file-1 | |
100 |
|
100 | |||
101 | Commit 1 (no-eol) |
|
101 | Commit 1 (no-eol) | |
102 |
|
102 | |||
103 | Specifying revisions by changeset ID |
|
103 | Specifying revisions by changeset ID | |
104 | $ hg log -T '{node}\n' |
|
104 | $ hg log -T '{node}\n' | |
105 | c6ad58c44207b6ff8a4fbbca7045a5edaa7e908b |
|
105 | c6ad58c44207b6ff8a4fbbca7045a5edaa7e908b | |
106 | d654274993d0149eecc3cc03214f598320211900 |
|
106 | d654274993d0149eecc3cc03214f598320211900 | |
107 | f646af7e96481d3a5470b695cf30ad8e3ab6c575 |
|
107 | f646af7e96481d3a5470b695cf30ad8e3ab6c575 | |
108 | cf8b83f14ead62b374b6e91a0e9303b85dfd9ed7 |
|
108 | cf8b83f14ead62b374b6e91a0e9303b85dfd9ed7 | |
109 | 91c6f6e73e39318534dc415ea4e8a09c99cd74d6 |
|
109 | 91c6f6e73e39318534dc415ea4e8a09c99cd74d6 | |
110 | 6ae9681c6d30389694d8701faf24b583cf3ccafe |
|
110 | 6ae9681c6d30389694d8701faf24b583cf3ccafe | |
111 | $ rhg files -r cf8b83 |
|
111 | $ rhg files -r cf8b83 | |
112 | file-1 |
|
112 | file-1 | |
113 | file-2 |
|
113 | file-2 | |
114 | file-3 |
|
114 | file-3 | |
115 | $ rhg cat -r cf8b83 file-2 |
|
115 | $ rhg cat -r cf8b83 file-2 | |
116 | 2 |
|
116 | 2 | |
117 | $ rhg cat -r c file-2 |
|
117 | $ rhg cat -r c file-2 | |
118 | abort: ambiguous revision identifier c |
|
118 | abort: ambiguous revision identifier c | |
119 | [255] |
|
119 | [255] | |
120 | $ rhg cat -r d file-2 |
|
120 | $ rhg cat -r d file-2 | |
121 | 2 |
|
121 | 2 | |
122 |
|
122 | |||
123 | Cat files |
|
123 | Cat files | |
124 | $ cd $TESTTMP |
|
124 | $ cd $TESTTMP | |
125 | $ rm -rf repository |
|
125 | $ rm -rf repository | |
126 | $ hg init repository |
|
126 | $ hg init repository | |
127 | $ cd repository |
|
127 | $ cd repository | |
128 | $ echo "original content" > original |
|
128 | $ echo "original content" > original | |
129 | $ hg add original |
|
129 | $ hg add original | |
130 | $ hg commit -m "add original" original |
|
130 | $ hg commit -m "add original" original | |
131 | $ rhg cat -r 0 original |
|
131 | $ rhg cat -r 0 original | |
132 | original content |
|
132 | original content | |
133 | Cat copied file should not display copy metadata |
|
133 | Cat copied file should not display copy metadata | |
134 | $ hg copy original copy_of_original |
|
134 | $ hg copy original copy_of_original | |
135 | $ hg commit -m "add copy of original" |
|
135 | $ hg commit -m "add copy of original" | |
136 | $ rhg cat -r 1 copy_of_original |
|
136 | $ rhg cat -r 1 copy_of_original | |
137 | original content |
|
137 | original content | |
138 |
|
138 | |||
139 | Requirements |
|
139 | Requirements | |
140 | $ rhg debugrequirements |
|
140 | $ rhg debugrequirements | |
141 | dotencode |
|
141 | dotencode | |
142 | fncache |
|
142 | fncache | |
143 | generaldelta |
|
143 | generaldelta | |
144 | revlogv1 |
|
144 | revlogv1 | |
145 | sparserevlog |
|
145 | sparserevlog | |
146 | store |
|
146 | store | |
147 |
|
147 | |||
148 | $ echo indoor-pool >> .hg/requires |
|
148 | $ echo indoor-pool >> .hg/requires | |
149 | $ rhg files |
|
149 | $ rhg files | |
150 | [252] |
|
150 | [252] | |
151 |
|
151 | |||
152 | $ rhg cat -r 1 copy_of_original |
|
152 | $ rhg cat -r 1 copy_of_original | |
153 | [252] |
|
153 | [252] | |
154 |
|
154 | |||
155 | $ rhg debugrequirements |
|
155 | $ rhg debugrequirements | |
156 | [252] |
|
156 | [252] | |
157 |
|
157 | |||
158 | $ echo -e '\xFF' >> .hg/requires |
|
158 | $ echo -e '\xFF' >> .hg/requires | |
159 | $ rhg debugrequirements |
|
159 | $ rhg debugrequirements | |
160 | abort: corrupted repository: parse error in 'requires' file |
|
160 | abort: corrupted repository: parse error in 'requires' file | |
161 | [255] |
|
161 | [255] | |
162 |
|
162 | |||
163 | Persistent nodemap |
|
163 | Persistent nodemap | |
164 | $ cd $TESTTMP |
|
164 | $ cd $TESTTMP | |
165 | $ rm -rf repository |
|
165 | $ rm -rf repository | |
166 | $ hg init repository |
|
166 | $ hg init repository | |
167 | $ cd repository |
|
167 | $ cd repository | |
168 | $ rhg debugrequirements | grep nodemap |
|
168 | $ rhg debugrequirements | grep nodemap | |
169 | [1] |
|
169 | [1] | |
170 | $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn" |
|
170 | $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn" | |
171 | $ hg id -r tip |
|
171 | $ hg id -r tip | |
172 | c3ae8dec9fad tip |
|
172 | c3ae8dec9fad tip | |
173 | $ ls .hg/store/00changelog* |
|
173 | $ ls .hg/store/00changelog* | |
174 | .hg/store/00changelog.d |
|
174 | .hg/store/00changelog.d | |
175 | .hg/store/00changelog.i |
|
175 | .hg/store/00changelog.i | |
176 | $ rhg files -r c3ae8dec9fad |
|
176 | $ rhg files -r c3ae8dec9fad | |
177 | of |
|
177 | of | |
178 |
|
178 | |||
179 | $ cd $TESTTMP |
|
179 | $ cd $TESTTMP | |
180 | $ rm -rf repository |
|
180 | $ rm -rf repository | |
181 | $ hg --config format.use-persistent-nodemap=True init repository |
|
181 | $ hg --config format.use-persistent-nodemap=True init repository | |
182 | $ cd repository |
|
182 | $ cd repository | |
183 | $ rhg debugrequirements | grep nodemap |
|
183 | $ rhg debugrequirements | grep nodemap | |
184 | persistent-nodemap |
|
184 | persistent-nodemap | |
185 | $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn" |
|
185 | $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn" | |
186 | $ hg id -r tip |
|
186 | $ hg id -r tip | |
187 | c3ae8dec9fad tip |
|
187 | c3ae8dec9fad tip | |
188 | $ ls .hg/store/00changelog* |
|
188 | $ ls .hg/store/00changelog* | |
189 | .hg/store/00changelog-*.nd (glob) |
|
189 | .hg/store/00changelog-*.nd (glob) | |
190 | .hg/store/00changelog.d |
|
190 | .hg/store/00changelog.d | |
191 | .hg/store/00changelog.i |
|
191 | .hg/store/00changelog.i | |
192 | .hg/store/00changelog.n |
|
192 | .hg/store/00changelog.n | |
193 |
|
193 | |||
194 | Specifying revisions by changeset ID |
|
194 | Specifying revisions by changeset ID | |
195 | $ rhg files -r c3ae8dec9fad |
|
195 | $ rhg files -r c3ae8dec9fad | |
196 | of |
|
196 | of | |
197 | $ rhg cat -r c3ae8dec9fad of |
|
197 | $ rhg cat -r c3ae8dec9fad of | |
198 | r5000 |
|
198 | r5000 | |
199 |
|
199 | |||
200 | Crate a shared repository |
|
200 | Crate a shared repository | |
201 |
|
201 | |||
202 | $ echo "[extensions]" >> $HGRCPATH |
|
202 | $ echo "[extensions]" >> $HGRCPATH | |
203 | $ echo "share = " >> $HGRCPATH |
|
203 | $ echo "share = " >> $HGRCPATH | |
204 |
|
204 | |||
205 | $ cd $TESTTMP |
|
205 | $ cd $TESTTMP | |
206 | $ hg init repo1 |
|
206 | $ hg init repo1 | |
207 | $ cd repo1 |
|
207 | $ cd repo1 | |
208 | $ echo a > a |
|
208 | $ echo a > a | |
209 | $ hg commit -A -m'init' |
|
209 | $ hg commit -A -m'init' | |
210 | adding a |
|
210 | adding a | |
211 |
|
211 | |||
212 | $ cd .. |
|
212 | $ cd .. | |
213 | $ hg share repo1 repo2 |
|
213 | $ hg share repo1 repo2 | |
214 | updating working directory |
|
214 | updating working directory | |
215 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
215 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
216 |
|
216 | |||
217 | And check that basic rhg commands work with sharing |
|
217 | And check that basic rhg commands work with sharing | |
218 |
|
218 | |||
219 | $ cd repo2 |
|
219 | $ cd repo2 | |
220 | $ rhg files |
|
220 | $ rhg files | |
221 | a |
|
221 | a | |
222 | $ rhg cat -r 0 a |
|
222 | $ rhg cat -r 0 a | |
223 | a |
|
223 | a | |
224 |
|
224 | |||
225 | Same with relative sharing |
|
225 | Same with relative sharing | |
226 |
|
226 | |||
227 | $ cd .. |
|
227 | $ cd .. | |
228 | $ hg share repo2 repo3 --relative |
|
228 | $ hg share repo2 repo3 --relative | |
229 | updating working directory |
|
229 | updating working directory | |
230 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
230 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
231 |
|
231 | |||
232 | $ cd repo3 |
|
232 | $ cd repo3 | |
233 | $ rhg files |
|
233 | $ rhg files | |
234 | a |
|
234 | a | |
235 | $ rhg cat -r 0 a |
|
235 | $ rhg cat -r 0 a | |
236 | a |
|
236 | a | |
237 |
|
237 | |||
238 | Same with share-safe |
|
238 | Same with share-safe | |
239 |
|
239 | |||
240 | $ echo "[format]" >> $HGRCPATH |
|
240 | $ echo "[format]" >> $HGRCPATH | |
241 | $ echo "use-share-safe = True" >> $HGRCPATH |
|
241 | $ echo "use-share-safe = True" >> $HGRCPATH | |
242 |
|
242 | |||
243 | $ cd $TESTTMP |
|
243 | $ cd $TESTTMP | |
244 | $ hg init repo4 |
|
244 | $ hg init repo4 | |
245 | $ cd repo4 |
|
245 | $ cd repo4 | |
246 | $ echo a > a |
|
246 | $ echo a > a | |
247 | $ hg commit -A -m'init' |
|
247 | $ hg commit -A -m'init' | |
248 | adding a |
|
248 | adding a | |
249 |
|
249 | |||
250 | $ cd .. |
|
250 | $ cd .. | |
251 | $ hg share repo4 repo5 |
|
251 | $ hg share repo4 repo5 | |
252 | updating working directory |
|
252 | updating working directory | |
253 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
253 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved | |
254 |
|
254 | |||
255 | And check that basic rhg commands work with sharing |
|
255 | And check that basic rhg commands work with sharing | |
256 |
|
256 | |||
257 | $ cd repo5 |
|
257 | $ cd repo5 | |
258 | $ rhg files |
|
258 | $ rhg files | |
259 | [252] |
|
259 | a | |
260 | $ rhg cat -r 0 a |
|
260 | $ rhg cat -r 0 a | |
261 | [252] |
|
261 | a | |
262 |
|
262 |
General Comments 0
You need to be logged in to leave comments.
Login now