Show More
@@ -1,140 +1,170 | |||
|
1 | 1 | use crate::errors::{HgError, IoResultExt}; |
|
2 | 2 | use crate::requirements; |
|
3 | 3 | use crate::utils::files::get_path_from_bytes; |
|
4 | 4 | use memmap::{Mmap, MmapOptions}; |
|
5 | 5 | use std::collections::HashSet; |
|
6 | 6 | use std::path::{Path, PathBuf}; |
|
7 | 7 | |
|
8 | 8 | /// A repository on disk |
|
9 | 9 | pub struct Repo { |
|
10 | 10 | working_directory: PathBuf, |
|
11 | 11 | dot_hg: PathBuf, |
|
12 | 12 | store: PathBuf, |
|
13 | 13 | requirements: HashSet<String>, |
|
14 | 14 | } |
|
15 | 15 | |
|
16 | 16 | #[derive(Debug, derive_more::From)] |
|
17 | 17 | pub enum RepoFindError { |
|
18 | 18 | NotFoundInCurrentDirectoryOrAncestors { |
|
19 | 19 | current_directory: PathBuf, |
|
20 | 20 | }, |
|
21 | 21 | #[from] |
|
22 | 22 | Other(HgError), |
|
23 | 23 | } |
|
24 | 24 | |
|
25 | 25 | /// Filesystem access abstraction for the contents of a given "base" diretory |
|
26 | 26 | #[derive(Clone, Copy)] |
|
27 | 27 | pub(crate) struct Vfs<'a> { |
|
28 | 28 | base: &'a Path, |
|
29 | 29 | } |
|
30 | 30 | |
|
31 | 31 | impl Repo { |
|
32 | 32 | /// Search the current directory and its ancestores for a repository: |
|
33 | 33 | /// a working directory that contains a `.hg` sub-directory. |
|
34 | 34 | pub fn find() -> Result<Self, RepoFindError> { |
|
35 | 35 | let current_directory = crate::utils::current_dir()?; |
|
36 | 36 | // ancestors() is inclusive: it first yields `current_directory` as-is. |
|
37 | 37 | for ancestor in current_directory.ancestors() { |
|
38 | 38 | if ancestor.join(".hg").is_dir() { |
|
39 | 39 | return Ok(Self::new_at_path(ancestor.to_owned())?); |
|
40 | 40 | } |
|
41 | 41 | } |
|
42 | 42 | Err(RepoFindError::NotFoundInCurrentDirectoryOrAncestors { |
|
43 | 43 | current_directory, |
|
44 | 44 | }) |
|
45 | 45 | } |
|
46 | 46 | |
|
47 | 47 | /// To be called after checking that `.hg` is a sub-directory |
|
48 | 48 | fn new_at_path(working_directory: PathBuf) -> Result<Self, HgError> { |
|
49 | 49 | let dot_hg = working_directory.join(".hg"); |
|
50 | ||
|
50 | 51 | let hg_vfs = Vfs { base: &dot_hg }; |
|
51 | let reqs = requirements::load_if_exists(hg_vfs)?; | |
|
52 | let mut reqs = requirements::load_if_exists(hg_vfs)?; | |
|
52 | 53 | let relative = |
|
53 | 54 | reqs.contains(requirements::RELATIVE_SHARED_REQUIREMENT); |
|
54 | 55 | let shared = |
|
55 | 56 | reqs.contains(requirements::SHARED_REQUIREMENT) || relative; |
|
57 | ||
|
58 | // From `mercurial/localrepo.py`: | |
|
59 | // | |
|
60 | // if .hg/requires contains the sharesafe requirement, it means | |
|
61 | // there exists a `.hg/store/requires` too and we should read it | |
|
62 | // NOTE: presence of SHARESAFE_REQUIREMENT imply that store requirement | |
|
63 | // is present. We never write SHARESAFE_REQUIREMENT for a repo if store | |
|
64 | // is not present, refer checkrequirementscompat() for that | |
|
65 | // | |
|
66 | // However, if SHARESAFE_REQUIREMENT is not present, it means that the | |
|
67 | // repository was shared the old way. We check the share source | |
|
68 | // .hg/requires for SHARESAFE_REQUIREMENT to detect whether the | |
|
69 | // current repository needs to be reshared | |
|
70 | let share_safe = reqs.contains(requirements::SHARESAFE_REQUIREMENT); | |
|
71 | ||
|
56 | 72 | let store_path; |
|
57 | 73 | if !shared { |
|
58 | 74 | store_path = dot_hg.join("store"); |
|
75 | if share_safe { | |
|
76 | reqs.extend(requirements::load(Vfs { base: &store_path })?); | |
|
77 | } | |
|
59 | 78 | } else { |
|
60 | 79 | let bytes = hg_vfs.read("sharedpath")?; |
|
61 | 80 | let mut shared_path = get_path_from_bytes(&bytes).to_owned(); |
|
62 | 81 | if relative { |
|
63 | 82 | shared_path = dot_hg.join(shared_path) |
|
64 | 83 | } |
|
65 | 84 | if !shared_path.is_dir() { |
|
66 | 85 | return Err(HgError::corrupted(format!( |
|
67 | 86 | ".hg/sharedpath points to nonexistent directory {}", |
|
68 | 87 | shared_path.display() |
|
69 | 88 | ))); |
|
70 | 89 | } |
|
71 | 90 | |
|
72 | 91 | store_path = shared_path.join("store"); |
|
92 | ||
|
93 | let source_is_share_safe = | |
|
94 | requirements::load(Vfs { base: &shared_path })? | |
|
95 | .contains(requirements::SHARESAFE_REQUIREMENT); | |
|
96 | ||
|
97 | // TODO: support for `share.safe-mismatch.*` config | |
|
98 | if share_safe && !source_is_share_safe { | |
|
99 | return Err(HgError::unsupported("share-safe downgrade")); | |
|
100 | } else if source_is_share_safe && !share_safe { | |
|
101 | return Err(HgError::unsupported("share-safe upgrade")); | |
|
102 | } | |
|
73 | 103 | } |
|
74 | 104 | |
|
75 | 105 | let repo = Self { |
|
76 | 106 | requirements: reqs, |
|
77 | 107 | working_directory, |
|
78 | 108 | store: store_path, |
|
79 | 109 | dot_hg, |
|
80 | 110 | }; |
|
81 | 111 | |
|
82 | 112 | requirements::check(&repo)?; |
|
83 | 113 | |
|
84 | 114 | Ok(repo) |
|
85 | 115 | } |
|
86 | 116 | |
|
87 | 117 | pub fn working_directory_path(&self) -> &Path { |
|
88 | 118 | &self.working_directory |
|
89 | 119 | } |
|
90 | 120 | |
|
91 | 121 | pub fn requirements(&self) -> &HashSet<String> { |
|
92 | 122 | &self.requirements |
|
93 | 123 | } |
|
94 | 124 | |
|
95 | 125 | /// For accessing repository files (in `.hg`), except for the store |
|
96 | 126 | /// (`.hg/store`). |
|
97 | 127 | pub(crate) fn hg_vfs(&self) -> Vfs<'_> { |
|
98 | 128 | Vfs { base: &self.dot_hg } |
|
99 | 129 | } |
|
100 | 130 | |
|
101 | 131 | /// For accessing repository store files (in `.hg/store`) |
|
102 | 132 | pub(crate) fn store_vfs(&self) -> Vfs<'_> { |
|
103 | 133 | Vfs { base: &self.store } |
|
104 | 134 | } |
|
105 | 135 | |
|
106 | 136 | /// For accessing the working copy |
|
107 | 137 | |
|
108 | 138 | // The undescore prefix silences the "never used" warning. Remove before |
|
109 | 139 | // using. |
|
110 | 140 | pub(crate) fn _working_directory_vfs(&self) -> Vfs<'_> { |
|
111 | 141 | Vfs { |
|
112 | 142 | base: &self.working_directory, |
|
113 | 143 | } |
|
114 | 144 | } |
|
115 | 145 | } |
|
116 | 146 | |
|
117 | 147 | impl Vfs<'_> { |
|
118 | 148 | pub(crate) fn join(&self, relative_path: impl AsRef<Path>) -> PathBuf { |
|
119 | 149 | self.base.join(relative_path) |
|
120 | 150 | } |
|
121 | 151 | |
|
122 | 152 | pub(crate) fn read( |
|
123 | 153 | &self, |
|
124 | 154 | relative_path: impl AsRef<Path>, |
|
125 | 155 | ) -> Result<Vec<u8>, HgError> { |
|
126 | 156 | let path = self.join(relative_path); |
|
127 | 157 | std::fs::read(&path).for_file(&path) |
|
128 | 158 | } |
|
129 | 159 | |
|
130 | 160 | pub(crate) fn mmap_open( |
|
131 | 161 | &self, |
|
132 | 162 | relative_path: impl AsRef<Path>, |
|
133 | 163 | ) -> Result<Mmap, HgError> { |
|
134 | 164 | let path = self.base.join(relative_path); |
|
135 | 165 | let file = std::fs::File::open(&path).for_file(&path)?; |
|
136 | 166 | // TODO: what are the safety requirements here? |
|
137 | 167 | let mmap = unsafe { MmapOptions::new().map(&file) }.for_file(&path)?; |
|
138 | 168 | Ok(mmap) |
|
139 | 169 | } |
|
140 | 170 | } |
@@ -1,133 +1,138 | |||
|
1 | 1 | use crate::errors::{HgError, HgResultExt}; |
|
2 | 2 | use crate::repo::{Repo, Vfs}; |
|
3 | 3 | use std::collections::HashSet; |
|
4 | 4 | |
|
5 | 5 | fn parse(bytes: &[u8]) -> Result<HashSet<String>, HgError> { |
|
6 | 6 | // The Python code reading this file uses `str.splitlines` |
|
7 | 7 | // which looks for a number of line separators (even including a couple of |
|
8 | 8 | // non-ASCII ones), but Python code writing it always uses `\n`. |
|
9 | 9 | let lines = bytes.split(|&byte| byte == b'\n'); |
|
10 | 10 | |
|
11 | 11 | lines |
|
12 | 12 | .filter(|line| !line.is_empty()) |
|
13 | 13 | .map(|line| { |
|
14 | 14 | // Python uses Unicode `str.isalnum` but feature names are all |
|
15 | 15 | // ASCII |
|
16 | 16 | if line[0].is_ascii_alphanumeric() && line.is_ascii() { |
|
17 | 17 | Ok(String::from_utf8(line.into()).unwrap()) |
|
18 | 18 | } else { |
|
19 | 19 | Err(HgError::corrupted("parse error in 'requires' file")) |
|
20 | 20 | } |
|
21 | 21 | }) |
|
22 | 22 | .collect() |
|
23 | 23 | } |
|
24 | 24 | |
|
25 | pub(crate) fn load(hg_vfs: Vfs) -> Result<HashSet<String>, HgError> { | |
|
26 | parse(&hg_vfs.read("requires")?) | |
|
27 | } | |
|
28 | ||
|
25 | 29 | pub(crate) fn load_if_exists(hg_vfs: Vfs) -> Result<HashSet<String>, HgError> { |
|
26 | 30 | if let Some(bytes) = hg_vfs.read("requires").io_not_found_as_none()? { |
|
27 | 31 | parse(&bytes) |
|
28 | 32 | } else { |
|
29 | 33 | // Treat a missing file the same as an empty file. |
|
30 | 34 | // From `mercurial/localrepo.py`: |
|
31 | 35 | // > requires file contains a newline-delimited list of |
|
32 | 36 | // > features/capabilities the opener (us) must have in order to use |
|
33 | 37 | // > the repository. This file was introduced in Mercurial 0.9.2, |
|
34 | 38 | // > which means very old repositories may not have one. We assume |
|
35 | 39 | // > a missing file translates to no requirements. |
|
36 | 40 | Ok(HashSet::new()) |
|
37 | 41 | } |
|
38 | 42 | } |
|
39 | 43 | |
|
40 | 44 | pub(crate) fn check(repo: &Repo) -> Result<(), HgError> { |
|
41 | 45 | for feature in repo.requirements() { |
|
42 | 46 | if !SUPPORTED.contains(&feature.as_str()) { |
|
43 | 47 | // TODO: collect and all unknown features and include them in the |
|
44 | 48 | // error message? |
|
45 | 49 | return Err(HgError::UnsupportedFeature(format!( |
|
46 | 50 | "repository requires feature unknown to this Mercurial: {}", |
|
47 | 51 | feature |
|
48 | 52 | ))); |
|
49 | 53 | } |
|
50 | 54 | } |
|
51 | 55 | Ok(()) |
|
52 | 56 | } |
|
53 | 57 | |
|
54 | 58 | // TODO: set this to actually-supported features |
|
55 | 59 | const SUPPORTED: &[&str] = &[ |
|
56 | 60 | "dotencode", |
|
57 | 61 | "fncache", |
|
58 | 62 | "generaldelta", |
|
59 | 63 | "revlogv1", |
|
60 | 64 | SHARED_REQUIREMENT, |
|
65 | SHARESAFE_REQUIREMENT, | |
|
61 | 66 | SPARSEREVLOG_REQUIREMENT, |
|
62 | 67 | RELATIVE_SHARED_REQUIREMENT, |
|
63 | 68 | "store", |
|
64 | 69 | // As of this writing everything rhg does is read-only. |
|
65 | 70 | // When it starts writing to the repository, itβll need to either keep the |
|
66 | 71 | // persistent nodemap up to date or remove this entry: |
|
67 | 72 | "persistent-nodemap", |
|
68 | 73 | ]; |
|
69 | 74 | |
|
70 | 75 | // Copied from mercurial/requirements.py: |
|
71 | 76 | |
|
72 | 77 | /// When narrowing is finalized and no longer subject to format changes, |
|
73 | 78 | /// we should move this to just "narrow" or similar. |
|
74 | 79 | #[allow(unused)] |
|
75 | 80 | pub(crate) const NARROW_REQUIREMENT: &str = "narrowhg-experimental"; |
|
76 | 81 | |
|
77 | 82 | /// Enables sparse working directory usage |
|
78 | 83 | #[allow(unused)] |
|
79 | 84 | pub(crate) const SPARSE_REQUIREMENT: &str = "exp-sparse"; |
|
80 | 85 | |
|
81 | 86 | /// Enables the internal phase which is used to hide changesets instead |
|
82 | 87 | /// of stripping them |
|
83 | 88 | #[allow(unused)] |
|
84 | 89 | pub(crate) const INTERNAL_PHASE_REQUIREMENT: &str = "internal-phase"; |
|
85 | 90 | |
|
86 | 91 | /// Stores manifest in Tree structure |
|
87 | 92 | #[allow(unused)] |
|
88 | 93 | pub(crate) const TREEMANIFEST_REQUIREMENT: &str = "treemanifest"; |
|
89 | 94 | |
|
90 | 95 | /// Increment the sub-version when the revlog v2 format changes to lock out old |
|
91 | 96 | /// clients. |
|
92 | 97 | #[allow(unused)] |
|
93 | 98 | pub(crate) const REVLOGV2_REQUIREMENT: &str = "exp-revlogv2.1"; |
|
94 | 99 | |
|
95 | 100 | /// A repository with the sparserevlog feature will have delta chains that |
|
96 | 101 | /// can spread over a larger span. Sparse reading cuts these large spans into |
|
97 | 102 | /// pieces, so that each piece isn't too big. |
|
98 | 103 | /// Without the sparserevlog capability, reading from the repository could use |
|
99 | 104 | /// huge amounts of memory, because the whole span would be read at once, |
|
100 | 105 | /// including all the intermediate revisions that aren't pertinent for the |
|
101 | 106 | /// chain. This is why once a repository has enabled sparse-read, it becomes |
|
102 | 107 | /// required. |
|
103 | 108 | #[allow(unused)] |
|
104 | 109 | pub(crate) const SPARSEREVLOG_REQUIREMENT: &str = "sparserevlog"; |
|
105 | 110 | |
|
106 | 111 | /// A repository with the sidedataflag requirement will allow to store extra |
|
107 | 112 | /// information for revision without altering their original hashes. |
|
108 | 113 | #[allow(unused)] |
|
109 | 114 | pub(crate) const SIDEDATA_REQUIREMENT: &str = "exp-sidedata-flag"; |
|
110 | 115 | |
|
111 | 116 | /// A repository with the the copies-sidedata-changeset requirement will store |
|
112 | 117 | /// copies related information in changeset's sidedata. |
|
113 | 118 | #[allow(unused)] |
|
114 | 119 | pub(crate) const COPIESSDC_REQUIREMENT: &str = "exp-copies-sidedata-changeset"; |
|
115 | 120 | |
|
116 | 121 | /// The repository use persistent nodemap for the changelog and the manifest. |
|
117 | 122 | #[allow(unused)] |
|
118 | 123 | pub(crate) const NODEMAP_REQUIREMENT: &str = "persistent-nodemap"; |
|
119 | 124 | |
|
120 | 125 | /// Denotes that the current repository is a share |
|
121 | 126 | #[allow(unused)] |
|
122 | 127 | pub(crate) const SHARED_REQUIREMENT: &str = "shared"; |
|
123 | 128 | |
|
124 | 129 | /// Denotes that current repository is a share and the shared source path is |
|
125 | 130 | /// relative to the current repository root path |
|
126 | 131 | #[allow(unused)] |
|
127 | 132 | pub(crate) const RELATIVE_SHARED_REQUIREMENT: &str = "relshared"; |
|
128 | 133 | |
|
129 | 134 | /// A repository with share implemented safely. The repository has different |
|
130 | 135 | /// store and working copy requirements i.e. both `.hg/requires` and |
|
131 | 136 | /// `.hg/store/requires` are present. |
|
132 | 137 | #[allow(unused)] |
|
133 |
pub(crate) const SHARESAFE_REQUIREMENT: &str = " |
|
|
138 | pub(crate) const SHARESAFE_REQUIREMENT: &str = "share-safe"; |
@@ -1,262 +1,262 | |||
|
1 | 1 | #require rust |
|
2 | 2 | |
|
3 | 3 | Define an rhg function that will only run if rhg exists |
|
4 | 4 | $ rhg() { |
|
5 | 5 | > if [ -f "$RUNTESTDIR/../rust/target/release/rhg" ]; then |
|
6 | 6 | > "$RUNTESTDIR/../rust/target/release/rhg" "$@" |
|
7 | 7 | > else |
|
8 | 8 | > echo "skipped: Cannot find rhg. Try to run cargo build in rust/rhg." |
|
9 | 9 | > exit 80 |
|
10 | 10 | > fi |
|
11 | 11 | > } |
|
12 | 12 | |
|
13 | 13 | Unimplemented command |
|
14 | 14 | $ rhg unimplemented-command |
|
15 | 15 | error: Found argument 'unimplemented-command' which wasn't expected, or isn't valid in this context |
|
16 | 16 | |
|
17 | 17 | USAGE: |
|
18 | 18 | rhg <SUBCOMMAND> |
|
19 | 19 | |
|
20 | 20 | For more information try --help |
|
21 | 21 | [252] |
|
22 | 22 | |
|
23 | 23 | Finding root |
|
24 | 24 | $ rhg root |
|
25 | 25 | abort: no repository found in '$TESTTMP' (.hg not found)! |
|
26 | 26 | [255] |
|
27 | 27 | |
|
28 | 28 | $ hg init repository |
|
29 | 29 | $ cd repository |
|
30 | 30 | $ rhg root |
|
31 | 31 | $TESTTMP/repository |
|
32 | 32 | |
|
33 | 33 | Unwritable file descriptor |
|
34 | 34 | $ rhg root > /dev/full |
|
35 | 35 | abort: No space left on device (os error 28) |
|
36 | 36 | [255] |
|
37 | 37 | |
|
38 | 38 | Deleted repository |
|
39 | 39 | $ rm -rf `pwd` |
|
40 | 40 | $ rhg root |
|
41 | 41 | abort: $ENOENT$: current directory |
|
42 | 42 | [255] |
|
43 | 43 | |
|
44 | 44 | Listing tracked files |
|
45 | 45 | $ cd $TESTTMP |
|
46 | 46 | $ hg init repository |
|
47 | 47 | $ cd repository |
|
48 | 48 | $ for i in 1 2 3; do |
|
49 | 49 | > echo $i >> file$i |
|
50 | 50 | > hg add file$i |
|
51 | 51 | > done |
|
52 | 52 | > hg commit -m "commit $i" -q |
|
53 | 53 | |
|
54 | 54 | Listing tracked files from root |
|
55 | 55 | $ rhg files |
|
56 | 56 | file1 |
|
57 | 57 | file2 |
|
58 | 58 | file3 |
|
59 | 59 | |
|
60 | 60 | Listing tracked files from subdirectory |
|
61 | 61 | $ mkdir -p path/to/directory |
|
62 | 62 | $ cd path/to/directory |
|
63 | 63 | $ rhg files |
|
64 | 64 | ../../../file1 |
|
65 | 65 | ../../../file2 |
|
66 | 66 | ../../../file3 |
|
67 | 67 | |
|
68 | 68 | Listing tracked files through broken pipe |
|
69 | 69 | $ rhg files | head -n 1 |
|
70 | 70 | ../../../file1 |
|
71 | 71 | |
|
72 | 72 | Debuging data in inline index |
|
73 | 73 | $ cd $TESTTMP |
|
74 | 74 | $ rm -rf repository |
|
75 | 75 | $ hg init repository |
|
76 | 76 | $ cd repository |
|
77 | 77 | $ for i in 1 2 3 4 5 6; do |
|
78 | 78 | > echo $i >> file-$i |
|
79 | 79 | > hg add file-$i |
|
80 | 80 | > hg commit -m "Commit $i" -q |
|
81 | 81 | > done |
|
82 | 82 | $ rhg debugdata -c 2 |
|
83 | 83 | 8d0267cb034247ebfa5ee58ce59e22e57a492297 |
|
84 | 84 | test |
|
85 | 85 | 0 0 |
|
86 | 86 | file-3 |
|
87 | 87 | |
|
88 | 88 | Commit 3 (no-eol) |
|
89 | 89 | $ rhg debugdata -m 2 |
|
90 | 90 | file-1\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc) |
|
91 | 91 | file-2\x005d9299349fc01ddd25d0070d149b124d8f10411e (esc) |
|
92 | 92 | file-3\x002661d26c649684b482d10f91960cc3db683c38b4 (esc) |
|
93 | 93 | |
|
94 | 94 | Debuging with full node id |
|
95 | 95 | $ rhg debugdata -c `hg log -r 0 -T '{node}'` |
|
96 | 96 | d1d1c679d3053e8926061b6f45ca52009f011e3f |
|
97 | 97 | test |
|
98 | 98 | 0 0 |
|
99 | 99 | file-1 |
|
100 | 100 | |
|
101 | 101 | Commit 1 (no-eol) |
|
102 | 102 | |
|
103 | 103 | Specifying revisions by changeset ID |
|
104 | 104 | $ hg log -T '{node}\n' |
|
105 | 105 | c6ad58c44207b6ff8a4fbbca7045a5edaa7e908b |
|
106 | 106 | d654274993d0149eecc3cc03214f598320211900 |
|
107 | 107 | f646af7e96481d3a5470b695cf30ad8e3ab6c575 |
|
108 | 108 | cf8b83f14ead62b374b6e91a0e9303b85dfd9ed7 |
|
109 | 109 | 91c6f6e73e39318534dc415ea4e8a09c99cd74d6 |
|
110 | 110 | 6ae9681c6d30389694d8701faf24b583cf3ccafe |
|
111 | 111 | $ rhg files -r cf8b83 |
|
112 | 112 | file-1 |
|
113 | 113 | file-2 |
|
114 | 114 | file-3 |
|
115 | 115 | $ rhg cat -r cf8b83 file-2 |
|
116 | 116 | 2 |
|
117 | 117 | $ rhg cat -r c file-2 |
|
118 | 118 | abort: ambiguous revision identifier c |
|
119 | 119 | [255] |
|
120 | 120 | $ rhg cat -r d file-2 |
|
121 | 121 | 2 |
|
122 | 122 | |
|
123 | 123 | Cat files |
|
124 | 124 | $ cd $TESTTMP |
|
125 | 125 | $ rm -rf repository |
|
126 | 126 | $ hg init repository |
|
127 | 127 | $ cd repository |
|
128 | 128 | $ echo "original content" > original |
|
129 | 129 | $ hg add original |
|
130 | 130 | $ hg commit -m "add original" original |
|
131 | 131 | $ rhg cat -r 0 original |
|
132 | 132 | original content |
|
133 | 133 | Cat copied file should not display copy metadata |
|
134 | 134 | $ hg copy original copy_of_original |
|
135 | 135 | $ hg commit -m "add copy of original" |
|
136 | 136 | $ rhg cat -r 1 copy_of_original |
|
137 | 137 | original content |
|
138 | 138 | |
|
139 | 139 | Requirements |
|
140 | 140 | $ rhg debugrequirements |
|
141 | 141 | dotencode |
|
142 | 142 | fncache |
|
143 | 143 | generaldelta |
|
144 | 144 | revlogv1 |
|
145 | 145 | sparserevlog |
|
146 | 146 | store |
|
147 | 147 | |
|
148 | 148 | $ echo indoor-pool >> .hg/requires |
|
149 | 149 | $ rhg files |
|
150 | 150 | [252] |
|
151 | 151 | |
|
152 | 152 | $ rhg cat -r 1 copy_of_original |
|
153 | 153 | [252] |
|
154 | 154 | |
|
155 | 155 | $ rhg debugrequirements |
|
156 | 156 | [252] |
|
157 | 157 | |
|
158 | 158 | $ echo -e '\xFF' >> .hg/requires |
|
159 | 159 | $ rhg debugrequirements |
|
160 | 160 | abort: corrupted repository: parse error in 'requires' file |
|
161 | 161 | [255] |
|
162 | 162 | |
|
163 | 163 | Persistent nodemap |
|
164 | 164 | $ cd $TESTTMP |
|
165 | 165 | $ rm -rf repository |
|
166 | 166 | $ hg init repository |
|
167 | 167 | $ cd repository |
|
168 | 168 | $ rhg debugrequirements | grep nodemap |
|
169 | 169 | [1] |
|
170 | 170 | $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn" |
|
171 | 171 | $ hg id -r tip |
|
172 | 172 | c3ae8dec9fad tip |
|
173 | 173 | $ ls .hg/store/00changelog* |
|
174 | 174 | .hg/store/00changelog.d |
|
175 | 175 | .hg/store/00changelog.i |
|
176 | 176 | $ rhg files -r c3ae8dec9fad |
|
177 | 177 | of |
|
178 | 178 | |
|
179 | 179 | $ cd $TESTTMP |
|
180 | 180 | $ rm -rf repository |
|
181 | 181 | $ hg --config format.use-persistent-nodemap=True init repository |
|
182 | 182 | $ cd repository |
|
183 | 183 | $ rhg debugrequirements | grep nodemap |
|
184 | 184 | persistent-nodemap |
|
185 | 185 | $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn" |
|
186 | 186 | $ hg id -r tip |
|
187 | 187 | c3ae8dec9fad tip |
|
188 | 188 | $ ls .hg/store/00changelog* |
|
189 | 189 | .hg/store/00changelog-*.nd (glob) |
|
190 | 190 | .hg/store/00changelog.d |
|
191 | 191 | .hg/store/00changelog.i |
|
192 | 192 | .hg/store/00changelog.n |
|
193 | 193 | |
|
194 | 194 | Specifying revisions by changeset ID |
|
195 | 195 | $ rhg files -r c3ae8dec9fad |
|
196 | 196 | of |
|
197 | 197 | $ rhg cat -r c3ae8dec9fad of |
|
198 | 198 | r5000 |
|
199 | 199 | |
|
200 | 200 | Crate a shared repository |
|
201 | 201 | |
|
202 | 202 | $ echo "[extensions]" >> $HGRCPATH |
|
203 | 203 | $ echo "share = " >> $HGRCPATH |
|
204 | 204 | |
|
205 | 205 | $ cd $TESTTMP |
|
206 | 206 | $ hg init repo1 |
|
207 | 207 | $ cd repo1 |
|
208 | 208 | $ echo a > a |
|
209 | 209 | $ hg commit -A -m'init' |
|
210 | 210 | adding a |
|
211 | 211 | |
|
212 | 212 | $ cd .. |
|
213 | 213 | $ hg share repo1 repo2 |
|
214 | 214 | updating working directory |
|
215 | 215 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
216 | 216 | |
|
217 | 217 | And check that basic rhg commands work with sharing |
|
218 | 218 | |
|
219 | 219 | $ cd repo2 |
|
220 | 220 | $ rhg files |
|
221 | 221 | a |
|
222 | 222 | $ rhg cat -r 0 a |
|
223 | 223 | a |
|
224 | 224 | |
|
225 | 225 | Same with relative sharing |
|
226 | 226 | |
|
227 | 227 | $ cd .. |
|
228 | 228 | $ hg share repo2 repo3 --relative |
|
229 | 229 | updating working directory |
|
230 | 230 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
231 | 231 | |
|
232 | 232 | $ cd repo3 |
|
233 | 233 | $ rhg files |
|
234 | 234 | a |
|
235 | 235 | $ rhg cat -r 0 a |
|
236 | 236 | a |
|
237 | 237 | |
|
238 | 238 | Same with share-safe |
|
239 | 239 | |
|
240 | 240 | $ echo "[format]" >> $HGRCPATH |
|
241 | 241 | $ echo "use-share-safe = True" >> $HGRCPATH |
|
242 | 242 | |
|
243 | 243 | $ cd $TESTTMP |
|
244 | 244 | $ hg init repo4 |
|
245 | 245 | $ cd repo4 |
|
246 | 246 | $ echo a > a |
|
247 | 247 | $ hg commit -A -m'init' |
|
248 | 248 | adding a |
|
249 | 249 | |
|
250 | 250 | $ cd .. |
|
251 | 251 | $ hg share repo4 repo5 |
|
252 | 252 | updating working directory |
|
253 | 253 | 1 files updated, 0 files merged, 0 files removed, 0 files unresolved |
|
254 | 254 | |
|
255 | 255 | And check that basic rhg commands work with sharing |
|
256 | 256 | |
|
257 | 257 | $ cd repo5 |
|
258 | 258 | $ rhg files |
|
259 | [252] | |
|
259 | a | |
|
260 | 260 | $ rhg cat -r 0 a |
|
261 | [252] | |
|
261 | a | |
|
262 | 262 |
General Comments 0
You need to be logged in to leave comments.
Login now