##// END OF EJS Templates
branching: merge default into stable...
Raphaël Gomès -
r50807:a3356ab6 merge 6.3rc0 stable
parent child Browse files
Show More
@@ -0,0 +1,141 b''
1 # pull_logger.py - Logs pulls to a JSON-line file in the repo's VFS.
2 #
3 # Copyright 2022 Pacien TRAN-GIRARD <pacien.trangirard@pacien.net>
4 #
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
7
8
9 '''logs pull parameters to a file
10
11 This extension logs the pull parameters, i.e. the remote and common heads,
12 when pulling from the local repository.
13
14 The collected data should give an idea of the state of a pair of repositories
15 and allow replaying past synchronisations between them. This is particularly
16 useful for working on data exchange, bundling and caching-related
17 optimisations.
18
19 The record is a JSON-line file located in the repository's VFS at
20 .hg/pull_log.jsonl.
21
22 Log write failures are not considered fatal: log writes may be skipped for any
23 reason such as insufficient storage or a timeout.
24
25 Some basic log file rotation can be enabled by setting 'rotate-size' to a value
26 greater than 0. This causes the current log file to be moved to
27 .hg/pull_log.jsonl.rotated when this threshold is met, discarding any
28 previously rotated log file.
29
30 The timeouts of the exclusive lock used when writing to the lock file can be
31 configured through the 'timeout.lock' and 'timeout.warn' options of this
32 plugin. Those are not expected to be held for a significant time in practice.::
33
34 [pull-logger]
35 timeout.lock = 300
36 timeout.warn = 100
37 rotate-size = 1kb
38 '''
39
40
41 import json
42 import time
43
44 from mercurial.i18n import _
45 from mercurial.utils import stringutil
46 from mercurial import (
47 error,
48 extensions,
49 lock,
50 registrar,
51 wireprotov1server,
52 )
53
54 EXT_NAME = b'pull-logger'
55 EXT_VERSION_CODE = 0
56
57 LOG_FILE = b'pull_log.jsonl'
58 OLD_LOG_FILE = LOG_FILE + b'.rotated'
59 LOCK_NAME = LOG_FILE + b'.lock'
60
61 configtable = {}
62 configitem = registrar.configitem(configtable)
63 configitem(EXT_NAME, b'timeout.lock', default=600)
64 configitem(EXT_NAME, b'timeout.warn', default=120)
65 configitem(EXT_NAME, b'rotate-size', default=b'100MB')
66
67
68 def wrap_getbundle(orig, repo, proto, others, *args, **kwargs):
69 heads, common = extract_pull_heads(others)
70 log_entry = {
71 'timestamp': time.time(),
72 'logger_version': EXT_VERSION_CODE,
73 'heads': sorted(heads),
74 'common': sorted(common),
75 }
76
77 try:
78 write_to_log(repo, log_entry)
79 except (IOError, error.LockError) as err:
80 msg = stringutil.forcebytestr(err)
81 repo.ui.warn(_(b'unable to append to pull log: %s\n') % msg)
82
83 return orig(repo, proto, others, *args, **kwargs)
84
85
86 def extract_pull_heads(bundle_args):
87 opts = wireprotov1server.options(
88 b'getbundle',
89 wireprotov1server.wireprototypes.GETBUNDLE_ARGUMENTS.keys(),
90 bundle_args.copy(), # this call consumes the args destructively
91 )
92
93 heads = opts.get(b'heads', b'').decode('utf-8').split(' ')
94 common = opts.get(b'common', b'').decode('utf-8').split(' ')
95 return (heads, common)
96
97
98 def write_to_log(repo, entry):
99 locktimeout = repo.ui.configint(EXT_NAME, b'timeout.lock')
100 lockwarntimeout = repo.ui.configint(EXT_NAME, b'timeout.warn')
101 rotatesize = repo.ui.configbytes(EXT_NAME, b'rotate-size')
102
103 with lock.trylock(
104 ui=repo.ui,
105 vfs=repo.vfs,
106 lockname=LOCK_NAME,
107 timeout=locktimeout,
108 warntimeout=lockwarntimeout,
109 ):
110 if rotatesize > 0 and repo.vfs.exists(LOG_FILE):
111 if repo.vfs.stat(LOG_FILE).st_size >= rotatesize:
112 repo.vfs.rename(LOG_FILE, OLD_LOG_FILE)
113
114 with repo.vfs.open(LOG_FILE, b'a+') as logfile:
115 serialised = json.dumps(entry, sort_keys=True)
116 logfile.write(serialised.encode('utf-8'))
117 logfile.write(b'\n')
118 logfile.flush()
119
120
121 def reposetup(ui, repo):
122 if repo.local():
123 repo._wlockfreeprefix.add(LOG_FILE)
124 repo._wlockfreeprefix.add(OLD_LOG_FILE)
125
126
127 def uisetup(ui):
128 del wireprotov1server.commands[b'getbundle']
129 decorator = wireprotov1server.wireprotocommand(
130 name=b'getbundle',
131 args=b'*',
132 permission=b'pull',
133 )
134
135 extensions.wrapfunction(
136 container=wireprotov1server,
137 funcname='getbundle',
138 wrapper=wrap_getbundle,
139 )
140
141 decorator(wireprotov1server.getbundle)
@@ -0,0 +1,79 b''
1 use crate::utils::files::get_bytes_from_os_string;
2 use std::env;
3
4 /// Keeps information on whether plain mode is active.
5 ///
6 /// Plain mode means that all configuration variables which affect
7 /// the behavior and output of Mercurial should be
8 /// ignored. Additionally, the output should be stable,
9 /// reproducible and suitable for use in scripts or applications.
10 ///
11 /// The only way to trigger plain mode is by setting either the
12 /// `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
13 ///
14 /// The return value can either be
15 /// - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
16 /// - False if feature is disabled by default and not included in HGPLAIN
17 /// - True otherwise
18 #[derive(Clone)]
19 pub struct PlainInfo {
20 is_plain: bool,
21 except: Vec<Vec<u8>>,
22 }
23
24 impl PlainInfo {
25 fn plain_except(except: Vec<Vec<u8>>) -> Self {
26 PlainInfo {
27 is_plain: true,
28 except,
29 }
30 }
31
32 pub fn empty() -> PlainInfo {
33 PlainInfo {
34 is_plain: false,
35 except: vec![],
36 }
37 }
38
39 pub fn from_env() -> PlainInfo {
40 if let Some(except) = env::var_os("HGPLAINEXCEPT") {
41 PlainInfo::plain_except(
42 get_bytes_from_os_string(except)
43 .split(|&byte| byte == b',')
44 .map(|x| x.to_vec())
45 .collect(),
46 )
47 } else {
48 PlainInfo {
49 is_plain: env::var_os("HGPLAIN").is_some(),
50 except: vec![],
51 }
52 }
53 }
54
55 pub fn is_feature_plain(&self, feature: &str) -> bool {
56 return self.is_plain
57 && !self
58 .except
59 .iter()
60 .any(|exception| exception.as_slice() == feature.as_bytes());
61 }
62
63 pub fn is_plain(&self) -> bool {
64 self.is_plain
65 }
66
67 pub fn plainalias(&self) -> bool {
68 self.is_feature_plain("alias")
69 }
70 pub fn plainrevsetalias(&self) -> bool {
71 self.is_feature_plain("revsetalias")
72 }
73 pub fn plaintemplatealias(&self) -> bool {
74 self.is_feature_plain("templatealias")
75 }
76 pub fn plaintweakdefaults(&self) -> bool {
77 self.is_feature_plain("tweakdefaults")
78 }
79 }
@@ -0,0 +1,111 b''
1 use std::path::Path;
2
3 use crate::{
4 errors::HgError,
5 exit_codes,
6 filepatterns::parse_pattern_file_contents,
7 matchers::{
8 AlwaysMatcher, DifferenceMatcher, IncludeMatcher, Matcher,
9 NeverMatcher,
10 },
11 repo::Repo,
12 requirements::NARROW_REQUIREMENT,
13 sparse::{self, SparseConfigError, SparseWarning},
14 };
15
16 /// The file in .hg/store/ that indicates which paths exit in the store
17 const FILENAME: &str = "narrowspec";
18 /// The file in .hg/ that indicates which paths exit in the dirstate
19 const DIRSTATE_FILENAME: &str = "narrowspec.dirstate";
20
21 /// Pattern prefixes that are allowed in narrow patterns. This list MUST
22 /// only contain patterns that are fast and safe to evaluate. Keep in mind
23 /// that patterns are supplied by clients and executed on remote servers
24 /// as part of wire protocol commands. That means that changes to this
25 /// data structure influence the wire protocol and should not be taken
26 /// lightly - especially removals.
27 const VALID_PREFIXES: [&str; 2] = ["path:", "rootfilesin:"];
28
29 /// Return the matcher for the current narrow spec, and all configuration
30 /// warnings to display.
31 pub fn matcher(
32 repo: &Repo,
33 ) -> Result<(Box<dyn Matcher + Sync>, Vec<SparseWarning>), SparseConfigError> {
34 let mut warnings = vec![];
35 if !repo.requirements().contains(NARROW_REQUIREMENT) {
36 return Ok((Box::new(AlwaysMatcher), warnings));
37 }
38 // Treat "narrowspec does not exist" the same as "narrowspec file exists
39 // and is empty".
40 let store_spec = repo.store_vfs().try_read(FILENAME)?.unwrap_or(vec![]);
41 let working_copy_spec =
42 repo.hg_vfs().try_read(DIRSTATE_FILENAME)?.unwrap_or(vec![]);
43 if store_spec != working_copy_spec {
44 return Err(HgError::abort(
45 "working copy's narrowspec is stale",
46 exit_codes::STATE_ERROR,
47 Some("run 'hg tracked --update-working-copy'".into()),
48 )
49 .into());
50 }
51
52 let config = sparse::parse_config(
53 &store_spec,
54 sparse::SparseConfigContext::Narrow,
55 )?;
56
57 warnings.extend(config.warnings);
58
59 if !config.profiles.is_empty() {
60 // TODO (from Python impl) maybe do something with profiles?
61 return Err(SparseConfigError::IncludesInNarrow);
62 }
63 validate_patterns(&config.includes)?;
64 validate_patterns(&config.excludes)?;
65
66 if config.includes.is_empty() {
67 return Ok((Box::new(NeverMatcher), warnings));
68 }
69
70 let (patterns, subwarnings) = parse_pattern_file_contents(
71 &config.includes,
72 Path::new(""),
73 None,
74 false,
75 )?;
76 warnings.extend(subwarnings.into_iter().map(From::from));
77
78 let mut m: Box<dyn Matcher + Sync> =
79 Box::new(IncludeMatcher::new(patterns)?);
80
81 let (patterns, subwarnings) = parse_pattern_file_contents(
82 &config.excludes,
83 Path::new(""),
84 None,
85 false,
86 )?;
87 if !patterns.is_empty() {
88 warnings.extend(subwarnings.into_iter().map(From::from));
89 let exclude_matcher = Box::new(IncludeMatcher::new(patterns)?);
90 m = Box::new(DifferenceMatcher::new(m, exclude_matcher));
91 }
92
93 Ok((m, warnings))
94 }
95
96 fn validate_patterns(patterns: &[u8]) -> Result<(), SparseConfigError> {
97 for pattern in patterns.split(|c| *c == b'\n') {
98 if pattern.is_empty() {
99 continue;
100 }
101 for prefix in VALID_PREFIXES.iter() {
102 if pattern.starts_with(prefix.as_bytes()) {
103 break;
104 }
105 return Err(SparseConfigError::InvalidNarrowPrefix(
106 pattern.to_owned(),
107 ));
108 }
109 }
110 Ok(())
111 }
@@ -0,0 +1,338 b''
1 use std::{collections::HashSet, path::Path};
2
3 use format_bytes::{write_bytes, DisplayBytes};
4
5 use crate::{
6 errors::HgError,
7 filepatterns::parse_pattern_file_contents,
8 matchers::{
9 AlwaysMatcher, DifferenceMatcher, IncludeMatcher, Matcher,
10 UnionMatcher,
11 },
12 operations::cat,
13 repo::Repo,
14 requirements::SPARSE_REQUIREMENT,
15 utils::{hg_path::HgPath, SliceExt},
16 IgnorePattern, PatternError, PatternFileWarning, PatternSyntax, Revision,
17 NULL_REVISION,
18 };
19
20 /// Command which is triggering the config read
21 #[derive(Copy, Clone, Debug)]
22 pub enum SparseConfigContext {
23 Sparse,
24 Narrow,
25 }
26
27 impl DisplayBytes for SparseConfigContext {
28 fn display_bytes(
29 &self,
30 output: &mut dyn std::io::Write,
31 ) -> std::io::Result<()> {
32 match self {
33 SparseConfigContext::Sparse => write_bytes!(output, b"sparse"),
34 SparseConfigContext::Narrow => write_bytes!(output, b"narrow"),
35 }
36 }
37 }
38
39 /// Possible warnings when reading sparse configuration
40 #[derive(Debug, derive_more::From)]
41 pub enum SparseWarning {
42 /// Warns about improper paths that start with "/"
43 RootWarning {
44 context: SparseConfigContext,
45 line: Vec<u8>,
46 },
47 /// Warns about a profile missing from the given changelog revision
48 ProfileNotFound { profile: Vec<u8>, rev: Revision },
49 #[from]
50 Pattern(PatternFileWarning),
51 }
52
53 /// Parsed sparse config
54 #[derive(Debug, Default)]
55 pub struct SparseConfig {
56 // Line-separated
57 pub(crate) includes: Vec<u8>,
58 // Line-separated
59 pub(crate) excludes: Vec<u8>,
60 pub(crate) profiles: HashSet<Vec<u8>>,
61 pub(crate) warnings: Vec<SparseWarning>,
62 }
63
64 /// All possible errors when reading sparse/narrow config
65 #[derive(Debug, derive_more::From)]
66 pub enum SparseConfigError {
67 IncludesAfterExcludes {
68 context: SparseConfigContext,
69 },
70 EntryOutsideSection {
71 context: SparseConfigContext,
72 line: Vec<u8>,
73 },
74 /// Narrow config does not support '%include' directives
75 IncludesInNarrow,
76 /// An invalid pattern prefix was given to the narrow spec. Includes the
77 /// entire pattern for context.
78 InvalidNarrowPrefix(Vec<u8>),
79 #[from]
80 HgError(HgError),
81 #[from]
82 PatternError(PatternError),
83 }
84
85 /// Parse sparse config file content.
86 pub(crate) fn parse_config(
87 raw: &[u8],
88 context: SparseConfigContext,
89 ) -> Result<SparseConfig, SparseConfigError> {
90 let mut includes = vec![];
91 let mut excludes = vec![];
92 let mut profiles = HashSet::new();
93 let mut warnings = vec![];
94
95 #[derive(PartialEq, Eq)]
96 enum Current {
97 Includes,
98 Excludes,
99 None,
100 }
101
102 let mut current = Current::None;
103 let mut in_section = false;
104
105 for line in raw.split(|c| *c == b'\n') {
106 let line = line.trim();
107 if line.is_empty() || line[0] == b'#' {
108 // empty or comment line, skip
109 continue;
110 }
111 if line.starts_with(b"%include ") {
112 let profile = line[b"%include ".len()..].trim();
113 if !profile.is_empty() {
114 profiles.insert(profile.into());
115 }
116 } else if line == b"[include]" {
117 if in_section && current == Current::Includes {
118 return Err(SparseConfigError::IncludesAfterExcludes {
119 context,
120 });
121 }
122 in_section = true;
123 current = Current::Includes;
124 continue;
125 } else if line == b"[exclude]" {
126 in_section = true;
127 current = Current::Excludes;
128 } else {
129 if current == Current::None {
130 return Err(SparseConfigError::EntryOutsideSection {
131 context,
132 line: line.into(),
133 });
134 }
135 if line.trim().starts_with(b"/") {
136 warnings.push(SparseWarning::RootWarning {
137 context,
138 line: line.into(),
139 });
140 continue;
141 }
142 match current {
143 Current::Includes => {
144 includes.push(b'\n');
145 includes.extend(line.iter());
146 }
147 Current::Excludes => {
148 excludes.push(b'\n');
149 excludes.extend(line.iter());
150 }
151 Current::None => unreachable!(),
152 }
153 }
154 }
155
156 Ok(SparseConfig {
157 includes,
158 excludes,
159 profiles,
160 warnings,
161 })
162 }
163
164 fn read_temporary_includes(
165 repo: &Repo,
166 ) -> Result<Vec<Vec<u8>>, SparseConfigError> {
167 let raw = repo.hg_vfs().try_read("tempsparse")?.unwrap_or(vec![]);
168 if raw.is_empty() {
169 return Ok(vec![]);
170 }
171 Ok(raw.split(|c| *c == b'\n').map(ToOwned::to_owned).collect())
172 }
173
174 /// Obtain sparse checkout patterns for the given revision
175 fn patterns_for_rev(
176 repo: &Repo,
177 rev: Revision,
178 ) -> Result<Option<SparseConfig>, SparseConfigError> {
179 if !repo.has_sparse() {
180 return Ok(None);
181 }
182 let raw = repo.hg_vfs().try_read("sparse")?.unwrap_or(vec![]);
183
184 if raw.is_empty() {
185 return Ok(None);
186 }
187
188 let mut config = parse_config(&raw, SparseConfigContext::Sparse)?;
189
190 if !config.profiles.is_empty() {
191 let mut profiles: Vec<Vec<u8>> = config.profiles.into_iter().collect();
192 let mut visited = HashSet::new();
193
194 while let Some(profile) = profiles.pop() {
195 if visited.contains(&profile) {
196 continue;
197 }
198 visited.insert(profile.to_owned());
199
200 let output =
201 cat(repo, &rev.to_string(), vec![HgPath::new(&profile)])
202 .map_err(|_| {
203 HgError::corrupted(format!(
204 "dirstate points to non-existent parent node"
205 ))
206 })?;
207 if output.results.is_empty() {
208 config.warnings.push(SparseWarning::ProfileNotFound {
209 profile: profile.to_owned(),
210 rev,
211 })
212 }
213
214 let subconfig = parse_config(
215 &output.results[0].1,
216 SparseConfigContext::Sparse,
217 )?;
218 if !subconfig.includes.is_empty() {
219 config.includes.push(b'\n');
220 config.includes.extend(&subconfig.includes);
221 }
222 if !subconfig.includes.is_empty() {
223 config.includes.push(b'\n');
224 config.excludes.extend(&subconfig.excludes);
225 }
226 config.warnings.extend(subconfig.warnings.into_iter());
227 profiles.extend(subconfig.profiles.into_iter());
228 }
229
230 config.profiles = visited;
231 }
232
233 if !config.includes.is_empty() {
234 config.includes.extend(b"\n.hg*");
235 }
236
237 Ok(Some(config))
238 }
239
240 /// Obtain a matcher for sparse working directories.
241 pub fn matcher(
242 repo: &Repo,
243 ) -> Result<(Box<dyn Matcher + Sync>, Vec<SparseWarning>), SparseConfigError> {
244 let mut warnings = vec![];
245 if !repo.requirements().contains(SPARSE_REQUIREMENT) {
246 return Ok((Box::new(AlwaysMatcher), warnings));
247 }
248
249 let parents = repo.dirstate_parents()?;
250 let mut revs = vec![];
251 let p1_rev =
252 repo.changelog()?
253 .rev_from_node(parents.p1.into())
254 .map_err(|_| {
255 HgError::corrupted(format!(
256 "dirstate points to non-existent parent node"
257 ))
258 })?;
259 if p1_rev != NULL_REVISION {
260 revs.push(p1_rev)
261 }
262 let p2_rev =
263 repo.changelog()?
264 .rev_from_node(parents.p2.into())
265 .map_err(|_| {
266 HgError::corrupted(format!(
267 "dirstate points to non-existent parent node"
268 ))
269 })?;
270 if p2_rev != NULL_REVISION {
271 revs.push(p2_rev)
272 }
273 let mut matchers = vec![];
274
275 for rev in revs.iter() {
276 let config = patterns_for_rev(repo, *rev);
277 if let Ok(Some(config)) = config {
278 warnings.extend(config.warnings);
279 let mut m: Box<dyn Matcher + Sync> = Box::new(AlwaysMatcher);
280 if !config.includes.is_empty() {
281 let (patterns, subwarnings) = parse_pattern_file_contents(
282 &config.includes,
283 Path::new(""),
284 Some(b"relglob:".as_ref()),
285 false,
286 )?;
287 warnings.extend(subwarnings.into_iter().map(From::from));
288 m = Box::new(IncludeMatcher::new(patterns)?);
289 }
290 if !config.excludes.is_empty() {
291 let (patterns, subwarnings) = parse_pattern_file_contents(
292 &config.excludes,
293 Path::new(""),
294 Some(b"relglob:".as_ref()),
295 false,
296 )?;
297 warnings.extend(subwarnings.into_iter().map(From::from));
298 m = Box::new(DifferenceMatcher::new(
299 m,
300 Box::new(IncludeMatcher::new(patterns)?),
301 ));
302 }
303 matchers.push(m);
304 }
305 }
306 let result: Box<dyn Matcher + Sync> = match matchers.len() {
307 0 => Box::new(AlwaysMatcher),
308 1 => matchers.pop().expect("1 is equal to 0"),
309 _ => Box::new(UnionMatcher::new(matchers)),
310 };
311
312 let matcher =
313 force_include_matcher(result, &read_temporary_includes(repo)?)?;
314 Ok((matcher, warnings))
315 }
316
317 /// Returns a matcher that returns true for any of the forced includes before
318 /// testing against the actual matcher
319 fn force_include_matcher(
320 result: Box<dyn Matcher + Sync>,
321 temp_includes: &[Vec<u8>],
322 ) -> Result<Box<dyn Matcher + Sync>, PatternError> {
323 if temp_includes.is_empty() {
324 return Ok(result);
325 }
326 let forced_include_matcher = IncludeMatcher::new(
327 temp_includes
328 .into_iter()
329 .map(|include| {
330 IgnorePattern::new(PatternSyntax::Path, include, Path::new(""))
331 })
332 .collect(),
333 )?;
334 Ok(Box::new(UnionMatcher::new(vec![
335 Box::new(forced_include_matcher),
336 result,
337 ])))
338 }
@@ -0,0 +1,43 b''
1 use std::os::unix::prelude::OsStrExt;
2
3 use crate::error::CommandError;
4 use clap::SubCommand;
5 use hg::{self, utils::hg_path::HgPath};
6
7 pub const HELP_TEXT: &str = "";
8
9 pub fn args() -> clap::App<'static, 'static> {
10 SubCommand::with_name("debugrhgsparse")
11 .arg(
12 clap::Arg::with_name("files")
13 .required(true)
14 .multiple(true)
15 .empty_values(false)
16 .value_name("FILES")
17 .help("Files to check against sparse profile"),
18 )
19 .about(HELP_TEXT)
20 }
21
22 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
23 let repo = invocation.repo?;
24
25 let (matcher, _warnings) = hg::sparse::matcher(&repo).unwrap();
26 let files = invocation.subcommand_args.values_of_os("files");
27 if let Some(files) = files {
28 for file in files {
29 invocation.ui.write_stdout(b"matches: ")?;
30 invocation.ui.write_stdout(
31 if matcher.matches(HgPath::new(file.as_bytes())) {
32 b"yes"
33 } else {
34 b"no"
35 },
36 )?;
37 invocation.ui.write_stdout(b" | file: ")?;
38 invocation.ui.write_stdout(file.as_bytes())?;
39 invocation.ui.write_stdout(b"\n")?;
40 }
41 }
42 Ok(())
43 }
@@ -0,0 +1,78 b''
1 Check that the pull logger plugins logs pulls
2 =============================================
3
4 Enable the extension
5
6 $ echo "[extensions]" >> $HGRCPATH
7 $ echo "pull-logger = $TESTDIR/../contrib/pull_logger.py" >> $HGRCPATH
8
9
10 Check the format of the generated log entries, with a bunch of elements in the
11 common and heads set
12
13 $ hg init server
14 $ hg -R server debugbuilddag '.*2+2'
15 $ hg clone ssh://user@dummy/server client --rev 0
16 adding changesets
17 adding manifests
18 adding file changes
19 added 1 changesets with 0 changes to 0 files
20 new changesets 1ea73414a91b
21 updating to branch default
22 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
23 $ tail -1 server/.hg/pull_log.jsonl
24 {"common": ["0000000000000000000000000000000000000000"], "heads": ["1ea73414a91b0920940797d8fc6a11e447f8ea1e"], "logger_version": 0, "timestamp": *} (glob)
25 $ hg -R client pull --rev 1 --rev 2
26 pulling from ssh://user@dummy/server
27 searching for changes
28 adding changesets
29 adding manifests
30 adding file changes
31 added 2 changesets with 0 changes to 0 files (+1 heads)
32 new changesets d8736c3a2c84:fa28e81e283b
33 (run 'hg heads' to see heads, 'hg merge' to merge)
34 $ tail -1 server/.hg/pull_log.jsonl
35 {"common": ["1ea73414a91b0920940797d8fc6a11e447f8ea1e"], "heads": ["d8736c3a2c84ee759a2821385804bcb67f266ade", "fa28e81e283b3416de4d48ee0dd2d446e9e38d7c"], "logger_version": 0, "timestamp": *} (glob)
36 $ hg -R client pull --rev 2 --rev 3
37 pulling from ssh://user@dummy/server
38 searching for changes
39 adding changesets
40 adding manifests
41 adding file changes
42 added 1 changesets with 0 changes to 0 files
43 new changesets 944641ddcaef
44 (run 'hg update' to get a working copy)
45 $ tail -1 server/.hg/pull_log.jsonl
46 {"common": ["1ea73414a91b0920940797d8fc6a11e447f8ea1e", "fa28e81e283b3416de4d48ee0dd2d446e9e38d7c"], "heads": ["944641ddcaef174df7ce1bc2751a5f165129778b", "fa28e81e283b3416de4d48ee0dd2d446e9e38d7c"], "logger_version": 0, "timestamp": *} (glob)
47
48
49 Check the number of entries generated in the log when pulling from multiple
50 clients at the same time
51
52 $ rm -f server/.hg/pull_log.jsonl
53 $ for i in $($TESTDIR/seq.py 32); do
54 > hg clone ssh://user@dummy/server client_$i --rev 0
55 > done > /dev/null
56 $ for i in $($TESTDIR/seq.py 32); do
57 > hg -R client_$i pull --rev 1 &
58 > done > /dev/null
59 $ wait
60 $ wc -l server/.hg/pull_log.jsonl
61 \s*64 .* (re)
62
63
64 Test log rotation when reaching some size threshold
65
66 $ cat >> $HGRCPATH << EOF
67 > [pull-logger]
68 > rotate-size = 1kb
69 > EOF
70
71 $ rm -f server/.hg/pull_log.jsonl
72 $ for i in $($TESTDIR/seq.py 10); do
73 > hg -R client pull --rev 1
74 > done > /dev/null
75 $ wc -l server/.hg/pull_log.jsonl
76 \s*3 .* (re)
77 $ wc -l server/.hg/pull_log.jsonl.rotated
78 \s*7 .* (re)
@@ -203,9 +203,11 b' i18n/hg.pot: $(PYFILES) $(DOCFILES) i18n'
203 packaging_targets := \
203 packaging_targets := \
204 rhel7 \
204 rhel7 \
205 rhel8 \
205 rhel8 \
206 rhel9 \
206 deb \
207 deb \
207 docker-rhel7 \
208 docker-rhel7 \
208 docker-rhel8 \
209 docker-rhel8 \
210 docker-rhel9 \
209 docker-debian-bullseye \
211 docker-debian-bullseye \
210 docker-debian-buster \
212 docker-debian-buster \
211 docker-debian-stretch \
213 docker-debian-stretch \
@@ -89,7 +89,7 b' check-pytype:'
89 - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
89 - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
90 - cd /tmp/mercurial-ci/
90 - cd /tmp/mercurial-ci/
91 - make local PYTHON=$PYTHON
91 - make local PYTHON=$PYTHON
92 - $PYTHON -m pip install --user -U pytype==2021.04.15
92 - $PYTHON -m pip install --user -U libcst==0.3.20 pytype==2022.03.29
93 script:
93 script:
94 - echo "Entering script section"
94 - echo "Entering script section"
95 - sh contrib/check-pytype.sh
95 - sh contrib/check-pytype.sh
@@ -15,7 +15,8 b' FEDORA_RELEASE := 31'
15
15
16 RHEL_RELEASES := \
16 RHEL_RELEASES := \
17 7 \
17 7 \
18 8
18 8 \
19 9
19
20
20 # Build a Python for these RHEL (and derivatives) releases.
21 # Build a Python for these RHEL (and derivatives) releases.
21 RHEL_WITH_PYTHON_RELEASES :=
22 RHEL_WITH_PYTHON_RELEASES :=
@@ -1,8 +1,12 b''
1 FROM rockylinux/rockylinux:8
1 FROM rockylinux/rockylinux:9
2
2
3 RUN groupadd -g %GID% build && \
3 RUN groupadd -g %GID% build && \
4 useradd -u %UID% -g %GID% -s /bin/bash -d /build -m build
4 useradd -u %UID% -g %GID% -s /bin/bash -d /build -m build
5
5
6 RUN dnf install 'dnf-command(config-manager)' -y
7 # crb repository is necessary for docutils
8 RUN dnf config-manager --set-enabled crb
9
6 RUN yum install -y \
10 RUN yum install -y \
7 gcc \
11 gcc \
8 gettext \
12 gettext \
@@ -9,7 +9,7 b''
9 # node: the node|short hg was built from, or empty if built from a tag
9 # node: the node|short hg was built from, or empty if built from a tag
10 gethgversion() {
10 gethgversion() {
11 if [ -z "${1+x}" ]; then
11 if [ -z "${1+x}" ]; then
12 python="python"
12 python="python3"
13 else
13 else
14 python="$1"
14 python="$1"
15 fi
15 fi
@@ -97,6 +97,16 b' VARIANTS_KEYS = ['
97 assert set(VARIANTS.keys()) == set(VARIANTS_KEYS)
97 assert set(VARIANTS.keys()) == set(VARIANTS_KEYS)
98
98
99
99
100 def parse_case(case):
101 case_type, case_args = case.split('-', 1)
102 if case_type == 'file':
103 case_args = (case_args,)
104 else:
105 case_args = tuple(int(x) for x in case_args.split('-'))
106 case = (case_type,) + case_args
107 return case
108
109
100 def format_case(case):
110 def format_case(case):
101 return '-'.join(str(s) for s in case)
111 return '-'.join(str(s) for s in case)
102
112
@@ -109,12 +119,41 b' def to_revsets(case):'
109 return '::randomantichain(all(), "%d")' % case[1]
119 return '::randomantichain(all(), "%d")' % case[1]
110 elif t == 'rev':
120 elif t == 'rev':
111 return '::%d' % case[1]
121 return '::%d' % case[1]
122 elif t == 'file':
123 return '::nodefromfile("%s")' % case[1]
112 else:
124 else:
113 assert False
125 assert False
114
126
115
127
116 def compare(repo, local_case, remote_case):
128 def compare(
129 repo,
130 local_case,
131 remote_case,
132 display_header=True,
133 display_case=True,
134 ):
117 case = (repo, local_case, remote_case)
135 case = (repo, local_case, remote_case)
136 if display_header:
137 pieces = ['#']
138 if display_case:
139 pieces += [
140 "repo",
141 "local-subset",
142 "remote-subset",
143 ]
144
145 pieces += [
146 "discovery-variant",
147 "roundtrips",
148 "queries",
149 "revs",
150 "local-heads",
151 "common-heads",
152 "undecided-initial",
153 "undecided-common",
154 "undecided-missing",
155 ]
156 print(*pieces)
118 for variant in VARIANTS_KEYS:
157 for variant in VARIANTS_KEYS:
119 res = process(case, VARIANTS[variant])
158 res = process(case, VARIANTS[variant])
120 revs = res["nb-revs"]
159 revs = res["nb-revs"]
@@ -122,36 +161,31 b' def compare(repo, local_case, remote_cas'
122 common_heads = res["nb-common-heads"]
161 common_heads = res["nb-common-heads"]
123 roundtrips = res["total-roundtrips"]
162 roundtrips = res["total-roundtrips"]
124 queries = res["total-queries"]
163 queries = res["total-queries"]
125 if 'tree-discovery' in variant:
164 pieces = []
126 print(
165 if display_case:
166 pieces += [
127 repo,
167 repo,
128 format_case(local_case),
168 format_case(local_case),
129 format_case(remote_case),
169 format_case(remote_case),
130 variant,
170 ]
131 roundtrips,
171 pieces += [
132 queries,
172 variant,
133 revs,
173 roundtrips,
134 local_heads,
174 queries,
135 common_heads,
175 revs,
136 )
176 local_heads,
137 else:
177 common_heads,
178 ]
179 if 'tree-discovery' not in variant:
138 undecided_common = res["nb-ini_und-common"]
180 undecided_common = res["nb-ini_und-common"]
139 undecided_missing = res["nb-ini_und-missing"]
181 undecided_missing = res["nb-ini_und-missing"]
140 undecided = undecided_common + undecided_missing
182 undecided = undecided_common + undecided_missing
141 print(
183 pieces += [
142 repo,
143 format_case(local_case),
144 format_case(remote_case),
145 variant,
146 roundtrips,
147 queries,
148 revs,
149 local_heads,
150 common_heads,
151 undecided,
184 undecided,
152 undecided_common,
185 undecided_common,
153 undecided_missing,
186 undecided_missing,
154 )
187 ]
188 print(*pieces)
155 return 0
189 return 0
156
190
157
191
@@ -171,13 +205,23 b' def process(case, variant):'
171
205
172
206
173 if __name__ == '__main__':
207 if __name__ == '__main__':
174 if len(sys.argv) != 4:
208
209 argv = sys.argv[:]
210
211 kwargs = {}
212 # primitive arg parsing
213 if '--no-header' in argv:
214 kwargs['display_header'] = False
215 argv = [a for a in argv if a != '--no-header']
216 if '--no-case' in argv:
217 kwargs['display_case'] = False
218 argv = [a for a in argv if a != '--no-case']
219
220 if len(argv) != 4:
175 usage = f'USAGE: {script_name} REPO LOCAL_CASE REMOTE_CASE'
221 usage = f'USAGE: {script_name} REPO LOCAL_CASE REMOTE_CASE'
176 print(usage, file=sys.stderr)
222 print(usage, file=sys.stderr)
177 sys.exit(128)
223 sys.exit(128)
178 repo = sys.argv[1]
224 repo = argv[1]
179 local_case = sys.argv[2].split('-')
225 local_case = parse_case(argv[2])
180 local_case = (local_case[0],) + tuple(int(x) for x in local_case[1:])
226 remote_case = parse_case(argv[3])
181 remote_case = sys.argv[3].split('-')
227 sys.exit(compare(repo, local_case, remote_case, **kwargs))
182 remote_case = (remote_case[0],) + tuple(int(x) for x in remote_case[1:])
183 sys.exit(compare(repo, local_case, remote_case))
@@ -925,6 +925,71 b' def perfancestorset(ui, repo, revset, **'
925 fm.end()
925 fm.end()
926
926
927
927
928 @command(
929 b'perf::delta-find',
930 revlogopts + formatteropts,
931 b'-c|-m|FILE REV',
932 )
933 def perf_delta_find(ui, repo, arg_1, arg_2=None, **opts):
934 """benchmark the process of finding a valid delta for a revlog revision
935
936 When a revlog receives a new revision (e.g. from a commit, or from an
937 incoming bundle), it searches for a suitable delta-base to produce a delta.
938 This perf command measures how much time we spend in this process. It
939 operates on an already stored revision.
940
941 See `hg help debug-delta-find` for another related command.
942 """
943 from mercurial import revlogutils
944 import mercurial.revlogutils.deltas as deltautil
945
946 opts = _byteskwargs(opts)
947 if arg_2 is None:
948 file_ = None
949 rev = arg_1
950 else:
951 file_ = arg_1
952 rev = arg_2
953
954 repo = repo.unfiltered()
955
956 timer, fm = gettimer(ui, opts)
957
958 rev = int(rev)
959
960 revlog = cmdutil.openrevlog(repo, b'perf::delta-find', file_, opts)
961
962 deltacomputer = deltautil.deltacomputer(revlog)
963
964 node = revlog.node(rev)
965 p1r, p2r = revlog.parentrevs(rev)
966 p1 = revlog.node(p1r)
967 p2 = revlog.node(p2r)
968 full_text = revlog.revision(rev)
969 textlen = len(full_text)
970 cachedelta = None
971 flags = revlog.flags(rev)
972
973 revinfo = revlogutils.revisioninfo(
974 node,
975 p1,
976 p2,
977 [full_text], # btext
978 textlen,
979 cachedelta,
980 flags,
981 )
982
983 # Note: we should probably purge the potential caches (like the full
984 # manifest cache) between runs.
985 def find_one():
986 with revlog._datafp() as fh:
987 deltacomputer.finddeltainfo(revinfo, fh, target_rev=rev)
988
989 timer(find_one)
990 fm.end()
991
992
928 @command(b'perf::discovery|perfdiscovery', formatteropts, b'PATH')
993 @command(b'perf::discovery|perfdiscovery', formatteropts, b'PATH')
929 def perfdiscovery(ui, repo, path, **opts):
994 def perfdiscovery(ui, repo, path, **opts):
930 """benchmark discovery between local repo and the peer at given path"""
995 """benchmark discovery between local repo and the peer at given path"""
@@ -974,6 +1039,111 b' def perfbookmarks(ui, repo, **opts):'
974 fm.end()
1039 fm.end()
975
1040
976
1041
1042 @command(
1043 b'perf::bundle',
1044 [
1045 (
1046 b'r',
1047 b'rev',
1048 [],
1049 b'changesets to bundle',
1050 b'REV',
1051 ),
1052 (
1053 b't',
1054 b'type',
1055 b'none',
1056 b'bundlespec to use (see `hg help bundlespec`)',
1057 b'TYPE',
1058 ),
1059 ]
1060 + formatteropts,
1061 b'REVS',
1062 )
1063 def perfbundle(ui, repo, *revs, **opts):
1064 """benchmark the creation of a bundle from a repository
1065
1066 For now, this only supports "none" compression.
1067 """
1068 try:
1069 from mercurial import bundlecaches
1070
1071 parsebundlespec = bundlecaches.parsebundlespec
1072 except ImportError:
1073 from mercurial import exchange
1074
1075 parsebundlespec = exchange.parsebundlespec
1076
1077 from mercurial import discovery
1078 from mercurial import bundle2
1079
1080 opts = _byteskwargs(opts)
1081 timer, fm = gettimer(ui, opts)
1082
1083 cl = repo.changelog
1084 revs = list(revs)
1085 revs.extend(opts.get(b'rev', ()))
1086 revs = scmutil.revrange(repo, revs)
1087 if not revs:
1088 raise error.Abort(b"not revision specified")
1089 # make it a consistent set (ie: without topological gaps)
1090 old_len = len(revs)
1091 revs = list(repo.revs(b"%ld::%ld", revs, revs))
1092 if old_len != len(revs):
1093 new_count = len(revs) - old_len
1094 msg = b"add %d new revisions to make it a consistent set\n"
1095 ui.write_err(msg % new_count)
1096
1097 targets = [cl.node(r) for r in repo.revs(b"heads(::%ld)", revs)]
1098 bases = [cl.node(r) for r in repo.revs(b"heads(::%ld - %ld)", revs, revs)]
1099 outgoing = discovery.outgoing(repo, bases, targets)
1100
1101 bundle_spec = opts.get(b'type')
1102
1103 bundle_spec = parsebundlespec(repo, bundle_spec, strict=False)
1104
1105 cgversion = bundle_spec.params.get(b"cg.version")
1106 if cgversion is None:
1107 if bundle_spec.version == b'v1':
1108 cgversion = b'01'
1109 if bundle_spec.version == b'v2':
1110 cgversion = b'02'
1111 if cgversion not in changegroup.supportedoutgoingversions(repo):
1112 err = b"repository does not support bundle version %s"
1113 raise error.Abort(err % cgversion)
1114
1115 if cgversion == b'01': # bundle1
1116 bversion = b'HG10' + bundle_spec.wirecompression
1117 bcompression = None
1118 elif cgversion in (b'02', b'03'):
1119 bversion = b'HG20'
1120 bcompression = bundle_spec.wirecompression
1121 else:
1122 err = b'perf::bundle: unexpected changegroup version %s'
1123 raise error.ProgrammingError(err % cgversion)
1124
1125 if bcompression is None:
1126 bcompression = b'UN'
1127
1128 if bcompression != b'UN':
1129 err = b'perf::bundle: compression currently unsupported: %s'
1130 raise error.ProgrammingError(err % bcompression)
1131
1132 def do_bundle():
1133 bundle2.writenewbundle(
1134 ui,
1135 repo,
1136 b'perf::bundle',
1137 os.devnull,
1138 bversion,
1139 outgoing,
1140 bundle_spec.params,
1141 )
1142
1143 timer(do_bundle)
1144 fm.end()
1145
1146
977 @command(b'perf::bundleread|perfbundleread', formatteropts, b'BUNDLE')
1147 @command(b'perf::bundleread|perfbundleread', formatteropts, b'BUNDLE')
978 def perfbundleread(ui, repo, bundlepath, **opts):
1148 def perfbundleread(ui, repo, bundlepath, **opts):
979 """Benchmark reading of bundle files.
1149 """Benchmark reading of bundle files.
@@ -2498,6 +2668,60 b' def perfbdiff(ui, repo, file_, rev=None,'
2498
2668
2499
2669
2500 @command(
2670 @command(
2671 b'perf::unbundle',
2672 formatteropts,
2673 b'BUNDLE_FILE',
2674 )
2675 def perf_unbundle(ui, repo, fname, **opts):
2676 """benchmark application of a bundle in a repository.
2677
2678 This does not include the final transaction processing"""
2679 from mercurial import exchange
2680 from mercurial import bundle2
2681
2682 opts = _byteskwargs(opts)
2683
2684 with repo.lock():
2685 bundle = [None, None]
2686 orig_quiet = repo.ui.quiet
2687 try:
2688 repo.ui.quiet = True
2689 with open(fname, mode="rb") as f:
2690
2691 def noop_report(*args, **kwargs):
2692 pass
2693
2694 def setup():
2695 gen, tr = bundle
2696 if tr is not None:
2697 tr.abort()
2698 bundle[:] = [None, None]
2699 f.seek(0)
2700 bundle[0] = exchange.readbundle(ui, f, fname)
2701 bundle[1] = repo.transaction(b'perf::unbundle')
2702 bundle[1]._report = noop_report # silence the transaction
2703
2704 def apply():
2705 gen, tr = bundle
2706 bundle2.applybundle(
2707 repo,
2708 gen,
2709 tr,
2710 source=b'perf::unbundle',
2711 url=fname,
2712 )
2713
2714 timer, fm = gettimer(ui, opts)
2715 timer(apply, setup=setup)
2716 fm.end()
2717 finally:
2718 repo.ui.quiet == orig_quiet
2719 gen, tr = bundle
2720 if tr is not None:
2721 tr.abort()
2722
2723
2724 @command(
2501 b'perf::unidiff|perfunidiff',
2725 b'perf::unidiff|perfunidiff',
2502 revlogopts
2726 revlogopts
2503 + formatteropts
2727 + formatteropts
@@ -36,6 +36,7 b' import sys'
36
36
37 from . import compat
37 from . import compat
38
38
39 abc = collections.abc
39
40
40 BSER_ARRAY = b"\x00"
41 BSER_ARRAY = b"\x00"
41 BSER_OBJECT = b"\x01"
42 BSER_OBJECT = b"\x01"
@@ -207,9 +208,7 b' class _bser_buffer:'
207 self.ensure_size(needed)
208 self.ensure_size(needed)
208 struct.pack_into(b"=cd", self.buf, self.wpos, BSER_REAL, val)
209 struct.pack_into(b"=cd", self.buf, self.wpos, BSER_REAL, val)
209 self.wpos += needed
210 self.wpos += needed
210 elif isinstance(val, collections.Mapping) and isinstance(
211 elif isinstance(val, abc.Mapping) and isinstance(val, abc.Sized):
211 val, collections.Sized
212 ):
213 val_len = len(val)
212 val_len = len(val)
214 size = _int_size(val_len)
213 size = _int_size(val_len)
215 needed = 2 + size
214 needed = 2 + size
@@ -260,9 +259,7 b' class _bser_buffer:'
260 for k, v in iteritems:
259 for k, v in iteritems:
261 self.append_string(k)
260 self.append_string(k)
262 self.append_recursive(v)
261 self.append_recursive(v)
263 elif isinstance(val, collections.Iterable) and isinstance(
262 elif isinstance(val, abc.Iterable) and isinstance(val, abc.Sized):
264 val, collections.Sized
265 ):
266 val_len = len(val)
263 val_len = len(val)
267 size = _int_size(val_len)
264 size = _int_size(val_len)
268 needed = 2 + size
265 needed = 2 + size
@@ -546,7 +546,9 b' class rebaseruntime:'
546 date = self.date
546 date = self.date
547 if date is None:
547 if date is None:
548 date = ctx.date()
548 date = ctx.date()
549 extra = {b'rebase_source': ctx.hex()}
549 extra = {}
550 if repo.ui.configbool(b'rebase', b'store-source'):
551 extra = {b'rebase_source': ctx.hex()}
550 for c in self.extrafns:
552 for c in self.extrafns:
551 c(ctx, extra)
553 c(ctx, extra)
552 destphase = max(ctx.phase(), phases.draft)
554 destphase = max(ctx.phase(), phases.draft)
@@ -70,7 +70,7 b' DEFAULT_SECTIONS = ['
70 (b'api', _(b'API Changes')),
70 (b'api', _(b'API Changes')),
71 ]
71 ]
72
72
73 RE_DIRECTIVE = re.compile(br'^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$')
73 RE_DIRECTIVE = re.compile(br'^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$', re.MULTILINE)
74 RE_ISSUE = br'\bissue ?[0-9]{4,6}(?![0-9])\b'
74 RE_ISSUE = br'\bissue ?[0-9]{4,6}(?![0-9])\b'
75
75
76 BULLET_SECTION = _(b'Other Changes')
76 BULLET_SECTION = _(b'Other Changes')
@@ -1382,6 +1382,7 b' static inline int index_baserev(indexObj'
1382 static int index_issnapshotrev(indexObject *self, Py_ssize_t rev)
1382 static int index_issnapshotrev(indexObject *self, Py_ssize_t rev)
1383 {
1383 {
1384 int ps[2];
1384 int ps[2];
1385 int b;
1385 Py_ssize_t base;
1386 Py_ssize_t base;
1386 while (rev >= 0) {
1387 while (rev >= 0) {
1387 base = (Py_ssize_t)index_baserev(self, rev);
1388 base = (Py_ssize_t)index_baserev(self, rev);
@@ -1399,6 +1400,20 b' static int index_issnapshotrev(indexObje'
1399 assert(PyErr_Occurred());
1400 assert(PyErr_Occurred());
1400 return -1;
1401 return -1;
1401 };
1402 };
1403 while ((index_get_length(self, ps[0]) == 0) && ps[0] >= 0) {
1404 b = index_baserev(self, ps[0]);
1405 if (b == ps[0]) {
1406 break;
1407 }
1408 ps[0] = b;
1409 }
1410 while ((index_get_length(self, ps[1]) == 0) && ps[1] >= 0) {
1411 b = index_baserev(self, ps[1]);
1412 if (b == ps[1]) {
1413 break;
1414 }
1415 ps[1] = b;
1416 }
1402 if (base == ps[0] || base == ps[1]) {
1417 if (base == ps[0] || base == ps[1]) {
1403 return 0;
1418 return 0;
1404 }
1419 }
@@ -832,7 +832,7 b' def _commentlines(raw):'
832
832
833 @attr.s(frozen=True)
833 @attr.s(frozen=True)
834 class morestatus:
834 class morestatus:
835 reporoot = attr.ib()
835 repo = attr.ib()
836 unfinishedop = attr.ib()
836 unfinishedop = attr.ib()
837 unfinishedmsg = attr.ib()
837 unfinishedmsg = attr.ib()
838 activemerge = attr.ib()
838 activemerge = attr.ib()
@@ -876,7 +876,7 b' class morestatus:'
876 mergeliststr = b'\n'.join(
876 mergeliststr = b'\n'.join(
877 [
877 [
878 b' %s'
878 b' %s'
879 % util.pathto(self.reporoot, encoding.getcwd(), path)
879 % util.pathto(self.repo.root, encoding.getcwd(), path)
880 for path in self.unresolvedpaths
880 for path in self.unresolvedpaths
881 ]
881 ]
882 )
882 )
@@ -898,6 +898,7 b' To mark files as resolved: hg resolve -'
898 # Already output.
898 # Already output.
899 continue
899 continue
900 fm.startitem()
900 fm.startitem()
901 fm.context(repo=self.repo)
901 # We can't claim to know the status of the file - it may just
902 # We can't claim to know the status of the file - it may just
902 # have been in one of the states that were not requested for
903 # have been in one of the states that were not requested for
903 # display, so it could be anything.
904 # display, so it could be anything.
@@ -923,7 +924,7 b' def readmorestatus(repo):'
923 if activemerge:
924 if activemerge:
924 unresolved = sorted(mergestate.unresolved())
925 unresolved = sorted(mergestate.unresolved())
925 return morestatus(
926 return morestatus(
926 repo.root, unfinishedop, unfinishedmsg, activemerge, unresolved
927 repo, unfinishedop, unfinishedmsg, activemerge, unresolved
927 )
928 )
928
929
929
930
@@ -1035,7 +1035,14 b' def bisect('
1035 state = hbisect.load_state(repo)
1035 state = hbisect.load_state(repo)
1036
1036
1037 if rev:
1037 if rev:
1038 nodes = [repo[i].node() for i in logcmdutil.revrange(repo, rev)]
1038 revs = logcmdutil.revrange(repo, rev)
1039 goodnodes = state[b'good']
1040 badnodes = state[b'bad']
1041 if goodnodes and badnodes:
1042 candidates = repo.revs(b'(%ln)::(%ln)', goodnodes, badnodes)
1043 candidates += repo.revs(b'(%ln)::(%ln)', badnodes, goodnodes)
1044 revs = candidates & revs
1045 nodes = [repo.changelog.node(i) for i in revs]
1039 else:
1046 else:
1040 nodes = [repo.lookup(b'.')]
1047 nodes = [repo.lookup(b'.')]
1041
1048
@@ -1485,6 +1492,12 b' def branches(ui, repo, active=False, clo'
1485 b'bundle',
1492 b'bundle',
1486 [
1493 [
1487 (
1494 (
1495 b'',
1496 b'exact',
1497 None,
1498 _(b'compute the base from the revision specified'),
1499 ),
1500 (
1488 b'f',
1501 b'f',
1489 b'force',
1502 b'force',
1490 None,
1503 None,
@@ -1553,6 +1566,7 b' def bundle(ui, repo, fname, *dests, **op'
1553 Returns 0 on success, 1 if no changes found.
1566 Returns 0 on success, 1 if no changes found.
1554 """
1567 """
1555 opts = pycompat.byteskwargs(opts)
1568 opts = pycompat.byteskwargs(opts)
1569
1556 revs = None
1570 revs = None
1557 if b'rev' in opts:
1571 if b'rev' in opts:
1558 revstrings = opts[b'rev']
1572 revstrings = opts[b'rev']
@@ -1586,7 +1600,19 b' def bundle(ui, repo, fname, *dests, **op'
1586 )
1600 )
1587 if opts.get(b'base'):
1601 if opts.get(b'base'):
1588 ui.warn(_(b"ignoring --base because --all was specified\n"))
1602 ui.warn(_(b"ignoring --base because --all was specified\n"))
1603 if opts.get(b'exact'):
1604 ui.warn(_(b"ignoring --exact because --all was specified\n"))
1589 base = [nullrev]
1605 base = [nullrev]
1606 elif opts.get(b'exact'):
1607 if dests:
1608 raise error.InputError(
1609 _(b"--exact is incompatible with specifying destinations")
1610 )
1611 if opts.get(b'base'):
1612 ui.warn(_(b"ignoring --base because --exact was specified\n"))
1613 base = repo.revs(b'parents(%ld) - %ld', revs, revs)
1614 if not base:
1615 base = [nullrev]
1590 else:
1616 else:
1591 base = logcmdutil.revrange(repo, opts.get(b'base'))
1617 base = logcmdutil.revrange(repo, opts.get(b'base'))
1592 if cgversion not in changegroup.supportedoutgoingversions(repo):
1618 if cgversion not in changegroup.supportedoutgoingversions(repo):
@@ -6954,11 +6980,13 b' def status(ui, repo, *pats, **opts):'
6954 )
6980 )
6955
6981
6956 copy = {}
6982 copy = {}
6957 if (
6983 show_copies = ui.configbool(b'ui', b'statuscopies')
6958 opts.get(b'all')
6984 if opts.get(b'copies') is not None:
6959 or opts.get(b'copies')
6985 show_copies = opts.get(b'copies')
6960 or ui.configbool(b'ui', b'statuscopies')
6986 show_copies = (show_copies or opts.get(b'all')) and not opts.get(
6961 ) and not opts.get(b'no_status'):
6987 b'no_status'
6988 )
6989 if show_copies:
6962 copy = copies.pathcopies(ctx1, ctx2, m)
6990 copy = copies.pathcopies(ctx1, ctx2, m)
6963
6991
6964 morestatus = None
6992 morestatus = None
@@ -1425,12 +1425,38 b' coreconfigitem('
1425 default=False,
1425 default=False,
1426 experimental=True,
1426 experimental=True,
1427 )
1427 )
1428
1429 # Moving this on by default means we are confident about the scaling of phases.
1430 # This is not garanteed to be the case at the time this message is written.
1428 coreconfigitem(
1431 coreconfigitem(
1429 b'format',
1432 b'format',
1430 b'internal-phase',
1433 b'use-internal-phase',
1431 default=False,
1434 default=False,
1432 experimental=True,
1435 experimental=True,
1433 )
1436 )
1437 # The interaction between the archived phase and obsolescence markers needs to
1438 # be sorted out before wider usage of this are to be considered.
1439 #
1440 # At the time this message is written, behavior when archiving obsolete
1441 # changeset differ significantly from stripping. As part of stripping, we also
1442 # remove the obsolescence marker associated to the stripped changesets,
1443 # revealing the precedecessors changesets when applicable. When archiving, we
1444 # don't touch the obsolescence markers, keeping everything hidden. This can
1445 # result in quite confusing situation for people combining exchanging draft
1446 # with the archived phases. As some markers needed by others may be skipped
1447 # during exchange.
1448 coreconfigitem(
1449 b'format',
1450 b'exp-archived-phase',
1451 default=False,
1452 experimental=True,
1453 )
1454 coreconfigitem(
1455 b'shelve',
1456 b'store',
1457 default=b'internal',
1458 experimental=True,
1459 )
1434 coreconfigitem(
1460 coreconfigitem(
1435 b'fsmonitor',
1461 b'fsmonitor',
1436 b'warn_when_unused',
1462 b'warn_when_unused',
@@ -2835,3 +2861,17 b' coreconfigitem('
2835 b'experimental.inmemory',
2861 b'experimental.inmemory',
2836 default=False,
2862 default=False,
2837 )
2863 )
2864
2865 # This setting controls creation of a rebase_source extra field
2866 # during rebase. When False, no such field is created. This is
2867 # useful eg for incrementally converting changesets and then
2868 # rebasing them onto an existing repo.
2869 # WARNING: this is an advanced setting reserved for people who know
2870 # exactly what they are doing. Misuse of this setting can easily
2871 # result in obsmarker cycles and a vivid headache.
2872 coreconfigitem(
2873 b'rebase',
2874 b'store-source',
2875 default=True,
2876 experimental=True,
2877 )
@@ -1021,7 +1021,7 b' def debugdeltafind(ui, repo, arg_1, arg_'
1021 deltacomputer = deltautil.deltacomputer(
1021 deltacomputer = deltautil.deltacomputer(
1022 revlog,
1022 revlog,
1023 write_debug=ui.write,
1023 write_debug=ui.write,
1024 debug_search=True,
1024 debug_search=not ui.quiet,
1025 )
1025 )
1026
1026
1027 node = revlog.node(rev)
1027 node = revlog.node(rev)
@@ -107,7 +107,7 b' kdiff3.regkeyalt=Software\\Wow6432Node\\KD'
107
107
108 meld.args=--label=$labellocal $local --label='merged' $base --label=$labelother $other -o $output --auto-merge
108 meld.args=--label=$labellocal $local --label='merged' $base --label=$labelother $other -o $output --auto-merge
109 meld.check=changed
109 meld.check=changed
110 meld.diffargs=-a --label=$plabel1 $parent --label=$clabel $child
110 meld.diffargs=--label=$plabel1 $parent --label=$clabel $child
111 meld.gui=True
111 meld.gui=True
112
112
113 merge.check=conflicts
113 merge.check=conflicts
@@ -1287,6 +1287,7 b' class dirstate:'
1287
1287
1288 allowed_matchers = (
1288 allowed_matchers = (
1289 matchmod.alwaysmatcher,
1289 matchmod.alwaysmatcher,
1290 matchmod.differencematcher,
1290 matchmod.exactmatcher,
1291 matchmod.exactmatcher,
1291 matchmod.includematcher,
1292 matchmod.includematcher,
1292 matchmod.intersectionmatcher,
1293 matchmod.intersectionmatcher,
@@ -952,14 +952,22 b' def _getlocal(ui, rpath, wd=None):'
952
952
953 Takes paths in [cwd]/.hg/hgrc into account."
953 Takes paths in [cwd]/.hg/hgrc into account."
954 """
954 """
955 try:
956 cwd = encoding.getcwd()
957 except OSError as e:
958 raise error.Abort(
959 _(b"error getting current working directory: %s")
960 % encoding.strtolocal(e.strerror)
961 )
962
963 # If using an alternate wd, temporarily switch to it so that relative
964 # paths are resolved correctly during config loading.
965 oldcwd = None
955 if wd is None:
966 if wd is None:
956 try:
967 wd = cwd
957 wd = encoding.getcwd()
968 else:
958 except OSError as e:
969 oldcwd = cwd
959 raise error.Abort(
970 os.chdir(wd)
960 _(b"error getting current working directory: %s")
961 % encoding.strtolocal(e.strerror)
962 )
963
971
964 path = cmdutil.findrepo(wd) or b""
972 path = cmdutil.findrepo(wd) or b""
965 if not path:
973 if not path:
@@ -979,6 +987,9 b' def _getlocal(ui, rpath, wd=None):'
979 lui.readconfig(os.path.join(path, b".hg", b"hgrc"), path)
987 lui.readconfig(os.path.join(path, b".hg", b"hgrc"), path)
980 lui.readconfig(os.path.join(path, b".hg", b"hgrc-not-shared"), path)
988 lui.readconfig(os.path.join(path, b".hg", b"hgrc-not-shared"), path)
981
989
990 if oldcwd:
991 os.chdir(oldcwd)
992
982 return path, lui
993 return path, lui
983
994
984
995
@@ -39,7 +39,7 b' def bisect(repo, state):'
39 def buildancestors(bad, good):
39 def buildancestors(bad, good):
40 badrev = min([changelog.rev(n) for n in bad])
40 badrev = min([changelog.rev(n) for n in bad])
41 ancestors = collections.defaultdict(lambda: None)
41 ancestors = collections.defaultdict(lambda: None)
42 for rev in repo.revs(b"descendants(%ln) - ancestors(%ln)", good, good):
42 for rev in repo.revs(b"(%ln::%d) - (::%ln)", good, badrev, good):
43 ancestors[rev] = []
43 ancestors[rev] = []
44 if ancestors[badrev] is None:
44 if ancestors[badrev] is None:
45 return badrev, None
45 return badrev, None
@@ -115,11 +115,21 b' def bisect(repo, state):'
115 poison.update(children.get(rev, []))
115 poison.update(children.get(rev, []))
116 continue
116 continue
117
117
118 unvisited = []
118 for c in children.get(rev, []):
119 for c in children.get(rev, []):
119 if ancestors[c]:
120 if ancestors[c]:
120 ancestors[c] = list(set(ancestors[c] + a))
121 ancestors[c] = list(set(ancestors[c] + a))
121 else:
122 else:
123 unvisited.append(c)
124
125 # Reuse existing ancestor list for the first unvisited child to avoid
126 # excessive copying for linear portions of history.
127 if unvisited:
128 first = unvisited.pop(0)
129 for c in unvisited:
122 ancestors[c] = a + [c]
130 ancestors[c] = a + [c]
131 a.append(first)
132 ancestors[first] = a
123
133
124 assert best_rev is not None
134 assert best_rev is not None
125 best_node = changelog.node(best_rev)
135 best_node = changelog.node(best_rev)
@@ -67,6 +67,33 b' The following bundle <compression> engin'
67
67
68 .. bundlecompressionmarker
68 .. bundlecompressionmarker
69
69
70 Available Options
71 =================
72
73 The following options exist:
74
75 changegroup
76 Include the changegroup data in the bundle (default to True).
77
78 cg.version
79 Select the version of the changegroup to use. Available options are : 01, 02
80 or 03. By default it will be automatically selected according to the current
81 repository format.
82
83 obsolescence
84 Include obsolescence-markers relevant to the bundled changesets.
85
86 phases
87 Include phase information relevant to the bundled changesets.
88
89 revbranchcache
90 Include the "tags-fnodes" cache inside the bundle.
91
92
93 tagsfnodescache
94 Include the "tags-fnodes" cache inside the bundle.
95
96
70 Examples
97 Examples
71 ========
98 ========
72
99
@@ -18,12 +18,15 b' from .. import ('
18
18
19 from ..utils import procutil
19 from ..utils import procutil
20
20
21 # pytype: disable=pyi-error
21 from . import (
22 from . import (
22 hgweb_mod,
23 hgweb_mod,
23 hgwebdir_mod,
24 hgwebdir_mod,
24 server,
25 server,
25 )
26 )
26
27
28 # pytype: enable=pyi-error
29
27
30
28 def hgweb(config, name=None, baseui=None):
31 def hgweb(config, name=None, baseui=None):
29 """create an hgweb wsgi object
32 """create an hgweb wsgi object
@@ -522,12 +522,8 b' def _readrequires(vfs, allowmissing):'
522 # the repository. This file was introduced in Mercurial 0.9.2,
522 # the repository. This file was introduced in Mercurial 0.9.2,
523 # which means very old repositories may not have one. We assume
523 # which means very old repositories may not have one. We assume
524 # a missing file translates to no requirements.
524 # a missing file translates to no requirements.
525 try:
525 read = vfs.tryread if allowmissing else vfs.read
526 return set(vfs.read(b'requires').splitlines())
526 return set(read(b'requires').splitlines())
527 except FileNotFoundError:
528 if not allowmissing:
529 raise
530 return set()
531
527
532
528
533 def makelocalrepository(baseui, path, intents=None):
529 def makelocalrepository(baseui, path, intents=None):
@@ -1281,6 +1277,7 b' class localrepository:'
1281 """
1277 """
1282
1278
1283 _basesupported = {
1279 _basesupported = {
1280 requirementsmod.ARCHIVED_PHASE_REQUIREMENT,
1284 requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT,
1281 requirementsmod.BOOKMARKS_IN_STORE_REQUIREMENT,
1285 requirementsmod.CHANGELOGV2_REQUIREMENT,
1282 requirementsmod.CHANGELOGV2_REQUIREMENT,
1286 requirementsmod.COPIESSDC_REQUIREMENT,
1283 requirementsmod.COPIESSDC_REQUIREMENT,
@@ -3668,9 +3665,13 b' def newreporequirements(ui, createopts):'
3668 requirements.discard(requirementsmod.REVLOGV1_REQUIREMENT)
3665 requirements.discard(requirementsmod.REVLOGV1_REQUIREMENT)
3669 requirements.add(requirementsmod.REVLOGV2_REQUIREMENT)
3666 requirements.add(requirementsmod.REVLOGV2_REQUIREMENT)
3670 # experimental config: format.internal-phase
3667 # experimental config: format.internal-phase
3671 if ui.configbool(b'format', b'internal-phase'):
3668 if ui.configbool(b'format', b'use-internal-phase'):
3672 requirements.add(requirementsmod.INTERNAL_PHASE_REQUIREMENT)
3669 requirements.add(requirementsmod.INTERNAL_PHASE_REQUIREMENT)
3673
3670
3671 # experimental config: format.exp-archived-phase
3672 if ui.configbool(b'format', b'exp-archived-phase'):
3673 requirements.add(requirementsmod.ARCHIVED_PHASE_REQUIREMENT)
3674
3674 if createopts.get(b'narrowfiles'):
3675 if createopts.get(b'narrowfiles'):
3675 requirements.add(requirementsmod.NARROW_REQUIREMENT)
3676 requirements.add(requirementsmod.NARROW_REQUIREMENT)
3676
3677
@@ -70,6 +70,7 b' comment associated with each format for '
70
70
71 import binascii
71 import binascii
72 import struct
72 import struct
73 import weakref
73
74
74 from .i18n import _
75 from .i18n import _
75 from .pycompat import getattr
76 from .pycompat import getattr
@@ -561,10 +562,18 b' class obsstore:'
561 # caches for various obsolescence related cache
562 # caches for various obsolescence related cache
562 self.caches = {}
563 self.caches = {}
563 self.svfs = svfs
564 self.svfs = svfs
564 self.repo = repo
565 self._repo = weakref.ref(repo)
565 self._defaultformat = defaultformat
566 self._defaultformat = defaultformat
566 self._readonly = readonly
567 self._readonly = readonly
567
568
569 @property
570 def repo(self):
571 r = self._repo()
572 if r is None:
573 msg = "using the obsstore of a deallocated repo"
574 raise error.ProgrammingError(msg)
575 return r
576
568 def __iter__(self):
577 def __iter__(self):
569 return iter(self._all)
578 return iter(self._all)
570
579
@@ -178,6 +178,12 b' def supportinternal(repo):'
178 return requirements.INTERNAL_PHASE_REQUIREMENT in repo.requirements
178 return requirements.INTERNAL_PHASE_REQUIREMENT in repo.requirements
179
179
180
180
181 def supportarchived(repo):
182 # type: (localrepo.localrepository) -> bool
183 """True if the archived phase can be used on a repository"""
184 return requirements.ARCHIVED_PHASE_REQUIREMENT in repo.requirements
185
186
181 def _readroots(repo, phasedefaults=None):
187 def _readroots(repo, phasedefaults=None):
182 # type: (localrepo.localrepository, Optional[Phasedefaults]) -> Tuple[Phaseroots, bool]
188 # type: (localrepo.localrepository, Optional[Phasedefaults]) -> Tuple[Phaseroots, bool]
183 """Read phase roots from disk
189 """Read phase roots from disk
@@ -642,7 +648,12 b' class phasecache:'
642 # phaseroots values, replace them.
648 # phaseroots values, replace them.
643 if revs is None:
649 if revs is None:
644 revs = []
650 revs = []
645 if targetphase in (archived, internal) and not supportinternal(repo):
651 if (
652 targetphase == internal
653 and not supportinternal(repo)
654 or targetphase == archived
655 and not supportarchived(repo)
656 ):
646 name = phasenames[targetphase]
657 name = phasenames[targetphase]
647 msg = b'this repository does not support the %s phase' % name
658 msg = b'this repository does not support the %s phase' % name
648 raise error.ProgrammingError(msg)
659 raise error.ProgrammingError(msg)
@@ -29,7 +29,11 b" SPARSE_REQUIREMENT = b'exp-sparse'"
29
29
30 # Enables the internal phase which is used to hide changesets instead
30 # Enables the internal phase which is used to hide changesets instead
31 # of stripping them
31 # of stripping them
32 INTERNAL_PHASE_REQUIREMENT = b'internal-phase'
32 INTERNAL_PHASE_REQUIREMENT = b'internal-phase-2'
33
34 # Enables the internal phase which is used to hide changesets instead
35 # of stripping them
36 ARCHIVED_PHASE_REQUIREMENT = b'exp-archived-phase'
33
37
34 # Stores manifest in Tree structure
38 # Stores manifest in Tree structure
35 TREEMANIFEST_REQUIREMENT = b'treemanifest'
39 TREEMANIFEST_REQUIREMENT = b'treemanifest'
@@ -107,6 +111,7 b' WORKING_DIR_REQUIREMENTS = {'
107 #
111 #
108 # note: the list is currently inherited from previous code and miss some relevant requirement while containing some irrelevant ones.
112 # note: the list is currently inherited from previous code and miss some relevant requirement while containing some irrelevant ones.
109 STREAM_FIXED_REQUIREMENTS = {
113 STREAM_FIXED_REQUIREMENTS = {
114 ARCHIVED_PHASE_REQUIREMENT,
110 BOOKMARKS_IN_STORE_REQUIREMENT,
115 BOOKMARKS_IN_STORE_REQUIREMENT,
111 CHANGELOGV2_REQUIREMENT,
116 CHANGELOGV2_REQUIREMENT,
112 COPIESSDC_REQUIREMENT,
117 COPIESSDC_REQUIREMENT,
@@ -235,6 +235,8 b' FILE_TOO_SHORT_MSG = _('
235 b' expected %d bytes from offset %d, data size is %d'
235 b' expected %d bytes from offset %d, data size is %d'
236 )
236 )
237
237
238 hexdigits = b'0123456789abcdefABCDEF'
239
238
240
239 class revlog:
241 class revlog:
240 """
242 """
@@ -1509,7 +1511,7 b' class revlog:'
1509 ambiguous = True
1511 ambiguous = True
1510 # fall through to slow path that filters hidden revisions
1512 # fall through to slow path that filters hidden revisions
1511 except (AttributeError, ValueError):
1513 except (AttributeError, ValueError):
1512 # we are pure python, or key was too short to search radix tree
1514 # we are pure python, or key is not hex
1513 pass
1515 pass
1514 if ambiguous:
1516 if ambiguous:
1515 raise error.AmbiguousPrefixLookupError(
1517 raise error.AmbiguousPrefixLookupError(
@@ -1523,6 +1525,11 b' class revlog:'
1523 # hex(node)[:...]
1525 # hex(node)[:...]
1524 l = len(id) // 2 * 2 # grab an even number of digits
1526 l = len(id) // 2 * 2 # grab an even number of digits
1525 try:
1527 try:
1528 # we're dropping the last digit, so let's check that it's hex,
1529 # to avoid the expensive computation below if it's not
1530 if len(id) % 2 > 0:
1531 if not (id[-1] in hexdigits):
1532 return None
1526 prefix = bin(id[:l])
1533 prefix = bin(id[:l])
1527 except binascii.Error:
1534 except binascii.Error:
1528 pass
1535 pass
@@ -1768,7 +1775,17 b' class revlog:'
1768 if base == nullrev:
1775 if base == nullrev:
1769 return True
1776 return True
1770 p1 = entry[5]
1777 p1 = entry[5]
1778 while self.length(p1) == 0:
1779 b = self.deltaparent(p1)
1780 if b == p1:
1781 break
1782 p1 = b
1771 p2 = entry[6]
1783 p2 = entry[6]
1784 while self.length(p2) == 0:
1785 b = self.deltaparent(p2)
1786 if b == p2:
1787 break
1788 p2 = b
1772 if base == p1 or base == p2:
1789 if base == p1 or base == p2:
1773 return False
1790 return False
1774 return self.issnapshot(base)
1791 return self.issnapshot(base)
@@ -7,7 +7,10 b''
7
7
8
8
9 import binascii
9 import binascii
10 import functools
11 import random
10 import re
12 import re
13 import sys
11
14
12 from .i18n import _
15 from .i18n import _
13 from .pycompat import getattr
16 from .pycompat import getattr
@@ -2339,14 +2342,28 b' def roots(repo, subset, x):'
2339 parents = repo.changelog.parentrevs
2342 parents = repo.changelog.parentrevs
2340
2343
2341 def filter(r):
2344 def filter(r):
2342 for p in parents(r):
2345 try:
2343 if 0 <= p and p in s:
2346 for p in parents(r):
2344 return False
2347 if 0 <= p and p in s:
2348 return False
2349 except error.WdirUnsupported:
2350 for p in repo[None].parents():
2351 if p.rev() in s:
2352 return False
2345 return True
2353 return True
2346
2354
2347 return subset & s.filter(filter, condrepr=b'<roots>')
2355 return subset & s.filter(filter, condrepr=b'<roots>')
2348
2356
2349
2357
2358 MAXINT = sys.maxsize
2359 MININT = -MAXINT - 1
2360
2361
2362 def pick_random(c, gen=random):
2363 # exists as its own function to make it possible to overwrite the seed
2364 return gen.randint(MININT, MAXINT)
2365
2366
2350 _sortkeyfuncs = {
2367 _sortkeyfuncs = {
2351 b'rev': scmutil.intrev,
2368 b'rev': scmutil.intrev,
2352 b'branch': lambda c: c.branch(),
2369 b'branch': lambda c: c.branch(),
@@ -2355,12 +2372,17 b' def roots(repo, subset, x):'
2355 b'author': lambda c: c.user(),
2372 b'author': lambda c: c.user(),
2356 b'date': lambda c: c.date()[0],
2373 b'date': lambda c: c.date()[0],
2357 b'node': scmutil.binnode,
2374 b'node': scmutil.binnode,
2375 b'random': pick_random,
2358 }
2376 }
2359
2377
2360
2378
2361 def _getsortargs(x):
2379 def _getsortargs(x):
2362 """Parse sort options into (set, [(key, reverse)], opts)"""
2380 """Parse sort options into (set, [(key, reverse)], opts)"""
2363 args = getargsdict(x, b'sort', b'set keys topo.firstbranch')
2381 args = getargsdict(
2382 x,
2383 b'sort',
2384 b'set keys topo.firstbranch random.seed',
2385 )
2364 if b'set' not in args:
2386 if b'set' not in args:
2365 # i18n: "sort" is a keyword
2387 # i18n: "sort" is a keyword
2366 raise error.ParseError(_(b'sort requires one or two arguments'))
2388 raise error.ParseError(_(b'sort requires one or two arguments'))
@@ -2400,6 +2422,20 b' def _getsortargs(x):'
2400 )
2422 )
2401 )
2423 )
2402
2424
2425 if b'random.seed' in args:
2426 if any(k == b'random' for k, reverse in keyflags):
2427 s = args[b'random.seed']
2428 seed = getstring(s, _(b"random.seed must be a string"))
2429 opts[b'random.seed'] = seed
2430 else:
2431 # i18n: "random" and "random.seed" are keywords
2432 raise error.ParseError(
2433 _(
2434 b'random.seed can only be used '
2435 b'when using the random sort key'
2436 )
2437 )
2438
2403 return args[b'set'], keyflags, opts
2439 return args[b'set'], keyflags, opts
2404
2440
2405
2441
@@ -2419,11 +2455,14 b' def sort(repo, subset, x, order):'
2419 - ``date`` for the commit date
2455 - ``date`` for the commit date
2420 - ``topo`` for a reverse topographical sort
2456 - ``topo`` for a reverse topographical sort
2421 - ``node`` the nodeid of the revision
2457 - ``node`` the nodeid of the revision
2458 - ``random`` randomly shuffle revisions
2422
2459
2423 The ``topo`` sort order cannot be combined with other sort keys. This sort
2460 The ``topo`` sort order cannot be combined with other sort keys. This sort
2424 takes one optional argument, ``topo.firstbranch``, which takes a revset that
2461 takes one optional argument, ``topo.firstbranch``, which takes a revset that
2425 specifies what topographical branches to prioritize in the sort.
2462 specifies what topographical branches to prioritize in the sort.
2426
2463
2464 The ``random`` sort takes one optional ``random.seed`` argument to control
2465 the pseudo-randomness of the result.
2427 """
2466 """
2428 s, keyflags, opts = _getsortargs(x)
2467 s, keyflags, opts = _getsortargs(x)
2429 revs = getset(repo, subset, s, order)
2468 revs = getset(repo, subset, s, order)
@@ -2435,10 +2474,20 b' def sort(repo, subset, x, order):'
2435 return revs
2474 return revs
2436 elif keyflags[0][0] == b"topo":
2475 elif keyflags[0][0] == b"topo":
2437 firstbranch = ()
2476 firstbranch = ()
2477 parentrevs = repo.changelog.parentrevs
2478 parentsfunc = parentrevs
2479 if wdirrev in revs:
2480
2481 def parentsfunc(r):
2482 try:
2483 return parentrevs(r)
2484 except error.WdirUnsupported:
2485 return [p.rev() for p in repo[None].parents()]
2486
2438 if b'topo.firstbranch' in opts:
2487 if b'topo.firstbranch' in opts:
2439 firstbranch = getset(repo, subset, opts[b'topo.firstbranch'])
2488 firstbranch = getset(repo, subset, opts[b'topo.firstbranch'])
2440 revs = baseset(
2489 revs = baseset(
2441 dagop.toposort(revs, repo.changelog.parentrevs, firstbranch),
2490 dagop.toposort(revs, parentsfunc, firstbranch),
2442 istopo=True,
2491 istopo=True,
2443 )
2492 )
2444 if keyflags[0][1]:
2493 if keyflags[0][1]:
@@ -2448,7 +2497,12 b' def sort(repo, subset, x, order):'
2448 # sort() is guaranteed to be stable
2497 # sort() is guaranteed to be stable
2449 ctxs = [repo[r] for r in revs]
2498 ctxs = [repo[r] for r in revs]
2450 for k, reverse in reversed(keyflags):
2499 for k, reverse in reversed(keyflags):
2451 ctxs.sort(key=_sortkeyfuncs[k], reverse=reverse)
2500 func = _sortkeyfuncs[k]
2501 if k == b'random' and b'random.seed' in opts:
2502 seed = opts[b'random.seed']
2503 r = random.Random(seed)
2504 func = functools.partial(func, gen=r)
2505 ctxs.sort(key=func, reverse=reverse)
2452 return baseset([c.rev() for c in ctxs])
2506 return baseset([c.rev() for c in ctxs])
2453
2507
2454
2508
@@ -1191,7 +1191,7 b' def cleanupnodes('
1191 obsolete.createmarkers(
1191 obsolete.createmarkers(
1192 repo, rels, operation=operation, metadata=metadata
1192 repo, rels, operation=operation, metadata=metadata
1193 )
1193 )
1194 elif phases.supportinternal(repo) and mayusearchived:
1194 elif phases.supportarchived(repo) and mayusearchived:
1195 # this assume we do not have "unstable" nodes above the cleaned ones
1195 # this assume we do not have "unstable" nodes above the cleaned ones
1196 allreplaced = set()
1196 allreplaced = set()
1197 for ns in replacements.keys():
1197 for ns in replacements.keys():
@@ -22,6 +22,7 b' shelve".'
22 """
22 """
23
23
24 import collections
24 import collections
25 import io
25 import itertools
26 import itertools
26 import stat
27 import stat
27
28
@@ -98,6 +99,17 b' class ShelfDir:'
98 return sorted(info, reverse=True)
99 return sorted(info, reverse=True)
99
100
100
101
102 def _use_internal_phase(repo):
103 return (
104 phases.supportinternal(repo)
105 and repo.ui.config(b'shelve', b'store') == b'internal'
106 )
107
108
109 def _target_phase(repo):
110 return phases.internal if _use_internal_phase(repo) else phases.secret
111
112
101 class Shelf:
113 class Shelf:
102 """Represents a shelf, including possibly multiple files storing it.
114 """Represents a shelf, including possibly multiple files storing it.
103
115
@@ -111,12 +123,19 b' class Shelf:'
111 self.name = name
123 self.name = name
112
124
113 def exists(self):
125 def exists(self):
114 return self.vfs.exists(self.name + b'.patch') and self.vfs.exists(
126 return self._exists(b'.shelve') or self._exists(b'.patch', b'.hg')
115 self.name + b'.hg'
127
116 )
128 def _exists(self, *exts):
129 return all(self.vfs.exists(self.name + ext) for ext in exts)
117
130
118 def mtime(self):
131 def mtime(self):
119 return self.vfs.stat(self.name + b'.patch')[stat.ST_MTIME]
132 try:
133 return self._stat(b'.shelve')[stat.ST_MTIME]
134 except FileNotFoundError:
135 return self._stat(b'.patch')[stat.ST_MTIME]
136
137 def _stat(self, ext):
138 return self.vfs.stat(self.name + ext)
120
139
121 def writeinfo(self, info):
140 def writeinfo(self, info):
122 scmutil.simplekeyvaluefile(self.vfs, self.name + b'.shelve').write(info)
141 scmutil.simplekeyvaluefile(self.vfs, self.name + b'.shelve').write(info)
@@ -159,9 +178,7 b' class Shelf:'
159 filename = self.name + b'.hg'
178 filename = self.name + b'.hg'
160 fp = self.vfs(filename)
179 fp = self.vfs(filename)
161 try:
180 try:
162 targetphase = phases.internal
181 targetphase = _target_phase(repo)
163 if not phases.supportinternal(repo):
164 targetphase = phases.secret
165 gen = exchange.readbundle(repo.ui, fp, filename, self.vfs)
182 gen = exchange.readbundle(repo.ui, fp, filename, self.vfs)
166 pretip = repo[b'tip']
183 pretip = repo[b'tip']
167 bundle2.applybundle(
184 bundle2.applybundle(
@@ -183,6 +200,27 b' class Shelf:'
183 def open_patch(self, mode=b'rb'):
200 def open_patch(self, mode=b'rb'):
184 return self.vfs(self.name + b'.patch', mode)
201 return self.vfs(self.name + b'.patch', mode)
185
202
203 def patch_from_node(self, repo, node):
204 repo = repo.unfiltered()
205 match = _optimized_match(repo, node)
206 fp = io.BytesIO()
207 cmdutil.exportfile(
208 repo,
209 [node],
210 fp,
211 opts=mdiff.diffopts(git=True),
212 match=match,
213 )
214 fp.seek(0)
215 return fp
216
217 def load_patch(self, repo):
218 try:
219 # prefer node-based shelf
220 return self.patch_from_node(repo, self.readinfo()[b'node'])
221 except (FileNotFoundError, error.RepoLookupError):
222 return self.open_patch()
223
186 def _backupfilename(self, backupvfs, filename):
224 def _backupfilename(self, backupvfs, filename):
187 def gennames(base):
225 def gennames(base):
188 yield base
226 yield base
@@ -210,6 +248,15 b' class Shelf:'
210 self.vfs.tryunlink(self.name + b'.' + ext)
248 self.vfs.tryunlink(self.name + b'.' + ext)
211
249
212
250
251 def _optimized_match(repo, node):
252 """
253 Create a matcher so that prefetch doesn't attempt to fetch
254 the entire repository pointlessly, and as an optimisation
255 for movedirstate, if needed.
256 """
257 return scmutil.matchfiles(repo, repo[node].files())
258
259
213 class shelvedstate:
260 class shelvedstate:
214 """Handle persistence during unshelving operations.
261 """Handle persistence during unshelving operations.
215
262
@@ -447,9 +494,7 b' def getcommitfunc(extra, interactive, ed'
447 if hasmq:
494 if hasmq:
448 saved, repo.mq.checkapplied = repo.mq.checkapplied, False
495 saved, repo.mq.checkapplied = repo.mq.checkapplied, False
449
496
450 targetphase = phases.internal
497 targetphase = _target_phase(repo)
451 if not phases.supportinternal(repo):
452 targetphase = phases.secret
453 overrides = {(b'phases', b'new-commit'): targetphase}
498 overrides = {(b'phases', b'new-commit'): targetphase}
454 try:
499 try:
455 editor_ = False
500 editor_ = False
@@ -510,7 +555,7 b' def _includeunknownfiles(repo, pats, opt'
510
555
511
556
512 def _finishshelve(repo, tr):
557 def _finishshelve(repo, tr):
513 if phases.supportinternal(repo):
558 if _use_internal_phase(repo):
514 tr.close()
559 tr.close()
515 else:
560 else:
516 _aborttransaction(repo, tr)
561 _aborttransaction(repo, tr)
@@ -579,10 +624,7 b' def _docreatecmd(ui, repo, pats, opts):'
579 _nothingtoshelvemessaging(ui, repo, pats, opts)
624 _nothingtoshelvemessaging(ui, repo, pats, opts)
580 return 1
625 return 1
581
626
582 # Create a matcher so that prefetch doesn't attempt to fetch
627 match = _optimized_match(repo, node)
583 # the entire repository pointlessly, and as an optimisation
584 # for movedirstate, if needed.
585 match = scmutil.matchfiles(repo, repo[node].files())
586 _shelvecreatedcommit(repo, node, name, match)
628 _shelvecreatedcommit(repo, node, name, match)
587
629
588 ui.status(_(b'shelved as %s\n') % name)
630 ui.status(_(b'shelved as %s\n') % name)
@@ -668,7 +710,7 b' def listcmd(ui, repo, pats, opts):'
668 ui.write(age, label=b'shelve.age')
710 ui.write(age, label=b'shelve.age')
669 ui.write(b' ' * (12 - len(age)))
711 ui.write(b' ' * (12 - len(age)))
670 used += 12
712 used += 12
671 with shelf_dir.get(name).open_patch() as fp:
713 with shelf_dir.get(name).load_patch(repo) as fp:
672 while True:
714 while True:
673 line = fp.readline()
715 line = fp.readline()
674 if not line:
716 if not line:
@@ -754,7 +796,7 b' def unshelveabort(ui, repo, state):'
754 if state.activebookmark and state.activebookmark in repo._bookmarks:
796 if state.activebookmark and state.activebookmark in repo._bookmarks:
755 bookmarks.activate(repo, state.activebookmark)
797 bookmarks.activate(repo, state.activebookmark)
756 mergefiles(ui, repo, state.wctx, state.pendingctx)
798 mergefiles(ui, repo, state.wctx, state.pendingctx)
757 if not phases.supportinternal(repo):
799 if not _use_internal_phase(repo):
758 repair.strip(
800 repair.strip(
759 ui, repo, state.nodestoremove, backup=False, topic=b'shelve'
801 ui, repo, state.nodestoremove, backup=False, topic=b'shelve'
760 )
802 )
@@ -816,9 +858,7 b' def unshelvecontinue(ui, repo, state, op'
816 repo.setparents(state.pendingctx.node(), repo.nullid)
858 repo.setparents(state.pendingctx.node(), repo.nullid)
817 repo.dirstate.write(repo.currenttransaction())
859 repo.dirstate.write(repo.currenttransaction())
818
860
819 targetphase = phases.internal
861 targetphase = _target_phase(repo)
820 if not phases.supportinternal(repo):
821 targetphase = phases.secret
822 overrides = {(b'phases', b'new-commit'): targetphase}
862 overrides = {(b'phases', b'new-commit'): targetphase}
823 with repo.ui.configoverride(overrides, b'unshelve'):
863 with repo.ui.configoverride(overrides, b'unshelve'):
824 with repo.dirstate.parentchange():
864 with repo.dirstate.parentchange():
@@ -843,7 +883,7 b' def unshelvecontinue(ui, repo, state, op'
843 mergefiles(ui, repo, state.wctx, shelvectx)
883 mergefiles(ui, repo, state.wctx, shelvectx)
844 restorebranch(ui, repo, state.branchtorestore)
884 restorebranch(ui, repo, state.branchtorestore)
845
885
846 if not phases.supportinternal(repo):
886 if not _use_internal_phase(repo):
847 repair.strip(
887 repair.strip(
848 ui, repo, state.nodestoremove, backup=False, topic=b'shelve'
888 ui, repo, state.nodestoremove, backup=False, topic=b'shelve'
849 )
889 )
@@ -957,7 +997,7 b' def _createunshelvectx(ui, repo, shelvec'
957 user=shelvectx.user(),
997 user=shelvectx.user(),
958 )
998 )
959 if snode:
999 if snode:
960 m = scmutil.matchfiles(repo, repo[snode].files())
1000 m = _optimized_match(repo, snode)
961 _shelvecreatedcommit(repo, snode, basename, m)
1001 _shelvecreatedcommit(repo, snode, basename, m)
962
1002
963 return newnode, bool(snode)
1003 return newnode, bool(snode)
@@ -1137,7 +1177,6 b' def _dounshelve(ui, repo, basename, opts'
1137 oldtiprev = len(repo)
1177 oldtiprev = len(repo)
1138
1178
1139 pctx = repo[b'.']
1179 pctx = repo[b'.']
1140 tmpwctx = pctx
1141 # The goal is to have a commit structure like so:
1180 # The goal is to have a commit structure like so:
1142 # ...-> pctx -> tmpwctx -> shelvectx
1181 # ...-> pctx -> tmpwctx -> shelvectx
1143 # where tmpwctx is an optional commit with the user's pending changes
1182 # where tmpwctx is an optional commit with the user's pending changes
@@ -1145,9 +1184,7 b' def _dounshelve(ui, repo, basename, opts'
1145 # to the original pctx.
1184 # to the original pctx.
1146
1185
1147 activebookmark = _backupactivebookmark(repo)
1186 activebookmark = _backupactivebookmark(repo)
1148 tmpwctx, addedbefore = _commitworkingcopychanges(
1187 tmpwctx, addedbefore = _commitworkingcopychanges(ui, repo, opts, pctx)
1149 ui, repo, opts, tmpwctx
1150 )
1151 repo, shelvectx = _unshelverestorecommit(ui, repo, tr, basename)
1188 repo, shelvectx = _unshelverestorecommit(ui, repo, tr, basename)
1152 _checkunshelveuntrackedproblems(ui, repo, shelvectx)
1189 _checkunshelveuntrackedproblems(ui, repo, shelvectx)
1153 branchtorestore = b''
1190 branchtorestore = b''
@@ -1099,6 +1099,10 b' class svnsubrepo(abstractsubrepo):'
1099 # --non-interactive.
1099 # --non-interactive.
1100 if commands[0] in (b'update', b'checkout', b'commit'):
1100 if commands[0] in (b'update', b'checkout', b'commit'):
1101 cmd.append(b'--non-interactive')
1101 cmd.append(b'--non-interactive')
1102 if util.safehasattr(subprocess, 'CREATE_NO_WINDOW'):
1103 # On Windows, prevent command prompts windows from popping up when
1104 # running in pythonw.
1105 extrakw['creationflags'] = getattr(subprocess, 'CREATE_NO_WINDOW')
1102 cmd.extend(commands)
1106 cmd.extend(commands)
1103 if filename is not None:
1107 if filename is not None:
1104 path = self.wvfs.reljoin(
1108 path = self.wvfs.reljoin(
@@ -1150,7 +1154,7 b' class svnsubrepo(abstractsubrepo):'
1150 # commit revision so we can compare the subrepo state with
1154 # commit revision so we can compare the subrepo state with
1151 # both. We used to store the working directory one.
1155 # both. We used to store the working directory one.
1152 output, err = self._svncommand([b'info', b'--xml'])
1156 output, err = self._svncommand([b'info', b'--xml'])
1153 doc = xml.dom.minidom.parseString(output)
1157 doc = xml.dom.minidom.parseString(output) # pytype: disable=pyi-error
1154 entries = doc.getElementsByTagName('entry')
1158 entries = doc.getElementsByTagName('entry')
1155 lastrev, rev = b'0', b'0'
1159 lastrev, rev = b'0', b'0'
1156 if entries:
1160 if entries:
@@ -1174,7 +1178,7 b' class svnsubrepo(abstractsubrepo):'
1174 """
1178 """
1175 output, err = self._svncommand([b'status', b'--xml'])
1179 output, err = self._svncommand([b'status', b'--xml'])
1176 externals, changes, missing = [], [], []
1180 externals, changes, missing = [], [], []
1177 doc = xml.dom.minidom.parseString(output)
1181 doc = xml.dom.minidom.parseString(output) # pytype: disable=pyi-error
1178 for e in doc.getElementsByTagName('entry'):
1182 for e in doc.getElementsByTagName('entry'):
1179 s = e.getElementsByTagName('wc-status')
1183 s = e.getElementsByTagName('wc-status')
1180 if not s:
1184 if not s:
@@ -1319,7 +1323,7 b' class svnsubrepo(abstractsubrepo):'
1319 @annotatesubrepoerror
1323 @annotatesubrepoerror
1320 def files(self):
1324 def files(self):
1321 output = self._svncommand([b'list', b'--recursive', b'--xml'])[0]
1325 output = self._svncommand([b'list', b'--recursive', b'--xml'])[0]
1322 doc = xml.dom.minidom.parseString(output)
1326 doc = xml.dom.minidom.parseString(output) # pytype: disable=pyi-error
1323 paths = []
1327 paths = []
1324 for e in doc.getElementsByTagName('entry'):
1328 for e in doc.getElementsByTagName('entry'):
1325 kind = pycompat.bytestr(e.getAttribute('kind'))
1329 kind = pycompat.bytestr(e.getAttribute('kind'))
@@ -1469,6 +1473,11 b' class gitsubrepo(abstractsubrepo):'
1469 # insert the argument in the front,
1473 # insert the argument in the front,
1470 # the end of git diff arguments is used for paths
1474 # the end of git diff arguments is used for paths
1471 commands.insert(1, b'--color')
1475 commands.insert(1, b'--color')
1476 extrakw = {}
1477 if util.safehasattr(subprocess, 'CREATE_NO_WINDOW'):
1478 # On Windows, prevent command prompts windows from popping up when
1479 # running in pythonw.
1480 extrakw['creationflags'] = getattr(subprocess, 'CREATE_NO_WINDOW')
1472 p = subprocess.Popen(
1481 p = subprocess.Popen(
1473 pycompat.rapply(
1482 pycompat.rapply(
1474 procutil.tonativestr, [self._gitexecutable] + commands
1483 procutil.tonativestr, [self._gitexecutable] + commands
@@ -1479,6 +1488,7 b' class gitsubrepo(abstractsubrepo):'
1479 close_fds=procutil.closefds,
1488 close_fds=procutil.closefds,
1480 stdout=subprocess.PIPE,
1489 stdout=subprocess.PIPE,
1481 stderr=errpipe,
1490 stderr=errpipe,
1491 **extrakw
1482 )
1492 )
1483 if stream:
1493 if stream:
1484 return p.stdout, None
1494 return p.stdout, None
@@ -390,6 +390,14 b' def person(author):'
390 return stringutil.person(author)
390 return stringutil.person(author)
391
391
392
392
393 @templatefilter(b'reverse')
394 def reverse(list_):
395 """List. Reverses the order of list items."""
396 if isinstance(list_, list):
397 return templateutil.hybridlist(list_[::-1], name=b'item')
398 raise error.ParseError(_(b'not reversible'))
399
400
393 @templatefilter(b'revescape', intype=bytes)
401 @templatefilter(b'revescape', intype=bytes)
394 def revescape(text):
402 def revescape(text):
395 """Any text. Escapes all "special" characters, except @.
403 """Any text. Escapes all "special" characters, except @.
@@ -222,13 +222,16 b' def _generic_start_transaction(handler, '
222 h.headers = None
222 h.headers = None
223
223
224
224
225 def _generic_proxytunnel(self):
225 def _generic_proxytunnel(self: "httpsconnection"):
226 headers = self.headers # pytype: disable=attribute-error
226 proxyheaders = {
227 proxyheaders = {
227 pycompat.bytestr(x): pycompat.bytestr(self.headers[x])
228 pycompat.bytestr(x): pycompat.bytestr(headers[x])
228 for x in self.headers
229 for x in headers
229 if x.lower().startswith('proxy-')
230 if x.lower().startswith('proxy-')
230 }
231 }
231 self.send(b'CONNECT %s HTTP/1.0\r\n' % self.realhostport)
232 realhostport = self.realhostport # pytype: disable=attribute-error
233 self.send(b'CONNECT %s HTTP/1.0\r\n' % realhostport)
234
232 for header in proxyheaders.items():
235 for header in proxyheaders.items():
233 self.send(b'%s: %s\r\n' % header)
236 self.send(b'%s: %s\r\n' % header)
234 self.send(b'\r\n')
237 self.send(b'\r\n')
@@ -237,10 +240,14 b' def _generic_proxytunnel(self):'
237 # httplib.HTTPConnection as there are no adequate places to
240 # httplib.HTTPConnection as there are no adequate places to
238 # override functions to provide the needed functionality.
241 # override functions to provide the needed functionality.
239
242
243 # pytype: disable=attribute-error
240 res = self.response_class(self.sock, method=self._method)
244 res = self.response_class(self.sock, method=self._method)
245 # pytype: enable=attribute-error
241
246
242 while True:
247 while True:
248 # pytype: disable=attribute-error
243 version, status, reason = res._read_status()
249 version, status, reason = res._read_status()
250 # pytype: enable=attribute-error
244 if status != httplib.CONTINUE:
251 if status != httplib.CONTINUE:
245 break
252 break
246 # skip lines that are all whitespace
253 # skip lines that are all whitespace
@@ -323,14 +330,15 b' if has_https:'
323 self.sock = socket.create_connection((self.host, self.port))
330 self.sock = socket.create_connection((self.host, self.port))
324
331
325 host = self.host
332 host = self.host
326 if self.realhostport: # use CONNECT proxy
333 realhostport = self.realhostport # pytype: disable=attribute-error
334 if realhostport: # use CONNECT proxy
327 _generic_proxytunnel(self)
335 _generic_proxytunnel(self)
328 host = self.realhostport.rsplit(b':', 1)[0]
336 host = realhostport.rsplit(b':', 1)[0]
329 self.sock = sslutil.wrapsocket(
337 self.sock = sslutil.wrapsocket(
330 self.sock,
338 self.sock,
331 self.key_file,
339 self.key_file,
332 self.cert_file,
340 self.cert_file,
333 ui=self.ui,
341 ui=self.ui, # pytype: disable=attribute-error
334 serverhostname=host,
342 serverhostname=host,
335 )
343 )
336 sslutil.validatesocket(self.sock)
344 sslutil.validatesocket(self.sock)
@@ -13,6 +13,12 b' Mercurial XXX.'
13
13
14 == Backwards Compatibility Changes ==
14 == Backwards Compatibility Changes ==
15
15
16 * chg worker processes will now correctly load per-repository configuration
17 when given a both a relative `--repository` path and an alternate working
18 directory via `--cwd`. A side-effect of this change is that these workers
19 will now return an error if hg cannot find the current working directory,
20 even when a different directory is specified via `--cwd`.
21
16 == Internal API Changes ==
22 == Internal API Changes ==
17
23
18 == Miscellaneous ==
24 == Miscellaneous ==
@@ -468,6 +468,7 b' dependencies = ['
468 "log",
468 "log",
469 "memmap2",
469 "memmap2",
470 "micro-timer",
470 "micro-timer",
471 "once_cell",
471 "ouroboros",
472 "ouroboros",
472 "pretty_assertions",
473 "pretty_assertions",
473 "rand 0.8.5",
474 "rand 0.8.5",
@@ -687,6 +688,12 b' dependencies = ['
687 ]
688 ]
688
689
689 [[package]]
690 [[package]]
691 name = "once_cell"
692 version = "1.14.0"
693 source = "registry+https://github.com/rust-lang/crates.io-index"
694 checksum = "2f7254b99e31cad77da24b08ebf628882739a608578bb1bcdfc1f9c21260d7c0"
695
696 [[package]]
690 name = "opaque-debug"
697 name = "opaque-debug"
691 version = "0.3.0"
698 version = "0.3.0"
692 source = "registry+https://github.com/rust-lang/crates.io-index"
699 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -981,6 +988,7 b' dependencies = ['
981 "lazy_static",
988 "lazy_static",
982 "log",
989 "log",
983 "micro-timer",
990 "micro-timer",
991 "rayon",
984 "regex",
992 "regex",
985 "users",
993 "users",
986 "which",
994 "which",
@@ -35,6 +35,9 b' log = "0.4.8"'
35 memmap2 = { version = "0.5.3", features = ["stable_deref_trait"] }
35 memmap2 = { version = "0.5.3", features = ["stable_deref_trait"] }
36 zstd = "0.5.3"
36 zstd = "0.5.3"
37 format-bytes = "0.3.0"
37 format-bytes = "0.3.0"
38 # once_cell 1.15 uses edition 2021, while the heptapod CI
39 # uses an old version of Cargo that doesn't support it.
40 once_cell = "=1.14.0"
38
41
39 # We don't use the `miniz-oxide` backend to not change rhg benchmarks and until
42 # We don't use the `miniz-oxide` backend to not change rhg benchmarks and until
40 # we have a clearer view of which backend is the fastest.
43 # we have a clearer view of which backend is the fastest.
@@ -11,6 +11,8 b''
11
11
12 mod config;
12 mod config;
13 mod layer;
13 mod layer;
14 mod plain_info;
14 mod values;
15 mod values;
15 pub use config::{Config, ConfigSource, ConfigValueParseError};
16 pub use config::{Config, ConfigSource, ConfigValueParseError};
16 pub use layer::{ConfigError, ConfigOrigin, ConfigParseError};
17 pub use layer::{ConfigError, ConfigOrigin, ConfigParseError};
18 pub use plain_info::PlainInfo;
@@ -12,6 +12,7 b' use super::values;'
12 use crate::config::layer::{
12 use crate::config::layer::{
13 ConfigError, ConfigLayer, ConfigOrigin, ConfigValue,
13 ConfigError, ConfigLayer, ConfigOrigin, ConfigValue,
14 };
14 };
15 use crate::config::plain_info::PlainInfo;
15 use crate::utils::files::get_bytes_from_os_str;
16 use crate::utils::files::get_bytes_from_os_str;
16 use format_bytes::{write_bytes, DisplayBytes};
17 use format_bytes::{write_bytes, DisplayBytes};
17 use std::collections::HashSet;
18 use std::collections::HashSet;
@@ -27,6 +28,7 b' use crate::errors::{HgResultExt, IoResul'
27 #[derive(Clone)]
28 #[derive(Clone)]
28 pub struct Config {
29 pub struct Config {
29 layers: Vec<layer::ConfigLayer>,
30 layers: Vec<layer::ConfigLayer>,
31 plain: PlainInfo,
30 }
32 }
31
33
32 impl DisplayBytes for Config {
34 impl DisplayBytes for Config {
@@ -83,17 +85,55 b' impl fmt::Display for ConfigValueParseEr'
83 }
85 }
84 }
86 }
85
87
88 /// Returns true if the config item is disabled by PLAIN or PLAINEXCEPT
89 fn should_ignore(plain: &PlainInfo, section: &[u8], item: &[u8]) -> bool {
90 // duplication with [_applyconfig] in [ui.py],
91 if !plain.is_plain() {
92 return false;
93 }
94 if section == b"alias" {
95 return plain.plainalias();
96 }
97 if section == b"revsetalias" {
98 return plain.plainrevsetalias();
99 }
100 if section == b"templatealias" {
101 return plain.plaintemplatealias();
102 }
103 if section == b"ui" {
104 let to_delete: &[&[u8]] = &[
105 b"debug",
106 b"fallbackencoding",
107 b"quiet",
108 b"slash",
109 b"logtemplate",
110 b"message-output",
111 b"statuscopies",
112 b"style",
113 b"traceback",
114 b"verbose",
115 ];
116 return to_delete.contains(&item);
117 }
118 let sections_to_delete: &[&[u8]] =
119 &[b"defaults", b"commands", b"command-templates"];
120 return sections_to_delete.contains(&section);
121 }
122
86 impl Config {
123 impl Config {
87 /// The configuration to use when printing configuration-loading errors
124 /// The configuration to use when printing configuration-loading errors
88 pub fn empty() -> Self {
125 pub fn empty() -> Self {
89 Self { layers: Vec::new() }
126 Self {
127 layers: Vec::new(),
128 plain: PlainInfo::empty(),
129 }
90 }
130 }
91
131
92 /// Load system and user configuration from various files.
132 /// Load system and user configuration from various files.
93 ///
133 ///
94 /// This is also affected by some environment variables.
134 /// This is also affected by some environment variables.
95 pub fn load_non_repo() -> Result<Self, ConfigError> {
135 pub fn load_non_repo() -> Result<Self, ConfigError> {
96 let mut config = Self { layers: Vec::new() };
136 let mut config = Self::empty();
97 let opt_rc_path = env::var_os("HGRCPATH");
137 let opt_rc_path = env::var_os("HGRCPATH");
98 // HGRCPATH replaces system config
138 // HGRCPATH replaces system config
99 if opt_rc_path.is_none() {
139 if opt_rc_path.is_none() {
@@ -266,7 +306,10 b' impl Config {'
266 }
306 }
267 }
307 }
268
308
269 Ok(Config { layers })
309 Ok(Config {
310 layers,
311 plain: PlainInfo::empty(),
312 })
270 }
313 }
271
314
272 /// Loads the per-repository config into a new `Config` which is combined
315 /// Loads the per-repository config into a new `Config` which is combined
@@ -283,6 +326,7 b' impl Config {'
283
326
284 let mut repo_config = Self {
327 let mut repo_config = Self {
285 layers: other_layers,
328 layers: other_layers,
329 plain: PlainInfo::empty(),
286 };
330 };
287 for path in repo_config_files {
331 for path in repo_config_files {
288 // TODO: check if this file should be trusted:
332 // TODO: check if this file should be trusted:
@@ -293,6 +337,10 b' impl Config {'
293 Ok(repo_config)
337 Ok(repo_config)
294 }
338 }
295
339
340 pub fn apply_plain(&mut self, plain: PlainInfo) {
341 self.plain = plain;
342 }
343
296 fn get_parse<'config, T: 'config>(
344 fn get_parse<'config, T: 'config>(
297 &'config self,
345 &'config self,
298 section: &[u8],
346 section: &[u8],
@@ -413,10 +461,25 b' impl Config {'
413 section: &[u8],
461 section: &[u8],
414 item: &[u8],
462 item: &[u8],
415 ) -> Option<(&ConfigLayer, &ConfigValue)> {
463 ) -> Option<(&ConfigLayer, &ConfigValue)> {
464 // Filter out the config items that are hidden by [PLAIN].
465 // This differs from python hg where we delete them from the config.
466 let should_ignore = should_ignore(&self.plain, &section, &item);
416 for layer in self.layers.iter().rev() {
467 for layer in self.layers.iter().rev() {
417 if !layer.trusted {
468 if !layer.trusted {
418 continue;
469 continue;
419 }
470 }
471 //The [PLAIN] config should not affect the defaults.
472 //
473 // However, PLAIN should also affect the "tweaked" defaults (unless
474 // "tweakdefault" is part of "HGPLAINEXCEPT").
475 //
476 // In practice the tweak-default layer is only added when it is
477 // relevant, so we can safely always take it into
478 // account here.
479 if should_ignore && !(layer.origin == ConfigOrigin::Tweakdefaults)
480 {
481 continue;
482 }
420 if let Some(v) = layer.get(&section, &item) {
483 if let Some(v) = layer.get(&section, &item) {
421 return Some((&layer, v));
484 return Some((&layer, v));
422 }
485 }
@@ -504,6 +567,38 b' impl Config {'
504 }
567 }
505 res
568 res
506 }
569 }
570
571 // a config layer that's introduced by ui.tweakdefaults
572 fn tweakdefaults_layer() -> ConfigLayer {
573 let mut layer = ConfigLayer::new(ConfigOrigin::Tweakdefaults);
574
575 let mut add = |section: &[u8], item: &[u8], value: &[u8]| {
576 layer.add(
577 section[..].into(),
578 item[..].into(),
579 value[..].into(),
580 None,
581 );
582 };
583 // duplication of [tweakrc] from [ui.py]
584 add(b"ui", b"rollback", b"False");
585 add(b"ui", b"statuscopies", b"yes");
586 add(b"ui", b"interface", b"curses");
587 add(b"ui", b"relative-paths", b"yes");
588 add(b"commands", b"grep.all-files", b"True");
589 add(b"commands", b"update.check", b"noconflict");
590 add(b"commands", b"status.verbose", b"True");
591 add(b"commands", b"resolve.explicit-re-merge", b"True");
592 add(b"git", b"git", b"1");
593 add(b"git", b"showfunc", b"1");
594 add(b"git", b"word-diff", b"1");
595 return layer;
596 }
597
598 // introduce the tweaked defaults as implied by ui.tweakdefaults
599 pub fn tweakdefaults<'a>(&mut self) -> () {
600 self.layers.insert(0, Config::tweakdefaults_layer());
601 }
507 }
602 }
508
603
509 #[cfg(test)]
604 #[cfg(test)]
@@ -81,6 +81,7 b' impl ConfigLayer {'
81 String::from_utf8_lossy(arg),
81 String::from_utf8_lossy(arg),
82 ),
82 ),
83 CONFIG_PARSE_ERROR_ABORT,
83 CONFIG_PARSE_ERROR_ABORT,
84 None,
84 ))?
85 ))?
85 }
86 }
86 }
87 }
@@ -299,6 +300,8 b' pub struct ConfigValue {'
299 pub enum ConfigOrigin {
300 pub enum ConfigOrigin {
300 /// From a configuration file
301 /// From a configuration file
301 File(PathBuf),
302 File(PathBuf),
303 /// From [ui.tweakdefaults]
304 Tweakdefaults,
302 /// From a `--config` CLI argument
305 /// From a `--config` CLI argument
303 CommandLine,
306 CommandLine,
304 /// From a `--color` CLI argument
307 /// From a `--color` CLI argument
@@ -321,6 +324,9 b' impl DisplayBytes for ConfigOrigin {'
321 ConfigOrigin::CommandLine => out.write_all(b"--config"),
324 ConfigOrigin::CommandLine => out.write_all(b"--config"),
322 ConfigOrigin::CommandLineColor => out.write_all(b"--color"),
325 ConfigOrigin::CommandLineColor => out.write_all(b"--color"),
323 ConfigOrigin::Environment(e) => write_bytes!(out, b"${}", e),
326 ConfigOrigin::Environment(e) => write_bytes!(out, b"${}", e),
327 ConfigOrigin::Tweakdefaults => {
328 write_bytes!(out, b"ui.tweakdefaults")
329 }
324 }
330 }
325 }
331 }
326 }
332 }
@@ -30,6 +30,10 b' impl DirstateParents {'
30 p1: NULL_NODE,
30 p1: NULL_NODE,
31 p2: NULL_NODE,
31 p2: NULL_NODE,
32 };
32 };
33
34 pub fn is_merge(&self) -> bool {
35 return !(self.p2 == NULL_NODE);
36 }
33 }
37 }
34
38
35 pub type StateMapIter<'a> = Box<
39 pub type StateMapIter<'a> = Box<
@@ -20,6 +20,7 b' use crate::PatternFileWarning;'
20 use crate::StatusError;
20 use crate::StatusError;
21 use crate::StatusOptions;
21 use crate::StatusOptions;
22 use micro_timer::timed;
22 use micro_timer::timed;
23 use once_cell::sync::OnceCell;
23 use rayon::prelude::*;
24 use rayon::prelude::*;
24 use sha1::{Digest, Sha1};
25 use sha1::{Digest, Sha1};
25 use std::borrow::Cow;
26 use std::borrow::Cow;
@@ -126,14 +127,14 b" pub fn status<'dirstate>("
126 };
127 };
127 let is_at_repo_root = true;
128 let is_at_repo_root = true;
128 let hg_path = &BorrowedPath::OnDisk(HgPath::new(""));
129 let hg_path = &BorrowedPath::OnDisk(HgPath::new(""));
129 let has_ignored_ancestor = false;
130 let has_ignored_ancestor = HasIgnoredAncestor::create(None, hg_path);
130 let root_cached_mtime = None;
131 let root_cached_mtime = None;
131 let root_dir_metadata = None;
132 let root_dir_metadata = None;
132 // If the path we have for the repository root is a symlink, do follow it.
133 // If the path we have for the repository root is a symlink, do follow it.
133 // (As opposed to symlinks within the working directory which are not
134 // (As opposed to symlinks within the working directory which are not
134 // followed, using `std::fs::symlink_metadata`.)
135 // followed, using `std::fs::symlink_metadata`.)
135 common.traverse_fs_directory_and_dirstate(
136 common.traverse_fs_directory_and_dirstate(
136 has_ignored_ancestor,
137 &has_ignored_ancestor,
137 dmap.root.as_ref(),
138 dmap.root.as_ref(),
138 hg_path,
139 hg_path,
139 &root_dir,
140 &root_dir,
@@ -196,6 +197,40 b' enum Outcome {'
196 Unsure,
197 Unsure,
197 }
198 }
198
199
200 /// Lazy computation of whether a given path has a hgignored
201 /// ancestor.
202 struct HasIgnoredAncestor<'a> {
203 /// `path` and `parent` constitute the inputs to the computation,
204 /// `cache` stores the outcome.
205 path: &'a HgPath,
206 parent: Option<&'a HasIgnoredAncestor<'a>>,
207 cache: OnceCell<bool>,
208 }
209
210 impl<'a> HasIgnoredAncestor<'a> {
211 fn create(
212 parent: Option<&'a HasIgnoredAncestor<'a>>,
213 path: &'a HgPath,
214 ) -> HasIgnoredAncestor<'a> {
215 Self {
216 path,
217 parent,
218 cache: OnceCell::new(),
219 }
220 }
221
222 fn force<'b>(&self, ignore_fn: &IgnoreFnType<'b>) -> bool {
223 match self.parent {
224 None => false,
225 Some(parent) => {
226 *(parent.cache.get_or_init(|| {
227 parent.force(ignore_fn) || ignore_fn(&self.path)
228 }))
229 }
230 }
231 }
232 }
233
199 impl<'a, 'tree, 'on_disk> StatusCommon<'a, 'tree, 'on_disk> {
234 impl<'a, 'tree, 'on_disk> StatusCommon<'a, 'tree, 'on_disk> {
200 fn push_outcome(
235 fn push_outcome(
201 &self,
236 &self,
@@ -318,9 +353,9 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
318
353
319 /// Returns whether all child entries of the filesystem directory have a
354 /// Returns whether all child entries of the filesystem directory have a
320 /// corresponding dirstate node or are ignored.
355 /// corresponding dirstate node or are ignored.
321 fn traverse_fs_directory_and_dirstate(
356 fn traverse_fs_directory_and_dirstate<'ancestor>(
322 &self,
357 &self,
323 has_ignored_ancestor: bool,
358 has_ignored_ancestor: &'ancestor HasIgnoredAncestor<'ancestor>,
324 dirstate_nodes: ChildNodesRef<'tree, 'on_disk>,
359 dirstate_nodes: ChildNodesRef<'tree, 'on_disk>,
325 directory_hg_path: &BorrowedPath<'tree, 'on_disk>,
360 directory_hg_path: &BorrowedPath<'tree, 'on_disk>,
326 directory_fs_path: &Path,
361 directory_fs_path: &Path,
@@ -418,7 +453,7 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
418 }
453 }
419 Right(fs_entry) => {
454 Right(fs_entry) => {
420 has_dirstate_node_or_is_ignored = self.traverse_fs_only(
455 has_dirstate_node_or_is_ignored = self.traverse_fs_only(
421 has_ignored_ancestor,
456 has_ignored_ancestor.force(&self.ignore_fn),
422 directory_hg_path,
457 directory_hg_path,
423 fs_entry,
458 fs_entry,
424 )
459 )
@@ -429,12 +464,12 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
429 .try_reduce(|| true, |a, b| Ok(a && b))
464 .try_reduce(|| true, |a, b| Ok(a && b))
430 }
465 }
431
466
432 fn traverse_fs_and_dirstate(
467 fn traverse_fs_and_dirstate<'ancestor>(
433 &self,
468 &self,
434 fs_path: &Path,
469 fs_path: &Path,
435 fs_metadata: &std::fs::Metadata,
470 fs_metadata: &std::fs::Metadata,
436 dirstate_node: NodeRef<'tree, 'on_disk>,
471 dirstate_node: NodeRef<'tree, 'on_disk>,
437 has_ignored_ancestor: bool,
472 has_ignored_ancestor: &'ancestor HasIgnoredAncestor<'ancestor>,
438 ) -> Result<(), DirstateV2ParseError> {
473 ) -> Result<(), DirstateV2ParseError> {
439 self.check_for_outdated_directory_cache(&dirstate_node)?;
474 self.check_for_outdated_directory_cache(&dirstate_node)?;
440 let hg_path = &dirstate_node.full_path_borrowed(self.dmap.on_disk)?;
475 let hg_path = &dirstate_node.full_path_borrowed(self.dmap.on_disk)?;
@@ -454,11 +489,14 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
454 .traversed
489 .traversed
455 .push(hg_path.detach_from_tree())
490 .push(hg_path.detach_from_tree())
456 }
491 }
457 let is_ignored = has_ignored_ancestor || (self.ignore_fn)(hg_path);
492 let is_ignored = HasIgnoredAncestor::create(
493 Some(&has_ignored_ancestor),
494 hg_path,
495 );
458 let is_at_repo_root = false;
496 let is_at_repo_root = false;
459 let children_all_have_dirstate_node_or_are_ignored = self
497 let children_all_have_dirstate_node_or_are_ignored = self
460 .traverse_fs_directory_and_dirstate(
498 .traverse_fs_directory_and_dirstate(
461 is_ignored,
499 &is_ignored,
462 dirstate_node.children(self.dmap.on_disk)?,
500 dirstate_node.children(self.dmap.on_disk)?,
463 hg_path,
501 hg_path,
464 fs_path,
502 fs_path,
@@ -472,14 +510,14 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
472 dirstate_node,
510 dirstate_node,
473 )?
511 )?
474 } else {
512 } else {
475 if file_or_symlink && self.matcher.matches(hg_path) {
513 if file_or_symlink && self.matcher.matches(&hg_path) {
476 if let Some(entry) = dirstate_node.entry()? {
514 if let Some(entry) = dirstate_node.entry()? {
477 if !entry.any_tracked() {
515 if !entry.any_tracked() {
478 // Forward-compat if we start tracking unknown/ignored
516 // Forward-compat if we start tracking unknown/ignored
479 // files for caching reasons
517 // files for caching reasons
480 self.mark_unknown_or_ignored(
518 self.mark_unknown_or_ignored(
481 has_ignored_ancestor,
519 has_ignored_ancestor.force(&self.ignore_fn),
482 hg_path,
520 &hg_path,
483 );
521 );
484 }
522 }
485 if entry.added() {
523 if entry.added() {
@@ -495,7 +533,7 b" impl<'a, 'tree, 'on_disk> StatusCommon<'"
495 // `node.entry.is_none()` indicates a "directory"
533 // `node.entry.is_none()` indicates a "directory"
496 // node, but the filesystem has a file
534 // node, but the filesystem has a file
497 self.mark_unknown_or_ignored(
535 self.mark_unknown_or_ignored(
498 has_ignored_ancestor,
536 has_ignored_ancestor.force(&self.ignore_fn),
499 hg_path,
537 hg_path,
500 );
538 );
501 }
539 }
@@ -33,6 +33,7 b' pub enum HgError {'
33 Abort {
33 Abort {
34 message: String,
34 message: String,
35 detailed_exit_code: exit_codes::ExitCode,
35 detailed_exit_code: exit_codes::ExitCode,
36 hint: Option<String>,
36 },
37 },
37
38
38 /// A configuration value is not in the expected syntax.
39 /// A configuration value is not in the expected syntax.
@@ -82,10 +83,12 b' impl HgError {'
82 pub fn abort(
83 pub fn abort(
83 explanation: impl Into<String>,
84 explanation: impl Into<String>,
84 exit_code: exit_codes::ExitCode,
85 exit_code: exit_codes::ExitCode,
86 hint: Option<String>,
85 ) -> Self {
87 ) -> Self {
86 HgError::Abort {
88 HgError::Abort {
87 message: explanation.into(),
89 message: explanation.into(),
88 detailed_exit_code: exit_code,
90 detailed_exit_code: exit_code,
91 hint,
89 }
92 }
90 }
93 }
91 }
94 }
@@ -9,6 +9,10 b' pub const ABORT: ExitCode = 255;'
9 // Abort when there is a config related error
9 // Abort when there is a config related error
10 pub const CONFIG_ERROR_ABORT: ExitCode = 30;
10 pub const CONFIG_ERROR_ABORT: ExitCode = 30;
11
11
12 /// Indicates that the operation might work if retried in a different state.
13 /// Examples: Unresolved merge conflicts, unfinished operations
14 pub const STATE_ERROR: ExitCode = 20;
15
12 // Abort when there is an error while parsing config
16 // Abort when there is an error while parsing config
13 pub const CONFIG_PARSE_ERROR_ABORT: ExitCode = 10;
17 pub const CONFIG_PARSE_ERROR_ABORT: ExitCode = 10;
14
18
@@ -314,6 +314,8 b' lazy_static! {'
314 m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref());
314 m.insert(b"rootglob".as_ref(), b"rootglob:".as_ref());
315 m.insert(b"include".as_ref(), b"include:".as_ref());
315 m.insert(b"include".as_ref(), b"include:".as_ref());
316 m.insert(b"subinclude".as_ref(), b"subinclude:".as_ref());
316 m.insert(b"subinclude".as_ref(), b"subinclude:".as_ref());
317 m.insert(b"path".as_ref(), b"path:".as_ref());
318 m.insert(b"rootfilesin".as_ref(), b"rootfilesin:".as_ref());
317 m
319 m
318 };
320 };
319 }
321 }
@@ -329,6 +331,7 b' pub enum PatternFileWarning {'
329 pub fn parse_pattern_file_contents(
331 pub fn parse_pattern_file_contents(
330 lines: &[u8],
332 lines: &[u8],
331 file_path: &Path,
333 file_path: &Path,
334 default_syntax_override: Option<&[u8]>,
332 warn: bool,
335 warn: bool,
333 ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> {
336 ) -> Result<(Vec<IgnorePattern>, Vec<PatternFileWarning>), PatternError> {
334 let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap();
337 let comment_regex = Regex::new(r"((?:^|[^\\])(?:\\\\)*)#.*").unwrap();
@@ -338,7 +341,8 b' pub fn parse_pattern_file_contents('
338 let mut inputs: Vec<IgnorePattern> = vec![];
341 let mut inputs: Vec<IgnorePattern> = vec![];
339 let mut warnings: Vec<PatternFileWarning> = vec![];
342 let mut warnings: Vec<PatternFileWarning> = vec![];
340
343
341 let mut current_syntax = b"relre:".as_ref();
344 let mut current_syntax =
345 default_syntax_override.unwrap_or(b"relre:".as_ref());
342
346
343 for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() {
347 for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() {
344 let line_number = line_number + 1;
348 let line_number = line_number + 1;
@@ -413,7 +417,7 b' pub fn read_pattern_file('
413 match std::fs::read(file_path) {
417 match std::fs::read(file_path) {
414 Ok(contents) => {
418 Ok(contents) => {
415 inspect_pattern_bytes(&contents);
419 inspect_pattern_bytes(&contents);
416 parse_pattern_file_contents(&contents, file_path, warn)
420 parse_pattern_file_contents(&contents, file_path, None, warn)
417 }
421 }
418 Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok((
422 Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok((
419 vec![],
423 vec![],
@@ -601,9 +605,14 b' mod tests {'
601 let lines = b"syntax: glob\n*.elc";
605 let lines = b"syntax: glob\n*.elc";
602
606
603 assert_eq!(
607 assert_eq!(
604 parse_pattern_file_contents(lines, Path::new("file_path"), false)
608 parse_pattern_file_contents(
605 .unwrap()
609 lines,
606 .0,
610 Path::new("file_path"),
611 None,
612 false
613 )
614 .unwrap()
615 .0,
607 vec![IgnorePattern::new(
616 vec![IgnorePattern::new(
608 PatternSyntax::RelGlob,
617 PatternSyntax::RelGlob,
609 b"*.elc",
618 b"*.elc",
@@ -614,16 +623,26 b' mod tests {'
614 let lines = b"syntax: include\nsyntax: glob";
623 let lines = b"syntax: include\nsyntax: glob";
615
624
616 assert_eq!(
625 assert_eq!(
617 parse_pattern_file_contents(lines, Path::new("file_path"), false)
626 parse_pattern_file_contents(
618 .unwrap()
627 lines,
619 .0,
628 Path::new("file_path"),
629 None,
630 false
631 )
632 .unwrap()
633 .0,
620 vec![]
634 vec![]
621 );
635 );
622 let lines = b"glob:**.o";
636 let lines = b"glob:**.o";
623 assert_eq!(
637 assert_eq!(
624 parse_pattern_file_contents(lines, Path::new("file_path"), false)
638 parse_pattern_file_contents(
625 .unwrap()
639 lines,
626 .0,
640 Path::new("file_path"),
641 None,
642 false
643 )
644 .unwrap()
645 .0,
627 vec![IgnorePattern::new(
646 vec![IgnorePattern::new(
628 PatternSyntax::RelGlob,
647 PatternSyntax::RelGlob,
629 b"**.o",
648 b"**.o",
@@ -7,6 +7,8 b''
7 mod ancestors;
7 mod ancestors;
8 pub mod dagops;
8 pub mod dagops;
9 pub mod errors;
9 pub mod errors;
10 pub mod narrow;
11 pub mod sparse;
10 pub use ancestors::{AncestorsIterator, MissingAncestors};
12 pub use ancestors::{AncestorsIterator, MissingAncestors};
11 pub mod dirstate;
13 pub mod dirstate;
12 pub mod dirstate_tree;
14 pub mod dirstate_tree;
@@ -46,7 +46,7 b' pub enum VisitChildrenSet {'
46 Recursive,
46 Recursive,
47 }
47 }
48
48
49 pub trait Matcher {
49 pub trait Matcher: core::fmt::Debug {
50 /// Explicitly listed files
50 /// Explicitly listed files
51 fn file_set(&self) -> Option<&HashSet<HgPathBuf>>;
51 fn file_set(&self) -> Option<&HashSet<HgPathBuf>>;
52 /// Returns whether `filename` is in `file_set`
52 /// Returns whether `filename` is in `file_set`
@@ -283,6 +283,18 b" pub struct IncludeMatcher<'a> {"
283 parents: HashSet<HgPathBuf>,
283 parents: HashSet<HgPathBuf>,
284 }
284 }
285
285
286 impl core::fmt::Debug for IncludeMatcher<'_> {
287 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
288 f.debug_struct("IncludeMatcher")
289 .field("patterns", &String::from_utf8_lossy(&self.patterns))
290 .field("prefix", &self.prefix)
291 .field("roots", &self.roots)
292 .field("dirs", &self.dirs)
293 .field("parents", &self.parents)
294 .finish()
295 }
296 }
297
286 impl<'a> Matcher for IncludeMatcher<'a> {
298 impl<'a> Matcher for IncludeMatcher<'a> {
287 fn file_set(&self) -> Option<&HashSet<HgPathBuf>> {
299 fn file_set(&self) -> Option<&HashSet<HgPathBuf>> {
288 None
300 None
@@ -330,6 +342,7 b" impl<'a> Matcher for IncludeMatcher<'a> "
330 }
342 }
331
343
332 /// The union of multiple matchers. Will match if any of the matchers match.
344 /// The union of multiple matchers. Will match if any of the matchers match.
345 #[derive(Debug)]
333 pub struct UnionMatcher {
346 pub struct UnionMatcher {
334 matchers: Vec<Box<dyn Matcher + Sync>>,
347 matchers: Vec<Box<dyn Matcher + Sync>>,
335 }
348 }
@@ -393,6 +406,7 b' impl UnionMatcher {'
393 }
406 }
394 }
407 }
395
408
409 #[derive(Debug)]
396 pub struct IntersectionMatcher {
410 pub struct IntersectionMatcher {
397 m1: Box<dyn Matcher + Sync>,
411 m1: Box<dyn Matcher + Sync>,
398 m2: Box<dyn Matcher + Sync>,
412 m2: Box<dyn Matcher + Sync>,
@@ -474,6 +488,91 b' impl IntersectionMatcher {'
474 }
488 }
475 }
489 }
476
490
491 #[derive(Debug)]
492 pub struct DifferenceMatcher {
493 base: Box<dyn Matcher + Sync>,
494 excluded: Box<dyn Matcher + Sync>,
495 files: Option<HashSet<HgPathBuf>>,
496 }
497
498 impl Matcher for DifferenceMatcher {
499 fn file_set(&self) -> Option<&HashSet<HgPathBuf>> {
500 self.files.as_ref()
501 }
502
503 fn exact_match(&self, filename: &HgPath) -> bool {
504 self.files.as_ref().map_or(false, |f| f.contains(filename))
505 }
506
507 fn matches(&self, filename: &HgPath) -> bool {
508 self.base.matches(filename) && !self.excluded.matches(filename)
509 }
510
511 fn visit_children_set(&self, directory: &HgPath) -> VisitChildrenSet {
512 let excluded_set = self.excluded.visit_children_set(directory);
513 if excluded_set == VisitChildrenSet::Recursive {
514 return VisitChildrenSet::Empty;
515 }
516 let base_set = self.base.visit_children_set(directory);
517 // Possible values for base: 'recursive', 'this', set(...), set()
518 // Possible values for excluded: 'this', set(...), set()
519 // If excluded has nothing under here that we care about, return base,
520 // even if it's 'recursive'.
521 if excluded_set == VisitChildrenSet::Empty {
522 return base_set;
523 }
524 match base_set {
525 VisitChildrenSet::This | VisitChildrenSet::Recursive => {
526 // Never return 'recursive' here if excluded_set is any kind of
527 // non-empty (either 'this' or set(foo)), since excluded might
528 // return set() for a subdirectory.
529 VisitChildrenSet::This
530 }
531 set => {
532 // Possible values for base: set(...), set()
533 // Possible values for excluded: 'this', set(...)
534 // We ignore excluded set results. They're possibly incorrect:
535 // base = path:dir/subdir
536 // excluded=rootfilesin:dir,
537 // visit_children_set(''):
538 // base returns {'dir'}, excluded returns {'dir'}, if we
539 // subtracted we'd return set(), which is *not* correct, we
540 // still need to visit 'dir'!
541 set
542 }
543 }
544 }
545
546 fn matches_everything(&self) -> bool {
547 false
548 }
549
550 fn is_exact(&self) -> bool {
551 self.base.is_exact()
552 }
553 }
554
555 impl DifferenceMatcher {
556 pub fn new(
557 base: Box<dyn Matcher + Sync>,
558 excluded: Box<dyn Matcher + Sync>,
559 ) -> Self {
560 let base_is_exact = base.is_exact();
561 let base_files = base.file_set().map(ToOwned::to_owned);
562 let mut new = Self {
563 base,
564 excluded,
565 files: None,
566 };
567 if base_is_exact {
568 new.files = base_files.map(|files| {
569 files.iter().cloned().filter(|f| new.matches(f)).collect()
570 });
571 }
572 new
573 }
574 }
575
477 /// Returns a function that matches an `HgPath` against the given regex
576 /// Returns a function that matches an `HgPath` against the given regex
478 /// pattern.
577 /// pattern.
479 ///
578 ///
@@ -1489,4 +1588,101 b' mod tests {'
1489 VisitChildrenSet::Empty
1588 VisitChildrenSet::Empty
1490 );
1589 );
1491 }
1590 }
1591
1592 #[test]
1593 fn test_differencematcher() {
1594 // Two alwaysmatchers should function like a nevermatcher
1595 let m1 = AlwaysMatcher;
1596 let m2 = AlwaysMatcher;
1597 let matcher = DifferenceMatcher::new(Box::new(m1), Box::new(m2));
1598
1599 for case in &[
1600 &b""[..],
1601 b"dir",
1602 b"dir/subdir",
1603 b"dir/subdir/z",
1604 b"dir/foo",
1605 b"dir/subdir/x",
1606 b"folder",
1607 ] {
1608 assert_eq!(
1609 matcher.visit_children_set(HgPath::new(case)),
1610 VisitChildrenSet::Empty
1611 );
1612 }
1613
1614 // One always and one never should behave the same as an always
1615 let m1 = AlwaysMatcher;
1616 let m2 = NeverMatcher;
1617 let matcher = DifferenceMatcher::new(Box::new(m1), Box::new(m2));
1618
1619 for case in &[
1620 &b""[..],
1621 b"dir",
1622 b"dir/subdir",
1623 b"dir/subdir/z",
1624 b"dir/foo",
1625 b"dir/subdir/x",
1626 b"folder",
1627 ] {
1628 assert_eq!(
1629 matcher.visit_children_set(HgPath::new(case)),
1630 VisitChildrenSet::Recursive
1631 );
1632 }
1633
1634 // Two include matchers
1635 let m1 = Box::new(
1636 IncludeMatcher::new(vec![IgnorePattern::new(
1637 PatternSyntax::RelPath,
1638 b"dir/subdir",
1639 Path::new("/repo"),
1640 )])
1641 .unwrap(),
1642 );
1643 let m2 = Box::new(
1644 IncludeMatcher::new(vec![IgnorePattern::new(
1645 PatternSyntax::RootFiles,
1646 b"dir",
1647 Path::new("/repo"),
1648 )])
1649 .unwrap(),
1650 );
1651
1652 let matcher = DifferenceMatcher::new(m1, m2);
1653
1654 let mut set = HashSet::new();
1655 set.insert(HgPathBuf::from_bytes(b"dir"));
1656 assert_eq!(
1657 matcher.visit_children_set(HgPath::new(b"")),
1658 VisitChildrenSet::Set(set)
1659 );
1660
1661 let mut set = HashSet::new();
1662 set.insert(HgPathBuf::from_bytes(b"subdir"));
1663 assert_eq!(
1664 matcher.visit_children_set(HgPath::new(b"dir")),
1665 VisitChildrenSet::Set(set)
1666 );
1667 assert_eq!(
1668 matcher.visit_children_set(HgPath::new(b"dir/subdir")),
1669 VisitChildrenSet::Recursive
1670 );
1671 assert_eq!(
1672 matcher.visit_children_set(HgPath::new(b"dir/foo")),
1673 VisitChildrenSet::Empty
1674 );
1675 assert_eq!(
1676 matcher.visit_children_set(HgPath::new(b"folder")),
1677 VisitChildrenSet::Empty
1678 );
1679 assert_eq!(
1680 matcher.visit_children_set(HgPath::new(b"dir/subdir/z")),
1681 VisitChildrenSet::This
1682 );
1683 assert_eq!(
1684 matcher.visit_children_set(HgPath::new(b"dir/subdir/x")),
1685 VisitChildrenSet::This
1686 );
1687 }
1492 }
1688 }
@@ -17,18 +17,21 b' pub struct Filelog {'
17 }
17 }
18
18
19 impl Filelog {
19 impl Filelog {
20 pub fn open(repo: &Repo, file_path: &HgPath) -> Result<Self, HgError> {
20 pub fn open_vfs(
21 store_vfs: &crate::vfs::Vfs<'_>,
22 file_path: &HgPath,
23 ) -> Result<Self, HgError> {
21 let index_path = store_path(file_path, b".i");
24 let index_path = store_path(file_path, b".i");
22 let data_path = store_path(file_path, b".d");
25 let data_path = store_path(file_path, b".d");
23 let revlog = Revlog::open(
26 let revlog =
24 &repo.store_vfs(),
27 Revlog::open(store_vfs, index_path, Some(&data_path), false)?;
25 index_path,
26 Some(&data_path),
27 false,
28 )?;
29 Ok(Self { revlog })
28 Ok(Self { revlog })
30 }
29 }
31
30
31 pub fn open(repo: &Repo, file_path: &HgPath) -> Result<Self, HgError> {
32 Self::open_vfs(&repo.store_vfs(), file_path)
33 }
34
32 /// The given node ID is that of the file as found in a filelog, not of a
35 /// The given node ID is that of the file as found in a filelog, not of a
33 /// changeset.
36 /// changeset.
34 pub fn data_for_node(
37 pub fn data_for_node(
@@ -40,6 +40,23 b" impl Vfs<'_> {"
40 std::fs::read(&path).when_reading_file(&path)
40 std::fs::read(&path).when_reading_file(&path)
41 }
41 }
42
42
43 /// Returns `Ok(None)` if the file does not exist.
44 pub fn try_read(
45 &self,
46 relative_path: impl AsRef<Path>,
47 ) -> Result<Option<Vec<u8>>, HgError> {
48 match self.read(relative_path) {
49 Err(e) => match &e {
50 HgError::IoError { error, .. } => match error.kind() {
51 ErrorKind::NotFound => return Ok(None),
52 _ => Err(e),
53 },
54 _ => Err(e),
55 },
56 Ok(v) => Ok(Some(v)),
57 }
58 }
59
43 fn mmap_open_gen(
60 fn mmap_open_gen(
44 &self,
61 &self,
45 relative_path: impl AsRef<Path>,
62 relative_path: impl AsRef<Path>,
@@ -15,7 +15,10 b' use cpython::{'
15 PyResult, PyTuple, Python, PythonObject, ToPyObject,
15 PyResult, PyTuple, Python, PythonObject, ToPyObject,
16 };
16 };
17 use hg::dirstate::status::StatusPath;
17 use hg::dirstate::status::StatusPath;
18 use hg::matchers::{IntersectionMatcher, Matcher, NeverMatcher, UnionMatcher};
18 use hg::matchers::{
19 DifferenceMatcher, IntersectionMatcher, Matcher, NeverMatcher,
20 UnionMatcher,
21 };
19 use hg::{
22 use hg::{
20 matchers::{AlwaysMatcher, FileMatcher, IncludeMatcher},
23 matchers::{AlwaysMatcher, FileMatcher, IncludeMatcher},
21 parse_pattern_syntax,
24 parse_pattern_syntax,
@@ -233,6 +236,12 b' fn extract_matcher('
233
236
234 Ok(Box::new(IntersectionMatcher::new(m1, m2)))
237 Ok(Box::new(IntersectionMatcher::new(m1, m2)))
235 }
238 }
239 "differencematcher" => {
240 let m1 = extract_matcher(py, matcher.getattr(py, "_m1")?)?;
241 let m2 = extract_matcher(py, matcher.getattr(py, "_m2")?)?;
242
243 Ok(Box::new(DifferenceMatcher::new(m1, m2)))
244 }
236 e => Err(PyErr::new::<FallbackError, _>(
245 e => Err(PyErr::new::<FallbackError, _>(
237 py,
246 py,
238 format!("Unsupported matcher {}", e),
247 format!("Unsupported matcher {}", e),
@@ -22,3 +22,4 b' env_logger = "0.9.0"'
22 format-bytes = "0.3.0"
22 format-bytes = "0.3.0"
23 users = "0.11.0"
23 users = "0.11.0"
24 which = "4.2.5"
24 which = "4.2.5"
25 rayon = "1.5.1"
@@ -55,6 +55,11 b' pub fn run(invocation: &crate::CliInvoca'
55 };
55 };
56
56
57 let repo = invocation.repo?;
57 let repo = invocation.repo?;
58 if repo.has_narrow() {
59 return Err(CommandError::unsupported(
60 "support for ellipsis nodes is missing and repo has narrow enabled",
61 ));
62 }
58 let data = debug_data(repo, rev, kind).map_err(|e| (e, rev))?;
63 let data = debug_data(repo, rev, kind).map_err(|e| (e, rev))?;
59
64
60 let mut stdout = invocation.ui.stdout_buffer();
65 let mut stdout = invocation.ui.stdout_buffer();
@@ -10,7 +10,6 b' use crate::ui::Ui;'
10 use crate::utils::path_utils::RelativizePaths;
10 use crate::utils::path_utils::RelativizePaths;
11 use clap::{Arg, SubCommand};
11 use clap::{Arg, SubCommand};
12 use format_bytes::format_bytes;
12 use format_bytes::format_bytes;
13 use hg;
14 use hg::config::Config;
13 use hg::config::Config;
15 use hg::dirstate::has_exec_bit;
14 use hg::dirstate::has_exec_bit;
16 use hg::dirstate::status::StatusPath;
15 use hg::dirstate::status::StatusPath;
@@ -18,7 +17,7 b' use hg::dirstate::TruncatedTimestamp;'
18 use hg::errors::{HgError, IoResultExt};
17 use hg::errors::{HgError, IoResultExt};
19 use hg::lock::LockError;
18 use hg::lock::LockError;
20 use hg::manifest::Manifest;
19 use hg::manifest::Manifest;
21 use hg::matchers::AlwaysMatcher;
20 use hg::matchers::{AlwaysMatcher, IntersectionMatcher};
22 use hg::repo::Repo;
21 use hg::repo::Repo;
23 use hg::utils::files::get_bytes_from_os_string;
22 use hg::utils::files::get_bytes_from_os_string;
24 use hg::utils::files::get_bytes_from_path;
23 use hg::utils::files::get_bytes_from_path;
@@ -28,7 +27,9 b' use hg::DirstateStatus;'
28 use hg::PatternFileWarning;
27 use hg::PatternFileWarning;
29 use hg::StatusError;
28 use hg::StatusError;
30 use hg::StatusOptions;
29 use hg::StatusOptions;
30 use hg::{self, narrow, sparse};
31 use log::info;
31 use log::info;
32 use rayon::prelude::*;
32 use std::io;
33 use std::io;
33 use std::path::PathBuf;
34 use std::path::PathBuf;
34
35
@@ -104,6 +105,12 b" pub fn args() -> clap::App<'static, 'sta"
104 .short("-n")
105 .short("-n")
105 .long("--no-status"),
106 .long("--no-status"),
106 )
107 )
108 .arg(
109 Arg::with_name("verbose")
110 .help("enable additional output")
111 .short("-v")
112 .long("--verbose"),
113 )
107 }
114 }
108
115
109 /// Pure data type allowing the caller to specify file states to display
116 /// Pure data type allowing the caller to specify file states to display
@@ -150,18 +157,35 b' impl DisplayStates {'
150 }
157 }
151 }
158 }
152
159
160 fn has_unfinished_merge(repo: &Repo) -> Result<bool, CommandError> {
161 return Ok(repo.dirstate_parents()?.is_merge());
162 }
163
164 fn has_unfinished_state(repo: &Repo) -> Result<bool, CommandError> {
165 // These are all the known values for the [fname] argument of
166 // [addunfinished] function in [state.py]
167 let known_state_files: &[&str] = &[
168 "bisect.state",
169 "graftstate",
170 "histedit-state",
171 "rebasestate",
172 "shelvedstate",
173 "transplant/journal",
174 "updatestate",
175 ];
176 if has_unfinished_merge(repo)? {
177 return Ok(true);
178 };
179 for f in known_state_files {
180 if repo.hg_vfs().join(f).exists() {
181 return Ok(true);
182 }
183 }
184 return Ok(false);
185 }
186
153 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
187 pub fn run(invocation: &crate::CliInvocation) -> Result<(), CommandError> {
154 // TODO: lift these limitations
188 // TODO: lift these limitations
155 if invocation.config.get_bool(b"ui", b"tweakdefaults")? {
156 return Err(CommandError::unsupported(
157 "ui.tweakdefaults is not yet supported with rhg status",
158 ));
159 }
160 if invocation.config.get_bool(b"ui", b"statuscopies")? {
161 return Err(CommandError::unsupported(
162 "ui.statuscopies is not yet supported with rhg status",
163 ));
164 }
165 if invocation
189 if invocation
166 .config
190 .config
167 .get(b"commands", b"status.terse")
191 .get(b"commands", b"status.terse")
@@ -176,15 +200,10 b' pub fn run(invocation: &crate::CliInvoca'
176 let config = invocation.config;
200 let config = invocation.config;
177 let args = invocation.subcommand_args;
201 let args = invocation.subcommand_args;
178
202
179 let verbose = !ui.plain(None)
203 let verbose = !args.is_present("print0")
180 && !args.is_present("print0")
204 && (args.is_present("verbose")
181 && (config.get_bool(b"ui", b"verbose")?
205 || config.get_bool(b"ui", b"verbose")?
182 || config.get_bool(b"commands", b"status.verbose")?);
206 || config.get_bool(b"commands", b"status.verbose")?);
183 if verbose {
184 return Err(CommandError::unsupported(
185 "verbose status is not supported yet",
186 ));
187 }
188
207
189 let all = args.is_present("all");
208 let all = args.is_present("all");
190 let display_states = if all {
209 let display_states = if all {
@@ -214,10 +233,12 b' pub fn run(invocation: &crate::CliInvoca'
214
233
215 let repo = invocation.repo?;
234 let repo = invocation.repo?;
216
235
217 if repo.has_sparse() || repo.has_narrow() {
236 if verbose {
218 return Err(CommandError::unsupported(
237 if has_unfinished_state(repo)? {
219 "rhg status is not supported for sparse checkouts or narrow clones yet"
238 return Err(CommandError::unsupported(
220 ));
239 "verbose status output is not supported by rhg (and is needed because we're in an unfinished operation)",
240 ));
241 };
221 }
242 }
222
243
223 let mut dmap = repo.dirstate_map_mut()?;
244 let mut dmap = repo.dirstate_map_mut()?;
@@ -239,28 +260,7 b' pub fn run(invocation: &crate::CliInvoca'
239 let after_status = |res: StatusResult| -> Result<_, CommandError> {
260 let after_status = |res: StatusResult| -> Result<_, CommandError> {
240 let (mut ds_status, pattern_warnings) = res?;
261 let (mut ds_status, pattern_warnings) = res?;
241 for warning in pattern_warnings {
262 for warning in pattern_warnings {
242 match warning {
263 ui.write_stderr(&print_pattern_file_warning(&warning, &repo))?;
243 hg::PatternFileWarning::InvalidSyntax(path, syntax) => ui
244 .write_stderr(&format_bytes!(
245 b"{}: ignoring invalid syntax '{}'\n",
246 get_bytes_from_path(path),
247 &*syntax
248 ))?,
249 hg::PatternFileWarning::NoSuchFile(path) => {
250 let path = if let Ok(relative) =
251 path.strip_prefix(repo.working_directory_path())
252 {
253 relative
254 } else {
255 &*path
256 };
257 ui.write_stderr(&format_bytes!(
258 b"skipping unreadable pattern file '{}': \
259 No such file or directory\n",
260 get_bytes_from_path(path),
261 ))?
262 }
263 }
264 }
264 }
265
265
266 for (path, error) in ds_status.bad {
266 for (path, error) in ds_status.bad {
@@ -292,23 +292,37 b' pub fn run(invocation: &crate::CliInvoca'
292 let manifest = repo.manifest_for_node(p1).map_err(|e| {
292 let manifest = repo.manifest_for_node(p1).map_err(|e| {
293 CommandError::from((e, &*format!("{:x}", p1.short())))
293 CommandError::from((e, &*format!("{:x}", p1.short())))
294 })?;
294 })?;
295 for to_check in ds_status.unsure {
295 let working_directory_vfs = repo.working_directory_vfs();
296 if unsure_is_modified(repo, &manifest, &to_check.path)? {
296 let store_vfs = repo.store_vfs();
297 let res: Vec<_> = ds_status
298 .unsure
299 .into_par_iter()
300 .map(|to_check| {
301 unsure_is_modified(
302 working_directory_vfs,
303 store_vfs,
304 &manifest,
305 &to_check.path,
306 )
307 .map(|modified| (to_check, modified))
308 })
309 .collect::<Result<_, _>>()?;
310 for (status_path, is_modified) in res.into_iter() {
311 if is_modified {
297 if display_states.modified {
312 if display_states.modified {
298 ds_status.modified.push(to_check);
313 ds_status.modified.push(status_path);
299 }
314 }
300 } else {
315 } else {
301 if display_states.clean {
316 if display_states.clean {
302 ds_status.clean.push(to_check.clone());
317 ds_status.clean.push(status_path.clone());
303 }
318 }
304 fixup.push(to_check.path.into_owned())
319 fixup.push(status_path.path.into_owned())
305 }
320 }
306 }
321 }
307 }
322 }
308 let relative_paths = (!ui.plain(None))
323 let relative_paths = config
309 && config
324 .get_option(b"commands", b"status.relative")?
310 .get_option(b"commands", b"status.relative")?
325 .unwrap_or(config.get_bool(b"ui", b"relative-paths")?);
311 .unwrap_or(config.get_bool(b"ui", b"relative-paths")?);
312 let output = DisplayStatusPaths {
326 let output = DisplayStatusPaths {
313 ui,
327 ui,
314 no_status,
328 no_status,
@@ -350,9 +364,45 b' pub fn run(invocation: &crate::CliInvoca'
350 filesystem_time_at_status_start,
364 filesystem_time_at_status_start,
351 ))
365 ))
352 };
366 };
367 let (narrow_matcher, narrow_warnings) = narrow::matcher(repo)?;
368 let (sparse_matcher, sparse_warnings) = sparse::matcher(repo)?;
369 let matcher = match (repo.has_narrow(), repo.has_sparse()) {
370 (true, true) => {
371 Box::new(IntersectionMatcher::new(narrow_matcher, sparse_matcher))
372 }
373 (true, false) => narrow_matcher,
374 (false, true) => sparse_matcher,
375 (false, false) => Box::new(AlwaysMatcher),
376 };
377
378 for warning in narrow_warnings.into_iter().chain(sparse_warnings) {
379 match &warning {
380 sparse::SparseWarning::RootWarning { context, line } => {
381 let msg = format_bytes!(
382 b"warning: {} profile cannot use paths \"
383 starting with /, ignoring {}\n",
384 context,
385 line
386 );
387 ui.write_stderr(&msg)?;
388 }
389 sparse::SparseWarning::ProfileNotFound { profile, rev } => {
390 let msg = format_bytes!(
391 b"warning: sparse profile '{}' not found \"
392 in rev {} - ignoring it\n",
393 profile,
394 rev
395 );
396 ui.write_stderr(&msg)?;
397 }
398 sparse::SparseWarning::Pattern(e) => {
399 ui.write_stderr(&print_pattern_file_warning(e, &repo))?;
400 }
401 }
402 }
353 let (fixup, mut dirstate_write_needed, filesystem_time_at_status_start) =
403 let (fixup, mut dirstate_write_needed, filesystem_time_at_status_start) =
354 dmap.with_status(
404 dmap.with_status(
355 &AlwaysMatcher,
405 matcher.as_ref(),
356 repo.working_directory_path().to_owned(),
406 repo.working_directory_path().to_owned(),
357 ignore_files(repo, config),
407 ignore_files(repo, config),
358 options,
408 options,
@@ -491,11 +541,12 b" impl DisplayStatusPaths<'_> {"
491 /// This meant to be used for those that the dirstate cannot resolve, due
541 /// This meant to be used for those that the dirstate cannot resolve, due
492 /// to time resolution limits.
542 /// to time resolution limits.
493 fn unsure_is_modified(
543 fn unsure_is_modified(
494 repo: &Repo,
544 working_directory_vfs: hg::vfs::Vfs,
545 store_vfs: hg::vfs::Vfs,
495 manifest: &Manifest,
546 manifest: &Manifest,
496 hg_path: &HgPath,
547 hg_path: &HgPath,
497 ) -> Result<bool, HgError> {
548 ) -> Result<bool, HgError> {
498 let vfs = repo.working_directory_vfs();
549 let vfs = working_directory_vfs;
499 let fs_path = hg_path_to_path_buf(hg_path).expect("HgPath conversion");
550 let fs_path = hg_path_to_path_buf(hg_path).expect("HgPath conversion");
500 let fs_metadata = vfs.symlink_metadata(&fs_path)?;
551 let fs_metadata = vfs.symlink_metadata(&fs_path)?;
501 let is_symlink = fs_metadata.file_type().is_symlink();
552 let is_symlink = fs_metadata.file_type().is_symlink();
@@ -515,7 +566,7 b' fn unsure_is_modified('
515 if entry.flags != fs_flags {
566 if entry.flags != fs_flags {
516 return Ok(true);
567 return Ok(true);
517 }
568 }
518 let filelog = repo.filelog(hg_path)?;
569 let filelog = hg::filelog::Filelog::open_vfs(&store_vfs, hg_path)?;
519 let fs_len = fs_metadata.len();
570 let fs_len = fs_metadata.len();
520 let file_node = entry.node_id()?;
571 let file_node = entry.node_id()?;
521 let filelog_entry = filelog.entry_for_node(file_node).map_err(|_| {
572 let filelog_entry = filelog.entry_for_node(file_node).map_err(|_| {
@@ -545,3 +596,30 b' fn unsure_is_modified('
545 };
596 };
546 Ok(p1_contents != &*fs_contents)
597 Ok(p1_contents != &*fs_contents)
547 }
598 }
599
600 fn print_pattern_file_warning(
601 warning: &PatternFileWarning,
602 repo: &Repo,
603 ) -> Vec<u8> {
604 match warning {
605 PatternFileWarning::InvalidSyntax(path, syntax) => format_bytes!(
606 b"{}: ignoring invalid syntax '{}'\n",
607 get_bytes_from_path(path),
608 &*syntax
609 ),
610 PatternFileWarning::NoSuchFile(path) => {
611 let path = if let Ok(relative) =
612 path.strip_prefix(repo.working_directory_path())
613 {
614 relative
615 } else {
616 &*path
617 };
618 format_bytes!(
619 b"skipping unreadable pattern file '{}': \
620 No such file or directory\n",
621 get_bytes_from_path(path),
622 )
623 }
624 }
625 }
@@ -8,6 +8,7 b' use hg::errors::HgError;'
8 use hg::exit_codes;
8 use hg::exit_codes;
9 use hg::repo::RepoError;
9 use hg::repo::RepoError;
10 use hg::revlog::revlog::RevlogError;
10 use hg::revlog::revlog::RevlogError;
11 use hg::sparse::SparseConfigError;
11 use hg::utils::files::get_bytes_from_path;
12 use hg::utils::files::get_bytes_from_path;
12 use hg::{DirstateError, DirstateMapError, StatusError};
13 use hg::{DirstateError, DirstateMapError, StatusError};
13 use std::convert::From;
14 use std::convert::From;
@@ -19,6 +20,7 b' pub enum CommandError {'
19 Abort {
20 Abort {
20 message: Vec<u8>,
21 message: Vec<u8>,
21 detailed_exit_code: exit_codes::ExitCode,
22 detailed_exit_code: exit_codes::ExitCode,
23 hint: Option<Vec<u8>>,
22 },
24 },
23
25
24 /// Exit with a failure exit code but no message.
26 /// Exit with a failure exit code but no message.
@@ -49,6 +51,32 b' impl CommandError {'
49 // https://www.mercurial-scm.org/wiki/EncodingStrategy#Mixing_output
51 // https://www.mercurial-scm.org/wiki/EncodingStrategy#Mixing_output
50 message: utf8_to_local(message.as_ref()).into(),
52 message: utf8_to_local(message.as_ref()).into(),
51 detailed_exit_code: detailed_exit_code,
53 detailed_exit_code: detailed_exit_code,
54 hint: None,
55 }
56 }
57
58 pub fn abort_with_exit_code_and_hint(
59 message: impl AsRef<str>,
60 detailed_exit_code: exit_codes::ExitCode,
61 hint: Option<impl AsRef<str>>,
62 ) -> Self {
63 CommandError::Abort {
64 message: utf8_to_local(message.as_ref()).into(),
65 detailed_exit_code,
66 hint: hint.map(|h| utf8_to_local(h.as_ref()).into()),
67 }
68 }
69
70 pub fn abort_with_exit_code_bytes(
71 message: impl AsRef<[u8]>,
72 detailed_exit_code: exit_codes::ExitCode,
73 ) -> Self {
74 // TODO: use this everywhere it makes sense instead of the string
75 // version.
76 CommandError::Abort {
77 message: message.as_ref().into(),
78 detailed_exit_code,
79 hint: None,
52 }
80 }
53 }
81 }
54
82
@@ -79,9 +107,12 b' impl From<HgError> for CommandError {'
79 HgError::Abort {
107 HgError::Abort {
80 message,
108 message,
81 detailed_exit_code,
109 detailed_exit_code,
82 } => {
110 hint,
83 CommandError::abort_with_exit_code(message, detailed_exit_code)
111 } => CommandError::abort_with_exit_code_and_hint(
84 }
112 message,
113 detailed_exit_code,
114 hint,
115 ),
85 _ => CommandError::abort(error.to_string()),
116 _ => CommandError::abort(error.to_string()),
86 }
117 }
87 }
118 }
@@ -108,13 +139,15 b' impl From<UiError> for CommandError {'
108 impl From<RepoError> for CommandError {
139 impl From<RepoError> for CommandError {
109 fn from(error: RepoError) -> Self {
140 fn from(error: RepoError) -> Self {
110 match error {
141 match error {
111 RepoError::NotFound { at } => CommandError::Abort {
142 RepoError::NotFound { at } => {
112 message: format_bytes!(
143 CommandError::abort_with_exit_code_bytes(
113 b"abort: repository {} not found",
144 format_bytes!(
114 get_bytes_from_path(at)
145 b"abort: repository {} not found",
115 ),
146 get_bytes_from_path(at)
116 detailed_exit_code: exit_codes::ABORT,
147 ),
117 },
148 exit_codes::ABORT,
149 )
150 }
118 RepoError::ConfigParseError(error) => error.into(),
151 RepoError::ConfigParseError(error) => error.into(),
119 RepoError::Other(error) => error.into(),
152 RepoError::Other(error) => error.into(),
120 }
153 }
@@ -124,13 +157,13 b' impl From<RepoError> for CommandError {'
124 impl<'a> From<&'a NoRepoInCwdError> for CommandError {
157 impl<'a> From<&'a NoRepoInCwdError> for CommandError {
125 fn from(error: &'a NoRepoInCwdError) -> Self {
158 fn from(error: &'a NoRepoInCwdError) -> Self {
126 let NoRepoInCwdError { cwd } = error;
159 let NoRepoInCwdError { cwd } = error;
127 CommandError::Abort {
160 CommandError::abort_with_exit_code_bytes(
128 message: format_bytes!(
161 format_bytes!(
129 b"abort: no repository found in '{}' (.hg not found)!",
162 b"abort: no repository found in '{}' (.hg not found)!",
130 get_bytes_from_path(cwd)
163 get_bytes_from_path(cwd)
131 ),
164 ),
132 detailed_exit_code: exit_codes::ABORT,
165 exit_codes::ABORT,
133 }
166 )
134 }
167 }
135 }
168 }
136
169
@@ -155,15 +188,15 b' impl From<ConfigParseError> for CommandE'
155 } else {
188 } else {
156 Vec::new()
189 Vec::new()
157 };
190 };
158 CommandError::Abort {
191 CommandError::abort_with_exit_code_bytes(
159 message: format_bytes!(
192 format_bytes!(
160 b"config error at {}{}: {}",
193 b"config error at {}{}: {}",
161 origin,
194 origin,
162 line_message,
195 line_message,
163 message
196 message
164 ),
197 ),
165 detailed_exit_code: exit_codes::CONFIG_ERROR_ABORT,
198 exit_codes::CONFIG_ERROR_ABORT,
166 }
199 )
167 }
200 }
168 }
201 }
169
202
@@ -212,3 +245,46 b' impl From<DirstateV2ParseError> for Comm'
212 HgError::from(error).into()
245 HgError::from(error).into()
213 }
246 }
214 }
247 }
248
249 impl From<SparseConfigError> for CommandError {
250 fn from(e: SparseConfigError) -> Self {
251 match e {
252 SparseConfigError::IncludesAfterExcludes { context } => {
253 Self::abort_with_exit_code_bytes(
254 format_bytes!(
255 b"{} config cannot have includes after excludes",
256 context
257 ),
258 exit_codes::CONFIG_PARSE_ERROR_ABORT,
259 )
260 }
261 SparseConfigError::EntryOutsideSection { context, line } => {
262 Self::abort_with_exit_code_bytes(
263 format_bytes!(
264 b"{} config entry outside of section: {}",
265 context,
266 &line,
267 ),
268 exit_codes::CONFIG_PARSE_ERROR_ABORT,
269 )
270 }
271 SparseConfigError::InvalidNarrowPrefix(prefix) => {
272 Self::abort_with_exit_code_bytes(
273 format_bytes!(
274 b"invalid prefix on narrow pattern: {}",
275 &prefix
276 ),
277 exit_codes::ABORT,
278 )
279 }
280 SparseConfigError::IncludesInNarrow => Self::abort(
281 "including other spec files using '%include' \
282 is not supported in narrowspec",
283 ),
284 SparseConfigError::HgError(e) => Self::from(e),
285 SparseConfigError::PatternError(e) => {
286 Self::unsupported(format!("{}", e))
287 }
288 }
289 }
290 }
@@ -6,11 +6,12 b' use clap::AppSettings;'
6 use clap::Arg;
6 use clap::Arg;
7 use clap::ArgMatches;
7 use clap::ArgMatches;
8 use format_bytes::{format_bytes, join};
8 use format_bytes::{format_bytes, join};
9 use hg::config::{Config, ConfigSource};
9 use hg::config::{Config, ConfigSource, PlainInfo};
10 use hg::repo::{Repo, RepoError};
10 use hg::repo::{Repo, RepoError};
11 use hg::utils::files::{get_bytes_from_os_str, get_path_from_bytes};
11 use hg::utils::files::{get_bytes_from_os_str, get_path_from_bytes};
12 use hg::utils::SliceExt;
12 use hg::utils::SliceExt;
13 use hg::{exit_codes, requirements};
13 use hg::{exit_codes, requirements};
14 use std::borrow::Cow;
14 use std::collections::HashSet;
15 use std::collections::HashSet;
15 use std::ffi::OsString;
16 use std::ffi::OsString;
16 use std::os::unix::prelude::CommandExt;
17 use std::os::unix::prelude::CommandExt;
@@ -300,6 +301,24 b' fn rhg_main(argv: Vec<OsString>) -> ! {'
300 }
301 }
301 };
302 };
302
303
304 let exit =
305 |ui: &Ui, config: &Config, result: Result<(), CommandError>| -> ! {
306 exit(
307 &argv,
308 &initial_current_dir,
309 ui,
310 OnUnsupported::from_config(config),
311 result,
312 // TODO: show a warning or combine with original error if
313 // `get_bool` returns an error
314 non_repo_config
315 .get_bool(b"ui", b"detailed-exit-code")
316 .unwrap_or(false),
317 )
318 };
319 let early_exit = |config: &Config, error: CommandError| -> ! {
320 exit(&Ui::new_infallible(config), &config, Err(error))
321 };
303 let repo_result = match Repo::find(&non_repo_config, repo_path.to_owned())
322 let repo_result = match Repo::find(&non_repo_config, repo_path.to_owned())
304 {
323 {
305 Ok(repo) => Ok(repo),
324 Ok(repo) => Ok(repo),
@@ -307,18 +326,7 b' fn rhg_main(argv: Vec<OsString>) -> ! {'
307 // Not finding a repo is not fatal yet, if `-R` was not given
326 // Not finding a repo is not fatal yet, if `-R` was not given
308 Err(NoRepoInCwdError { cwd: at })
327 Err(NoRepoInCwdError { cwd: at })
309 }
328 }
310 Err(error) => exit(
329 Err(error) => early_exit(&non_repo_config, error.into()),
311 &argv,
312 &initial_current_dir,
313 &Ui::new_infallible(&non_repo_config),
314 OnUnsupported::from_config(&non_repo_config),
315 Err(error.into()),
316 // TODO: show a warning or combine with original error if
317 // `get_bool` returns an error
318 non_repo_config
319 .get_bool(b"ui", b"detailed-exit-code")
320 .unwrap_or(false),
321 ),
322 };
330 };
323
331
324 let config = if let Ok(repo) = &repo_result {
332 let config = if let Ok(repo) = &repo_result {
@@ -326,20 +334,20 b' fn rhg_main(argv: Vec<OsString>) -> ! {'
326 } else {
334 } else {
327 &non_repo_config
335 &non_repo_config
328 };
336 };
329 let ui = Ui::new(&config).unwrap_or_else(|error| {
330 exit(
331 &argv,
332 &initial_current_dir,
333 &Ui::new_infallible(&config),
334 OnUnsupported::from_config(&config),
335 Err(error.into()),
336 config
337 .get_bool(b"ui", b"detailed-exit-code")
338 .unwrap_or(false),
339 )
340 });
341 let on_unsupported = OnUnsupported::from_config(config);
342
337
338 let mut config_cow = Cow::Borrowed(config);
339 config_cow.to_mut().apply_plain(PlainInfo::from_env());
340 if !ui::plain(Some("tweakdefaults"))
341 && config_cow
342 .as_ref()
343 .get_bool(b"ui", b"tweakdefaults")
344 .unwrap_or_else(|error| early_exit(&config, error.into()))
345 {
346 config_cow.to_mut().tweakdefaults()
347 };
348 let config = config_cow.as_ref();
349 let ui = Ui::new(&config)
350 .unwrap_or_else(|error| early_exit(&config, error.into()));
343 let result = main_with_result(
351 let result = main_with_result(
344 argv.iter().map(|s| s.to_owned()).collect(),
352 argv.iter().map(|s| s.to_owned()).collect(),
345 &process_start_time,
353 &process_start_time,
@@ -347,18 +355,7 b' fn rhg_main(argv: Vec<OsString>) -> ! {'
347 repo_result.as_ref(),
355 repo_result.as_ref(),
348 config,
356 config,
349 );
357 );
350 exit(
358 exit(&ui, &config, result)
351 &argv,
352 &initial_current_dir,
353 &ui,
354 on_unsupported,
355 result,
356 // TODO: show a warning or combine with original error if `get_bool`
357 // returns an error
358 config
359 .get_bool(b"ui", b"detailed-exit-code")
360 .unwrap_or(false),
361 )
362 }
359 }
363
360
364 fn main() -> ! {
361 fn main() -> ! {
@@ -372,8 +369,7 b' fn exit_code('
372 match result {
369 match result {
373 Ok(()) => exit_codes::OK,
370 Ok(()) => exit_codes::OK,
374 Err(CommandError::Abort {
371 Err(CommandError::Abort {
375 message: _,
372 detailed_exit_code, ..
376 detailed_exit_code,
377 }) => {
373 }) => {
378 if use_detailed_exit_code {
374 if use_detailed_exit_code {
379 *detailed_exit_code
375 *detailed_exit_code
@@ -480,15 +476,15 b' fn exit_no_fallback('
480 match &result {
476 match &result {
481 Ok(_) => {}
477 Ok(_) => {}
482 Err(CommandError::Unsuccessful) => {}
478 Err(CommandError::Unsuccessful) => {}
483 Err(CommandError::Abort {
479 Err(CommandError::Abort { message, hint, .. }) => {
484 message,
480 // Ignore errors when writing to stderr, we’re already exiting
485 detailed_exit_code: _,
481 // with failure code so there’s not much more we can do.
486 }) => {
487 if !message.is_empty() {
482 if !message.is_empty() {
488 // Ignore errors when writing to stderr, we’re already exiting
489 // with failure code so there’s not much more we can do.
490 let _ = ui.write_stderr(&format_bytes!(b"{}\n", message));
483 let _ = ui.write_stderr(&format_bytes!(b"{}\n", message));
491 }
484 }
485 if let Some(hint) = hint {
486 let _ = ui.write_stderr(&format_bytes!(b"({})\n", hint));
487 }
492 }
488 }
493 Err(CommandError::UnsupportedFeature { message }) => {
489 Err(CommandError::UnsupportedFeature { message }) => {
494 match on_unsupported {
490 match on_unsupported {
@@ -546,6 +542,7 b' subcommands! {'
546 debugdata
542 debugdata
547 debugrequirements
543 debugrequirements
548 debugignorerhg
544 debugignorerhg
545 debugrhgsparse
549 files
546 files
550 root
547 root
551 config
548 config
@@ -677,8 +674,15 b' impl OnUnsupported {'
677 /// The `*` extension is an edge-case for config sub-options that apply to all
674 /// The `*` extension is an edge-case for config sub-options that apply to all
678 /// extensions. For now, only `:required` exists, but that may change in the
675 /// extensions. For now, only `:required` exists, but that may change in the
679 /// future.
676 /// future.
680 const SUPPORTED_EXTENSIONS: &[&[u8]] =
677 const SUPPORTED_EXTENSIONS: &[&[u8]] = &[
681 &[b"blackbox", b"share", b"sparse", b"narrow", b"*"];
678 b"blackbox",
679 b"share",
680 b"sparse",
681 b"narrow",
682 b"*",
683 b"strip",
684 b"rebase",
685 ];
682
686
683 fn check_extensions(config: &Config) -> Result<(), CommandError> {
687 fn check_extensions(config: &Config) -> Result<(), CommandError> {
684 if let Some(b"*") = config.get(b"rhg", b"ignored-extensions") {
688 if let Some(b"*") = config.get(b"rhg", b"ignored-extensions") {
@@ -687,13 +691,18 b' fn check_extensions(config: &Config) -> '
687 }
691 }
688
692
689 let enabled: HashSet<&[u8]> = config
693 let enabled: HashSet<&[u8]> = config
690 .get_section_keys(b"extensions")
694 .iter_section(b"extensions")
691 .into_iter()
695 .filter_map(|(extension, value)| {
692 .map(|extension| {
696 if value == b"!" {
697 // Filter out disabled extensions
698 return None;
699 }
693 // Ignore extension suboptions. Only `required` exists for now.
700 // Ignore extension suboptions. Only `required` exists for now.
694 // `rhg` either supports an extension or doesn't, so it doesn't
701 // `rhg` either supports an extension or doesn't, so it doesn't
695 // make sense to consider the loading of an extension.
702 // make sense to consider the loading of an extension.
696 extension.split_2(b':').unwrap_or((extension, b"")).0
703 let actual_extension =
704 extension.split_2(b':').unwrap_or((extension, b"")).0;
705 Some(actual_extension)
697 })
706 })
698 .collect();
707 .collect();
699
708
@@ -3,10 +3,9 b' use crate::color::Effect;'
3 use format_bytes::format_bytes;
3 use format_bytes::format_bytes;
4 use format_bytes::write_bytes;
4 use format_bytes::write_bytes;
5 use hg::config::Config;
5 use hg::config::Config;
6 use hg::config::PlainInfo;
6 use hg::errors::HgError;
7 use hg::errors::HgError;
7 use hg::utils::files::get_bytes_from_os_string;
8 use std::borrow::Cow;
8 use std::borrow::Cow;
9 use std::env;
10 use std::io;
9 use std::io;
11 use std::io::{ErrorKind, Write};
10 use std::io::{ErrorKind, Write};
12
11
@@ -127,35 +126,15 b' impl Ui {'
127 }
126 }
128 stdout.flush()
127 stdout.flush()
129 }
128 }
130
131 /// Return whether plain mode is active.
132 ///
133 /// Plain mode means that all configuration variables which affect
134 /// the behavior and output of Mercurial should be
135 /// ignored. Additionally, the output should be stable,
136 /// reproducible and suitable for use in scripts or applications.
137 ///
138 /// The only way to trigger plain mode is by setting either the
139 /// `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
140 ///
141 /// The return value can either be
142 /// - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
143 /// - False if feature is disabled by default and not included in HGPLAIN
144 /// - True otherwise
145 pub fn plain(&self, feature: Option<&str>) -> bool {
146 plain(feature)
147 }
148 }
129 }
149
130
131 // TODO: pass the PlainInfo to call sites directly and
132 // delete this function
150 pub fn plain(opt_feature: Option<&str>) -> bool {
133 pub fn plain(opt_feature: Option<&str>) -> bool {
151 if let Some(except) = env::var_os("HGPLAINEXCEPT") {
134 let plain_info = PlainInfo::from_env();
152 opt_feature.map_or(true, |feature| {
135 match opt_feature {
153 get_bytes_from_os_string(except)
136 None => plain_info.is_plain(),
154 .split(|&byte| byte == b',')
137 Some(feature) => plain_info.is_feature_plain(feature),
155 .all(|exception| exception != feature.as_bytes())
156 })
157 } else {
158 env::var_os("HGPLAIN").is_some()
159 }
138 }
160 }
139 }
161
140
@@ -666,30 +666,55 b' class buildhgextindex(Command):'
666
666
667 class buildhgexe(build_ext):
667 class buildhgexe(build_ext):
668 description = 'compile hg.exe from mercurial/exewrapper.c'
668 description = 'compile hg.exe from mercurial/exewrapper.c'
669 user_options = build_ext.user_options + [
670 (
671 'long-paths-support',
672 None,
673 'enable support for long paths on '
674 'Windows (off by default and '
675 'experimental)',
676 ),
677 ]
678
669
679 LONG_PATHS_MANIFEST = """
670 LONG_PATHS_MANIFEST = """\
680 <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
671 <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
681 <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
672 <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
682 <application>
673 <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
683 <windowsSettings
674 <security>
684 xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings">
675 <requestedPrivileges>
685 <ws2:longPathAware>true</ws2:longPathAware>
676 <requestedExecutionLevel
686 </windowsSettings>
677 level="asInvoker"
687 </application>
678 uiAccess="false"
688 </assembly>"""
679 />
680 </requestedPrivileges>
681 </security>
682 </trustInfo>
683 <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
684 <application>
685 <!-- Windows Vista -->
686 <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
687 <!-- Windows 7 -->
688 <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
689 <!-- Windows 8 -->
690 <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
691 <!-- Windows 8.1 -->
692 <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
693 <!-- Windows 10 and Windows 11 -->
694 <supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
695 </application>
696 </compatibility>
697 <application xmlns="urn:schemas-microsoft-com:asm.v3">
698 <windowsSettings
699 xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings">
700 <ws2:longPathAware>true</ws2:longPathAware>
701 </windowsSettings>
702 </application>
703 <dependency>
704 <dependentAssembly>
705 <assemblyIdentity type="win32"
706 name="Microsoft.Windows.Common-Controls"
707 version="6.0.0.0"
708 processorArchitecture="*"
709 publicKeyToken="6595b64144ccf1df"
710 language="*" />
711 </dependentAssembly>
712 </dependency>
713 </assembly>
714 """
689
715
690 def initialize_options(self):
716 def initialize_options(self):
691 build_ext.initialize_options(self)
717 build_ext.initialize_options(self)
692 self.long_paths_support = False
693
718
694 def build_extensions(self):
719 def build_extensions(self):
695 if os.name != 'nt':
720 if os.name != 'nt':
@@ -700,8 +725,8 b' class buildhgexe(build_ext):'
700
725
701 pythonlib = None
726 pythonlib = None
702
727
703 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
728 dirname = os.path.dirname(self.get_ext_fullpath('dummy'))
704 self.hgtarget = os.path.join(dir, 'hg')
729 self.hgtarget = os.path.join(dirname, 'hg')
705
730
706 if getattr(sys, 'dllhandle', None):
731 if getattr(sys, 'dllhandle', None):
707 # Different Python installs can have different Python library
732 # Different Python installs can have different Python library
@@ -774,22 +799,11 b' class buildhgexe(build_ext):'
774 self.compiler.link_executable(
799 self.compiler.link_executable(
775 objects, self.hgtarget, libraries=[], output_dir=self.build_temp
800 objects, self.hgtarget, libraries=[], output_dir=self.build_temp
776 )
801 )
777 if self.long_paths_support:
802
778 self.addlongpathsmanifest()
803 self.addlongpathsmanifest()
779
804
780 def addlongpathsmanifest(self):
805 def addlongpathsmanifest(self):
781 r"""Add manifest pieces so that hg.exe understands long paths
806 """Add manifest pieces so that hg.exe understands long paths
782
783 This is an EXPERIMENTAL feature, use with care.
784 To enable long paths support, one needs to do two things:
785 - build Mercurial with --long-paths-support option
786 - change HKLM\SYSTEM\CurrentControlSet\Control\FileSystem\
787 LongPathsEnabled to have value 1.
788
789 Please ignore 'warning 81010002: Unrecognized Element "longPathAware"';
790 it happens because Mercurial uses mt.exe circa 2008, which is not
791 yet aware of long paths support in the manifest (I think so at least).
792 This does not stop mt.exe from embedding/merging the XML properly.
793
807
794 Why resource #1 should be used for .exe manifests? I don't know and
808 Why resource #1 should be used for .exe manifests? I don't know and
795 wasn't able to find an explanation for mortals. But it seems to work.
809 wasn't able to find an explanation for mortals. But it seems to work.
@@ -797,21 +811,18 b' class buildhgexe(build_ext):'
797 exefname = self.compiler.executable_filename(self.hgtarget)
811 exefname = self.compiler.executable_filename(self.hgtarget)
798 fdauto, manfname = tempfile.mkstemp(suffix='.hg.exe.manifest')
812 fdauto, manfname = tempfile.mkstemp(suffix='.hg.exe.manifest')
799 os.close(fdauto)
813 os.close(fdauto)
800 with open(manfname, 'w') as f:
814 with open(manfname, 'w', encoding="UTF-8") as f:
801 f.write(self.LONG_PATHS_MANIFEST)
815 f.write(self.LONG_PATHS_MANIFEST)
802 log.info("long paths manifest is written to '%s'" % manfname)
816 log.info("long paths manifest is written to '%s'" % manfname)
803 inputresource = '-inputresource:%s;#1' % exefname
804 outputresource = '-outputresource:%s;#1' % exefname
817 outputresource = '-outputresource:%s;#1' % exefname
805 log.info("running mt.exe to update hg.exe's manifest in-place")
818 log.info("running mt.exe to update hg.exe's manifest in-place")
806 # supplying both -manifest and -inputresource to mt.exe makes
819
807 # it merge the embedded and supplied manifests in the -outputresource
808 self.spawn(
820 self.spawn(
809 [
821 [
810 'mt.exe',
822 self.compiler.mt,
811 '-nologo',
823 '-nologo',
812 '-manifest',
824 '-manifest',
813 manfname,
825 manfname,
814 inputresource,
815 outputresource,
826 outputresource,
816 ]
827 ]
817 )
828 )
@@ -48,7 +48,7 b' import argparse'
48 import collections
48 import collections
49 import contextlib
49 import contextlib
50 import difflib
50 import difflib
51 import distutils.version as version
51
52 import errno
52 import errno
53 import functools
53 import functools
54 import json
54 import json
@@ -72,6 +72,13 b' import unittest'
72 import uuid
72 import uuid
73 import xml.dom.minidom as minidom
73 import xml.dom.minidom as minidom
74
74
75 try:
76 # PEP 632 recommend the use of `packaging.version` to replace the
77 # deprecated `distutil.version`. So lets do it.
78 import packaging.version as version
79 except ImportError:
80 import distutils.version as version
81
75 if sys.version_info < (3, 5, 0):
82 if sys.version_info < (3, 5, 0):
76 print(
83 print(
77 '%s is only supported on Python 3.5+, not %s'
84 '%s is only supported on Python 3.5+, not %s'
@@ -3437,6 +3444,7 b' class TestRunner:'
3437 if self.options.list_tests:
3444 if self.options.list_tests:
3438 result = runner.listtests(suite)
3445 result = runner.listtests(suite)
3439 else:
3446 else:
3447 install_start_time = time.monotonic()
3440 self._usecorrectpython()
3448 self._usecorrectpython()
3441 if self._installdir:
3449 if self._installdir:
3442 self._installhg()
3450 self._installhg()
@@ -3450,6 +3458,11 b' class TestRunner:'
3450 elif self.options.pyoxidized:
3458 elif self.options.pyoxidized:
3451 self._build_pyoxidized()
3459 self._build_pyoxidized()
3452 self._use_correct_mercurial()
3460 self._use_correct_mercurial()
3461 install_end_time = time.monotonic()
3462 if self._installdir:
3463 msg = 'installed Mercurial in %.2f seconds'
3464 msg %= install_end_time - install_start_time
3465 log(msg)
3453
3466
3454 log(
3467 log(
3455 'running %d tests using %d parallel processes'
3468 'running %d tests using %d parallel processes'
@@ -784,7 +784,6 b' user adds irrelevant but consistent info'
784 $ hg log -q -r 'bisect(pruned)'
784 $ hg log -q -r 'bisect(pruned)'
785 0:33b1f9bc8bc5
785 0:33b1f9bc8bc5
786 1:4ca5088da217
786 1:4ca5088da217
787 2:051e12f87bf1
788 8:dab8161ac8fc
787 8:dab8161ac8fc
789 11:82ca6f06eccd
788 11:82ca6f06eccd
790 12:9f259202bbe7
789 12:9f259202bbe7
@@ -718,7 +718,7 b' Unbundle incremental bundles into fresh '
718 $ hg init empty
718 $ hg init empty
719 $ hg -R test bundle --base null -r 0 ../0.hg
719 $ hg -R test bundle --base null -r 0 ../0.hg
720 1 changesets found
720 1 changesets found
721 $ hg -R test bundle --base 0 -r 1 ../1.hg
721 $ hg -R test bundle --exact -r 1 ../1.hg
722 1 changesets found
722 1 changesets found
723 $ hg -R empty unbundle -u ../0.hg ../1.hg
723 $ hg -R empty unbundle -u ../0.hg ../1.hg
724 adding changesets
724 adding changesets
@@ -432,6 +432,20 b' check server log:'
432 YYYY/MM/DD HH:MM:SS (PID)> log -R cached
432 YYYY/MM/DD HH:MM:SS (PID)> log -R cached
433 YYYY/MM/DD HH:MM:SS (PID)> loaded repo into cache: $TESTTMP/cached (in ...s)
433 YYYY/MM/DD HH:MM:SS (PID)> loaded repo into cache: $TESTTMP/cached (in ...s)
434
434
435 Test that -R is interpreted relative to --cwd.
436
437 $ hg init repo1
438 $ mkdir -p a/b
439 $ hg init a/b/repo2
440 $ printf "[alias]\ntest=repo1\n" >> repo1/.hg/hgrc
441 $ printf "[alias]\ntest=repo2\n" >> a/b/repo2/.hg/hgrc
442 $ cd a
443 $ chg --cwd .. -R repo1 show alias.test
444 repo1
445 $ chg --cwd . -R b/repo2 show alias.test
446 repo2
447 $ cd ..
448
435 Test that chg works (sets to the user's actual LC_CTYPE) even when python
449 Test that chg works (sets to the user's actual LC_CTYPE) even when python
436 "coerces" the locale (py3.7+)
450 "coerces" the locale (py3.7+)
437
451
@@ -261,7 +261,7 b' Show all commands + options'
261 bookmarks: force, rev, delete, rename, inactive, list, template
261 bookmarks: force, rev, delete, rename, inactive, list, template
262 branch: force, clean, rev
262 branch: force, clean, rev
263 branches: active, closed, rev, template
263 branches: active, closed, rev, template
264 bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure
264 bundle: exact, force, rev, branch, base, all, type, ssh, remotecmd, insecure
265 cat: output, rev, decode, include, exclude, template
265 cat: output, rev, decode, include, exclude, template
266 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
266 clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
267 commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
267 commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
@@ -96,6 +96,7 b' perfstatus'
96 perf::branchmapupdate
96 perf::branchmapupdate
97 benchmark branchmap update from for <base> revs to <target>
97 benchmark branchmap update from for <base> revs to <target>
98 revs
98 revs
99 perf::bundle benchmark the creation of a bundle from a repository
99 perf::bundleread
100 perf::bundleread
100 Benchmark reading of bundle files.
101 Benchmark reading of bundle files.
101 perf::cca (no help text available)
102 perf::cca (no help text available)
@@ -105,6 +106,9 b' perfstatus'
105 (no help text available)
106 (no help text available)
106 perf::ctxfiles
107 perf::ctxfiles
107 (no help text available)
108 (no help text available)
109 perf::delta-find
110 benchmark the process of finding a valid delta for a revlog
111 revision
108 perf::diffwd Profile diff of working directory changes
112 perf::diffwd Profile diff of working directory changes
109 perf::dirfoldmap
113 perf::dirfoldmap
110 benchmap a 'dirstate._map.dirfoldmap.get()' request
114 benchmap a 'dirstate._map.dirfoldmap.get()' request
@@ -187,6 +191,8 b' perfstatus'
187 perf::tags (no help text available)
191 perf::tags (no help text available)
188 perf::templating
192 perf::templating
189 test the rendering time of a given template
193 test the rendering time of a given template
194 perf::unbundle
195 benchmark application of a bundle in a repository.
190 perf::unidiff
196 perf::unidiff
191 benchmark a unified diff between revisions
197 benchmark a unified diff between revisions
192 perf::volatilesets
198 perf::volatilesets
@@ -385,6 +391,11 b' Test pre-run feature'
385 searching for changes
391 searching for changes
386 searching for changes
392 searching for changes
387 searching for changes
393 searching for changes
394 $ hg perf::bundle 'last(all(), 5)'
395 $ hg bundle --exact --rev 'last(all(), 5)' last-5.hg
396 4 changesets found
397 $ hg perf::unbundle last-5.hg
398
388
399
389 test profile-benchmark option
400 test profile-benchmark option
390 ------------------------------
401 ------------------------------
@@ -659,7 +659,7 b' Server sends partial bundle2 header magi'
659
659
660 $ hg clone http://localhost:$HGPORT/ clone
660 $ hg clone http://localhost:$HGPORT/ clone
661 requesting all changes
661 requesting all changes
662 abort: HTTP request error (incomplete response) (py3 !)
662 abort: HTTP request error (incomplete response*) (glob)
663 (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
663 (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
664 [255]
664 [255]
665
665
@@ -703,7 +703,7 b' Server sends incomplete bundle2 stream p'
703
703
704 $ hg clone http://localhost:$HGPORT/ clone
704 $ hg clone http://localhost:$HGPORT/ clone
705 requesting all changes
705 requesting all changes
706 abort: HTTP request error (incomplete response) (py3 !)
706 abort: HTTP request error (incomplete response*) (glob)
707 (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
707 (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
708 [255]
708 [255]
709
709
@@ -904,7 +904,7 b' Server stops after bundle2 part payload '
904 adding changesets
904 adding changesets
905 transaction abort!
905 transaction abort!
906 rollback completed
906 rollback completed
907 abort: HTTP request error (incomplete response) (py3 !)
907 abort: HTTP request error (incomplete response*) (glob)
908 (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
908 (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
909 [255]
909 [255]
910
910
@@ -1021,7 +1021,7 b' Server stops sending after 0 length payl'
1021 adding file changes
1021 adding file changes
1022 transaction abort!
1022 transaction abort!
1023 rollback completed
1023 rollback completed
1024 abort: HTTP request error (incomplete response) (py3 !)
1024 abort: HTTP request error (incomplete response*) (glob)
1025 (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
1025 (this may be an intermittent network failure; if the error persists, consider contacting the network or server operator)
1026 [255]
1026 [255]
1027
1027
@@ -2157,6 +2157,8 b' test -u/-k for problematic encoding'
2157 ... '''.encode('utf-8')) and None
2157 ... '''.encode('utf-8')) and None
2158 $ sh < setup.sh
2158 $ sh < setup.sh
2159
2159
2160 #if no-rhg
2161
2160 test in problematic encoding
2162 test in problematic encoding
2161 >>> with open('test.sh', 'wb') as f:
2163 >>> with open('test.sh', 'wb') as f:
2162 ... f.write(u'''
2164 ... f.write(u'''
@@ -2179,6 +2181,8 b' test in problematic encoding'
2179 3
2181 3
2180 1
2182 1
2181
2183
2184 #endif
2185
2182 $ cd ..
2186 $ cd ..
2183
2187
2184 test hg log on non-existent files and on directories
2188 test hg log on non-existent files and on directories
@@ -38,7 +38,7 b' qnew on repo w/svn subrepo'
38 A .hgsub
38 A .hgsub
39 $ hg qnew -m0 0.diff
39 $ hg qnew -m0 0.diff
40 $ cd sub
40 $ cd sub
41 $ echo a > a
41 $ echo foo > a
42 $ svn add a
42 $ svn add a
43 A a
43 A a
44 $ svn st
44 $ svn st
@@ -4,7 +4,7 b' Test features and behaviors related to t'
4
4
5 $ cat << EOF >> $HGRCPATH
5 $ cat << EOF >> $HGRCPATH
6 > [format]
6 > [format]
7 > internal-phase=yes
7 > exp-archived-phase=yes
8 > [extensions]
8 > [extensions]
9 > strip=
9 > strip=
10 > [experimental]
10 > [experimental]
@@ -879,7 +879,7 b' Test for the "internal" phase'
879
879
880 Check we deny its usage on older repository
880 Check we deny its usage on older repository
881
881
882 $ hg init no-internal-phase --config format.internal-phase=no
882 $ hg init no-internal-phase --config format.use-internal-phase=no
883 $ cd no-internal-phase
883 $ cd no-internal-phase
884 $ hg debugrequires | grep internal-phase
884 $ hg debugrequires | grep internal-phase
885 [1]
885 [1]
@@ -900,10 +900,10 b' Check we deny its usage on older reposit'
900
900
901 Check it works fine with repository that supports it.
901 Check it works fine with repository that supports it.
902
902
903 $ hg init internal-phase --config format.internal-phase=yes
903 $ hg init internal-phase --config format.use-internal-phase=yes
904 $ cd internal-phase
904 $ cd internal-phase
905 $ hg debugrequires | grep internal-phase
905 $ hg debugrequires | grep internal-phase
906 internal-phase
906 internal-phase-2
907 $ mkcommit A
907 $ mkcommit A
908 test-debug-phase: new rev 0: x -> 1
908 test-debug-phase: new rev 0: x -> 1
909 test-hook-close-phase: 4a2df7238c3b48766b5e22fafbb8a2f506ec8256: -> draft
909 test-hook-close-phase: 4a2df7238c3b48766b5e22fafbb8a2f506ec8256: -> draft
@@ -951,21 +951,28 b' Test for archived phase'
951
951
952 Commit an archived changesets
952 Commit an archived changesets
953
953
954 $ cd ..
955 $ hg clone --quiet --pull internal-phase archived-phase \
956 > --config format.exp-archived-phase=yes \
957 > --config extensions.phasereport='!' \
958 > --config hooks.txnclose-phase.test=
959
960 $ cd archived-phase
961
954 $ echo B > B
962 $ echo B > B
955 $ hg add B
963 $ hg add B
956 $ hg status
964 $ hg status
957 A B
965 A B
958 $ hg --config "phases.new-commit=archived" commit -m "my test archived commit"
966 $ hg --config "phases.new-commit=archived" commit -m "my test archived commit"
959 test-debug-phase: new rev 2: x -> 32
967 test-debug-phase: new rev 1: x -> 32
960 test-hook-close-phase: 8df5997c3361518f733d1ae67cd3adb9b0eaf125: -> archived
968 test-hook-close-phase: 8df5997c3361518f733d1ae67cd3adb9b0eaf125: -> archived
961
969
962 The changeset is a working parent descendant.
970 The changeset is a working parent descendant.
963 Per the usual visibility rules, it is made visible.
971 Per the usual visibility rules, it is made visible.
964
972
965 $ hg log -G -l 3
973 $ hg log -G -l 3
966 @ changeset: 2:8df5997c3361
974 @ changeset: 1:8df5997c3361
967 | tag: tip
975 | tag: tip
968 | parent: 0:4a2df7238c3b
969 | user: test
976 | user: test
970 | date: Thu Jan 01 00:00:00 1970 +0000
977 | date: Thu Jan 01 00:00:00 1970 +0000
971 | summary: my test archived commit
978 | summary: my test archived commit
@@ -387,6 +387,8 b' Testing output for the --check (-c) flag'
387
387
388 $ touch a
388 $ touch a
389 $ hg -q commit -A -l - << EOF
389 $ hg -q commit -A -l - << EOF
390 > commit 2
391 >
390 > .. asf::
392 > .. asf::
391 >
393 >
392 > First paragraph under this admonition.
394 > First paragraph under this admonition.
@@ -395,7 +397,7 b' Testing output for the --check (-c) flag'
395 Suggest similar admonition in place of the invalid one.
397 Suggest similar admonition in place of the invalid one.
396
398
397 $ hg releasenotes -r . -c
399 $ hg releasenotes -r . -c
398 Invalid admonition 'asf' present in changeset 4026fe9e1c20
400 Invalid admonition 'asf' present in changeset 99fa3c800c5e
399
401
400 $ touch b
402 $ touch b
401 $ hg -q commit -A -l - << EOF
403 $ hg -q commit -A -l - << EOF
@@ -405,7 +407,7 b' Suggest similar admonition in place of t'
405 > EOF
407 > EOF
406
408
407 $ hg releasenotes -r . -c
409 $ hg releasenotes -r . -c
408 Invalid admonition 'fixes' present in changeset 0e7130d2705c
410 Invalid admonition 'fixes' present in changeset 4737b1b5afd1
409 (did you mean fix?)
411 (did you mean fix?)
410
412
411 $ cd ..
413 $ cd ..
@@ -2974,6 +2974,25 b' test sorting by multiple keys including '
2974 1 b11 m12 u111 112 7200
2974 1 b11 m12 u111 112 7200
2975 0 b12 m111 u112 111 10800
2975 0 b12 m111 u112 111 10800
2976
2976
2977 random sort
2978
2979 $ hg log --rev 'sort(all(), "random")' | wc -l
2980 \s*8 (re)
2981 $ hg log --rev 'sort(all(), "-random")' | wc -l
2982 \s*8 (re)
2983 $ hg log --rev 'sort(all(), "random", random.seed=celeste)'
2984 6 b111 t2 tu 130 0
2985 7 b111 t3 tu 130 0
2986 4 b111 m112 u111 110 14400
2987 3 b112 m111 u11 120 0
2988 5 b111 t1 tu 130 0
2989 0 b12 m111 u112 111 10800
2990 1 b11 m12 u111 112 7200
2991 2 b111 m11 u12 111 3600
2992 $ hg log --rev 'first(sort(all(), "random", random.seed=celeste))'
2993 6 b111 t2 tu 130 0
2994
2995
2977 topographical sorting can't be combined with other sort keys, and you can't
2996 topographical sorting can't be combined with other sort keys, and you can't
2978 use the topo.firstbranch option when topo sort is not active:
2997 use the topo.firstbranch option when topo sort is not active:
2979
2998
@@ -1481,6 +1481,20 b' prepare repository that has "default" br'
1481 $ hg init namedbranch
1481 $ hg init namedbranch
1482 $ cd namedbranch
1482 $ cd namedbranch
1483
1483
1484 $ log 'roots(.)'
1485 -1
1486 $ log 'roots(. or wdir())'
1487 -1
1488 $ log 'roots(wdir())'
1489 2147483647
1490 $ log 'sort(., -topo)'
1491 -1
1492 $ log 'sort(. or wdir(), -topo)'
1493 -1
1494 2147483647
1495 $ log 'sort(wdir(), -topo)'
1496 2147483647
1497
1484 $ echo default0 >> a
1498 $ echo default0 >> a
1485 $ hg ci -Aqm0
1499 $ hg ci -Aqm0
1486 $ echo default1 >> a
1500 $ echo default1 >> a
@@ -1498,6 +1512,17 b' prepare repository that has "default" br'
1498 $ echo default5 >> a
1512 $ echo default5 >> a
1499 $ hg ci -m5
1513 $ hg ci -m5
1500
1514
1515 $ log 'roots(. or wdir())'
1516 5
1517 $ log 'roots(wdir())'
1518 2147483647
1519 $ log 'sort(. or wdir() or .^, -topo)'
1520 4
1521 5
1522 2147483647
1523 $ log 'sort(wdir(), -topo)'
1524 2147483647
1525
1501 "null" revision belongs to "default" branch (issue4683)
1526 "null" revision belongs to "default" branch (issue4683)
1502
1527
1503 $ log 'branch(null)'
1528 $ log 'branch(null)'
@@ -85,15 +85,12 b' hidden by narrow, so we just fall back t'
85 dir1/x
85 dir1/x
86 dir1/y
86 dir1/y
87
87
88 Hg status needs to do some filtering based on narrow spec, so we don't
88 Hg status needs to do some filtering based on narrow spec
89 support it in rhg for narrow clones yet.
90
89
91 $ mkdir dir2
90 $ mkdir dir2
92 $ touch dir2/q
91 $ touch dir2/q
93 $ "$real_hg" status
92 $ "$real_hg" status
94 $ $NO_FALLBACK rhg --config rhg.status=true status
93 $ $NO_FALLBACK rhg --config rhg.status=true status
95 unsupported feature: rhg status is not supported for sparse checkouts or narrow clones yet
96 [252]
97
94
98 Adding "orphaned" index files:
95 Adding "orphaned" index files:
99
96
@@ -24,7 +24,7 b''
24
24
25 $ cat <<EOF >> $HGRCPATH
25 $ cat <<EOF >> $HGRCPATH
26 > [format]
26 > [format]
27 > internal-phase = yes
27 > use-internal-phase = yes
28 > EOF
28 > EOF
29
29
30 #endif
30 #endif
@@ -253,12 +253,12 b' apply it and make sure our state is as e'
253 (this also tests that same timestamp prevents backups from being
253 (this also tests that same timestamp prevents backups from being
254 removed, even though there are more than 'maxbackups' backups)
254 removed, even though there are more than 'maxbackups' backups)
255
255
256 $ f -t .hg/shelve-backup/default.patch
256 $ f -t .hg/shelve-backup/default.shelve
257 .hg/shelve-backup/default.patch: file
257 .hg/shelve-backup/default.shelve: file
258 $ touch -t 200001010000 .hg/shelve-backup/default.patch
258 $ touch -t 200001010000 .hg/shelve-backup/default.shelve
259 $ f -t .hg/shelve-backup/default-1.patch
259 $ f -t .hg/shelve-backup/default-1.shelve
260 .hg/shelve-backup/default-1.patch: file
260 .hg/shelve-backup/default-1.shelve: file
261 $ touch -t 200001010000 .hg/shelve-backup/default-1.patch
261 $ touch -t 200001010000 .hg/shelve-backup/default-1.shelve
262
262
263 $ hg unshelve
263 $ hg unshelve
264 unshelving change 'default-01'
264 unshelving change 'default-01'
@@ -1544,4 +1544,87 b' produced by `hg shelve`.'
1544 $ hg update -q --clean .
1544 $ hg update -q --clean .
1545 $ hg patch -p1 test_patch.patch
1545 $ hg patch -p1 test_patch.patch
1546 applying test_patch.patch
1546 applying test_patch.patch
1547
1548 $ hg strip -q -r .
1547 #endif
1549 #endif
1550
1551 Check the comment of the last commit for consistency
1552
1553 $ hg log -r . --template '{desc}\n'
1554 add C to bars
1555
1556 -- if phasebased, shelve works without patch and bundle
1557
1558 $ hg update -q --clean .
1559 $ rm -r .hg/shelve*
1560 $ echo import antigravity >> somefile.py
1561 $ hg add somefile.py
1562 $ hg shelve -q
1563 #if phasebased
1564 $ rm .hg/shelved/default.hg
1565 $ rm .hg/shelved/default.patch
1566 #endif
1567
1568 shelve --list --patch should work even with no patch file.
1569
1570 $ hg shelve --list --patch
1571 default (*s ago) * changes to: add C to bars (glob)
1572
1573 diff --git a/somefile.py b/somefile.py
1574 new file mode 100644
1575 --- /dev/null
1576 +++ b/somefile.py
1577 @@ -0,0 +1,1 @@
1578 +import antigravity
1579
1580 $ hg unshelve
1581 unshelving change 'default'
1582
1583 #if phasebased
1584 $ ls .hg/shelve-backup
1585 default.shelve
1586 #endif
1587
1588 #if stripbased
1589 $ ls .hg/shelve-backup
1590 default.hg
1591 default.patch
1592 default.shelve
1593 #endif
1594
1595
1596 -- allow for phase-based shelves to be disabled
1597
1598 $ hg update -q --clean .
1599 $ hg strip -q --hidden -r 0
1600 $ rm -r .hg/shelve*
1601
1602 #if phasebased
1603 $ cat <<EOF >> $HGRCPATH
1604 > [shelve]
1605 > store = strip
1606 > EOF
1607 #endif
1608
1609 $ echo import this >> somefile.py
1610 $ hg add somefile.py
1611 $ hg shelve -q
1612 $ hg log --hidden
1613 $ ls .hg/shelved
1614 default.hg
1615 default.patch
1616 default.shelve
1617 $ hg unshelve -q
1618
1619 Override the disabling, re-enabling phase-based shelves
1620
1621 $ hg shelve --config shelve.store=internal -q
1622
1623 #if phasebased
1624 $ hg log --hidden --template '{user}\n'
1625 shelve@localhost
1626 #endif
1627
1628 #if stripbased
1629 $ hg log --hidden --template '{user}\n'
1630 #endif
@@ -26,7 +26,7 b''
26
26
27 $ cat <<EOF >> $HGRCPATH
27 $ cat <<EOF >> $HGRCPATH
28 > [format]
28 > [format]
29 > internal-phase = yes
29 > use-internal-phase = yes
30 > EOF
30 > EOF
31
31
32 #endif
32 #endif
@@ -637,9 +637,16 b' using ui.statuscopies setting'
637 M a
637 M a
638 b
638 b
639 R b
639 R b
640 $ hg st --config ui.statuscopies=true --no-copies
641 M a
642 R b
640 $ hg st --config ui.statuscopies=false
643 $ hg st --config ui.statuscopies=false
641 M a
644 M a
642 R b
645 R b
646 $ hg st --config ui.statuscopies=false --copies
647 M a
648 b
649 R b
643 $ hg st --config ui.tweakdefaults=yes
650 $ hg st --config ui.tweakdefaults=yes
644 M a
651 M a
645 b
652 b
@@ -591,7 +591,7 b' well.'
591 $ cd "$WCROOT"
591 $ cd "$WCROOT"
592 $ svn up > /dev/null
592 $ svn up > /dev/null
593 $ mkdir trunk/subdir branches
593 $ mkdir trunk/subdir branches
594 $ echo a > trunk/subdir/a
594 $ echo foo > trunk/subdir/a
595 $ svn add trunk/subdir branches
595 $ svn add trunk/subdir branches
596 A trunk/subdir
596 A trunk/subdir
597 A trunk/subdir/a
597 A trunk/subdir/a
@@ -1718,4 +1718,19 b' read config options:'
1718 $ hg log -T "{config('templateconfig', 'knob', if(true, 'foo', 'bar'))}\n"
1718 $ hg log -T "{config('templateconfig', 'knob', if(true, 'foo', 'bar'))}\n"
1719 foo
1719 foo
1720
1720
1721 reverse filter:
1722
1723 $ hg log -T "{'abc\ndef\nghi'|splitlines|reverse}\n"
1724 ghi def abc
1725
1726 $ hg log -T "{'abc'|reverse}\n"
1727 hg: parse error: not reversible
1728 (incompatible use of template filter 'reverse')
1729 [10]
1730
1731 $ hg log -T "{date|reverse}\n"
1732 hg: parse error: not reversible
1733 (template filter 'reverse' is not compatible with keyword 'date')
1734 [10]
1735
1721 $ cd ..
1736 $ cd ..
@@ -633,6 +633,10 b' Test that statuses are reported properly'
633 #
633 #
634 # To mark files as resolved: hg resolve --mark FILE
634 # To mark files as resolved: hg resolve --mark FILE
635
635
636 $ hg status -T '{status} {path} - {relpath(path)}\n'
637 M foo - foo
638 a - a
639
636 $ hg status -Tjson
640 $ hg status -Tjson
637 [
641 [
638 {
642 {
General Comments 0
You need to be logged in to leave comments. Login now