##// END OF EJS Templates
branching: merge stable into default
Raphaël Gomès -
r49098:96aa3a68 merge default
parent child Browse files
Show More
@@ -1,188 +1,188 b''
1 stages:
1 stages:
2 - tests
2 - tests
3 - phabricator
3 - phabricator
4
4
5 image: registry.heptapod.net/mercurial/ci-images/mercurial-core:$HG_CI_IMAGE_TAG
5 image: registry.heptapod.net/mercurial/ci-images/mercurial-core:$HG_CI_IMAGE_TAG
6
6
7 variables:
7 variables:
8 PYTHON: python
8 PYTHON: python
9 TEST_HGMODULEPOLICY: "allow"
9 TEST_HGMODULEPOLICY: "allow"
10 HG_CI_IMAGE_TAG: "latest"
10 HG_CI_IMAGE_TAG: "latest"
11 TEST_HGTESTS_ALLOW_NETIO: "0"
11 TEST_HGTESTS_ALLOW_NETIO: "0"
12
12
13 .all_template: &all
13 .all_template: &all
14 when: always
14 when: always
15
15
16 .runtests_template: &runtests
16 .runtests_template: &runtests
17 <<: *all
17 <<: *all
18 stage: tests
18 stage: tests
19 # The runner made a clone as root.
19 # The runner made a clone as root.
20 # We make a new clone owned by user used to run the step.
20 # We make a new clone owned by user used to run the step.
21 before_script:
21 before_script:
22 - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
22 - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
23 - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
23 - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
24 - cd /tmp/mercurial-ci/
24 - cd /tmp/mercurial-ci/
25 - ls -1 tests/test-check-*.* > /tmp/check-tests.txt
25 - ls -1 tests/test-check-*.* > /tmp/check-tests.txt
26 - black --version
26 - black --version
27 - clang-format --version
27 - clang-format --version
28 script:
28 script:
29 - echo "python used, $PYTHON"
29 - echo "python used, $PYTHON"
30 - echo "$RUNTEST_ARGS"
30 - echo "$RUNTEST_ARGS"
31 - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS
31 - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS
32
32
33 checks-py2:
33 checks-py2:
34 <<: *runtests
34 <<: *runtests
35 variables:
35 variables:
36 RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
36 RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
37
37
38 checks-py3:
38 checks-py3:
39 <<: *runtests
39 <<: *runtests
40 variables:
40 variables:
41 RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
41 RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
42 PYTHON: python3
42 PYTHON: python3
43
43
44 rust-cargo-test-py2: &rust_cargo_test
44 rust-cargo-test-py2: &rust_cargo_test
45 <<: *all
45 <<: *all
46 stage: tests
46 stage: tests
47 script:
47 script:
48 - echo "python used, $PYTHON"
48 - echo "python used, $PYTHON"
49 - make rust-tests
49 - make rust-tests
50
50
51 rust-cargo-test-py3:
51 rust-cargo-test-py3:
52 stage: tests
52 stage: tests
53 <<: *rust_cargo_test
53 <<: *rust_cargo_test
54 variables:
54 variables:
55 PYTHON: python3
55 PYTHON: python3
56
56
57 phabricator-refresh:
57 phabricator-refresh:
58 stage: phabricator
58 stage: phabricator
59 variables:
59 variables:
60 DEFAULT_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)"
60 DEFAULT_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)"
61 STABLE_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\n⚠ This patch is intended for stable ⚠\n{image https://media.giphy.com/media/nYI8SmmChYXK0/source.gif}"
61 STABLE_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\n⚠ This patch is intended for stable ⚠\n{image https://media.giphy.com/media/nYI8SmmChYXK0/source.gif}"
62 script:
62 script:
63 - |
63 - |
64 if [ `hg branch` == "stable" ]; then
64 if [ `hg branch` == "stable" ]; then
65 ./contrib/phab-refresh-stack.sh --comment "$STABLE_COMMENT";
65 ./contrib/phab-refresh-stack.sh --comment "$STABLE_COMMENT";
66 else
66 else
67 ./contrib/phab-refresh-stack.sh --comment "$DEFAULT_COMMENT";
67 ./contrib/phab-refresh-stack.sh --comment "$DEFAULT_COMMENT";
68 fi
68 fi
69
69
70 test-py2:
70 test-py2:
71 <<: *runtests
71 <<: *runtests
72 variables:
72 variables:
73 RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
73 RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
74 TEST_HGMODULEPOLICY: "c"
74 TEST_HGMODULEPOLICY: "c"
75 TEST_HGTESTS_ALLOW_NETIO: "1"
75 TEST_HGTESTS_ALLOW_NETIO: "1"
76
76
77 test-py3:
77 test-py3:
78 <<: *runtests
78 <<: *runtests
79 variables:
79 variables:
80 RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
80 RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
81 PYTHON: python3
81 PYTHON: python3
82 TEST_HGMODULEPOLICY: "c"
82 TEST_HGMODULEPOLICY: "c"
83 TEST_HGTESTS_ALLOW_NETIO: "1"
83 TEST_HGTESTS_ALLOW_NETIO: "1"
84
84
85 test-py2-pure:
85 test-py2-pure:
86 <<: *runtests
86 <<: *runtests
87 variables:
87 variables:
88 RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
88 RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
89 TEST_HGMODULEPOLICY: "py"
89 TEST_HGMODULEPOLICY: "py"
90
90
91 test-py3-pure:
91 test-py3-pure:
92 <<: *runtests
92 <<: *runtests
93 variables:
93 variables:
94 RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
94 RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
95 PYTHON: python3
95 PYTHON: python3
96 TEST_HGMODULEPOLICY: "py"
96 TEST_HGMODULEPOLICY: "py"
97
97
98 test-py2-rust:
98 test-py2-rust:
99 <<: *runtests
99 <<: *runtests
100 variables:
100 variables:
101 HGWITHRUSTEXT: cpython
101 HGWITHRUSTEXT: cpython
102 RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
102 RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
103 TEST_HGMODULEPOLICY: "rust+c"
103 TEST_HGMODULEPOLICY: "rust+c"
104
104
105 test-py3-rust:
105 test-py3-rust:
106 <<: *runtests
106 <<: *runtests
107 variables:
107 variables:
108 HGWITHRUSTEXT: cpython
108 HGWITHRUSTEXT: cpython
109 RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
109 RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
110 PYTHON: python3
110 PYTHON: python3
111 TEST_HGMODULEPOLICY: "rust+c"
111 TEST_HGMODULEPOLICY: "rust+c"
112
112
113 test-py3-rhg:
113 test-py3-rhg:
114 <<: *runtests
114 <<: *runtests
115 variables:
115 variables:
116 HGWITHRUSTEXT: cpython
116 HGWITHRUSTEXT: cpython
117 RUNTEST_ARGS: "--rust --rhg --blacklist /tmp/check-tests.txt"
117 RUNTEST_ARGS: "--rust --rhg --blacklist /tmp/check-tests.txt"
118 PYTHON: python3
118 PYTHON: python3
119 TEST_HGMODULEPOLICY: "rust+c"
119 TEST_HGMODULEPOLICY: "rust+c"
120
120
121 test-py2-chg:
121 test-py2-chg:
122 <<: *runtests
122 <<: *runtests
123 variables:
123 variables:
124 RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
124 RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
125 TEST_HGMODULEPOLICY: "c"
125 TEST_HGMODULEPOLICY: "c"
126
126
127 test-py3-chg:
127 test-py3-chg:
128 <<: *runtests
128 <<: *runtests
129 variables:
129 variables:
130 PYTHON: python3
130 PYTHON: python3
131 RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
131 RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
132 TEST_HGMODULEPOLICY: "c"
132 TEST_HGMODULEPOLICY: "c"
133
133
134 check-pytype-py3:
134 check-pytype-py3:
135 extends: .runtests_template
135 extends: .runtests_template
136 before_script:
136 before_script:
137 - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
137 - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
138 - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
138 - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
139 - cd /tmp/mercurial-ci/
139 - cd /tmp/mercurial-ci/
140 - make local PYTHON=$PYTHON
140 - make local PYTHON=$PYTHON
141 - $PYTHON -m pip install --user -U pytype==2021.04.15
141 - $PYTHON -m pip install --user -U pytype==2021.04.15
142 variables:
142 variables:
143 RUNTEST_ARGS: " --allow-slow-tests tests/test-check-pytype.t"
143 RUNTEST_ARGS: " --allow-slow-tests tests/test-check-pytype.t"
144 HGTEST_TIMEOUT: "3600"
144 HGTEST_SLOWTIMEOUT: "3600"
145 PYTHON: python3
145 PYTHON: python3
146 TEST_HGMODULEPOLICY: "c"
146 TEST_HGMODULEPOLICY: "c"
147
147
148 # `sh.exe --login` sets a couple of extra environment variables that are defined
148 # `sh.exe --login` sets a couple of extra environment variables that are defined
149 # in the MinGW shell, but switches CWD to /home/$username. The previous value
149 # in the MinGW shell, but switches CWD to /home/$username. The previous value
150 # is stored in OLDPWD. Of the added variables, MSYSTEM is crucial to running
150 # is stored in OLDPWD. Of the added variables, MSYSTEM is crucial to running
151 # run-tests.py- it is needed to make run-tests.py generate a `python3` script
151 # run-tests.py- it is needed to make run-tests.py generate a `python3` script
152 # that satisfies the various shebang lines and delegates to `py -3`.
152 # that satisfies the various shebang lines and delegates to `py -3`.
153 .window_runtests_template: &windows_runtests
153 .window_runtests_template: &windows_runtests
154 <<: *all
154 <<: *all
155 when: manual # we don't have any Windows runners anymore at the moment
155 when: manual # we don't have any Windows runners anymore at the moment
156 stage: tests
156 stage: tests
157 before_script:
157 before_script:
158 - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && ls -1 tests/test-check-*.* > C:/Temp/check-tests.txt'
158 - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && ls -1 tests/test-check-*.* > C:/Temp/check-tests.txt'
159 # TODO: find/install cvs, bzr, perforce, gpg, sqlite3
159 # TODO: find/install cvs, bzr, perforce, gpg, sqlite3
160
160
161 script:
161 script:
162 - echo "Entering script section"
162 - echo "Entering script section"
163 - echo "python used, $Env:PYTHON"
163 - echo "python used, $Env:PYTHON"
164 - Invoke-Expression "$Env:PYTHON -V"
164 - Invoke-Expression "$Env:PYTHON -V"
165 - Invoke-Expression "$Env:PYTHON -m black --version"
165 - Invoke-Expression "$Env:PYTHON -m black --version"
166 - echo "$Env:RUNTEST_ARGS"
166 - echo "$Env:RUNTEST_ARGS"
167 - echo "$Env:TMP"
167 - echo "$Env:TMP"
168 - echo "$Env:TEMP"
168 - echo "$Env:TEMP"
169
169
170 - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" $PYTHON tests/run-tests.py --color=always $RUNTEST_ARGS'
170 - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" $PYTHON tests/run-tests.py --color=always $RUNTEST_ARGS'
171
171
172 windows-py3:
172 windows-py3:
173 <<: *windows_runtests
173 <<: *windows_runtests
174 tags:
174 tags:
175 - windows
175 - windows
176 variables:
176 variables:
177 TEST_HGMODULEPOLICY: "c"
177 TEST_HGMODULEPOLICY: "c"
178 RUNTEST_ARGS: "--blacklist C:/Temp/check-tests.txt"
178 RUNTEST_ARGS: "--blacklist C:/Temp/check-tests.txt"
179 PYTHON: py -3
179 PYTHON: py -3
180
180
181 windows-py3-pyox:
181 windows-py3-pyox:
182 <<: *windows_runtests
182 <<: *windows_runtests
183 tags:
183 tags:
184 - windows
184 - windows
185 variables:
185 variables:
186 TEST_HGMODULEPOLICY: "c"
186 TEST_HGMODULEPOLICY: "c"
187 RUNTEST_ARGS: "--blacklist C:/Temp/check-tests.txt --pyoxidized"
187 RUNTEST_ARGS: "--blacklist C:/Temp/check-tests.txt --pyoxidized"
188 PYTHON: py -3
188 PYTHON: py -3
@@ -1,2430 +1,2434 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2006, 2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import struct
12 import struct
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import nullrev
15 from .node import nullrev
16 from .thirdparty import attr
16 from .thirdparty import attr
17 from .utils import stringutil
17 from .utils import stringutil
18 from .dirstateutils import timestamp
18 from .dirstateutils import timestamp
19 from . import (
19 from . import (
20 copies,
20 copies,
21 encoding,
21 encoding,
22 error,
22 error,
23 filemerge,
23 filemerge,
24 match as matchmod,
24 match as matchmod,
25 mergestate as mergestatemod,
25 mergestate as mergestatemod,
26 obsutil,
26 obsutil,
27 pathutil,
27 pathutil,
28 pycompat,
28 pycompat,
29 scmutil,
29 scmutil,
30 subrepoutil,
30 subrepoutil,
31 util,
31 util,
32 worker,
32 worker,
33 )
33 )
34
34
35 _pack = struct.pack
35 _pack = struct.pack
36 _unpack = struct.unpack
36 _unpack = struct.unpack
37
37
38
38
39 def _getcheckunknownconfig(repo, section, name):
39 def _getcheckunknownconfig(repo, section, name):
40 config = repo.ui.config(section, name)
40 config = repo.ui.config(section, name)
41 valid = [b'abort', b'ignore', b'warn']
41 valid = [b'abort', b'ignore', b'warn']
42 if config not in valid:
42 if config not in valid:
43 validstr = b', '.join([b"'" + v + b"'" for v in valid])
43 validstr = b', '.join([b"'" + v + b"'" for v in valid])
44 raise error.ConfigError(
44 raise error.ConfigError(
45 _(b"%s.%s not valid ('%s' is none of %s)")
45 _(b"%s.%s not valid ('%s' is none of %s)")
46 % (section, name, config, validstr)
46 % (section, name, config, validstr)
47 )
47 )
48 return config
48 return config
49
49
50
50
51 def _checkunknownfile(repo, wctx, mctx, f, f2=None):
51 def _checkunknownfile(repo, wctx, mctx, f, f2=None):
52 if wctx.isinmemory():
52 if wctx.isinmemory():
53 # Nothing to do in IMM because nothing in the "working copy" can be an
53 # Nothing to do in IMM because nothing in the "working copy" can be an
54 # unknown file.
54 # unknown file.
55 #
55 #
56 # Note that we should bail out here, not in ``_checkunknownfiles()``,
56 # Note that we should bail out here, not in ``_checkunknownfiles()``,
57 # because that function does other useful work.
57 # because that function does other useful work.
58 return False
58 return False
59
59
60 if f2 is None:
60 if f2 is None:
61 f2 = f
61 f2 = f
62 return (
62 return (
63 repo.wvfs.audit.check(f)
63 repo.wvfs.audit.check(f)
64 and repo.wvfs.isfileorlink(f)
64 and repo.wvfs.isfileorlink(f)
65 and repo.dirstate.normalize(f) not in repo.dirstate
65 and repo.dirstate.normalize(f) not in repo.dirstate
66 and mctx[f2].cmp(wctx[f])
66 and mctx[f2].cmp(wctx[f])
67 )
67 )
68
68
69
69
70 class _unknowndirschecker(object):
70 class _unknowndirschecker(object):
71 """
71 """
72 Look for any unknown files or directories that may have a path conflict
72 Look for any unknown files or directories that may have a path conflict
73 with a file. If any path prefix of the file exists as a file or link,
73 with a file. If any path prefix of the file exists as a file or link,
74 then it conflicts. If the file itself is a directory that contains any
74 then it conflicts. If the file itself is a directory that contains any
75 file that is not tracked, then it conflicts.
75 file that is not tracked, then it conflicts.
76
76
77 Returns the shortest path at which a conflict occurs, or None if there is
77 Returns the shortest path at which a conflict occurs, or None if there is
78 no conflict.
78 no conflict.
79 """
79 """
80
80
81 def __init__(self):
81 def __init__(self):
82 # A set of paths known to be good. This prevents repeated checking of
82 # A set of paths known to be good. This prevents repeated checking of
83 # dirs. It will be updated with any new dirs that are checked and found
83 # dirs. It will be updated with any new dirs that are checked and found
84 # to be safe.
84 # to be safe.
85 self._unknowndircache = set()
85 self._unknowndircache = set()
86
86
87 # A set of paths that are known to be absent. This prevents repeated
87 # A set of paths that are known to be absent. This prevents repeated
88 # checking of subdirectories that are known not to exist. It will be
88 # checking of subdirectories that are known not to exist. It will be
89 # updated with any new dirs that are checked and found to be absent.
89 # updated with any new dirs that are checked and found to be absent.
90 self._missingdircache = set()
90 self._missingdircache = set()
91
91
92 def __call__(self, repo, wctx, f):
92 def __call__(self, repo, wctx, f):
93 if wctx.isinmemory():
93 if wctx.isinmemory():
94 # Nothing to do in IMM for the same reason as ``_checkunknownfile``.
94 # Nothing to do in IMM for the same reason as ``_checkunknownfile``.
95 return False
95 return False
96
96
97 # Check for path prefixes that exist as unknown files.
97 # Check for path prefixes that exist as unknown files.
98 for p in reversed(list(pathutil.finddirs(f))):
98 for p in reversed(list(pathutil.finddirs(f))):
99 if p in self._missingdircache:
99 if p in self._missingdircache:
100 return
100 return
101 if p in self._unknowndircache:
101 if p in self._unknowndircache:
102 continue
102 continue
103 if repo.wvfs.audit.check(p):
103 if repo.wvfs.audit.check(p):
104 if (
104 if (
105 repo.wvfs.isfileorlink(p)
105 repo.wvfs.isfileorlink(p)
106 and repo.dirstate.normalize(p) not in repo.dirstate
106 and repo.dirstate.normalize(p) not in repo.dirstate
107 ):
107 ):
108 return p
108 return p
109 if not repo.wvfs.lexists(p):
109 if not repo.wvfs.lexists(p):
110 self._missingdircache.add(p)
110 self._missingdircache.add(p)
111 return
111 return
112 self._unknowndircache.add(p)
112 self._unknowndircache.add(p)
113
113
114 # Check if the file conflicts with a directory containing unknown files.
114 # Check if the file conflicts with a directory containing unknown files.
115 if repo.wvfs.audit.check(f) and repo.wvfs.isdir(f):
115 if repo.wvfs.audit.check(f) and repo.wvfs.isdir(f):
116 # Does the directory contain any files that are not in the dirstate?
116 # Does the directory contain any files that are not in the dirstate?
117 for p, dirs, files in repo.wvfs.walk(f):
117 for p, dirs, files in repo.wvfs.walk(f):
118 for fn in files:
118 for fn in files:
119 relf = util.pconvert(repo.wvfs.reljoin(p, fn))
119 relf = util.pconvert(repo.wvfs.reljoin(p, fn))
120 relf = repo.dirstate.normalize(relf, isknown=True)
120 relf = repo.dirstate.normalize(relf, isknown=True)
121 if relf not in repo.dirstate:
121 if relf not in repo.dirstate:
122 return f
122 return f
123 return None
123 return None
124
124
125
125
126 def _checkunknownfiles(repo, wctx, mctx, force, mresult, mergeforce):
126 def _checkunknownfiles(repo, wctx, mctx, force, mresult, mergeforce):
127 """
127 """
128 Considers any actions that care about the presence of conflicting unknown
128 Considers any actions that care about the presence of conflicting unknown
129 files. For some actions, the result is to abort; for others, it is to
129 files. For some actions, the result is to abort; for others, it is to
130 choose a different action.
130 choose a different action.
131 """
131 """
132 fileconflicts = set()
132 fileconflicts = set()
133 pathconflicts = set()
133 pathconflicts = set()
134 warnconflicts = set()
134 warnconflicts = set()
135 abortconflicts = set()
135 abortconflicts = set()
136 unknownconfig = _getcheckunknownconfig(repo, b'merge', b'checkunknown')
136 unknownconfig = _getcheckunknownconfig(repo, b'merge', b'checkunknown')
137 ignoredconfig = _getcheckunknownconfig(repo, b'merge', b'checkignored')
137 ignoredconfig = _getcheckunknownconfig(repo, b'merge', b'checkignored')
138 pathconfig = repo.ui.configbool(
138 pathconfig = repo.ui.configbool(
139 b'experimental', b'merge.checkpathconflicts'
139 b'experimental', b'merge.checkpathconflicts'
140 )
140 )
141 if not force:
141 if not force:
142
142
143 def collectconflicts(conflicts, config):
143 def collectconflicts(conflicts, config):
144 if config == b'abort':
144 if config == b'abort':
145 abortconflicts.update(conflicts)
145 abortconflicts.update(conflicts)
146 elif config == b'warn':
146 elif config == b'warn':
147 warnconflicts.update(conflicts)
147 warnconflicts.update(conflicts)
148
148
149 checkunknowndirs = _unknowndirschecker()
149 checkunknowndirs = _unknowndirschecker()
150 for f in mresult.files(
150 for f in mresult.files(
151 (
151 (
152 mergestatemod.ACTION_CREATED,
152 mergestatemod.ACTION_CREATED,
153 mergestatemod.ACTION_DELETED_CHANGED,
153 mergestatemod.ACTION_DELETED_CHANGED,
154 )
154 )
155 ):
155 ):
156 if _checkunknownfile(repo, wctx, mctx, f):
156 if _checkunknownfile(repo, wctx, mctx, f):
157 fileconflicts.add(f)
157 fileconflicts.add(f)
158 elif pathconfig and f not in wctx:
158 elif pathconfig and f not in wctx:
159 path = checkunknowndirs(repo, wctx, f)
159 path = checkunknowndirs(repo, wctx, f)
160 if path is not None:
160 if path is not None:
161 pathconflicts.add(path)
161 pathconflicts.add(path)
162 for f, args, msg in mresult.getactions(
162 for f, args, msg in mresult.getactions(
163 [mergestatemod.ACTION_LOCAL_DIR_RENAME_GET]
163 [mergestatemod.ACTION_LOCAL_DIR_RENAME_GET]
164 ):
164 ):
165 if _checkunknownfile(repo, wctx, mctx, f, args[0]):
165 if _checkunknownfile(repo, wctx, mctx, f, args[0]):
166 fileconflicts.add(f)
166 fileconflicts.add(f)
167
167
168 allconflicts = fileconflicts | pathconflicts
168 allconflicts = fileconflicts | pathconflicts
169 ignoredconflicts = {c for c in allconflicts if repo.dirstate._ignore(c)}
169 ignoredconflicts = {c for c in allconflicts if repo.dirstate._ignore(c)}
170 unknownconflicts = allconflicts - ignoredconflicts
170 unknownconflicts = allconflicts - ignoredconflicts
171 collectconflicts(ignoredconflicts, ignoredconfig)
171 collectconflicts(ignoredconflicts, ignoredconfig)
172 collectconflicts(unknownconflicts, unknownconfig)
172 collectconflicts(unknownconflicts, unknownconfig)
173 else:
173 else:
174 for f, args, msg in list(
174 for f, args, msg in list(
175 mresult.getactions([mergestatemod.ACTION_CREATED_MERGE])
175 mresult.getactions([mergestatemod.ACTION_CREATED_MERGE])
176 ):
176 ):
177 fl2, anc = args
177 fl2, anc = args
178 different = _checkunknownfile(repo, wctx, mctx, f)
178 different = _checkunknownfile(repo, wctx, mctx, f)
179 if repo.dirstate._ignore(f):
179 if repo.dirstate._ignore(f):
180 config = ignoredconfig
180 config = ignoredconfig
181 else:
181 else:
182 config = unknownconfig
182 config = unknownconfig
183
183
184 # The behavior when force is True is described by this table:
184 # The behavior when force is True is described by this table:
185 # config different mergeforce | action backup
185 # config different mergeforce | action backup
186 # * n * | get n
186 # * n * | get n
187 # * y y | merge -
187 # * y y | merge -
188 # abort y n | merge - (1)
188 # abort y n | merge - (1)
189 # warn y n | warn + get y
189 # warn y n | warn + get y
190 # ignore y n | get y
190 # ignore y n | get y
191 #
191 #
192 # (1) this is probably the wrong behavior here -- we should
192 # (1) this is probably the wrong behavior here -- we should
193 # probably abort, but some actions like rebases currently
193 # probably abort, but some actions like rebases currently
194 # don't like an abort happening in the middle of
194 # don't like an abort happening in the middle of
195 # merge.update.
195 # merge.update.
196 if not different:
196 if not different:
197 mresult.addfile(
197 mresult.addfile(
198 f,
198 f,
199 mergestatemod.ACTION_GET,
199 mergestatemod.ACTION_GET,
200 (fl2, False),
200 (fl2, False),
201 b'remote created',
201 b'remote created',
202 )
202 )
203 elif mergeforce or config == b'abort':
203 elif mergeforce or config == b'abort':
204 mresult.addfile(
204 mresult.addfile(
205 f,
205 f,
206 mergestatemod.ACTION_MERGE,
206 mergestatemod.ACTION_MERGE,
207 (f, f, None, False, anc),
207 (f, f, None, False, anc),
208 b'remote differs from untracked local',
208 b'remote differs from untracked local',
209 )
209 )
210 elif config == b'abort':
210 elif config == b'abort':
211 abortconflicts.add(f)
211 abortconflicts.add(f)
212 else:
212 else:
213 if config == b'warn':
213 if config == b'warn':
214 warnconflicts.add(f)
214 warnconflicts.add(f)
215 mresult.addfile(
215 mresult.addfile(
216 f,
216 f,
217 mergestatemod.ACTION_GET,
217 mergestatemod.ACTION_GET,
218 (fl2, True),
218 (fl2, True),
219 b'remote created',
219 b'remote created',
220 )
220 )
221
221
222 for f in sorted(abortconflicts):
222 for f in sorted(abortconflicts):
223 warn = repo.ui.warn
223 warn = repo.ui.warn
224 if f in pathconflicts:
224 if f in pathconflicts:
225 if repo.wvfs.isfileorlink(f):
225 if repo.wvfs.isfileorlink(f):
226 warn(_(b"%s: untracked file conflicts with directory\n") % f)
226 warn(_(b"%s: untracked file conflicts with directory\n") % f)
227 else:
227 else:
228 warn(_(b"%s: untracked directory conflicts with file\n") % f)
228 warn(_(b"%s: untracked directory conflicts with file\n") % f)
229 else:
229 else:
230 warn(_(b"%s: untracked file differs\n") % f)
230 warn(_(b"%s: untracked file differs\n") % f)
231 if abortconflicts:
231 if abortconflicts:
232 raise error.StateError(
232 raise error.StateError(
233 _(
233 _(
234 b"untracked files in working directory "
234 b"untracked files in working directory "
235 b"differ from files in requested revision"
235 b"differ from files in requested revision"
236 )
236 )
237 )
237 )
238
238
239 for f in sorted(warnconflicts):
239 for f in sorted(warnconflicts):
240 if repo.wvfs.isfileorlink(f):
240 if repo.wvfs.isfileorlink(f):
241 repo.ui.warn(_(b"%s: replacing untracked file\n") % f)
241 repo.ui.warn(_(b"%s: replacing untracked file\n") % f)
242 else:
242 else:
243 repo.ui.warn(_(b"%s: replacing untracked files in directory\n") % f)
243 repo.ui.warn(_(b"%s: replacing untracked files in directory\n") % f)
244
244
245 for f, args, msg in list(
245 for f, args, msg in list(
246 mresult.getactions([mergestatemod.ACTION_CREATED])
246 mresult.getactions([mergestatemod.ACTION_CREATED])
247 ):
247 ):
248 backup = (
248 backup = (
249 f in fileconflicts
249 f in fileconflicts
250 or f in pathconflicts
250 or f in pathconflicts
251 or any(p in pathconflicts for p in pathutil.finddirs(f))
251 or any(p in pathconflicts for p in pathutil.finddirs(f))
252 )
252 )
253 (flags,) = args
253 (flags,) = args
254 mresult.addfile(f, mergestatemod.ACTION_GET, (flags, backup), msg)
254 mresult.addfile(f, mergestatemod.ACTION_GET, (flags, backup), msg)
255
255
256
256
257 def _forgetremoved(wctx, mctx, branchmerge, mresult):
257 def _forgetremoved(wctx, mctx, branchmerge, mresult):
258 """
258 """
259 Forget removed files
259 Forget removed files
260
260
261 If we're jumping between revisions (as opposed to merging), and if
261 If we're jumping between revisions (as opposed to merging), and if
262 neither the working directory nor the target rev has the file,
262 neither the working directory nor the target rev has the file,
263 then we need to remove it from the dirstate, to prevent the
263 then we need to remove it from the dirstate, to prevent the
264 dirstate from listing the file when it is no longer in the
264 dirstate from listing the file when it is no longer in the
265 manifest.
265 manifest.
266
266
267 If we're merging, and the other revision has removed a file
267 If we're merging, and the other revision has removed a file
268 that is not present in the working directory, we need to mark it
268 that is not present in the working directory, we need to mark it
269 as removed.
269 as removed.
270 """
270 """
271
271
272 m = mergestatemod.ACTION_FORGET
272 m = mergestatemod.ACTION_FORGET
273 if branchmerge:
273 if branchmerge:
274 m = mergestatemod.ACTION_REMOVE
274 m = mergestatemod.ACTION_REMOVE
275 for f in wctx.deleted():
275 for f in wctx.deleted():
276 if f not in mctx:
276 if f not in mctx:
277 mresult.addfile(f, m, None, b"forget deleted")
277 mresult.addfile(f, m, None, b"forget deleted")
278
278
279 if not branchmerge:
279 if not branchmerge:
280 for f in wctx.removed():
280 for f in wctx.removed():
281 if f not in mctx:
281 if f not in mctx:
282 mresult.addfile(
282 mresult.addfile(
283 f,
283 f,
284 mergestatemod.ACTION_FORGET,
284 mergestatemod.ACTION_FORGET,
285 None,
285 None,
286 b"forget removed",
286 b"forget removed",
287 )
287 )
288
288
289
289
290 def _checkcollision(repo, wmf, mresult):
290 def _checkcollision(repo, wmf, mresult):
291 """
291 """
292 Check for case-folding collisions.
292 Check for case-folding collisions.
293 """
293 """
294 # If the repo is narrowed, filter out files outside the narrowspec.
294 # If the repo is narrowed, filter out files outside the narrowspec.
295 narrowmatch = repo.narrowmatch()
295 narrowmatch = repo.narrowmatch()
296 if not narrowmatch.always():
296 if not narrowmatch.always():
297 pmmf = set(wmf.walk(narrowmatch))
297 pmmf = set(wmf.walk(narrowmatch))
298 if mresult:
298 if mresult:
299 for f in list(mresult.files()):
299 for f in list(mresult.files()):
300 if not narrowmatch(f):
300 if not narrowmatch(f):
301 mresult.removefile(f)
301 mresult.removefile(f)
302 else:
302 else:
303 # build provisional merged manifest up
303 # build provisional merged manifest up
304 pmmf = set(wmf)
304 pmmf = set(wmf)
305
305
306 if mresult:
306 if mresult:
307 # KEEP and EXEC are no-op
307 # KEEP and EXEC are no-op
308 for f in mresult.files(
308 for f in mresult.files(
309 (
309 (
310 mergestatemod.ACTION_ADD,
310 mergestatemod.ACTION_ADD,
311 mergestatemod.ACTION_ADD_MODIFIED,
311 mergestatemod.ACTION_ADD_MODIFIED,
312 mergestatemod.ACTION_FORGET,
312 mergestatemod.ACTION_FORGET,
313 mergestatemod.ACTION_GET,
313 mergestatemod.ACTION_GET,
314 mergestatemod.ACTION_CHANGED_DELETED,
314 mergestatemod.ACTION_CHANGED_DELETED,
315 mergestatemod.ACTION_DELETED_CHANGED,
315 mergestatemod.ACTION_DELETED_CHANGED,
316 )
316 )
317 ):
317 ):
318 pmmf.add(f)
318 pmmf.add(f)
319 for f in mresult.files((mergestatemod.ACTION_REMOVE,)):
319 for f in mresult.files((mergestatemod.ACTION_REMOVE,)):
320 pmmf.discard(f)
320 pmmf.discard(f)
321 for f, args, msg in mresult.getactions(
321 for f, args, msg in mresult.getactions(
322 [mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL]
322 [mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL]
323 ):
323 ):
324 f2, flags = args
324 f2, flags = args
325 pmmf.discard(f2)
325 pmmf.discard(f2)
326 pmmf.add(f)
326 pmmf.add(f)
327 for f in mresult.files((mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,)):
327 for f in mresult.files((mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,)):
328 pmmf.add(f)
328 pmmf.add(f)
329 for f, args, msg in mresult.getactions([mergestatemod.ACTION_MERGE]):
329 for f, args, msg in mresult.getactions([mergestatemod.ACTION_MERGE]):
330 f1, f2, fa, move, anc = args
330 f1, f2, fa, move, anc = args
331 if move:
331 if move:
332 pmmf.discard(f1)
332 pmmf.discard(f1)
333 pmmf.add(f)
333 pmmf.add(f)
334
334
335 # check case-folding collision in provisional merged manifest
335 # check case-folding collision in provisional merged manifest
336 foldmap = {}
336 foldmap = {}
337 for f in pmmf:
337 for f in pmmf:
338 fold = util.normcase(f)
338 fold = util.normcase(f)
339 if fold in foldmap:
339 if fold in foldmap:
340 raise error.StateError(
340 raise error.StateError(
341 _(b"case-folding collision between %s and %s")
341 _(b"case-folding collision between %s and %s")
342 % (f, foldmap[fold])
342 % (f, foldmap[fold])
343 )
343 )
344 foldmap[fold] = f
344 foldmap[fold] = f
345
345
346 # check case-folding of directories
346 # check case-folding of directories
347 foldprefix = unfoldprefix = lastfull = b''
347 foldprefix = unfoldprefix = lastfull = b''
348 for fold, f in sorted(foldmap.items()):
348 for fold, f in sorted(foldmap.items()):
349 if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
349 if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
350 # the folded prefix matches but actual casing is different
350 # the folded prefix matches but actual casing is different
351 raise error.StateError(
351 raise error.StateError(
352 _(b"case-folding collision between %s and directory of %s")
352 _(b"case-folding collision between %s and directory of %s")
353 % (lastfull, f)
353 % (lastfull, f)
354 )
354 )
355 foldprefix = fold + b'/'
355 foldprefix = fold + b'/'
356 unfoldprefix = f + b'/'
356 unfoldprefix = f + b'/'
357 lastfull = f
357 lastfull = f
358
358
359
359
360 def _filesindirs(repo, manifest, dirs):
360 def _filesindirs(repo, manifest, dirs):
361 """
361 """
362 Generator that yields pairs of all the files in the manifest that are found
362 Generator that yields pairs of all the files in the manifest that are found
363 inside the directories listed in dirs, and which directory they are found
363 inside the directories listed in dirs, and which directory they are found
364 in.
364 in.
365 """
365 """
366 for f in manifest:
366 for f in manifest:
367 for p in pathutil.finddirs(f):
367 for p in pathutil.finddirs(f):
368 if p in dirs:
368 if p in dirs:
369 yield f, p
369 yield f, p
370 break
370 break
371
371
372
372
373 def checkpathconflicts(repo, wctx, mctx, mresult):
373 def checkpathconflicts(repo, wctx, mctx, mresult):
374 """
374 """
375 Check if any actions introduce path conflicts in the repository, updating
375 Check if any actions introduce path conflicts in the repository, updating
376 actions to record or handle the path conflict accordingly.
376 actions to record or handle the path conflict accordingly.
377 """
377 """
378 mf = wctx.manifest()
378 mf = wctx.manifest()
379
379
380 # The set of local files that conflict with a remote directory.
380 # The set of local files that conflict with a remote directory.
381 localconflicts = set()
381 localconflicts = set()
382
382
383 # The set of directories that conflict with a remote file, and so may cause
383 # The set of directories that conflict with a remote file, and so may cause
384 # conflicts if they still contain any files after the merge.
384 # conflicts if they still contain any files after the merge.
385 remoteconflicts = set()
385 remoteconflicts = set()
386
386
387 # The set of directories that appear as both a file and a directory in the
387 # The set of directories that appear as both a file and a directory in the
388 # remote manifest. These indicate an invalid remote manifest, which
388 # remote manifest. These indicate an invalid remote manifest, which
389 # can't be updated to cleanly.
389 # can't be updated to cleanly.
390 invalidconflicts = set()
390 invalidconflicts = set()
391
391
392 # The set of directories that contain files that are being created.
392 # The set of directories that contain files that are being created.
393 createdfiledirs = set()
393 createdfiledirs = set()
394
394
395 # The set of files deleted by all the actions.
395 # The set of files deleted by all the actions.
396 deletedfiles = set()
396 deletedfiles = set()
397
397
398 for f in mresult.files(
398 for f in mresult.files(
399 (
399 (
400 mergestatemod.ACTION_CREATED,
400 mergestatemod.ACTION_CREATED,
401 mergestatemod.ACTION_DELETED_CHANGED,
401 mergestatemod.ACTION_DELETED_CHANGED,
402 mergestatemod.ACTION_MERGE,
402 mergestatemod.ACTION_MERGE,
403 mergestatemod.ACTION_CREATED_MERGE,
403 mergestatemod.ACTION_CREATED_MERGE,
404 )
404 )
405 ):
405 ):
406 # This action may create a new local file.
406 # This action may create a new local file.
407 createdfiledirs.update(pathutil.finddirs(f))
407 createdfiledirs.update(pathutil.finddirs(f))
408 if mf.hasdir(f):
408 if mf.hasdir(f):
409 # The file aliases a local directory. This might be ok if all
409 # The file aliases a local directory. This might be ok if all
410 # the files in the local directory are being deleted. This
410 # the files in the local directory are being deleted. This
411 # will be checked once we know what all the deleted files are.
411 # will be checked once we know what all the deleted files are.
412 remoteconflicts.add(f)
412 remoteconflicts.add(f)
413 # Track the names of all deleted files.
413 # Track the names of all deleted files.
414 for f in mresult.files((mergestatemod.ACTION_REMOVE,)):
414 for f in mresult.files((mergestatemod.ACTION_REMOVE,)):
415 deletedfiles.add(f)
415 deletedfiles.add(f)
416 for (f, args, msg) in mresult.getactions((mergestatemod.ACTION_MERGE,)):
416 for (f, args, msg) in mresult.getactions((mergestatemod.ACTION_MERGE,)):
417 f1, f2, fa, move, anc = args
417 f1, f2, fa, move, anc = args
418 if move:
418 if move:
419 deletedfiles.add(f1)
419 deletedfiles.add(f1)
420 for (f, args, msg) in mresult.getactions(
420 for (f, args, msg) in mresult.getactions(
421 (mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,)
421 (mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,)
422 ):
422 ):
423 f2, flags = args
423 f2, flags = args
424 deletedfiles.add(f2)
424 deletedfiles.add(f2)
425
425
426 # Check all directories that contain created files for path conflicts.
426 # Check all directories that contain created files for path conflicts.
427 for p in createdfiledirs:
427 for p in createdfiledirs:
428 if p in mf:
428 if p in mf:
429 if p in mctx:
429 if p in mctx:
430 # A file is in a directory which aliases both a local
430 # A file is in a directory which aliases both a local
431 # and a remote file. This is an internal inconsistency
431 # and a remote file. This is an internal inconsistency
432 # within the remote manifest.
432 # within the remote manifest.
433 invalidconflicts.add(p)
433 invalidconflicts.add(p)
434 else:
434 else:
435 # A file is in a directory which aliases a local file.
435 # A file is in a directory which aliases a local file.
436 # We will need to rename the local file.
436 # We will need to rename the local file.
437 localconflicts.add(p)
437 localconflicts.add(p)
438 pd = mresult.getfile(p)
438 pd = mresult.getfile(p)
439 if pd and pd[0] in (
439 if pd and pd[0] in (
440 mergestatemod.ACTION_CREATED,
440 mergestatemod.ACTION_CREATED,
441 mergestatemod.ACTION_DELETED_CHANGED,
441 mergestatemod.ACTION_DELETED_CHANGED,
442 mergestatemod.ACTION_MERGE,
442 mergestatemod.ACTION_MERGE,
443 mergestatemod.ACTION_CREATED_MERGE,
443 mergestatemod.ACTION_CREATED_MERGE,
444 ):
444 ):
445 # The file is in a directory which aliases a remote file.
445 # The file is in a directory which aliases a remote file.
446 # This is an internal inconsistency within the remote
446 # This is an internal inconsistency within the remote
447 # manifest.
447 # manifest.
448 invalidconflicts.add(p)
448 invalidconflicts.add(p)
449
449
450 # Rename all local conflicting files that have not been deleted.
450 # Rename all local conflicting files that have not been deleted.
451 for p in localconflicts:
451 for p in localconflicts:
452 if p not in deletedfiles:
452 if p not in deletedfiles:
453 ctxname = bytes(wctx).rstrip(b'+')
453 ctxname = bytes(wctx).rstrip(b'+')
454 pnew = util.safename(p, ctxname, wctx, set(mresult.files()))
454 pnew = util.safename(p, ctxname, wctx, set(mresult.files()))
455 porig = wctx[p].copysource() or p
455 porig = wctx[p].copysource() or p
456 mresult.addfile(
456 mresult.addfile(
457 pnew,
457 pnew,
458 mergestatemod.ACTION_PATH_CONFLICT_RESOLVE,
458 mergestatemod.ACTION_PATH_CONFLICT_RESOLVE,
459 (p, porig),
459 (p, porig),
460 b'local path conflict',
460 b'local path conflict',
461 )
461 )
462 mresult.addfile(
462 mresult.addfile(
463 p,
463 p,
464 mergestatemod.ACTION_PATH_CONFLICT,
464 mergestatemod.ACTION_PATH_CONFLICT,
465 (pnew, b'l'),
465 (pnew, b'l'),
466 b'path conflict',
466 b'path conflict',
467 )
467 )
468
468
469 if remoteconflicts:
469 if remoteconflicts:
470 # Check if all files in the conflicting directories have been removed.
470 # Check if all files in the conflicting directories have been removed.
471 ctxname = bytes(mctx).rstrip(b'+')
471 ctxname = bytes(mctx).rstrip(b'+')
472 for f, p in _filesindirs(repo, mf, remoteconflicts):
472 for f, p in _filesindirs(repo, mf, remoteconflicts):
473 if f not in deletedfiles:
473 if f not in deletedfiles:
474 m, args, msg = mresult.getfile(p)
474 m, args, msg = mresult.getfile(p)
475 pnew = util.safename(p, ctxname, wctx, set(mresult.files()))
475 pnew = util.safename(p, ctxname, wctx, set(mresult.files()))
476 if m in (
476 if m in (
477 mergestatemod.ACTION_DELETED_CHANGED,
477 mergestatemod.ACTION_DELETED_CHANGED,
478 mergestatemod.ACTION_MERGE,
478 mergestatemod.ACTION_MERGE,
479 ):
479 ):
480 # Action was merge, just update target.
480 # Action was merge, just update target.
481 mresult.addfile(pnew, m, args, msg)
481 mresult.addfile(pnew, m, args, msg)
482 else:
482 else:
483 # Action was create, change to renamed get action.
483 # Action was create, change to renamed get action.
484 fl = args[0]
484 fl = args[0]
485 mresult.addfile(
485 mresult.addfile(
486 pnew,
486 pnew,
487 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
487 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
488 (p, fl),
488 (p, fl),
489 b'remote path conflict',
489 b'remote path conflict',
490 )
490 )
491 mresult.addfile(
491 mresult.addfile(
492 p,
492 p,
493 mergestatemod.ACTION_PATH_CONFLICT,
493 mergestatemod.ACTION_PATH_CONFLICT,
494 (pnew, mergestatemod.ACTION_REMOVE),
494 (pnew, mergestatemod.ACTION_REMOVE),
495 b'path conflict',
495 b'path conflict',
496 )
496 )
497 remoteconflicts.remove(p)
497 remoteconflicts.remove(p)
498 break
498 break
499
499
500 if invalidconflicts:
500 if invalidconflicts:
501 for p in invalidconflicts:
501 for p in invalidconflicts:
502 repo.ui.warn(_(b"%s: is both a file and a directory\n") % p)
502 repo.ui.warn(_(b"%s: is both a file and a directory\n") % p)
503 raise error.StateError(
503 raise error.StateError(
504 _(b"destination manifest contains path conflicts")
504 _(b"destination manifest contains path conflicts")
505 )
505 )
506
506
507
507
508 def _filternarrowactions(narrowmatch, branchmerge, mresult):
508 def _filternarrowactions(narrowmatch, branchmerge, mresult):
509 """
509 """
510 Filters out actions that can ignored because the repo is narrowed.
510 Filters out actions that can ignored because the repo is narrowed.
511
511
512 Raise an exception if the merge cannot be completed because the repo is
512 Raise an exception if the merge cannot be completed because the repo is
513 narrowed.
513 narrowed.
514 """
514 """
515 # TODO: handle with nonconflicttypes
515 # TODO: handle with nonconflicttypes
516 nonconflicttypes = {
516 nonconflicttypes = {
517 mergestatemod.ACTION_ADD,
517 mergestatemod.ACTION_ADD,
518 mergestatemod.ACTION_ADD_MODIFIED,
518 mergestatemod.ACTION_ADD_MODIFIED,
519 mergestatemod.ACTION_CREATED,
519 mergestatemod.ACTION_CREATED,
520 mergestatemod.ACTION_CREATED_MERGE,
520 mergestatemod.ACTION_CREATED_MERGE,
521 mergestatemod.ACTION_FORGET,
521 mergestatemod.ACTION_FORGET,
522 mergestatemod.ACTION_GET,
522 mergestatemod.ACTION_GET,
523 mergestatemod.ACTION_REMOVE,
523 mergestatemod.ACTION_REMOVE,
524 mergestatemod.ACTION_EXEC,
524 mergestatemod.ACTION_EXEC,
525 }
525 }
526 # We mutate the items in the dict during iteration, so iterate
526 # We mutate the items in the dict during iteration, so iterate
527 # over a copy.
527 # over a copy.
528 for f, action in mresult.filemap():
528 for f, action in mresult.filemap():
529 if narrowmatch(f):
529 if narrowmatch(f):
530 pass
530 pass
531 elif not branchmerge:
531 elif not branchmerge:
532 mresult.removefile(f) # just updating, ignore changes outside clone
532 mresult.removefile(f) # just updating, ignore changes outside clone
533 elif action[0] in mergestatemod.NO_OP_ACTIONS:
533 elif action[0] in mergestatemod.NO_OP_ACTIONS:
534 mresult.removefile(f) # merge does not affect file
534 mresult.removefile(f) # merge does not affect file
535 elif action[0] in nonconflicttypes:
535 elif action[0] in nonconflicttypes:
536 raise error.Abort(
536 raise error.Abort(
537 _(
537 _(
538 b'merge affects file \'%s\' outside narrow, '
538 b'merge affects file \'%s\' outside narrow, '
539 b'which is not yet supported'
539 b'which is not yet supported'
540 )
540 )
541 % f,
541 % f,
542 hint=_(b'merging in the other direction may work'),
542 hint=_(b'merging in the other direction may work'),
543 )
543 )
544 else:
544 else:
545 raise error.Abort(
545 raise error.Abort(
546 _(b'conflict in file \'%s\' is outside narrow clone') % f
546 _(b'conflict in file \'%s\' is outside narrow clone') % f
547 )
547 )
548
548
549
549
550 class mergeresult(object):
550 class mergeresult(object):
551 """An object representing result of merging manifests.
551 """An object representing result of merging manifests.
552
552
553 It has information about what actions need to be performed on dirstate
553 It has information about what actions need to be performed on dirstate
554 mapping of divergent renames and other such cases."""
554 mapping of divergent renames and other such cases."""
555
555
556 def __init__(self):
556 def __init__(self):
557 """
557 """
558 filemapping: dict of filename as keys and action related info as values
558 filemapping: dict of filename as keys and action related info as values
559 diverge: mapping of source name -> list of dest name for
559 diverge: mapping of source name -> list of dest name for
560 divergent renames
560 divergent renames
561 renamedelete: mapping of source name -> list of destinations for files
561 renamedelete: mapping of source name -> list of destinations for files
562 deleted on one side and renamed on other.
562 deleted on one side and renamed on other.
563 commitinfo: dict containing data which should be used on commit
563 commitinfo: dict containing data which should be used on commit
564 contains a filename -> info mapping
564 contains a filename -> info mapping
565 actionmapping: dict of action names as keys and values are dict of
565 actionmapping: dict of action names as keys and values are dict of
566 filename as key and related data as values
566 filename as key and related data as values
567 """
567 """
568 self._filemapping = {}
568 self._filemapping = {}
569 self._diverge = {}
569 self._diverge = {}
570 self._renamedelete = {}
570 self._renamedelete = {}
571 self._commitinfo = collections.defaultdict(dict)
571 self._commitinfo = collections.defaultdict(dict)
572 self._actionmapping = collections.defaultdict(dict)
572 self._actionmapping = collections.defaultdict(dict)
573
573
574 def updatevalues(self, diverge, renamedelete):
574 def updatevalues(self, diverge, renamedelete):
575 self._diverge = diverge
575 self._diverge = diverge
576 self._renamedelete = renamedelete
576 self._renamedelete = renamedelete
577
577
578 def addfile(self, filename, action, data, message):
578 def addfile(self, filename, action, data, message):
579 """adds a new file to the mergeresult object
579 """adds a new file to the mergeresult object
580
580
581 filename: file which we are adding
581 filename: file which we are adding
582 action: one of mergestatemod.ACTION_*
582 action: one of mergestatemod.ACTION_*
583 data: a tuple of information like fctx and ctx related to this merge
583 data: a tuple of information like fctx and ctx related to this merge
584 message: a message about the merge
584 message: a message about the merge
585 """
585 """
586 # if the file already existed, we need to delete it's old
586 # if the file already existed, we need to delete it's old
587 # entry form _actionmapping too
587 # entry form _actionmapping too
588 if filename in self._filemapping:
588 if filename in self._filemapping:
589 a, d, m = self._filemapping[filename]
589 a, d, m = self._filemapping[filename]
590 del self._actionmapping[a][filename]
590 del self._actionmapping[a][filename]
591
591
592 self._filemapping[filename] = (action, data, message)
592 self._filemapping[filename] = (action, data, message)
593 self._actionmapping[action][filename] = (data, message)
593 self._actionmapping[action][filename] = (data, message)
594
594
595 def getfile(self, filename, default_return=None):
595 def getfile(self, filename, default_return=None):
596 """returns (action, args, msg) about this file
596 """returns (action, args, msg) about this file
597
597
598 returns default_return if the file is not present"""
598 returns default_return if the file is not present"""
599 if filename in self._filemapping:
599 if filename in self._filemapping:
600 return self._filemapping[filename]
600 return self._filemapping[filename]
601 return default_return
601 return default_return
602
602
603 def files(self, actions=None):
603 def files(self, actions=None):
604 """returns files on which provided action needs to perfromed
604 """returns files on which provided action needs to perfromed
605
605
606 If actions is None, all files are returned
606 If actions is None, all files are returned
607 """
607 """
608 # TODO: think whether we should return renamedelete and
608 # TODO: think whether we should return renamedelete and
609 # diverge filenames also
609 # diverge filenames also
610 if actions is None:
610 if actions is None:
611 for f in self._filemapping:
611 for f in self._filemapping:
612 yield f
612 yield f
613
613
614 else:
614 else:
615 for a in actions:
615 for a in actions:
616 for f in self._actionmapping[a]:
616 for f in self._actionmapping[a]:
617 yield f
617 yield f
618
618
619 def removefile(self, filename):
619 def removefile(self, filename):
620 """removes a file from the mergeresult object as the file might
620 """removes a file from the mergeresult object as the file might
621 not merging anymore"""
621 not merging anymore"""
622 action, data, message = self._filemapping[filename]
622 action, data, message = self._filemapping[filename]
623 del self._filemapping[filename]
623 del self._filemapping[filename]
624 del self._actionmapping[action][filename]
624 del self._actionmapping[action][filename]
625
625
626 def getactions(self, actions, sort=False):
626 def getactions(self, actions, sort=False):
627 """get list of files which are marked with these actions
627 """get list of files which are marked with these actions
628 if sort is true, files for each action is sorted and then added
628 if sort is true, files for each action is sorted and then added
629
629
630 Returns a list of tuple of form (filename, data, message)
630 Returns a list of tuple of form (filename, data, message)
631 """
631 """
632 for a in actions:
632 for a in actions:
633 if sort:
633 if sort:
634 for f in sorted(self._actionmapping[a]):
634 for f in sorted(self._actionmapping[a]):
635 args, msg = self._actionmapping[a][f]
635 args, msg = self._actionmapping[a][f]
636 yield f, args, msg
636 yield f, args, msg
637 else:
637 else:
638 for f, (args, msg) in pycompat.iteritems(
638 for f, (args, msg) in pycompat.iteritems(
639 self._actionmapping[a]
639 self._actionmapping[a]
640 ):
640 ):
641 yield f, args, msg
641 yield f, args, msg
642
642
643 def len(self, actions=None):
643 def len(self, actions=None):
644 """returns number of files which needs actions
644 """returns number of files which needs actions
645
645
646 if actions is passed, total of number of files in that action
646 if actions is passed, total of number of files in that action
647 only is returned"""
647 only is returned"""
648
648
649 if actions is None:
649 if actions is None:
650 return len(self._filemapping)
650 return len(self._filemapping)
651
651
652 return sum(len(self._actionmapping[a]) for a in actions)
652 return sum(len(self._actionmapping[a]) for a in actions)
653
653
654 def filemap(self, sort=False):
654 def filemap(self, sort=False):
655 if sorted:
655 if sorted:
656 for key, val in sorted(pycompat.iteritems(self._filemapping)):
656 for key, val in sorted(pycompat.iteritems(self._filemapping)):
657 yield key, val
657 yield key, val
658 else:
658 else:
659 for key, val in pycompat.iteritems(self._filemapping):
659 for key, val in pycompat.iteritems(self._filemapping):
660 yield key, val
660 yield key, val
661
661
662 def addcommitinfo(self, filename, key, value):
662 def addcommitinfo(self, filename, key, value):
663 """adds key-value information about filename which will be required
663 """adds key-value information about filename which will be required
664 while committing this merge"""
664 while committing this merge"""
665 self._commitinfo[filename][key] = value
665 self._commitinfo[filename][key] = value
666
666
667 @property
667 @property
668 def diverge(self):
668 def diverge(self):
669 return self._diverge
669 return self._diverge
670
670
671 @property
671 @property
672 def renamedelete(self):
672 def renamedelete(self):
673 return self._renamedelete
673 return self._renamedelete
674
674
675 @property
675 @property
676 def commitinfo(self):
676 def commitinfo(self):
677 return self._commitinfo
677 return self._commitinfo
678
678
679 @property
679 @property
680 def actionsdict(self):
680 def actionsdict(self):
681 """returns a dictionary of actions to be perfomed with action as key
681 """returns a dictionary of actions to be perfomed with action as key
682 and a list of files and related arguments as values"""
682 and a list of files and related arguments as values"""
683 res = collections.defaultdict(list)
683 res = collections.defaultdict(list)
684 for a, d in pycompat.iteritems(self._actionmapping):
684 for a, d in pycompat.iteritems(self._actionmapping):
685 for f, (args, msg) in pycompat.iteritems(d):
685 for f, (args, msg) in pycompat.iteritems(d):
686 res[a].append((f, args, msg))
686 res[a].append((f, args, msg))
687 return res
687 return res
688
688
689 def setactions(self, actions):
689 def setactions(self, actions):
690 self._filemapping = actions
690 self._filemapping = actions
691 self._actionmapping = collections.defaultdict(dict)
691 self._actionmapping = collections.defaultdict(dict)
692 for f, (act, data, msg) in pycompat.iteritems(self._filemapping):
692 for f, (act, data, msg) in pycompat.iteritems(self._filemapping):
693 self._actionmapping[act][f] = data, msg
693 self._actionmapping[act][f] = data, msg
694
694
695 def hasconflicts(self):
695 def hasconflicts(self):
696 """tells whether this merge resulted in some actions which can
696 """tells whether this merge resulted in some actions which can
697 result in conflicts or not"""
697 result in conflicts or not"""
698 for a in self._actionmapping.keys():
698 for a in self._actionmapping.keys():
699 if (
699 if (
700 a
700 a
701 not in (
701 not in (
702 mergestatemod.ACTION_GET,
702 mergestatemod.ACTION_GET,
703 mergestatemod.ACTION_EXEC,
703 mergestatemod.ACTION_EXEC,
704 mergestatemod.ACTION_REMOVE,
704 mergestatemod.ACTION_REMOVE,
705 mergestatemod.ACTION_PATH_CONFLICT_RESOLVE,
705 mergestatemod.ACTION_PATH_CONFLICT_RESOLVE,
706 )
706 )
707 and self._actionmapping[a]
707 and self._actionmapping[a]
708 and a not in mergestatemod.NO_OP_ACTIONS
708 and a not in mergestatemod.NO_OP_ACTIONS
709 ):
709 ):
710 return True
710 return True
711
711
712 return False
712 return False
713
713
714
714
715 def manifestmerge(
715 def manifestmerge(
716 repo,
716 repo,
717 wctx,
717 wctx,
718 p2,
718 p2,
719 pa,
719 pa,
720 branchmerge,
720 branchmerge,
721 force,
721 force,
722 matcher,
722 matcher,
723 acceptremote,
723 acceptremote,
724 followcopies,
724 followcopies,
725 forcefulldiff=False,
725 forcefulldiff=False,
726 ):
726 ):
727 """
727 """
728 Merge wctx and p2 with ancestor pa and generate merge action list
728 Merge wctx and p2 with ancestor pa and generate merge action list
729
729
730 branchmerge and force are as passed in to update
730 branchmerge and force are as passed in to update
731 matcher = matcher to filter file lists
731 matcher = matcher to filter file lists
732 acceptremote = accept the incoming changes without prompting
732 acceptremote = accept the incoming changes without prompting
733
733
734 Returns an object of mergeresult class
734 Returns an object of mergeresult class
735 """
735 """
736 mresult = mergeresult()
736 mresult = mergeresult()
737 if matcher is not None and matcher.always():
737 if matcher is not None and matcher.always():
738 matcher = None
738 matcher = None
739
739
740 # manifests fetched in order are going to be faster, so prime the caches
740 # manifests fetched in order are going to be faster, so prime the caches
741 [
741 [
742 x.manifest()
742 x.manifest()
743 for x in sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)
743 for x in sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)
744 ]
744 ]
745
745
746 branch_copies1 = copies.branch_copies()
746 branch_copies1 = copies.branch_copies()
747 branch_copies2 = copies.branch_copies()
747 branch_copies2 = copies.branch_copies()
748 diverge = {}
748 diverge = {}
749 # information from merge which is needed at commit time
749 # information from merge which is needed at commit time
750 # for example choosing filelog of which parent to commit
750 # for example choosing filelog of which parent to commit
751 # TODO: use specific constants in future for this mapping
751 # TODO: use specific constants in future for this mapping
752 if followcopies:
752 if followcopies:
753 branch_copies1, branch_copies2, diverge = copies.mergecopies(
753 branch_copies1, branch_copies2, diverge = copies.mergecopies(
754 repo, wctx, p2, pa
754 repo, wctx, p2, pa
755 )
755 )
756
756
757 boolbm = pycompat.bytestr(bool(branchmerge))
757 boolbm = pycompat.bytestr(bool(branchmerge))
758 boolf = pycompat.bytestr(bool(force))
758 boolf = pycompat.bytestr(bool(force))
759 boolm = pycompat.bytestr(bool(matcher))
759 boolm = pycompat.bytestr(bool(matcher))
760 repo.ui.note(_(b"resolving manifests\n"))
760 repo.ui.note(_(b"resolving manifests\n"))
761 repo.ui.debug(
761 repo.ui.debug(
762 b" branchmerge: %s, force: %s, partial: %s\n" % (boolbm, boolf, boolm)
762 b" branchmerge: %s, force: %s, partial: %s\n" % (boolbm, boolf, boolm)
763 )
763 )
764 repo.ui.debug(b" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
764 repo.ui.debug(b" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
765
765
766 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
766 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
767 copied1 = set(branch_copies1.copy.values())
767 copied1 = set(branch_copies1.copy.values())
768 copied1.update(branch_copies1.movewithdir.values())
768 copied1.update(branch_copies1.movewithdir.values())
769 copied2 = set(branch_copies2.copy.values())
769 copied2 = set(branch_copies2.copy.values())
770 copied2.update(branch_copies2.movewithdir.values())
770 copied2.update(branch_copies2.movewithdir.values())
771
771
772 if b'.hgsubstate' in m1 and wctx.rev() is None:
772 if b'.hgsubstate' in m1 and wctx.rev() is None:
773 # Check whether sub state is modified, and overwrite the manifest
773 # Check whether sub state is modified, and overwrite the manifest
774 # to flag the change. If wctx is a committed revision, we shouldn't
774 # to flag the change. If wctx is a committed revision, we shouldn't
775 # care for the dirty state of the working directory.
775 # care for the dirty state of the working directory.
776 if any(wctx.sub(s).dirty() for s in wctx.substate):
776 if any(wctx.sub(s).dirty() for s in wctx.substate):
777 m1[b'.hgsubstate'] = repo.nodeconstants.modifiednodeid
777 m1[b'.hgsubstate'] = repo.nodeconstants.modifiednodeid
778
778
779 # Don't use m2-vs-ma optimization if:
779 # Don't use m2-vs-ma optimization if:
780 # - ma is the same as m1 or m2, which we're just going to diff again later
780 # - ma is the same as m1 or m2, which we're just going to diff again later
781 # - The caller specifically asks for a full diff, which is useful during bid
781 # - The caller specifically asks for a full diff, which is useful during bid
782 # merge.
782 # merge.
783 # - we are tracking salvaged files specifically hence should process all
783 # - we are tracking salvaged files specifically hence should process all
784 # files
784 # files
785 if (
785 if (
786 pa not in ([wctx, p2] + wctx.parents())
786 pa not in ([wctx, p2] + wctx.parents())
787 and not forcefulldiff
787 and not forcefulldiff
788 and not (
788 and not (
789 repo.ui.configbool(b'experimental', b'merge-track-salvaged')
789 repo.ui.configbool(b'experimental', b'merge-track-salvaged')
790 or repo.filecopiesmode == b'changeset-sidedata'
790 or repo.filecopiesmode == b'changeset-sidedata'
791 )
791 )
792 ):
792 ):
793 # Identify which files are relevant to the merge, so we can limit the
793 # Identify which files are relevant to the merge, so we can limit the
794 # total m1-vs-m2 diff to just those files. This has significant
794 # total m1-vs-m2 diff to just those files. This has significant
795 # performance benefits in large repositories.
795 # performance benefits in large repositories.
796 relevantfiles = set(ma.diff(m2).keys())
796 relevantfiles = set(ma.diff(m2).keys())
797
797
798 # For copied and moved files, we need to add the source file too.
798 # For copied and moved files, we need to add the source file too.
799 for copykey, copyvalue in pycompat.iteritems(branch_copies1.copy):
799 for copykey, copyvalue in pycompat.iteritems(branch_copies1.copy):
800 if copyvalue in relevantfiles:
800 if copyvalue in relevantfiles:
801 relevantfiles.add(copykey)
801 relevantfiles.add(copykey)
802 for movedirkey in branch_copies1.movewithdir:
802 for movedirkey in branch_copies1.movewithdir:
803 relevantfiles.add(movedirkey)
803 relevantfiles.add(movedirkey)
804 filesmatcher = scmutil.matchfiles(repo, relevantfiles)
804 filesmatcher = scmutil.matchfiles(repo, relevantfiles)
805 matcher = matchmod.intersectmatchers(matcher, filesmatcher)
805 matcher = matchmod.intersectmatchers(matcher, filesmatcher)
806
806
807 diff = m1.diff(m2, match=matcher)
807 diff = m1.diff(m2, match=matcher)
808
808
809 for f, ((n1, fl1), (n2, fl2)) in pycompat.iteritems(diff):
809 for f, ((n1, fl1), (n2, fl2)) in pycompat.iteritems(diff):
810 if n1 and n2: # file exists on both local and remote side
810 if n1 and n2: # file exists on both local and remote side
811 if f not in ma:
811 if f not in ma:
812 # TODO: what if they're renamed from different sources?
812 # TODO: what if they're renamed from different sources?
813 fa = branch_copies1.copy.get(
813 fa = branch_copies1.copy.get(
814 f, None
814 f, None
815 ) or branch_copies2.copy.get(f, None)
815 ) or branch_copies2.copy.get(f, None)
816 args, msg = None, None
816 args, msg = None, None
817 if fa is not None:
817 if fa is not None:
818 args = (f, f, fa, False, pa.node())
818 args = (f, f, fa, False, pa.node())
819 msg = b'both renamed from %s' % fa
819 msg = b'both renamed from %s' % fa
820 else:
820 else:
821 args = (f, f, None, False, pa.node())
821 args = (f, f, None, False, pa.node())
822 msg = b'both created'
822 msg = b'both created'
823 mresult.addfile(f, mergestatemod.ACTION_MERGE, args, msg)
823 mresult.addfile(f, mergestatemod.ACTION_MERGE, args, msg)
824 elif f in branch_copies1.copy:
824 elif f in branch_copies1.copy:
825 fa = branch_copies1.copy[f]
825 fa = branch_copies1.copy[f]
826 mresult.addfile(
826 mresult.addfile(
827 f,
827 f,
828 mergestatemod.ACTION_MERGE,
828 mergestatemod.ACTION_MERGE,
829 (f, fa, fa, False, pa.node()),
829 (f, fa, fa, False, pa.node()),
830 b'local replaced from %s' % fa,
830 b'local replaced from %s' % fa,
831 )
831 )
832 elif f in branch_copies2.copy:
832 elif f in branch_copies2.copy:
833 fa = branch_copies2.copy[f]
833 fa = branch_copies2.copy[f]
834 mresult.addfile(
834 mresult.addfile(
835 f,
835 f,
836 mergestatemod.ACTION_MERGE,
836 mergestatemod.ACTION_MERGE,
837 (fa, f, fa, False, pa.node()),
837 (fa, f, fa, False, pa.node()),
838 b'other replaced from %s' % fa,
838 b'other replaced from %s' % fa,
839 )
839 )
840 else:
840 else:
841 a = ma[f]
841 a = ma[f]
842 fla = ma.flags(f)
842 fla = ma.flags(f)
843 nol = b'l' not in fl1 + fl2 + fla
843 nol = b'l' not in fl1 + fl2 + fla
844 if n2 == a and fl2 == fla:
844 if n2 == a and fl2 == fla:
845 mresult.addfile(
845 mresult.addfile(
846 f,
846 f,
847 mergestatemod.ACTION_KEEP,
847 mergestatemod.ACTION_KEEP,
848 (),
848 (),
849 b'remote unchanged',
849 b'remote unchanged',
850 )
850 )
851 elif n1 == a and fl1 == fla: # local unchanged - use remote
851 elif n1 == a and fl1 == fla: # local unchanged - use remote
852 if n1 == n2: # optimization: keep local content
852 if n1 == n2: # optimization: keep local content
853 mresult.addfile(
853 mresult.addfile(
854 f,
854 f,
855 mergestatemod.ACTION_EXEC,
855 mergestatemod.ACTION_EXEC,
856 (fl2,),
856 (fl2,),
857 b'update permissions',
857 b'update permissions',
858 )
858 )
859 else:
859 else:
860 mresult.addfile(
860 mresult.addfile(
861 f,
861 f,
862 mergestatemod.ACTION_GET,
862 mergestatemod.ACTION_GET,
863 (fl2, False),
863 (fl2, False),
864 b'remote is newer',
864 b'remote is newer',
865 )
865 )
866 if branchmerge:
866 if branchmerge:
867 mresult.addcommitinfo(
867 mresult.addcommitinfo(
868 f, b'filenode-source', b'other'
868 f, b'filenode-source', b'other'
869 )
869 )
870 elif nol and n2 == a: # remote only changed 'x'
870 elif nol and n2 == a: # remote only changed 'x'
871 mresult.addfile(
871 mresult.addfile(
872 f,
872 f,
873 mergestatemod.ACTION_EXEC,
873 mergestatemod.ACTION_EXEC,
874 (fl2,),
874 (fl2,),
875 b'update permissions',
875 b'update permissions',
876 )
876 )
877 elif nol and n1 == a: # local only changed 'x'
877 elif nol and n1 == a: # local only changed 'x'
878 mresult.addfile(
878 mresult.addfile(
879 f,
879 f,
880 mergestatemod.ACTION_GET,
880 mergestatemod.ACTION_GET,
881 (fl1, False),
881 (fl1, False),
882 b'remote is newer',
882 b'remote is newer',
883 )
883 )
884 if branchmerge:
884 if branchmerge:
885 mresult.addcommitinfo(f, b'filenode-source', b'other')
885 mresult.addcommitinfo(f, b'filenode-source', b'other')
886 else: # both changed something
886 else: # both changed something
887 mresult.addfile(
887 mresult.addfile(
888 f,
888 f,
889 mergestatemod.ACTION_MERGE,
889 mergestatemod.ACTION_MERGE,
890 (f, f, f, False, pa.node()),
890 (f, f, f, False, pa.node()),
891 b'versions differ',
891 b'versions differ',
892 )
892 )
893 elif n1: # file exists only on local side
893 elif n1: # file exists only on local side
894 if f in copied2:
894 if f in copied2:
895 pass # we'll deal with it on m2 side
895 pass # we'll deal with it on m2 side
896 elif (
896 elif (
897 f in branch_copies1.movewithdir
897 f in branch_copies1.movewithdir
898 ): # directory rename, move local
898 ): # directory rename, move local
899 f2 = branch_copies1.movewithdir[f]
899 f2 = branch_copies1.movewithdir[f]
900 if f2 in m2:
900 if f2 in m2:
901 mresult.addfile(
901 mresult.addfile(
902 f2,
902 f2,
903 mergestatemod.ACTION_MERGE,
903 mergestatemod.ACTION_MERGE,
904 (f, f2, None, True, pa.node()),
904 (f, f2, None, True, pa.node()),
905 b'remote directory rename, both created',
905 b'remote directory rename, both created',
906 )
906 )
907 else:
907 else:
908 mresult.addfile(
908 mresult.addfile(
909 f2,
909 f2,
910 mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,
910 mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,
911 (f, fl1),
911 (f, fl1),
912 b'remote directory rename - move from %s' % f,
912 b'remote directory rename - move from %s' % f,
913 )
913 )
914 elif f in branch_copies1.copy:
914 elif f in branch_copies1.copy:
915 f2 = branch_copies1.copy[f]
915 f2 = branch_copies1.copy[f]
916 mresult.addfile(
916 mresult.addfile(
917 f,
917 f,
918 mergestatemod.ACTION_MERGE,
918 mergestatemod.ACTION_MERGE,
919 (f, f2, f2, False, pa.node()),
919 (f, f2, f2, False, pa.node()),
920 b'local copied/moved from %s' % f2,
920 b'local copied/moved from %s' % f2,
921 )
921 )
922 elif f in ma: # clean, a different, no remote
922 elif f in ma: # clean, a different, no remote
923 if n1 != ma[f]:
923 if n1 != ma[f]:
924 if acceptremote:
924 if acceptremote:
925 mresult.addfile(
925 mresult.addfile(
926 f,
926 f,
927 mergestatemod.ACTION_REMOVE,
927 mergestatemod.ACTION_REMOVE,
928 None,
928 None,
929 b'remote delete',
929 b'remote delete',
930 )
930 )
931 else:
931 else:
932 mresult.addfile(
932 mresult.addfile(
933 f,
933 f,
934 mergestatemod.ACTION_CHANGED_DELETED,
934 mergestatemod.ACTION_CHANGED_DELETED,
935 (f, None, f, False, pa.node()),
935 (f, None, f, False, pa.node()),
936 b'prompt changed/deleted',
936 b'prompt changed/deleted',
937 )
937 )
938 if branchmerge:
938 if branchmerge:
939 mresult.addcommitinfo(
939 mresult.addcommitinfo(
940 f, b'merge-removal-candidate', b'yes'
940 f, b'merge-removal-candidate', b'yes'
941 )
941 )
942 elif n1 == repo.nodeconstants.addednodeid:
942 elif n1 == repo.nodeconstants.addednodeid:
943 # This file was locally added. We should forget it instead of
943 # This file was locally added. We should forget it instead of
944 # deleting it.
944 # deleting it.
945 mresult.addfile(
945 mresult.addfile(
946 f,
946 f,
947 mergestatemod.ACTION_FORGET,
947 mergestatemod.ACTION_FORGET,
948 None,
948 None,
949 b'remote deleted',
949 b'remote deleted',
950 )
950 )
951 else:
951 else:
952 mresult.addfile(
952 mresult.addfile(
953 f,
953 f,
954 mergestatemod.ACTION_REMOVE,
954 mergestatemod.ACTION_REMOVE,
955 None,
955 None,
956 b'other deleted',
956 b'other deleted',
957 )
957 )
958 if branchmerge:
958 if branchmerge:
959 # the file must be absent after merging,
959 # the file must be absent after merging,
960 # howeber the user might make
960 # howeber the user might make
961 # the file reappear using revert and if they does,
961 # the file reappear using revert and if they does,
962 # we force create a new node
962 # we force create a new node
963 mresult.addcommitinfo(
963 mresult.addcommitinfo(
964 f, b'merge-removal-candidate', b'yes'
964 f, b'merge-removal-candidate', b'yes'
965 )
965 )
966
966
967 else: # file not in ancestor, not in remote
967 else: # file not in ancestor, not in remote
968 mresult.addfile(
968 mresult.addfile(
969 f,
969 f,
970 mergestatemod.ACTION_KEEP_NEW,
970 mergestatemod.ACTION_KEEP_NEW,
971 None,
971 None,
972 b'ancestor missing, remote missing',
972 b'ancestor missing, remote missing',
973 )
973 )
974
974
975 elif n2: # file exists only on remote side
975 elif n2: # file exists only on remote side
976 if f in copied1:
976 if f in copied1:
977 pass # we'll deal with it on m1 side
977 pass # we'll deal with it on m1 side
978 elif f in branch_copies2.movewithdir:
978 elif f in branch_copies2.movewithdir:
979 f2 = branch_copies2.movewithdir[f]
979 f2 = branch_copies2.movewithdir[f]
980 if f2 in m1:
980 if f2 in m1:
981 mresult.addfile(
981 mresult.addfile(
982 f2,
982 f2,
983 mergestatemod.ACTION_MERGE,
983 mergestatemod.ACTION_MERGE,
984 (f2, f, None, False, pa.node()),
984 (f2, f, None, False, pa.node()),
985 b'local directory rename, both created',
985 b'local directory rename, both created',
986 )
986 )
987 else:
987 else:
988 mresult.addfile(
988 mresult.addfile(
989 f2,
989 f2,
990 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
990 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
991 (f, fl2),
991 (f, fl2),
992 b'local directory rename - get from %s' % f,
992 b'local directory rename - get from %s' % f,
993 )
993 )
994 elif f in branch_copies2.copy:
994 elif f in branch_copies2.copy:
995 f2 = branch_copies2.copy[f]
995 f2 = branch_copies2.copy[f]
996 msg, args = None, None
996 msg, args = None, None
997 if f2 in m2:
997 if f2 in m2:
998 args = (f2, f, f2, False, pa.node())
998 args = (f2, f, f2, False, pa.node())
999 msg = b'remote copied from %s' % f2
999 msg = b'remote copied from %s' % f2
1000 else:
1000 else:
1001 args = (f2, f, f2, True, pa.node())
1001 args = (f2, f, f2, True, pa.node())
1002 msg = b'remote moved from %s' % f2
1002 msg = b'remote moved from %s' % f2
1003 mresult.addfile(f, mergestatemod.ACTION_MERGE, args, msg)
1003 mresult.addfile(f, mergestatemod.ACTION_MERGE, args, msg)
1004 elif f not in ma:
1004 elif f not in ma:
1005 # local unknown, remote created: the logic is described by the
1005 # local unknown, remote created: the logic is described by the
1006 # following table:
1006 # following table:
1007 #
1007 #
1008 # force branchmerge different | action
1008 # force branchmerge different | action
1009 # n * * | create
1009 # n * * | create
1010 # y n * | create
1010 # y n * | create
1011 # y y n | create
1011 # y y n | create
1012 # y y y | merge
1012 # y y y | merge
1013 #
1013 #
1014 # Checking whether the files are different is expensive, so we
1014 # Checking whether the files are different is expensive, so we
1015 # don't do that when we can avoid it.
1015 # don't do that when we can avoid it.
1016 if not force:
1016 if not force:
1017 mresult.addfile(
1017 mresult.addfile(
1018 f,
1018 f,
1019 mergestatemod.ACTION_CREATED,
1019 mergestatemod.ACTION_CREATED,
1020 (fl2,),
1020 (fl2,),
1021 b'remote created',
1021 b'remote created',
1022 )
1022 )
1023 elif not branchmerge:
1023 elif not branchmerge:
1024 mresult.addfile(
1024 mresult.addfile(
1025 f,
1025 f,
1026 mergestatemod.ACTION_CREATED,
1026 mergestatemod.ACTION_CREATED,
1027 (fl2,),
1027 (fl2,),
1028 b'remote created',
1028 b'remote created',
1029 )
1029 )
1030 else:
1030 else:
1031 mresult.addfile(
1031 mresult.addfile(
1032 f,
1032 f,
1033 mergestatemod.ACTION_CREATED_MERGE,
1033 mergestatemod.ACTION_CREATED_MERGE,
1034 (fl2, pa.node()),
1034 (fl2, pa.node()),
1035 b'remote created, get or merge',
1035 b'remote created, get or merge',
1036 )
1036 )
1037 elif n2 != ma[f]:
1037 elif n2 != ma[f]:
1038 df = None
1038 df = None
1039 for d in branch_copies1.dirmove:
1039 for d in branch_copies1.dirmove:
1040 if f.startswith(d):
1040 if f.startswith(d):
1041 # new file added in a directory that was moved
1041 # new file added in a directory that was moved
1042 df = branch_copies1.dirmove[d] + f[len(d) :]
1042 df = branch_copies1.dirmove[d] + f[len(d) :]
1043 break
1043 break
1044 if df is not None and df in m1:
1044 if df is not None and df in m1:
1045 mresult.addfile(
1045 mresult.addfile(
1046 df,
1046 df,
1047 mergestatemod.ACTION_MERGE,
1047 mergestatemod.ACTION_MERGE,
1048 (df, f, f, False, pa.node()),
1048 (df, f, f, False, pa.node()),
1049 b'local directory rename - respect move '
1049 b'local directory rename - respect move '
1050 b'from %s' % f,
1050 b'from %s' % f,
1051 )
1051 )
1052 elif acceptremote:
1052 elif acceptremote:
1053 mresult.addfile(
1053 mresult.addfile(
1054 f,
1054 f,
1055 mergestatemod.ACTION_CREATED,
1055 mergestatemod.ACTION_CREATED,
1056 (fl2,),
1056 (fl2,),
1057 b'remote recreating',
1057 b'remote recreating',
1058 )
1058 )
1059 else:
1059 else:
1060 mresult.addfile(
1060 mresult.addfile(
1061 f,
1061 f,
1062 mergestatemod.ACTION_DELETED_CHANGED,
1062 mergestatemod.ACTION_DELETED_CHANGED,
1063 (None, f, f, False, pa.node()),
1063 (None, f, f, False, pa.node()),
1064 b'prompt deleted/changed',
1064 b'prompt deleted/changed',
1065 )
1065 )
1066 if branchmerge:
1066 if branchmerge:
1067 mresult.addcommitinfo(
1067 mresult.addcommitinfo(
1068 f, b'merge-removal-candidate', b'yes'
1068 f, b'merge-removal-candidate', b'yes'
1069 )
1069 )
1070 else:
1070 else:
1071 mresult.addfile(
1071 mresult.addfile(
1072 f,
1072 f,
1073 mergestatemod.ACTION_KEEP_ABSENT,
1073 mergestatemod.ACTION_KEEP_ABSENT,
1074 None,
1074 None,
1075 b'local not present, remote unchanged',
1075 b'local not present, remote unchanged',
1076 )
1076 )
1077 if branchmerge:
1077 if branchmerge:
1078 # the file must be absent after merging
1078 # the file must be absent after merging
1079 # however the user might make
1079 # however the user might make
1080 # the file reappear using revert and if they does,
1080 # the file reappear using revert and if they does,
1081 # we force create a new node
1081 # we force create a new node
1082 mresult.addcommitinfo(f, b'merge-removal-candidate', b'yes')
1082 mresult.addcommitinfo(f, b'merge-removal-candidate', b'yes')
1083
1083
1084 if repo.ui.configbool(b'experimental', b'merge.checkpathconflicts'):
1084 if repo.ui.configbool(b'experimental', b'merge.checkpathconflicts'):
1085 # If we are merging, look for path conflicts.
1085 # If we are merging, look for path conflicts.
1086 checkpathconflicts(repo, wctx, p2, mresult)
1086 checkpathconflicts(repo, wctx, p2, mresult)
1087
1087
1088 narrowmatch = repo.narrowmatch()
1088 narrowmatch = repo.narrowmatch()
1089 if not narrowmatch.always():
1089 if not narrowmatch.always():
1090 # Updates "actions" in place
1090 # Updates "actions" in place
1091 _filternarrowactions(narrowmatch, branchmerge, mresult)
1091 _filternarrowactions(narrowmatch, branchmerge, mresult)
1092
1092
1093 renamedelete = branch_copies1.renamedelete
1093 renamedelete = branch_copies1.renamedelete
1094 renamedelete.update(branch_copies2.renamedelete)
1094 renamedelete.update(branch_copies2.renamedelete)
1095
1095
1096 mresult.updatevalues(diverge, renamedelete)
1096 mresult.updatevalues(diverge, renamedelete)
1097 return mresult
1097 return mresult
1098
1098
1099
1099
1100 def _resolvetrivial(repo, wctx, mctx, ancestor, mresult):
1100 def _resolvetrivial(repo, wctx, mctx, ancestor, mresult):
1101 """Resolves false conflicts where the nodeid changed but the content
1101 """Resolves false conflicts where the nodeid changed but the content
1102 remained the same."""
1102 remained the same."""
1103 # We force a copy of actions.items() because we're going to mutate
1103 # We force a copy of actions.items() because we're going to mutate
1104 # actions as we resolve trivial conflicts.
1104 # actions as we resolve trivial conflicts.
1105 for f in list(mresult.files((mergestatemod.ACTION_CHANGED_DELETED,))):
1105 for f in list(mresult.files((mergestatemod.ACTION_CHANGED_DELETED,))):
1106 if f in ancestor and not wctx[f].cmp(ancestor[f]):
1106 if f in ancestor and not wctx[f].cmp(ancestor[f]):
1107 # local did change but ended up with same content
1107 # local did change but ended up with same content
1108 mresult.addfile(
1108 mresult.addfile(
1109 f, mergestatemod.ACTION_REMOVE, None, b'prompt same'
1109 f, mergestatemod.ACTION_REMOVE, None, b'prompt same'
1110 )
1110 )
1111
1111
1112 for f in list(mresult.files((mergestatemod.ACTION_DELETED_CHANGED,))):
1112 for f in list(mresult.files((mergestatemod.ACTION_DELETED_CHANGED,))):
1113 if f in ancestor and not mctx[f].cmp(ancestor[f]):
1113 if f in ancestor and not mctx[f].cmp(ancestor[f]):
1114 # remote did change but ended up with same content
1114 # remote did change but ended up with same content
1115 mresult.removefile(f) # don't get = keep local deleted
1115 mresult.removefile(f) # don't get = keep local deleted
1116
1116
1117
1117
1118 def calculateupdates(
1118 def calculateupdates(
1119 repo,
1119 repo,
1120 wctx,
1120 wctx,
1121 mctx,
1121 mctx,
1122 ancestors,
1122 ancestors,
1123 branchmerge,
1123 branchmerge,
1124 force,
1124 force,
1125 acceptremote,
1125 acceptremote,
1126 followcopies,
1126 followcopies,
1127 matcher=None,
1127 matcher=None,
1128 mergeforce=False,
1128 mergeforce=False,
1129 ):
1129 ):
1130 """
1130 """
1131 Calculate the actions needed to merge mctx into wctx using ancestors
1131 Calculate the actions needed to merge mctx into wctx using ancestors
1132
1132
1133 Uses manifestmerge() to merge manifest and get list of actions required to
1133 Uses manifestmerge() to merge manifest and get list of actions required to
1134 perform for merging two manifests. If there are multiple ancestors, uses bid
1134 perform for merging two manifests. If there are multiple ancestors, uses bid
1135 merge if enabled.
1135 merge if enabled.
1136
1136
1137 Also filters out actions which are unrequired if repository is sparse.
1137 Also filters out actions which are unrequired if repository is sparse.
1138
1138
1139 Returns mergeresult object same as manifestmerge().
1139 Returns mergeresult object same as manifestmerge().
1140 """
1140 """
1141 # Avoid cycle.
1141 # Avoid cycle.
1142 from . import sparse
1142 from . import sparse
1143
1143
1144 mresult = None
1144 mresult = None
1145 if len(ancestors) == 1: # default
1145 if len(ancestors) == 1: # default
1146 mresult = manifestmerge(
1146 mresult = manifestmerge(
1147 repo,
1147 repo,
1148 wctx,
1148 wctx,
1149 mctx,
1149 mctx,
1150 ancestors[0],
1150 ancestors[0],
1151 branchmerge,
1151 branchmerge,
1152 force,
1152 force,
1153 matcher,
1153 matcher,
1154 acceptremote,
1154 acceptremote,
1155 followcopies,
1155 followcopies,
1156 )
1156 )
1157 _checkunknownfiles(repo, wctx, mctx, force, mresult, mergeforce)
1157 _checkunknownfiles(repo, wctx, mctx, force, mresult, mergeforce)
1158
1158
1159 else: # only when merge.preferancestor=* - the default
1159 else: # only when merge.preferancestor=* - the default
1160 repo.ui.note(
1160 repo.ui.note(
1161 _(b"note: merging %s and %s using bids from ancestors %s\n")
1161 _(b"note: merging %s and %s using bids from ancestors %s\n")
1162 % (
1162 % (
1163 wctx,
1163 wctx,
1164 mctx,
1164 mctx,
1165 _(b' and ').join(pycompat.bytestr(anc) for anc in ancestors),
1165 _(b' and ').join(pycompat.bytestr(anc) for anc in ancestors),
1166 )
1166 )
1167 )
1167 )
1168
1168
1169 # mapping filename to bids (action method to list af actions)
1169 # mapping filename to bids (action method to list af actions)
1170 # {FILENAME1 : BID1, FILENAME2 : BID2}
1170 # {FILENAME1 : BID1, FILENAME2 : BID2}
1171 # BID is another dictionary which contains
1171 # BID is another dictionary which contains
1172 # mapping of following form:
1172 # mapping of following form:
1173 # {ACTION_X : [info, ..], ACTION_Y : [info, ..]}
1173 # {ACTION_X : [info, ..], ACTION_Y : [info, ..]}
1174 fbids = {}
1174 fbids = {}
1175 mresult = mergeresult()
1175 mresult = mergeresult()
1176 diverge, renamedelete = None, None
1176 diverge, renamedelete = None, None
1177 for ancestor in ancestors:
1177 for ancestor in ancestors:
1178 repo.ui.note(_(b'\ncalculating bids for ancestor %s\n') % ancestor)
1178 repo.ui.note(_(b'\ncalculating bids for ancestor %s\n') % ancestor)
1179 mresult1 = manifestmerge(
1179 mresult1 = manifestmerge(
1180 repo,
1180 repo,
1181 wctx,
1181 wctx,
1182 mctx,
1182 mctx,
1183 ancestor,
1183 ancestor,
1184 branchmerge,
1184 branchmerge,
1185 force,
1185 force,
1186 matcher,
1186 matcher,
1187 acceptremote,
1187 acceptremote,
1188 followcopies,
1188 followcopies,
1189 forcefulldiff=True,
1189 forcefulldiff=True,
1190 )
1190 )
1191 _checkunknownfiles(repo, wctx, mctx, force, mresult1, mergeforce)
1191 _checkunknownfiles(repo, wctx, mctx, force, mresult1, mergeforce)
1192
1192
1193 # Track the shortest set of warning on the theory that bid
1193 # Track the shortest set of warning on the theory that bid
1194 # merge will correctly incorporate more information
1194 # merge will correctly incorporate more information
1195 if diverge is None or len(mresult1.diverge) < len(diverge):
1195 if diverge is None or len(mresult1.diverge) < len(diverge):
1196 diverge = mresult1.diverge
1196 diverge = mresult1.diverge
1197 if renamedelete is None or len(renamedelete) < len(
1197 if renamedelete is None or len(renamedelete) < len(
1198 mresult1.renamedelete
1198 mresult1.renamedelete
1199 ):
1199 ):
1200 renamedelete = mresult1.renamedelete
1200 renamedelete = mresult1.renamedelete
1201
1201
1202 # blindly update final mergeresult commitinfo with what we get
1202 # blindly update final mergeresult commitinfo with what we get
1203 # from mergeresult object for each ancestor
1203 # from mergeresult object for each ancestor
1204 # TODO: some commitinfo depends on what bid merge choose and hence
1204 # TODO: some commitinfo depends on what bid merge choose and hence
1205 # we will need to make commitinfo also depend on bid merge logic
1205 # we will need to make commitinfo also depend on bid merge logic
1206 mresult._commitinfo.update(mresult1._commitinfo)
1206 mresult._commitinfo.update(mresult1._commitinfo)
1207
1207
1208 for f, a in mresult1.filemap(sort=True):
1208 for f, a in mresult1.filemap(sort=True):
1209 m, args, msg = a
1209 m, args, msg = a
1210 repo.ui.debug(b' %s: %s -> %s\n' % (f, msg, m))
1210 repo.ui.debug(b' %s: %s -> %s\n' % (f, msg, m))
1211 if f in fbids:
1211 if f in fbids:
1212 d = fbids[f]
1212 d = fbids[f]
1213 if m in d:
1213 if m in d:
1214 d[m].append(a)
1214 d[m].append(a)
1215 else:
1215 else:
1216 d[m] = [a]
1216 d[m] = [a]
1217 else:
1217 else:
1218 fbids[f] = {m: [a]}
1218 fbids[f] = {m: [a]}
1219
1219
1220 # Call for bids
1220 # Call for bids
1221 # Pick the best bid for each file
1221 # Pick the best bid for each file
1222 repo.ui.note(
1222 repo.ui.note(
1223 _(b'\nauction for merging merge bids (%d ancestors)\n')
1223 _(b'\nauction for merging merge bids (%d ancestors)\n')
1224 % len(ancestors)
1224 % len(ancestors)
1225 )
1225 )
1226 for f, bids in sorted(fbids.items()):
1226 for f, bids in sorted(fbids.items()):
1227 if repo.ui.debugflag:
1227 if repo.ui.debugflag:
1228 repo.ui.debug(b" list of bids for %s:\n" % f)
1228 repo.ui.debug(b" list of bids for %s:\n" % f)
1229 for m, l in sorted(bids.items()):
1229 for m, l in sorted(bids.items()):
1230 for _f, args, msg in l:
1230 for _f, args, msg in l:
1231 repo.ui.debug(b' %s -> %s\n' % (msg, m))
1231 repo.ui.debug(b' %s -> %s\n' % (msg, m))
1232 # bids is a mapping from action method to list af actions
1232 # bids is a mapping from action method to list af actions
1233 # Consensus?
1233 # Consensus?
1234 if len(bids) == 1: # all bids are the same kind of method
1234 if len(bids) == 1: # all bids are the same kind of method
1235 m, l = list(bids.items())[0]
1235 m, l = list(bids.items())[0]
1236 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
1236 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
1237 repo.ui.note(_(b" %s: consensus for %s\n") % (f, m))
1237 repo.ui.note(_(b" %s: consensus for %s\n") % (f, m))
1238 mresult.addfile(f, *l[0])
1238 mresult.addfile(f, *l[0])
1239 continue
1239 continue
1240 # If keep is an option, just do it.
1240 # If keep is an option, just do it.
1241 if mergestatemod.ACTION_KEEP in bids:
1241 if mergestatemod.ACTION_KEEP in bids:
1242 repo.ui.note(_(b" %s: picking 'keep' action\n") % f)
1242 repo.ui.note(_(b" %s: picking 'keep' action\n") % f)
1243 mresult.addfile(f, *bids[mergestatemod.ACTION_KEEP][0])
1243 mresult.addfile(f, *bids[mergestatemod.ACTION_KEEP][0])
1244 continue
1244 continue
1245 # If keep absent is an option, just do that
1245 # If keep absent is an option, just do that
1246 if mergestatemod.ACTION_KEEP_ABSENT in bids:
1246 if mergestatemod.ACTION_KEEP_ABSENT in bids:
1247 repo.ui.note(_(b" %s: picking 'keep absent' action\n") % f)
1247 repo.ui.note(_(b" %s: picking 'keep absent' action\n") % f)
1248 mresult.addfile(f, *bids[mergestatemod.ACTION_KEEP_ABSENT][0])
1248 mresult.addfile(f, *bids[mergestatemod.ACTION_KEEP_ABSENT][0])
1249 continue
1249 continue
1250 # ACTION_KEEP_NEW and ACTION_CHANGED_DELETED are conflicting actions
1250 # ACTION_KEEP_NEW and ACTION_CHANGED_DELETED are conflicting actions
1251 # as one say that file is new while other says that file was present
1251 # as one say that file is new while other says that file was present
1252 # earlier too and has a change delete conflict
1252 # earlier too and has a change delete conflict
1253 # Let's fall back to conflicting ACTION_CHANGED_DELETED and let user
1253 # Let's fall back to conflicting ACTION_CHANGED_DELETED and let user
1254 # do the right thing
1254 # do the right thing
1255 if (
1255 if (
1256 mergestatemod.ACTION_CHANGED_DELETED in bids
1256 mergestatemod.ACTION_CHANGED_DELETED in bids
1257 and mergestatemod.ACTION_KEEP_NEW in bids
1257 and mergestatemod.ACTION_KEEP_NEW in bids
1258 ):
1258 ):
1259 repo.ui.note(_(b" %s: picking 'changed/deleted' action\n") % f)
1259 repo.ui.note(_(b" %s: picking 'changed/deleted' action\n") % f)
1260 mresult.addfile(
1260 mresult.addfile(
1261 f, *bids[mergestatemod.ACTION_CHANGED_DELETED][0]
1261 f, *bids[mergestatemod.ACTION_CHANGED_DELETED][0]
1262 )
1262 )
1263 continue
1263 continue
1264 # If keep new is an option, let's just do that
1264 # If keep new is an option, let's just do that
1265 if mergestatemod.ACTION_KEEP_NEW in bids:
1265 if mergestatemod.ACTION_KEEP_NEW in bids:
1266 repo.ui.note(_(b" %s: picking 'keep new' action\n") % f)
1266 repo.ui.note(_(b" %s: picking 'keep new' action\n") % f)
1267 mresult.addfile(f, *bids[mergestatemod.ACTION_KEEP_NEW][0])
1267 mresult.addfile(f, *bids[mergestatemod.ACTION_KEEP_NEW][0])
1268 continue
1268 continue
1269 # ACTION_GET and ACTION_DELETE_CHANGED are conflicting actions as
1269 # ACTION_GET and ACTION_DELETE_CHANGED are conflicting actions as
1270 # one action states the file is newer/created on remote side and
1270 # one action states the file is newer/created on remote side and
1271 # other states that file is deleted locally and changed on remote
1271 # other states that file is deleted locally and changed on remote
1272 # side. Let's fallback and rely on a conflicting action to let user
1272 # side. Let's fallback and rely on a conflicting action to let user
1273 # do the right thing
1273 # do the right thing
1274 if (
1274 if (
1275 mergestatemod.ACTION_DELETED_CHANGED in bids
1275 mergestatemod.ACTION_DELETED_CHANGED in bids
1276 and mergestatemod.ACTION_GET in bids
1276 and mergestatemod.ACTION_GET in bids
1277 ):
1277 ):
1278 repo.ui.note(_(b" %s: picking 'delete/changed' action\n") % f)
1278 repo.ui.note(_(b" %s: picking 'delete/changed' action\n") % f)
1279 mresult.addfile(
1279 mresult.addfile(
1280 f, *bids[mergestatemod.ACTION_DELETED_CHANGED][0]
1280 f, *bids[mergestatemod.ACTION_DELETED_CHANGED][0]
1281 )
1281 )
1282 continue
1282 continue
1283 # If there are gets and they all agree [how could they not?], do it.
1283 # If there are gets and they all agree [how could they not?], do it.
1284 if mergestatemod.ACTION_GET in bids:
1284 if mergestatemod.ACTION_GET in bids:
1285 ga0 = bids[mergestatemod.ACTION_GET][0]
1285 ga0 = bids[mergestatemod.ACTION_GET][0]
1286 if all(a == ga0 for a in bids[mergestatemod.ACTION_GET][1:]):
1286 if all(a == ga0 for a in bids[mergestatemod.ACTION_GET][1:]):
1287 repo.ui.note(_(b" %s: picking 'get' action\n") % f)
1287 repo.ui.note(_(b" %s: picking 'get' action\n") % f)
1288 mresult.addfile(f, *ga0)
1288 mresult.addfile(f, *ga0)
1289 continue
1289 continue
1290 # TODO: Consider other simple actions such as mode changes
1290 # TODO: Consider other simple actions such as mode changes
1291 # Handle inefficient democrazy.
1291 # Handle inefficient democrazy.
1292 repo.ui.note(_(b' %s: multiple bids for merge action:\n') % f)
1292 repo.ui.note(_(b' %s: multiple bids for merge action:\n') % f)
1293 for m, l in sorted(bids.items()):
1293 for m, l in sorted(bids.items()):
1294 for _f, args, msg in l:
1294 for _f, args, msg in l:
1295 repo.ui.note(b' %s -> %s\n' % (msg, m))
1295 repo.ui.note(b' %s -> %s\n' % (msg, m))
1296 # Pick random action. TODO: Instead, prompt user when resolving
1296 # Pick random action. TODO: Instead, prompt user when resolving
1297 m, l = list(bids.items())[0]
1297 m, l = list(bids.items())[0]
1298 repo.ui.warn(
1298 repo.ui.warn(
1299 _(b' %s: ambiguous merge - picked %s action\n') % (f, m)
1299 _(b' %s: ambiguous merge - picked %s action\n') % (f, m)
1300 )
1300 )
1301 mresult.addfile(f, *l[0])
1301 mresult.addfile(f, *l[0])
1302 continue
1302 continue
1303 repo.ui.note(_(b'end of auction\n\n'))
1303 repo.ui.note(_(b'end of auction\n\n'))
1304 mresult.updatevalues(diverge, renamedelete)
1304 mresult.updatevalues(diverge, renamedelete)
1305
1305
1306 if wctx.rev() is None:
1306 if wctx.rev() is None:
1307 _forgetremoved(wctx, mctx, branchmerge, mresult)
1307 _forgetremoved(wctx, mctx, branchmerge, mresult)
1308
1308
1309 sparse.filterupdatesactions(repo, wctx, mctx, branchmerge, mresult)
1309 sparse.filterupdatesactions(repo, wctx, mctx, branchmerge, mresult)
1310 _resolvetrivial(repo, wctx, mctx, ancestors[0], mresult)
1310 _resolvetrivial(repo, wctx, mctx, ancestors[0], mresult)
1311
1311
1312 return mresult
1312 return mresult
1313
1313
1314
1314
1315 def _getcwd():
1315 def _getcwd():
1316 try:
1316 try:
1317 return encoding.getcwd()
1317 return encoding.getcwd()
1318 except OSError as err:
1318 except OSError as err:
1319 if err.errno == errno.ENOENT:
1319 if err.errno == errno.ENOENT:
1320 return None
1320 return None
1321 raise
1321 raise
1322
1322
1323
1323
1324 def batchremove(repo, wctx, actions):
1324 def batchremove(repo, wctx, actions):
1325 """apply removes to the working directory
1325 """apply removes to the working directory
1326
1326
1327 yields tuples for progress updates
1327 yields tuples for progress updates
1328 """
1328 """
1329 verbose = repo.ui.verbose
1329 verbose = repo.ui.verbose
1330 cwd = _getcwd()
1330 cwd = _getcwd()
1331 i = 0
1331 i = 0
1332 for f, args, msg in actions:
1332 for f, args, msg in actions:
1333 repo.ui.debug(b" %s: %s -> r\n" % (f, msg))
1333 repo.ui.debug(b" %s: %s -> r\n" % (f, msg))
1334 if verbose:
1334 if verbose:
1335 repo.ui.note(_(b"removing %s\n") % f)
1335 repo.ui.note(_(b"removing %s\n") % f)
1336 wctx[f].audit()
1336 wctx[f].audit()
1337 try:
1337 try:
1338 wctx[f].remove(ignoremissing=True)
1338 wctx[f].remove(ignoremissing=True)
1339 except OSError as inst:
1339 except OSError as inst:
1340 repo.ui.warn(
1340 repo.ui.warn(
1341 _(b"update failed to remove %s: %s!\n")
1341 _(b"update failed to remove %s: %s!\n")
1342 % (f, stringutil.forcebytestr(inst.strerror))
1342 % (f, stringutil.forcebytestr(inst.strerror))
1343 )
1343 )
1344 if i == 100:
1344 if i == 100:
1345 yield i, f
1345 yield i, f
1346 i = 0
1346 i = 0
1347 i += 1
1347 i += 1
1348 if i > 0:
1348 if i > 0:
1349 yield i, f
1349 yield i, f
1350
1350
1351 if cwd and not _getcwd():
1351 if cwd and not _getcwd():
1352 # cwd was removed in the course of removing files; print a helpful
1352 # cwd was removed in the course of removing files; print a helpful
1353 # warning.
1353 # warning.
1354 repo.ui.warn(
1354 repo.ui.warn(
1355 _(
1355 _(
1356 b"current directory was removed\n"
1356 b"current directory was removed\n"
1357 b"(consider changing to repo root: %s)\n"
1357 b"(consider changing to repo root: %s)\n"
1358 )
1358 )
1359 % repo.root
1359 % repo.root
1360 )
1360 )
1361
1361
1362
1362
1363 def batchget(repo, mctx, wctx, wantfiledata, actions):
1363 def batchget(repo, mctx, wctx, wantfiledata, actions):
1364 """apply gets to the working directory
1364 """apply gets to the working directory
1365
1365
1366 mctx is the context to get from
1366 mctx is the context to get from
1367
1367
1368 Yields arbitrarily many (False, tuple) for progress updates, followed by
1368 Yields arbitrarily many (False, tuple) for progress updates, followed by
1369 exactly one (True, filedata). When wantfiledata is false, filedata is an
1369 exactly one (True, filedata). When wantfiledata is false, filedata is an
1370 empty dict. When wantfiledata is true, filedata[f] is a triple (mode, size,
1370 empty dict. When wantfiledata is true, filedata[f] is a triple (mode, size,
1371 mtime) of the file f written for each action.
1371 mtime) of the file f written for each action.
1372 """
1372 """
1373 filedata = {}
1373 filedata = {}
1374 verbose = repo.ui.verbose
1374 verbose = repo.ui.verbose
1375 fctx = mctx.filectx
1375 fctx = mctx.filectx
1376 ui = repo.ui
1376 ui = repo.ui
1377 i = 0
1377 i = 0
1378 with repo.wvfs.backgroundclosing(ui, expectedcount=len(actions)):
1378 with repo.wvfs.backgroundclosing(ui, expectedcount=len(actions)):
1379 for f, (flags, backup), msg in actions:
1379 for f, (flags, backup), msg in actions:
1380 repo.ui.debug(b" %s: %s -> g\n" % (f, msg))
1380 repo.ui.debug(b" %s: %s -> g\n" % (f, msg))
1381 if verbose:
1381 if verbose:
1382 repo.ui.note(_(b"getting %s\n") % f)
1382 repo.ui.note(_(b"getting %s\n") % f)
1383
1383
1384 if backup:
1384 if backup:
1385 # If a file or directory exists with the same name, back that
1385 # If a file or directory exists with the same name, back that
1386 # up. Otherwise, look to see if there is a file that conflicts
1386 # up. Otherwise, look to see if there is a file that conflicts
1387 # with a directory this file is in, and if so, back that up.
1387 # with a directory this file is in, and if so, back that up.
1388 conflicting = f
1388 conflicting = f
1389 if not repo.wvfs.lexists(f):
1389 if not repo.wvfs.lexists(f):
1390 for p in pathutil.finddirs(f):
1390 for p in pathutil.finddirs(f):
1391 if repo.wvfs.isfileorlink(p):
1391 if repo.wvfs.isfileorlink(p):
1392 conflicting = p
1392 conflicting = p
1393 break
1393 break
1394 if repo.wvfs.lexists(conflicting):
1394 if repo.wvfs.lexists(conflicting):
1395 orig = scmutil.backuppath(ui, repo, conflicting)
1395 orig = scmutil.backuppath(ui, repo, conflicting)
1396 util.rename(repo.wjoin(conflicting), orig)
1396 util.rename(repo.wjoin(conflicting), orig)
1397 wfctx = wctx[f]
1397 wfctx = wctx[f]
1398 wfctx.clearunknown()
1398 wfctx.clearunknown()
1399 atomictemp = ui.configbool(b"experimental", b"update.atomic-file")
1399 atomictemp = ui.configbool(b"experimental", b"update.atomic-file")
1400 size = wfctx.write(
1400 size = wfctx.write(
1401 fctx(f).data(),
1401 fctx(f).data(),
1402 flags,
1402 flags,
1403 backgroundclose=True,
1403 backgroundclose=True,
1404 atomictemp=atomictemp,
1404 atomictemp=atomictemp,
1405 )
1405 )
1406 if wantfiledata:
1406 if wantfiledata:
1407 s = wfctx.lstat()
1407 s = wfctx.lstat()
1408 mode = s.st_mode
1408 mode = s.st_mode
1409 mtime = timestamp.mtime_of(s)
1409 mtime = timestamp.mtime_of(s)
1410 # for dirstate.update_file's parentfiledata argument:
1410 # for dirstate.update_file's parentfiledata argument:
1411 filedata[f] = (mode, size, mtime)
1411 filedata[f] = (mode, size, mtime)
1412 if i == 100:
1412 if i == 100:
1413 yield False, (i, f)
1413 yield False, (i, f)
1414 i = 0
1414 i = 0
1415 i += 1
1415 i += 1
1416 if i > 0:
1416 if i > 0:
1417 yield False, (i, f)
1417 yield False, (i, f)
1418 yield True, filedata
1418 yield True, filedata
1419
1419
1420
1420
1421 def _prefetchfiles(repo, ctx, mresult):
1421 def _prefetchfiles(repo, ctx, mresult):
1422 """Invoke ``scmutil.prefetchfiles()`` for the files relevant to the dict
1422 """Invoke ``scmutil.prefetchfiles()`` for the files relevant to the dict
1423 of merge actions. ``ctx`` is the context being merged in."""
1423 of merge actions. ``ctx`` is the context being merged in."""
1424
1424
1425 # Skipping 'a', 'am', 'f', 'r', 'dm', 'e', 'k', 'p' and 'pr', because they
1425 # Skipping 'a', 'am', 'f', 'r', 'dm', 'e', 'k', 'p' and 'pr', because they
1426 # don't touch the context to be merged in. 'cd' is skipped, because
1426 # don't touch the context to be merged in. 'cd' is skipped, because
1427 # changed/deleted never resolves to something from the remote side.
1427 # changed/deleted never resolves to something from the remote side.
1428 files = mresult.files(
1428 files = mresult.files(
1429 [
1429 [
1430 mergestatemod.ACTION_GET,
1430 mergestatemod.ACTION_GET,
1431 mergestatemod.ACTION_DELETED_CHANGED,
1431 mergestatemod.ACTION_DELETED_CHANGED,
1432 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
1432 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
1433 mergestatemod.ACTION_MERGE,
1433 mergestatemod.ACTION_MERGE,
1434 ]
1434 ]
1435 )
1435 )
1436
1436
1437 prefetch = scmutil.prefetchfiles
1437 prefetch = scmutil.prefetchfiles
1438 matchfiles = scmutil.matchfiles
1438 matchfiles = scmutil.matchfiles
1439 prefetch(
1439 prefetch(
1440 repo,
1440 repo,
1441 [
1441 [
1442 (
1442 (
1443 ctx.rev(),
1443 ctx.rev(),
1444 matchfiles(repo, files),
1444 matchfiles(repo, files),
1445 )
1445 )
1446 ],
1446 ],
1447 )
1447 )
1448
1448
1449
1449
1450 @attr.s(frozen=True)
1450 @attr.s(frozen=True)
1451 class updateresult(object):
1451 class updateresult(object):
1452 updatedcount = attr.ib()
1452 updatedcount = attr.ib()
1453 mergedcount = attr.ib()
1453 mergedcount = attr.ib()
1454 removedcount = attr.ib()
1454 removedcount = attr.ib()
1455 unresolvedcount = attr.ib()
1455 unresolvedcount = attr.ib()
1456
1456
1457 def isempty(self):
1457 def isempty(self):
1458 return not (
1458 return not (
1459 self.updatedcount
1459 self.updatedcount
1460 or self.mergedcount
1460 or self.mergedcount
1461 or self.removedcount
1461 or self.removedcount
1462 or self.unresolvedcount
1462 or self.unresolvedcount
1463 )
1463 )
1464
1464
1465
1465
1466 def applyupdates(
1466 def applyupdates(
1467 repo,
1467 repo,
1468 mresult,
1468 mresult,
1469 wctx,
1469 wctx,
1470 mctx,
1470 mctx,
1471 overwrite,
1471 overwrite,
1472 wantfiledata,
1472 wantfiledata,
1473 labels=None,
1473 labels=None,
1474 ):
1474 ):
1475 """apply the merge action list to the working directory
1475 """apply the merge action list to the working directory
1476
1476
1477 mresult is a mergeresult object representing result of the merge
1477 mresult is a mergeresult object representing result of the merge
1478 wctx is the working copy context
1478 wctx is the working copy context
1479 mctx is the context to be merged into the working copy
1479 mctx is the context to be merged into the working copy
1480
1480
1481 Return a tuple of (counts, filedata), where counts is a tuple
1481 Return a tuple of (counts, filedata), where counts is a tuple
1482 (updated, merged, removed, unresolved) that describes how many
1482 (updated, merged, removed, unresolved) that describes how many
1483 files were affected by the update, and filedata is as described in
1483 files were affected by the update, and filedata is as described in
1484 batchget.
1484 batchget.
1485 """
1485 """
1486
1486
1487 _prefetchfiles(repo, mctx, mresult)
1487 _prefetchfiles(repo, mctx, mresult)
1488
1488
1489 updated, merged, removed = 0, 0, 0
1489 updated, merged, removed = 0, 0, 0
1490 ms = wctx.mergestate(clean=True)
1490 ms = wctx.mergestate(clean=True)
1491 ms.start(wctx.p1().node(), mctx.node(), labels)
1491 ms.start(wctx.p1().node(), mctx.node(), labels)
1492
1492
1493 for f, op in pycompat.iteritems(mresult.commitinfo):
1493 for f, op in pycompat.iteritems(mresult.commitinfo):
1494 # the other side of filenode was choosen while merging, store this in
1494 # the other side of filenode was choosen while merging, store this in
1495 # mergestate so that it can be reused on commit
1495 # mergestate so that it can be reused on commit
1496 ms.addcommitinfo(f, op)
1496 ms.addcommitinfo(f, op)
1497
1497
1498 numupdates = mresult.len() - mresult.len(mergestatemod.NO_OP_ACTIONS)
1498 numupdates = mresult.len() - mresult.len(mergestatemod.NO_OP_ACTIONS)
1499 progress = repo.ui.makeprogress(
1499 progress = repo.ui.makeprogress(
1500 _(b'updating'), unit=_(b'files'), total=numupdates
1500 _(b'updating'), unit=_(b'files'), total=numupdates
1501 )
1501 )
1502
1502
1503 if b'.hgsubstate' in mresult._actionmapping[mergestatemod.ACTION_REMOVE]:
1503 if b'.hgsubstate' in mresult._actionmapping[mergestatemod.ACTION_REMOVE]:
1504 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1504 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1505
1505
1506 # record path conflicts
1506 # record path conflicts
1507 for f, args, msg in mresult.getactions(
1507 for f, args, msg in mresult.getactions(
1508 [mergestatemod.ACTION_PATH_CONFLICT], sort=True
1508 [mergestatemod.ACTION_PATH_CONFLICT], sort=True
1509 ):
1509 ):
1510 f1, fo = args
1510 f1, fo = args
1511 s = repo.ui.status
1511 s = repo.ui.status
1512 s(
1512 s(
1513 _(
1513 _(
1514 b"%s: path conflict - a file or link has the same name as a "
1514 b"%s: path conflict - a file or link has the same name as a "
1515 b"directory\n"
1515 b"directory\n"
1516 )
1516 )
1517 % f
1517 % f
1518 )
1518 )
1519 if fo == b'l':
1519 if fo == b'l':
1520 s(_(b"the local file has been renamed to %s\n") % f1)
1520 s(_(b"the local file has been renamed to %s\n") % f1)
1521 else:
1521 else:
1522 s(_(b"the remote file has been renamed to %s\n") % f1)
1522 s(_(b"the remote file has been renamed to %s\n") % f1)
1523 s(_(b"resolve manually then use 'hg resolve --mark %s'\n") % f)
1523 s(_(b"resolve manually then use 'hg resolve --mark %s'\n") % f)
1524 ms.addpathconflict(f, f1, fo)
1524 ms.addpathconflict(f, f1, fo)
1525 progress.increment(item=f)
1525 progress.increment(item=f)
1526
1526
1527 # When merging in-memory, we can't support worker processes, so set the
1527 # When merging in-memory, we can't support worker processes, so set the
1528 # per-item cost at 0 in that case.
1528 # per-item cost at 0 in that case.
1529 cost = 0 if wctx.isinmemory() else 0.001
1529 cost = 0 if wctx.isinmemory() else 0.001
1530
1530
1531 # remove in parallel (must come before resolving path conflicts and getting)
1531 # remove in parallel (must come before resolving path conflicts and getting)
1532 prog = worker.worker(
1532 prog = worker.worker(
1533 repo.ui,
1533 repo.ui,
1534 cost,
1534 cost,
1535 batchremove,
1535 batchremove,
1536 (repo, wctx),
1536 (repo, wctx),
1537 list(mresult.getactions([mergestatemod.ACTION_REMOVE], sort=True)),
1537 list(mresult.getactions([mergestatemod.ACTION_REMOVE], sort=True)),
1538 )
1538 )
1539 for i, item in prog:
1539 for i, item in prog:
1540 progress.increment(step=i, item=item)
1540 progress.increment(step=i, item=item)
1541 removed = mresult.len((mergestatemod.ACTION_REMOVE,))
1541 removed = mresult.len((mergestatemod.ACTION_REMOVE,))
1542
1542
1543 # resolve path conflicts (must come before getting)
1543 # resolve path conflicts (must come before getting)
1544 for f, args, msg in mresult.getactions(
1544 for f, args, msg in mresult.getactions(
1545 [mergestatemod.ACTION_PATH_CONFLICT_RESOLVE], sort=True
1545 [mergestatemod.ACTION_PATH_CONFLICT_RESOLVE], sort=True
1546 ):
1546 ):
1547 repo.ui.debug(b" %s: %s -> pr\n" % (f, msg))
1547 repo.ui.debug(b" %s: %s -> pr\n" % (f, msg))
1548 (f0, origf0) = args
1548 (f0, origf0) = args
1549 if wctx[f0].lexists():
1549 if wctx[f0].lexists():
1550 repo.ui.note(_(b"moving %s to %s\n") % (f0, f))
1550 repo.ui.note(_(b"moving %s to %s\n") % (f0, f))
1551 wctx[f].audit()
1551 wctx[f].audit()
1552 wctx[f].write(wctx.filectx(f0).data(), wctx.filectx(f0).flags())
1552 wctx[f].write(wctx.filectx(f0).data(), wctx.filectx(f0).flags())
1553 wctx[f0].remove()
1553 wctx[f0].remove()
1554 progress.increment(item=f)
1554 progress.increment(item=f)
1555
1555
1556 # get in parallel.
1556 # get in parallel.
1557 threadsafe = repo.ui.configbool(
1557 threadsafe = repo.ui.configbool(
1558 b'experimental', b'worker.wdir-get-thread-safe'
1558 b'experimental', b'worker.wdir-get-thread-safe'
1559 )
1559 )
1560 prog = worker.worker(
1560 prog = worker.worker(
1561 repo.ui,
1561 repo.ui,
1562 cost,
1562 cost,
1563 batchget,
1563 batchget,
1564 (repo, mctx, wctx, wantfiledata),
1564 (repo, mctx, wctx, wantfiledata),
1565 list(mresult.getactions([mergestatemod.ACTION_GET], sort=True)),
1565 list(mresult.getactions([mergestatemod.ACTION_GET], sort=True)),
1566 threadsafe=threadsafe,
1566 threadsafe=threadsafe,
1567 hasretval=True,
1567 hasretval=True,
1568 )
1568 )
1569 getfiledata = {}
1569 getfiledata = {}
1570 for final, res in prog:
1570 for final, res in prog:
1571 if final:
1571 if final:
1572 getfiledata = res
1572 getfiledata = res
1573 else:
1573 else:
1574 i, item = res
1574 i, item = res
1575 progress.increment(step=i, item=item)
1575 progress.increment(step=i, item=item)
1576
1576
1577 if b'.hgsubstate' in mresult._actionmapping[mergestatemod.ACTION_GET]:
1577 if b'.hgsubstate' in mresult._actionmapping[mergestatemod.ACTION_GET]:
1578 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1578 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1579
1579
1580 # forget (manifest only, just log it) (must come first)
1580 # forget (manifest only, just log it) (must come first)
1581 for f, args, msg in mresult.getactions(
1581 for f, args, msg in mresult.getactions(
1582 (mergestatemod.ACTION_FORGET,), sort=True
1582 (mergestatemod.ACTION_FORGET,), sort=True
1583 ):
1583 ):
1584 repo.ui.debug(b" %s: %s -> f\n" % (f, msg))
1584 repo.ui.debug(b" %s: %s -> f\n" % (f, msg))
1585 progress.increment(item=f)
1585 progress.increment(item=f)
1586
1586
1587 # re-add (manifest only, just log it)
1587 # re-add (manifest only, just log it)
1588 for f, args, msg in mresult.getactions(
1588 for f, args, msg in mresult.getactions(
1589 (mergestatemod.ACTION_ADD,), sort=True
1589 (mergestatemod.ACTION_ADD,), sort=True
1590 ):
1590 ):
1591 repo.ui.debug(b" %s: %s -> a\n" % (f, msg))
1591 repo.ui.debug(b" %s: %s -> a\n" % (f, msg))
1592 progress.increment(item=f)
1592 progress.increment(item=f)
1593
1593
1594 # re-add/mark as modified (manifest only, just log it)
1594 # re-add/mark as modified (manifest only, just log it)
1595 for f, args, msg in mresult.getactions(
1595 for f, args, msg in mresult.getactions(
1596 (mergestatemod.ACTION_ADD_MODIFIED,), sort=True
1596 (mergestatemod.ACTION_ADD_MODIFIED,), sort=True
1597 ):
1597 ):
1598 repo.ui.debug(b" %s: %s -> am\n" % (f, msg))
1598 repo.ui.debug(b" %s: %s -> am\n" % (f, msg))
1599 progress.increment(item=f)
1599 progress.increment(item=f)
1600
1600
1601 # keep (noop, just log it)
1601 # keep (noop, just log it)
1602 for a in mergestatemod.NO_OP_ACTIONS:
1602 for a in mergestatemod.NO_OP_ACTIONS:
1603 for f, args, msg in mresult.getactions((a,), sort=True):
1603 for f, args, msg in mresult.getactions((a,), sort=True):
1604 repo.ui.debug(b" %s: %s -> %s\n" % (f, msg, a))
1604 repo.ui.debug(b" %s: %s -> %s\n" % (f, msg, a))
1605 # no progress
1605 # no progress
1606
1606
1607 # directory rename, move local
1607 # directory rename, move local
1608 for f, args, msg in mresult.getactions(
1608 for f, args, msg in mresult.getactions(
1609 (mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,), sort=True
1609 (mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,), sort=True
1610 ):
1610 ):
1611 repo.ui.debug(b" %s: %s -> dm\n" % (f, msg))
1611 repo.ui.debug(b" %s: %s -> dm\n" % (f, msg))
1612 progress.increment(item=f)
1612 progress.increment(item=f)
1613 f0, flags = args
1613 f0, flags = args
1614 repo.ui.note(_(b"moving %s to %s\n") % (f0, f))
1614 repo.ui.note(_(b"moving %s to %s\n") % (f0, f))
1615 wctx[f].audit()
1615 wctx[f].audit()
1616 wctx[f].write(wctx.filectx(f0).data(), flags)
1616 wctx[f].write(wctx.filectx(f0).data(), flags)
1617 wctx[f0].remove()
1617 wctx[f0].remove()
1618
1618
1619 # local directory rename, get
1619 # local directory rename, get
1620 for f, args, msg in mresult.getactions(
1620 for f, args, msg in mresult.getactions(
1621 (mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,), sort=True
1621 (mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,), sort=True
1622 ):
1622 ):
1623 repo.ui.debug(b" %s: %s -> dg\n" % (f, msg))
1623 repo.ui.debug(b" %s: %s -> dg\n" % (f, msg))
1624 progress.increment(item=f)
1624 progress.increment(item=f)
1625 f0, flags = args
1625 f0, flags = args
1626 repo.ui.note(_(b"getting %s to %s\n") % (f0, f))
1626 repo.ui.note(_(b"getting %s to %s\n") % (f0, f))
1627 wctx[f].write(mctx.filectx(f0).data(), flags)
1627 wctx[f].write(mctx.filectx(f0).data(), flags)
1628
1628
1629 # exec
1629 # exec
1630 for f, args, msg in mresult.getactions(
1630 for f, args, msg in mresult.getactions(
1631 (mergestatemod.ACTION_EXEC,), sort=True
1631 (mergestatemod.ACTION_EXEC,), sort=True
1632 ):
1632 ):
1633 repo.ui.debug(b" %s: %s -> e\n" % (f, msg))
1633 repo.ui.debug(b" %s: %s -> e\n" % (f, msg))
1634 progress.increment(item=f)
1634 progress.increment(item=f)
1635 (flags,) = args
1635 (flags,) = args
1636 wctx[f].audit()
1636 wctx[f].audit()
1637 wctx[f].setflags(b'l' in flags, b'x' in flags)
1637 wctx[f].setflags(b'l' in flags, b'x' in flags)
1638
1638
1639 moves = []
1639 moves = []
1640
1640
1641 # 'cd' and 'dc' actions are treated like other merge conflicts
1641 # 'cd' and 'dc' actions are treated like other merge conflicts
1642 mergeactions = list(
1642 mergeactions = list(
1643 mresult.getactions(
1643 mresult.getactions(
1644 [
1644 [
1645 mergestatemod.ACTION_CHANGED_DELETED,
1645 mergestatemod.ACTION_CHANGED_DELETED,
1646 mergestatemod.ACTION_DELETED_CHANGED,
1646 mergestatemod.ACTION_DELETED_CHANGED,
1647 mergestatemod.ACTION_MERGE,
1647 mergestatemod.ACTION_MERGE,
1648 ],
1648 ],
1649 sort=True,
1649 sort=True,
1650 )
1650 )
1651 )
1651 )
1652 for f, args, msg in mergeactions:
1652 for f, args, msg in mergeactions:
1653 f1, f2, fa, move, anc = args
1653 f1, f2, fa, move, anc = args
1654 if f == b'.hgsubstate': # merged internally
1654 if f == b'.hgsubstate': # merged internally
1655 continue
1655 continue
1656 if f1 is None:
1656 if f1 is None:
1657 fcl = filemerge.absentfilectx(wctx, fa)
1657 fcl = filemerge.absentfilectx(wctx, fa)
1658 else:
1658 else:
1659 repo.ui.debug(b" preserving %s for resolve of %s\n" % (f1, f))
1659 repo.ui.debug(b" preserving %s for resolve of %s\n" % (f1, f))
1660 fcl = wctx[f1]
1660 fcl = wctx[f1]
1661 if f2 is None:
1661 if f2 is None:
1662 fco = filemerge.absentfilectx(mctx, fa)
1662 fco = filemerge.absentfilectx(mctx, fa)
1663 else:
1663 else:
1664 fco = mctx[f2]
1664 fco = mctx[f2]
1665 actx = repo[anc]
1665 actx = repo[anc]
1666 if fa in actx:
1666 if fa in actx:
1667 fca = actx[fa]
1667 fca = actx[fa]
1668 else:
1668 else:
1669 # TODO: move to absentfilectx
1669 # TODO: move to absentfilectx
1670 fca = repo.filectx(f1, fileid=nullrev)
1670 fca = repo.filectx(f1, fileid=nullrev)
1671 ms.add(fcl, fco, fca, f)
1671 ms.add(fcl, fco, fca, f)
1672 if f1 != f and move:
1672 if f1 != f and move:
1673 moves.append(f1)
1673 moves.append(f1)
1674
1674
1675 # remove renamed files after safely stored
1675 # remove renamed files after safely stored
1676 for f in moves:
1676 for f in moves:
1677 if wctx[f].lexists():
1677 if wctx[f].lexists():
1678 repo.ui.debug(b"removing %s\n" % f)
1678 repo.ui.debug(b"removing %s\n" % f)
1679 wctx[f].audit()
1679 wctx[f].audit()
1680 wctx[f].remove()
1680 wctx[f].remove()
1681
1681
1682 # these actions updates the file
1682 # these actions updates the file
1683 updated = mresult.len(
1683 updated = mresult.len(
1684 (
1684 (
1685 mergestatemod.ACTION_GET,
1685 mergestatemod.ACTION_GET,
1686 mergestatemod.ACTION_EXEC,
1686 mergestatemod.ACTION_EXEC,
1687 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
1687 mergestatemod.ACTION_LOCAL_DIR_RENAME_GET,
1688 mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,
1688 mergestatemod.ACTION_DIR_RENAME_MOVE_LOCAL,
1689 )
1689 )
1690 )
1690 )
1691
1691
1692 try:
1692 try:
1693 # premerge
1693 # premerge
1694 tocomplete = []
1694 tocomplete = []
1695 for f, args, msg in mergeactions:
1695 for f, args, msg in mergeactions:
1696 repo.ui.debug(b" %s: %s -> m (premerge)\n" % (f, msg))
1696 repo.ui.debug(b" %s: %s -> m (premerge)\n" % (f, msg))
1697 ms.addcommitinfo(f, {b'merged': b'yes'})
1697 ms.addcommitinfo(f, {b'merged': b'yes'})
1698 progress.increment(item=f)
1698 progress.increment(item=f)
1699 if f == b'.hgsubstate': # subrepo states need updating
1699 if f == b'.hgsubstate': # subrepo states need updating
1700 subrepoutil.submerge(
1700 subrepoutil.submerge(
1701 repo, wctx, mctx, wctx.ancestor(mctx), overwrite, labels
1701 repo, wctx, mctx, wctx.ancestor(mctx), overwrite, labels
1702 )
1702 )
1703 continue
1703 continue
1704 wctx[f].audit()
1704 wctx[f].audit()
1705 complete, r = ms.preresolve(f, wctx)
1705 complete, r = ms.preresolve(f, wctx)
1706 if not complete:
1706 if not complete:
1707 numupdates += 1
1707 numupdates += 1
1708 tocomplete.append((f, args, msg))
1708 tocomplete.append((f, args, msg))
1709
1709
1710 # merge
1710 # merge
1711 for f, args, msg in tocomplete:
1711 for f, args, msg in tocomplete:
1712 repo.ui.debug(b" %s: %s -> m (merge)\n" % (f, msg))
1712 repo.ui.debug(b" %s: %s -> m (merge)\n" % (f, msg))
1713 ms.addcommitinfo(f, {b'merged': b'yes'})
1713 ms.addcommitinfo(f, {b'merged': b'yes'})
1714 progress.increment(item=f, total=numupdates)
1714 progress.increment(item=f, total=numupdates)
1715 ms.resolve(f, wctx)
1715 ms.resolve(f, wctx)
1716
1716
1717 except error.InterventionRequired:
1718 # If the user has merge.on-failure=halt, catch the error and close the
1719 # merge state "properly".
1720 pass
1717 finally:
1721 finally:
1718 ms.commit()
1722 ms.commit()
1719
1723
1720 unresolved = ms.unresolvedcount()
1724 unresolved = ms.unresolvedcount()
1721
1725
1722 msupdated, msmerged, msremoved = ms.counts()
1726 msupdated, msmerged, msremoved = ms.counts()
1723 updated += msupdated
1727 updated += msupdated
1724 merged += msmerged
1728 merged += msmerged
1725 removed += msremoved
1729 removed += msremoved
1726
1730
1727 extraactions = ms.actions()
1731 extraactions = ms.actions()
1728
1732
1729 progress.complete()
1733 progress.complete()
1730 return (
1734 return (
1731 updateresult(updated, merged, removed, unresolved),
1735 updateresult(updated, merged, removed, unresolved),
1732 getfiledata,
1736 getfiledata,
1733 extraactions,
1737 extraactions,
1734 )
1738 )
1735
1739
1736
1740
1737 def _advertisefsmonitor(repo, num_gets, p1node):
1741 def _advertisefsmonitor(repo, num_gets, p1node):
1738 # Advertise fsmonitor when its presence could be useful.
1742 # Advertise fsmonitor when its presence could be useful.
1739 #
1743 #
1740 # We only advertise when performing an update from an empty working
1744 # We only advertise when performing an update from an empty working
1741 # directory. This typically only occurs during initial clone.
1745 # directory. This typically only occurs during initial clone.
1742 #
1746 #
1743 # We give users a mechanism to disable the warning in case it is
1747 # We give users a mechanism to disable the warning in case it is
1744 # annoying.
1748 # annoying.
1745 #
1749 #
1746 # We only allow on Linux and MacOS because that's where fsmonitor is
1750 # We only allow on Linux and MacOS because that's where fsmonitor is
1747 # considered stable.
1751 # considered stable.
1748 fsmonitorwarning = repo.ui.configbool(b'fsmonitor', b'warn_when_unused')
1752 fsmonitorwarning = repo.ui.configbool(b'fsmonitor', b'warn_when_unused')
1749 fsmonitorthreshold = repo.ui.configint(
1753 fsmonitorthreshold = repo.ui.configint(
1750 b'fsmonitor', b'warn_update_file_count'
1754 b'fsmonitor', b'warn_update_file_count'
1751 )
1755 )
1752 # avoid cycle dirstate -> sparse -> merge -> dirstate
1756 # avoid cycle dirstate -> sparse -> merge -> dirstate
1753 from . import dirstate
1757 from . import dirstate
1754
1758
1755 if dirstate.rustmod is not None:
1759 if dirstate.rustmod is not None:
1756 # When using rust status, fsmonitor becomes necessary at higher sizes
1760 # When using rust status, fsmonitor becomes necessary at higher sizes
1757 fsmonitorthreshold = repo.ui.configint(
1761 fsmonitorthreshold = repo.ui.configint(
1758 b'fsmonitor',
1762 b'fsmonitor',
1759 b'warn_update_file_count_rust',
1763 b'warn_update_file_count_rust',
1760 )
1764 )
1761
1765
1762 try:
1766 try:
1763 # avoid cycle: extensions -> cmdutil -> merge
1767 # avoid cycle: extensions -> cmdutil -> merge
1764 from . import extensions
1768 from . import extensions
1765
1769
1766 extensions.find(b'fsmonitor')
1770 extensions.find(b'fsmonitor')
1767 fsmonitorenabled = repo.ui.config(b'fsmonitor', b'mode') != b'off'
1771 fsmonitorenabled = repo.ui.config(b'fsmonitor', b'mode') != b'off'
1768 # We intentionally don't look at whether fsmonitor has disabled
1772 # We intentionally don't look at whether fsmonitor has disabled
1769 # itself because a) fsmonitor may have already printed a warning
1773 # itself because a) fsmonitor may have already printed a warning
1770 # b) we only care about the config state here.
1774 # b) we only care about the config state here.
1771 except KeyError:
1775 except KeyError:
1772 fsmonitorenabled = False
1776 fsmonitorenabled = False
1773
1777
1774 if (
1778 if (
1775 fsmonitorwarning
1779 fsmonitorwarning
1776 and not fsmonitorenabled
1780 and not fsmonitorenabled
1777 and p1node == repo.nullid
1781 and p1node == repo.nullid
1778 and num_gets >= fsmonitorthreshold
1782 and num_gets >= fsmonitorthreshold
1779 and pycompat.sysplatform.startswith((b'linux', b'darwin'))
1783 and pycompat.sysplatform.startswith((b'linux', b'darwin'))
1780 ):
1784 ):
1781 repo.ui.warn(
1785 repo.ui.warn(
1782 _(
1786 _(
1783 b'(warning: large working directory being used without '
1787 b'(warning: large working directory being used without '
1784 b'fsmonitor enabled; enable fsmonitor to improve performance; '
1788 b'fsmonitor enabled; enable fsmonitor to improve performance; '
1785 b'see "hg help -e fsmonitor")\n'
1789 b'see "hg help -e fsmonitor")\n'
1786 )
1790 )
1787 )
1791 )
1788
1792
1789
1793
1790 UPDATECHECK_ABORT = b'abort' # handled at higher layers
1794 UPDATECHECK_ABORT = b'abort' # handled at higher layers
1791 UPDATECHECK_NONE = b'none'
1795 UPDATECHECK_NONE = b'none'
1792 UPDATECHECK_LINEAR = b'linear'
1796 UPDATECHECK_LINEAR = b'linear'
1793 UPDATECHECK_NO_CONFLICT = b'noconflict'
1797 UPDATECHECK_NO_CONFLICT = b'noconflict'
1794
1798
1795
1799
1796 def _update(
1800 def _update(
1797 repo,
1801 repo,
1798 node,
1802 node,
1799 branchmerge,
1803 branchmerge,
1800 force,
1804 force,
1801 ancestor=None,
1805 ancestor=None,
1802 mergeancestor=False,
1806 mergeancestor=False,
1803 labels=None,
1807 labels=None,
1804 matcher=None,
1808 matcher=None,
1805 mergeforce=False,
1809 mergeforce=False,
1806 updatedirstate=True,
1810 updatedirstate=True,
1807 updatecheck=None,
1811 updatecheck=None,
1808 wc=None,
1812 wc=None,
1809 ):
1813 ):
1810 """
1814 """
1811 Perform a merge between the working directory and the given node
1815 Perform a merge between the working directory and the given node
1812
1816
1813 node = the node to update to
1817 node = the node to update to
1814 branchmerge = whether to merge between branches
1818 branchmerge = whether to merge between branches
1815 force = whether to force branch merging or file overwriting
1819 force = whether to force branch merging or file overwriting
1816 matcher = a matcher to filter file lists (dirstate not updated)
1820 matcher = a matcher to filter file lists (dirstate not updated)
1817 mergeancestor = whether it is merging with an ancestor. If true,
1821 mergeancestor = whether it is merging with an ancestor. If true,
1818 we should accept the incoming changes for any prompts that occur.
1822 we should accept the incoming changes for any prompts that occur.
1819 If false, merging with an ancestor (fast-forward) is only allowed
1823 If false, merging with an ancestor (fast-forward) is only allowed
1820 between different named branches. This flag is used by rebase extension
1824 between different named branches. This flag is used by rebase extension
1821 as a temporary fix and should be avoided in general.
1825 as a temporary fix and should be avoided in general.
1822 labels = labels to use for base, local and other
1826 labels = labels to use for base, local and other
1823 mergeforce = whether the merge was run with 'merge --force' (deprecated): if
1827 mergeforce = whether the merge was run with 'merge --force' (deprecated): if
1824 this is True, then 'force' should be True as well.
1828 this is True, then 'force' should be True as well.
1825
1829
1826 The table below shows all the behaviors of the update command given the
1830 The table below shows all the behaviors of the update command given the
1827 -c/--check and -C/--clean or no options, whether the working directory is
1831 -c/--check and -C/--clean or no options, whether the working directory is
1828 dirty, whether a revision is specified, and the relationship of the parent
1832 dirty, whether a revision is specified, and the relationship of the parent
1829 rev to the target rev (linear or not). Match from top first. The -n
1833 rev to the target rev (linear or not). Match from top first. The -n
1830 option doesn't exist on the command line, but represents the
1834 option doesn't exist on the command line, but represents the
1831 experimental.updatecheck=noconflict option.
1835 experimental.updatecheck=noconflict option.
1832
1836
1833 This logic is tested by test-update-branches.t.
1837 This logic is tested by test-update-branches.t.
1834
1838
1835 -c -C -n -m dirty rev linear | result
1839 -c -C -n -m dirty rev linear | result
1836 y y * * * * * | (1)
1840 y y * * * * * | (1)
1837 y * y * * * * | (1)
1841 y * y * * * * | (1)
1838 y * * y * * * | (1)
1842 y * * y * * * | (1)
1839 * y y * * * * | (1)
1843 * y y * * * * | (1)
1840 * y * y * * * | (1)
1844 * y * y * * * | (1)
1841 * * y y * * * | (1)
1845 * * y y * * * | (1)
1842 * * * * * n n | x
1846 * * * * * n n | x
1843 * * * * n * * | ok
1847 * * * * n * * | ok
1844 n n n n y * y | merge
1848 n n n n y * y | merge
1845 n n n n y y n | (2)
1849 n n n n y y n | (2)
1846 n n n y y * * | merge
1850 n n n y y * * | merge
1847 n n y n y * * | merge if no conflict
1851 n n y n y * * | merge if no conflict
1848 n y n n y * * | discard
1852 n y n n y * * | discard
1849 y n n n y * * | (3)
1853 y n n n y * * | (3)
1850
1854
1851 x = can't happen
1855 x = can't happen
1852 * = don't-care
1856 * = don't-care
1853 1 = incompatible options (checked in commands.py)
1857 1 = incompatible options (checked in commands.py)
1854 2 = abort: uncommitted changes (commit or update --clean to discard changes)
1858 2 = abort: uncommitted changes (commit or update --clean to discard changes)
1855 3 = abort: uncommitted changes (checked in commands.py)
1859 3 = abort: uncommitted changes (checked in commands.py)
1856
1860
1857 The merge is performed inside ``wc``, a workingctx-like objects. It defaults
1861 The merge is performed inside ``wc``, a workingctx-like objects. It defaults
1858 to repo[None] if None is passed.
1862 to repo[None] if None is passed.
1859
1863
1860 Return the same tuple as applyupdates().
1864 Return the same tuple as applyupdates().
1861 """
1865 """
1862 # Avoid cycle.
1866 # Avoid cycle.
1863 from . import sparse
1867 from . import sparse
1864
1868
1865 # This function used to find the default destination if node was None, but
1869 # This function used to find the default destination if node was None, but
1866 # that's now in destutil.py.
1870 # that's now in destutil.py.
1867 assert node is not None
1871 assert node is not None
1868 if not branchmerge and not force:
1872 if not branchmerge and not force:
1869 # TODO: remove the default once all callers that pass branchmerge=False
1873 # TODO: remove the default once all callers that pass branchmerge=False
1870 # and force=False pass a value for updatecheck. We may want to allow
1874 # and force=False pass a value for updatecheck. We may want to allow
1871 # updatecheck='abort' to better suppport some of these callers.
1875 # updatecheck='abort' to better suppport some of these callers.
1872 if updatecheck is None:
1876 if updatecheck is None:
1873 updatecheck = UPDATECHECK_LINEAR
1877 updatecheck = UPDATECHECK_LINEAR
1874 if updatecheck not in (
1878 if updatecheck not in (
1875 UPDATECHECK_NONE,
1879 UPDATECHECK_NONE,
1876 UPDATECHECK_LINEAR,
1880 UPDATECHECK_LINEAR,
1877 UPDATECHECK_NO_CONFLICT,
1881 UPDATECHECK_NO_CONFLICT,
1878 ):
1882 ):
1879 raise ValueError(
1883 raise ValueError(
1880 r'Invalid updatecheck %r (can accept %r)'
1884 r'Invalid updatecheck %r (can accept %r)'
1881 % (
1885 % (
1882 updatecheck,
1886 updatecheck,
1883 (
1887 (
1884 UPDATECHECK_NONE,
1888 UPDATECHECK_NONE,
1885 UPDATECHECK_LINEAR,
1889 UPDATECHECK_LINEAR,
1886 UPDATECHECK_NO_CONFLICT,
1890 UPDATECHECK_NO_CONFLICT,
1887 ),
1891 ),
1888 )
1892 )
1889 )
1893 )
1890 if wc is not None and wc.isinmemory():
1894 if wc is not None and wc.isinmemory():
1891 maybe_wlock = util.nullcontextmanager()
1895 maybe_wlock = util.nullcontextmanager()
1892 else:
1896 else:
1893 maybe_wlock = repo.wlock()
1897 maybe_wlock = repo.wlock()
1894 with maybe_wlock:
1898 with maybe_wlock:
1895 if wc is None:
1899 if wc is None:
1896 wc = repo[None]
1900 wc = repo[None]
1897 pl = wc.parents()
1901 pl = wc.parents()
1898 p1 = pl[0]
1902 p1 = pl[0]
1899 p2 = repo[node]
1903 p2 = repo[node]
1900 if ancestor is not None:
1904 if ancestor is not None:
1901 pas = [repo[ancestor]]
1905 pas = [repo[ancestor]]
1902 else:
1906 else:
1903 if repo.ui.configlist(b'merge', b'preferancestor') == [b'*']:
1907 if repo.ui.configlist(b'merge', b'preferancestor') == [b'*']:
1904 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
1908 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
1905 pas = [repo[anc] for anc in (sorted(cahs) or [repo.nullid])]
1909 pas = [repo[anc] for anc in (sorted(cahs) or [repo.nullid])]
1906 else:
1910 else:
1907 pas = [p1.ancestor(p2, warn=branchmerge)]
1911 pas = [p1.ancestor(p2, warn=branchmerge)]
1908
1912
1909 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), bytes(p1), bytes(p2)
1913 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), bytes(p1), bytes(p2)
1910
1914
1911 overwrite = force and not branchmerge
1915 overwrite = force and not branchmerge
1912 ### check phase
1916 ### check phase
1913 if not overwrite:
1917 if not overwrite:
1914 if len(pl) > 1:
1918 if len(pl) > 1:
1915 raise error.StateError(_(b"outstanding uncommitted merge"))
1919 raise error.StateError(_(b"outstanding uncommitted merge"))
1916 ms = wc.mergestate()
1920 ms = wc.mergestate()
1917 if ms.unresolvedcount():
1921 if ms.unresolvedcount():
1918 raise error.StateError(
1922 raise error.StateError(
1919 _(b"outstanding merge conflicts"),
1923 _(b"outstanding merge conflicts"),
1920 hint=_(b"use 'hg resolve' to resolve"),
1924 hint=_(b"use 'hg resolve' to resolve"),
1921 )
1925 )
1922 if branchmerge:
1926 if branchmerge:
1923 if pas == [p2]:
1927 if pas == [p2]:
1924 raise error.Abort(
1928 raise error.Abort(
1925 _(
1929 _(
1926 b"merging with a working directory ancestor"
1930 b"merging with a working directory ancestor"
1927 b" has no effect"
1931 b" has no effect"
1928 )
1932 )
1929 )
1933 )
1930 elif pas == [p1]:
1934 elif pas == [p1]:
1931 if not mergeancestor and wc.branch() == p2.branch():
1935 if not mergeancestor and wc.branch() == p2.branch():
1932 raise error.Abort(
1936 raise error.Abort(
1933 _(b"nothing to merge"),
1937 _(b"nothing to merge"),
1934 hint=_(b"use 'hg update' or check 'hg heads'"),
1938 hint=_(b"use 'hg update' or check 'hg heads'"),
1935 )
1939 )
1936 if not force and (wc.files() or wc.deleted()):
1940 if not force and (wc.files() or wc.deleted()):
1937 raise error.StateError(
1941 raise error.StateError(
1938 _(b"uncommitted changes"),
1942 _(b"uncommitted changes"),
1939 hint=_(b"use 'hg status' to list changes"),
1943 hint=_(b"use 'hg status' to list changes"),
1940 )
1944 )
1941 if not wc.isinmemory():
1945 if not wc.isinmemory():
1942 for s in sorted(wc.substate):
1946 for s in sorted(wc.substate):
1943 wc.sub(s).bailifchanged()
1947 wc.sub(s).bailifchanged()
1944
1948
1945 elif not overwrite:
1949 elif not overwrite:
1946 if p1 == p2: # no-op update
1950 if p1 == p2: # no-op update
1947 # call the hooks and exit early
1951 # call the hooks and exit early
1948 repo.hook(b'preupdate', throw=True, parent1=xp2, parent2=b'')
1952 repo.hook(b'preupdate', throw=True, parent1=xp2, parent2=b'')
1949 repo.hook(b'update', parent1=xp2, parent2=b'', error=0)
1953 repo.hook(b'update', parent1=xp2, parent2=b'', error=0)
1950 return updateresult(0, 0, 0, 0)
1954 return updateresult(0, 0, 0, 0)
1951
1955
1952 if updatecheck == UPDATECHECK_LINEAR and pas not in (
1956 if updatecheck == UPDATECHECK_LINEAR and pas not in (
1953 [p1],
1957 [p1],
1954 [p2],
1958 [p2],
1955 ): # nonlinear
1959 ): # nonlinear
1956 dirty = wc.dirty(missing=True)
1960 dirty = wc.dirty(missing=True)
1957 if dirty:
1961 if dirty:
1958 # Branching is a bit strange to ensure we do the minimal
1962 # Branching is a bit strange to ensure we do the minimal
1959 # amount of call to obsutil.foreground.
1963 # amount of call to obsutil.foreground.
1960 foreground = obsutil.foreground(repo, [p1.node()])
1964 foreground = obsutil.foreground(repo, [p1.node()])
1961 # note: the <node> variable contains a random identifier
1965 # note: the <node> variable contains a random identifier
1962 if repo[node].node() in foreground:
1966 if repo[node].node() in foreground:
1963 pass # allow updating to successors
1967 pass # allow updating to successors
1964 else:
1968 else:
1965 msg = _(b"uncommitted changes")
1969 msg = _(b"uncommitted changes")
1966 hint = _(b"commit or update --clean to discard changes")
1970 hint = _(b"commit or update --clean to discard changes")
1967 raise error.UpdateAbort(msg, hint=hint)
1971 raise error.UpdateAbort(msg, hint=hint)
1968 else:
1972 else:
1969 # Allow jumping branches if clean and specific rev given
1973 # Allow jumping branches if clean and specific rev given
1970 pass
1974 pass
1971
1975
1972 if overwrite:
1976 if overwrite:
1973 pas = [wc]
1977 pas = [wc]
1974 elif not branchmerge:
1978 elif not branchmerge:
1975 pas = [p1]
1979 pas = [p1]
1976
1980
1977 # deprecated config: merge.followcopies
1981 # deprecated config: merge.followcopies
1978 followcopies = repo.ui.configbool(b'merge', b'followcopies')
1982 followcopies = repo.ui.configbool(b'merge', b'followcopies')
1979 if overwrite:
1983 if overwrite:
1980 followcopies = False
1984 followcopies = False
1981 elif not pas[0]:
1985 elif not pas[0]:
1982 followcopies = False
1986 followcopies = False
1983 if not branchmerge and not wc.dirty(missing=True):
1987 if not branchmerge and not wc.dirty(missing=True):
1984 followcopies = False
1988 followcopies = False
1985
1989
1986 ### calculate phase
1990 ### calculate phase
1987 mresult = calculateupdates(
1991 mresult = calculateupdates(
1988 repo,
1992 repo,
1989 wc,
1993 wc,
1990 p2,
1994 p2,
1991 pas,
1995 pas,
1992 branchmerge,
1996 branchmerge,
1993 force,
1997 force,
1994 mergeancestor,
1998 mergeancestor,
1995 followcopies,
1999 followcopies,
1996 matcher=matcher,
2000 matcher=matcher,
1997 mergeforce=mergeforce,
2001 mergeforce=mergeforce,
1998 )
2002 )
1999
2003
2000 if updatecheck == UPDATECHECK_NO_CONFLICT:
2004 if updatecheck == UPDATECHECK_NO_CONFLICT:
2001 if mresult.hasconflicts():
2005 if mresult.hasconflicts():
2002 msg = _(b"conflicting changes")
2006 msg = _(b"conflicting changes")
2003 hint = _(b"commit or update --clean to discard changes")
2007 hint = _(b"commit or update --clean to discard changes")
2004 raise error.StateError(msg, hint=hint)
2008 raise error.StateError(msg, hint=hint)
2005
2009
2006 # Prompt and create actions. Most of this is in the resolve phase
2010 # Prompt and create actions. Most of this is in the resolve phase
2007 # already, but we can't handle .hgsubstate in filemerge or
2011 # already, but we can't handle .hgsubstate in filemerge or
2008 # subrepoutil.submerge yet so we have to keep prompting for it.
2012 # subrepoutil.submerge yet so we have to keep prompting for it.
2009 vals = mresult.getfile(b'.hgsubstate')
2013 vals = mresult.getfile(b'.hgsubstate')
2010 if vals:
2014 if vals:
2011 f = b'.hgsubstate'
2015 f = b'.hgsubstate'
2012 m, args, msg = vals
2016 m, args, msg = vals
2013 prompts = filemerge.partextras(labels)
2017 prompts = filemerge.partextras(labels)
2014 prompts[b'f'] = f
2018 prompts[b'f'] = f
2015 if m == mergestatemod.ACTION_CHANGED_DELETED:
2019 if m == mergestatemod.ACTION_CHANGED_DELETED:
2016 if repo.ui.promptchoice(
2020 if repo.ui.promptchoice(
2017 _(
2021 _(
2018 b"local%(l)s changed %(f)s which other%(o)s deleted\n"
2022 b"local%(l)s changed %(f)s which other%(o)s deleted\n"
2019 b"use (c)hanged version or (d)elete?"
2023 b"use (c)hanged version or (d)elete?"
2020 b"$$ &Changed $$ &Delete"
2024 b"$$ &Changed $$ &Delete"
2021 )
2025 )
2022 % prompts,
2026 % prompts,
2023 0,
2027 0,
2024 ):
2028 ):
2025 mresult.addfile(
2029 mresult.addfile(
2026 f,
2030 f,
2027 mergestatemod.ACTION_REMOVE,
2031 mergestatemod.ACTION_REMOVE,
2028 None,
2032 None,
2029 b'prompt delete',
2033 b'prompt delete',
2030 )
2034 )
2031 elif f in p1:
2035 elif f in p1:
2032 mresult.addfile(
2036 mresult.addfile(
2033 f,
2037 f,
2034 mergestatemod.ACTION_ADD_MODIFIED,
2038 mergestatemod.ACTION_ADD_MODIFIED,
2035 None,
2039 None,
2036 b'prompt keep',
2040 b'prompt keep',
2037 )
2041 )
2038 else:
2042 else:
2039 mresult.addfile(
2043 mresult.addfile(
2040 f,
2044 f,
2041 mergestatemod.ACTION_ADD,
2045 mergestatemod.ACTION_ADD,
2042 None,
2046 None,
2043 b'prompt keep',
2047 b'prompt keep',
2044 )
2048 )
2045 elif m == mergestatemod.ACTION_DELETED_CHANGED:
2049 elif m == mergestatemod.ACTION_DELETED_CHANGED:
2046 f1, f2, fa, move, anc = args
2050 f1, f2, fa, move, anc = args
2047 flags = p2[f2].flags()
2051 flags = p2[f2].flags()
2048 if (
2052 if (
2049 repo.ui.promptchoice(
2053 repo.ui.promptchoice(
2050 _(
2054 _(
2051 b"other%(o)s changed %(f)s which local%(l)s deleted\n"
2055 b"other%(o)s changed %(f)s which local%(l)s deleted\n"
2052 b"use (c)hanged version or leave (d)eleted?"
2056 b"use (c)hanged version or leave (d)eleted?"
2053 b"$$ &Changed $$ &Deleted"
2057 b"$$ &Changed $$ &Deleted"
2054 )
2058 )
2055 % prompts,
2059 % prompts,
2056 0,
2060 0,
2057 )
2061 )
2058 == 0
2062 == 0
2059 ):
2063 ):
2060 mresult.addfile(
2064 mresult.addfile(
2061 f,
2065 f,
2062 mergestatemod.ACTION_GET,
2066 mergestatemod.ACTION_GET,
2063 (flags, False),
2067 (flags, False),
2064 b'prompt recreating',
2068 b'prompt recreating',
2065 )
2069 )
2066 else:
2070 else:
2067 mresult.removefile(f)
2071 mresult.removefile(f)
2068
2072
2069 if not util.fscasesensitive(repo.path):
2073 if not util.fscasesensitive(repo.path):
2070 # check collision between files only in p2 for clean update
2074 # check collision between files only in p2 for clean update
2071 if not branchmerge and (
2075 if not branchmerge and (
2072 force or not wc.dirty(missing=True, branch=False)
2076 force or not wc.dirty(missing=True, branch=False)
2073 ):
2077 ):
2074 _checkcollision(repo, p2.manifest(), None)
2078 _checkcollision(repo, p2.manifest(), None)
2075 else:
2079 else:
2076 _checkcollision(repo, wc.manifest(), mresult)
2080 _checkcollision(repo, wc.manifest(), mresult)
2077
2081
2078 # divergent renames
2082 # divergent renames
2079 for f, fl in sorted(pycompat.iteritems(mresult.diverge)):
2083 for f, fl in sorted(pycompat.iteritems(mresult.diverge)):
2080 repo.ui.warn(
2084 repo.ui.warn(
2081 _(
2085 _(
2082 b"note: possible conflict - %s was renamed "
2086 b"note: possible conflict - %s was renamed "
2083 b"multiple times to:\n"
2087 b"multiple times to:\n"
2084 )
2088 )
2085 % f
2089 % f
2086 )
2090 )
2087 for nf in sorted(fl):
2091 for nf in sorted(fl):
2088 repo.ui.warn(b" %s\n" % nf)
2092 repo.ui.warn(b" %s\n" % nf)
2089
2093
2090 # rename and delete
2094 # rename and delete
2091 for f, fl in sorted(pycompat.iteritems(mresult.renamedelete)):
2095 for f, fl in sorted(pycompat.iteritems(mresult.renamedelete)):
2092 repo.ui.warn(
2096 repo.ui.warn(
2093 _(
2097 _(
2094 b"note: possible conflict - %s was deleted "
2098 b"note: possible conflict - %s was deleted "
2095 b"and renamed to:\n"
2099 b"and renamed to:\n"
2096 )
2100 )
2097 % f
2101 % f
2098 )
2102 )
2099 for nf in sorted(fl):
2103 for nf in sorted(fl):
2100 repo.ui.warn(b" %s\n" % nf)
2104 repo.ui.warn(b" %s\n" % nf)
2101
2105
2102 ### apply phase
2106 ### apply phase
2103 if not branchmerge: # just jump to the new rev
2107 if not branchmerge: # just jump to the new rev
2104 fp1, fp2, xp1, xp2 = fp2, repo.nullid, xp2, b''
2108 fp1, fp2, xp1, xp2 = fp2, repo.nullid, xp2, b''
2105 # If we're doing a partial update, we need to skip updating
2109 # If we're doing a partial update, we need to skip updating
2106 # the dirstate.
2110 # the dirstate.
2107 always = matcher is None or matcher.always()
2111 always = matcher is None or matcher.always()
2108 updatedirstate = updatedirstate and always and not wc.isinmemory()
2112 updatedirstate = updatedirstate and always and not wc.isinmemory()
2109 if updatedirstate:
2113 if updatedirstate:
2110 repo.hook(b'preupdate', throw=True, parent1=xp1, parent2=xp2)
2114 repo.hook(b'preupdate', throw=True, parent1=xp1, parent2=xp2)
2111 # note that we're in the middle of an update
2115 # note that we're in the middle of an update
2112 repo.vfs.write(b'updatestate', p2.hex())
2116 repo.vfs.write(b'updatestate', p2.hex())
2113
2117
2114 _advertisefsmonitor(
2118 _advertisefsmonitor(
2115 repo, mresult.len((mergestatemod.ACTION_GET,)), p1.node()
2119 repo, mresult.len((mergestatemod.ACTION_GET,)), p1.node()
2116 )
2120 )
2117
2121
2118 wantfiledata = updatedirstate and not branchmerge
2122 wantfiledata = updatedirstate and not branchmerge
2119 stats, getfiledata, extraactions = applyupdates(
2123 stats, getfiledata, extraactions = applyupdates(
2120 repo,
2124 repo,
2121 mresult,
2125 mresult,
2122 wc,
2126 wc,
2123 p2,
2127 p2,
2124 overwrite,
2128 overwrite,
2125 wantfiledata,
2129 wantfiledata,
2126 labels=labels,
2130 labels=labels,
2127 )
2131 )
2128
2132
2129 if updatedirstate:
2133 if updatedirstate:
2130 if extraactions:
2134 if extraactions:
2131 for k, acts in pycompat.iteritems(extraactions):
2135 for k, acts in pycompat.iteritems(extraactions):
2132 for a in acts:
2136 for a in acts:
2133 mresult.addfile(a[0], k, *a[1:])
2137 mresult.addfile(a[0], k, *a[1:])
2134 if k == mergestatemod.ACTION_GET and wantfiledata:
2138 if k == mergestatemod.ACTION_GET and wantfiledata:
2135 # no filedata until mergestate is updated to provide it
2139 # no filedata until mergestate is updated to provide it
2136 for a in acts:
2140 for a in acts:
2137 getfiledata[a[0]] = None
2141 getfiledata[a[0]] = None
2138
2142
2139 assert len(getfiledata) == (
2143 assert len(getfiledata) == (
2140 mresult.len((mergestatemod.ACTION_GET,)) if wantfiledata else 0
2144 mresult.len((mergestatemod.ACTION_GET,)) if wantfiledata else 0
2141 )
2145 )
2142 with repo.dirstate.parentchange():
2146 with repo.dirstate.parentchange():
2143 repo.setparents(fp1, fp2)
2147 repo.setparents(fp1, fp2)
2144 mergestatemod.recordupdates(
2148 mergestatemod.recordupdates(
2145 repo, mresult.actionsdict, branchmerge, getfiledata
2149 repo, mresult.actionsdict, branchmerge, getfiledata
2146 )
2150 )
2147 # update completed, clear state
2151 # update completed, clear state
2148 util.unlink(repo.vfs.join(b'updatestate'))
2152 util.unlink(repo.vfs.join(b'updatestate'))
2149
2153
2150 if not branchmerge:
2154 if not branchmerge:
2151 repo.dirstate.setbranch(p2.branch())
2155 repo.dirstate.setbranch(p2.branch())
2152
2156
2153 # If we're updating to a location, clean up any stale temporary includes
2157 # If we're updating to a location, clean up any stale temporary includes
2154 # (ex: this happens during hg rebase --abort).
2158 # (ex: this happens during hg rebase --abort).
2155 if not branchmerge:
2159 if not branchmerge:
2156 sparse.prunetemporaryincludes(repo)
2160 sparse.prunetemporaryincludes(repo)
2157
2161
2158 if updatedirstate:
2162 if updatedirstate:
2159 repo.hook(
2163 repo.hook(
2160 b'update', parent1=xp1, parent2=xp2, error=stats.unresolvedcount
2164 b'update', parent1=xp1, parent2=xp2, error=stats.unresolvedcount
2161 )
2165 )
2162 return stats
2166 return stats
2163
2167
2164
2168
2165 def merge(ctx, labels=None, force=False, wc=None):
2169 def merge(ctx, labels=None, force=False, wc=None):
2166 """Merge another topological branch into the working copy.
2170 """Merge another topological branch into the working copy.
2167
2171
2168 force = whether the merge was run with 'merge --force' (deprecated)
2172 force = whether the merge was run with 'merge --force' (deprecated)
2169 """
2173 """
2170
2174
2171 return _update(
2175 return _update(
2172 ctx.repo(),
2176 ctx.repo(),
2173 ctx.rev(),
2177 ctx.rev(),
2174 labels=labels,
2178 labels=labels,
2175 branchmerge=True,
2179 branchmerge=True,
2176 force=force,
2180 force=force,
2177 mergeforce=force,
2181 mergeforce=force,
2178 wc=wc,
2182 wc=wc,
2179 )
2183 )
2180
2184
2181
2185
2182 def update(ctx, updatecheck=None, wc=None):
2186 def update(ctx, updatecheck=None, wc=None):
2183 """Do a regular update to the given commit, aborting if there are conflicts.
2187 """Do a regular update to the given commit, aborting if there are conflicts.
2184
2188
2185 The 'updatecheck' argument can be used to control what to do in case of
2189 The 'updatecheck' argument can be used to control what to do in case of
2186 conflicts.
2190 conflicts.
2187
2191
2188 Note: This is a new, higher-level update() than the one that used to exist
2192 Note: This is a new, higher-level update() than the one that used to exist
2189 in this module. That function is now called _update(). You can hopefully
2193 in this module. That function is now called _update(). You can hopefully
2190 replace your callers to use this new update(), or clean_update(), merge(),
2194 replace your callers to use this new update(), or clean_update(), merge(),
2191 revert_to(), or graft().
2195 revert_to(), or graft().
2192 """
2196 """
2193 return _update(
2197 return _update(
2194 ctx.repo(),
2198 ctx.repo(),
2195 ctx.rev(),
2199 ctx.rev(),
2196 branchmerge=False,
2200 branchmerge=False,
2197 force=False,
2201 force=False,
2198 labels=[b'working copy', b'destination'],
2202 labels=[b'working copy', b'destination'],
2199 updatecheck=updatecheck,
2203 updatecheck=updatecheck,
2200 wc=wc,
2204 wc=wc,
2201 )
2205 )
2202
2206
2203
2207
2204 def clean_update(ctx, wc=None):
2208 def clean_update(ctx, wc=None):
2205 """Do a clean update to the given commit.
2209 """Do a clean update to the given commit.
2206
2210
2207 This involves updating to the commit and discarding any changes in the
2211 This involves updating to the commit and discarding any changes in the
2208 working copy.
2212 working copy.
2209 """
2213 """
2210 return _update(ctx.repo(), ctx.rev(), branchmerge=False, force=True, wc=wc)
2214 return _update(ctx.repo(), ctx.rev(), branchmerge=False, force=True, wc=wc)
2211
2215
2212
2216
2213 def revert_to(ctx, matcher=None, wc=None):
2217 def revert_to(ctx, matcher=None, wc=None):
2214 """Revert the working copy to the given commit.
2218 """Revert the working copy to the given commit.
2215
2219
2216 The working copy will keep its current parent(s) but its content will
2220 The working copy will keep its current parent(s) but its content will
2217 be the same as in the given commit.
2221 be the same as in the given commit.
2218 """
2222 """
2219
2223
2220 return _update(
2224 return _update(
2221 ctx.repo(),
2225 ctx.repo(),
2222 ctx.rev(),
2226 ctx.rev(),
2223 branchmerge=False,
2227 branchmerge=False,
2224 force=True,
2228 force=True,
2225 updatedirstate=False,
2229 updatedirstate=False,
2226 matcher=matcher,
2230 matcher=matcher,
2227 wc=wc,
2231 wc=wc,
2228 )
2232 )
2229
2233
2230
2234
2231 def graft(
2235 def graft(
2232 repo,
2236 repo,
2233 ctx,
2237 ctx,
2234 base=None,
2238 base=None,
2235 labels=None,
2239 labels=None,
2236 keepparent=False,
2240 keepparent=False,
2237 keepconflictparent=False,
2241 keepconflictparent=False,
2238 wctx=None,
2242 wctx=None,
2239 ):
2243 ):
2240 """Do a graft-like merge.
2244 """Do a graft-like merge.
2241
2245
2242 This is a merge where the merge ancestor is chosen such that one
2246 This is a merge where the merge ancestor is chosen such that one
2243 or more changesets are grafted onto the current changeset. In
2247 or more changesets are grafted onto the current changeset. In
2244 addition to the merge, this fixes up the dirstate to include only
2248 addition to the merge, this fixes up the dirstate to include only
2245 a single parent (if keepparent is False) and tries to duplicate any
2249 a single parent (if keepparent is False) and tries to duplicate any
2246 renames/copies appropriately.
2250 renames/copies appropriately.
2247
2251
2248 ctx - changeset to rebase
2252 ctx - changeset to rebase
2249 base - merge base, or ctx.p1() if not specified
2253 base - merge base, or ctx.p1() if not specified
2250 labels - merge labels eg ['local', 'graft']
2254 labels - merge labels eg ['local', 'graft']
2251 keepparent - keep second parent if any
2255 keepparent - keep second parent if any
2252 keepconflictparent - if unresolved, keep parent used for the merge
2256 keepconflictparent - if unresolved, keep parent used for the merge
2253
2257
2254 """
2258 """
2255 # If we're grafting a descendant onto an ancestor, be sure to pass
2259 # If we're grafting a descendant onto an ancestor, be sure to pass
2256 # mergeancestor=True to update. This does two things: 1) allows the merge if
2260 # mergeancestor=True to update. This does two things: 1) allows the merge if
2257 # the destination is the same as the parent of the ctx (so we can use graft
2261 # the destination is the same as the parent of the ctx (so we can use graft
2258 # to copy commits), and 2) informs update that the incoming changes are
2262 # to copy commits), and 2) informs update that the incoming changes are
2259 # newer than the destination so it doesn't prompt about "remote changed foo
2263 # newer than the destination so it doesn't prompt about "remote changed foo
2260 # which local deleted".
2264 # which local deleted".
2261 # We also pass mergeancestor=True when base is the same revision as p1. 2)
2265 # We also pass mergeancestor=True when base is the same revision as p1. 2)
2262 # doesn't matter as there can't possibly be conflicts, but 1) is necessary.
2266 # doesn't matter as there can't possibly be conflicts, but 1) is necessary.
2263 wctx = wctx or repo[None]
2267 wctx = wctx or repo[None]
2264 pctx = wctx.p1()
2268 pctx = wctx.p1()
2265 base = base or ctx.p1()
2269 base = base or ctx.p1()
2266 mergeancestor = (
2270 mergeancestor = (
2267 repo.changelog.isancestor(pctx.node(), ctx.node())
2271 repo.changelog.isancestor(pctx.node(), ctx.node())
2268 or pctx.rev() == base.rev()
2272 or pctx.rev() == base.rev()
2269 )
2273 )
2270
2274
2271 stats = _update(
2275 stats = _update(
2272 repo,
2276 repo,
2273 ctx.node(),
2277 ctx.node(),
2274 True,
2278 True,
2275 True,
2279 True,
2276 base.node(),
2280 base.node(),
2277 mergeancestor=mergeancestor,
2281 mergeancestor=mergeancestor,
2278 labels=labels,
2282 labels=labels,
2279 wc=wctx,
2283 wc=wctx,
2280 )
2284 )
2281
2285
2282 if keepconflictparent and stats.unresolvedcount:
2286 if keepconflictparent and stats.unresolvedcount:
2283 pother = ctx.node()
2287 pother = ctx.node()
2284 else:
2288 else:
2285 pother = repo.nullid
2289 pother = repo.nullid
2286 parents = ctx.parents()
2290 parents = ctx.parents()
2287 if keepparent and len(parents) == 2 and base in parents:
2291 if keepparent and len(parents) == 2 and base in parents:
2288 parents.remove(base)
2292 parents.remove(base)
2289 pother = parents[0].node()
2293 pother = parents[0].node()
2290 # Never set both parents equal to each other
2294 # Never set both parents equal to each other
2291 if pother == pctx.node():
2295 if pother == pctx.node():
2292 pother = repo.nullid
2296 pother = repo.nullid
2293
2297
2294 if wctx.isinmemory():
2298 if wctx.isinmemory():
2295 wctx.setparents(pctx.node(), pother)
2299 wctx.setparents(pctx.node(), pother)
2296 # fix up dirstate for copies and renames
2300 # fix up dirstate for copies and renames
2297 copies.graftcopies(wctx, ctx, base)
2301 copies.graftcopies(wctx, ctx, base)
2298 else:
2302 else:
2299 with repo.dirstate.parentchange():
2303 with repo.dirstate.parentchange():
2300 repo.setparents(pctx.node(), pother)
2304 repo.setparents(pctx.node(), pother)
2301 repo.dirstate.write(repo.currenttransaction())
2305 repo.dirstate.write(repo.currenttransaction())
2302 # fix up dirstate for copies and renames
2306 # fix up dirstate for copies and renames
2303 copies.graftcopies(wctx, ctx, base)
2307 copies.graftcopies(wctx, ctx, base)
2304 return stats
2308 return stats
2305
2309
2306
2310
2307 def back_out(ctx, parent=None, wc=None):
2311 def back_out(ctx, parent=None, wc=None):
2308 if parent is None:
2312 if parent is None:
2309 if ctx.p2() is not None:
2313 if ctx.p2() is not None:
2310 raise error.ProgrammingError(
2314 raise error.ProgrammingError(
2311 b"must specify parent of merge commit to back out"
2315 b"must specify parent of merge commit to back out"
2312 )
2316 )
2313 parent = ctx.p1()
2317 parent = ctx.p1()
2314 return _update(
2318 return _update(
2315 ctx.repo(),
2319 ctx.repo(),
2316 parent,
2320 parent,
2317 branchmerge=True,
2321 branchmerge=True,
2318 force=True,
2322 force=True,
2319 ancestor=ctx.node(),
2323 ancestor=ctx.node(),
2320 mergeancestor=False,
2324 mergeancestor=False,
2321 )
2325 )
2322
2326
2323
2327
2324 def purge(
2328 def purge(
2325 repo,
2329 repo,
2326 matcher,
2330 matcher,
2327 unknown=True,
2331 unknown=True,
2328 ignored=False,
2332 ignored=False,
2329 removeemptydirs=True,
2333 removeemptydirs=True,
2330 removefiles=True,
2334 removefiles=True,
2331 abortonerror=False,
2335 abortonerror=False,
2332 noop=False,
2336 noop=False,
2333 confirm=False,
2337 confirm=False,
2334 ):
2338 ):
2335 """Purge the working directory of untracked files.
2339 """Purge the working directory of untracked files.
2336
2340
2337 ``matcher`` is a matcher configured to scan the working directory -
2341 ``matcher`` is a matcher configured to scan the working directory -
2338 potentially a subset.
2342 potentially a subset.
2339
2343
2340 ``unknown`` controls whether unknown files should be purged.
2344 ``unknown`` controls whether unknown files should be purged.
2341
2345
2342 ``ignored`` controls whether ignored files should be purged.
2346 ``ignored`` controls whether ignored files should be purged.
2343
2347
2344 ``removeemptydirs`` controls whether empty directories should be removed.
2348 ``removeemptydirs`` controls whether empty directories should be removed.
2345
2349
2346 ``removefiles`` controls whether files are removed.
2350 ``removefiles`` controls whether files are removed.
2347
2351
2348 ``abortonerror`` causes an exception to be raised if an error occurs
2352 ``abortonerror`` causes an exception to be raised if an error occurs
2349 deleting a file or directory.
2353 deleting a file or directory.
2350
2354
2351 ``noop`` controls whether to actually remove files. If not defined, actions
2355 ``noop`` controls whether to actually remove files. If not defined, actions
2352 will be taken.
2356 will be taken.
2353
2357
2354 ``confirm`` ask confirmation before actually removing anything.
2358 ``confirm`` ask confirmation before actually removing anything.
2355
2359
2356 Returns an iterable of relative paths in the working directory that were
2360 Returns an iterable of relative paths in the working directory that were
2357 or would be removed.
2361 or would be removed.
2358 """
2362 """
2359
2363
2360 def remove(removefn, path):
2364 def remove(removefn, path):
2361 try:
2365 try:
2362 removefn(path)
2366 removefn(path)
2363 except OSError:
2367 except OSError:
2364 m = _(b'%s cannot be removed') % path
2368 m = _(b'%s cannot be removed') % path
2365 if abortonerror:
2369 if abortonerror:
2366 raise error.Abort(m)
2370 raise error.Abort(m)
2367 else:
2371 else:
2368 repo.ui.warn(_(b'warning: %s\n') % m)
2372 repo.ui.warn(_(b'warning: %s\n') % m)
2369
2373
2370 # There's no API to copy a matcher. So mutate the passed matcher and
2374 # There's no API to copy a matcher. So mutate the passed matcher and
2371 # restore it when we're done.
2375 # restore it when we're done.
2372 oldtraversedir = matcher.traversedir
2376 oldtraversedir = matcher.traversedir
2373
2377
2374 res = []
2378 res = []
2375
2379
2376 try:
2380 try:
2377 if removeemptydirs:
2381 if removeemptydirs:
2378 directories = []
2382 directories = []
2379 matcher.traversedir = directories.append
2383 matcher.traversedir = directories.append
2380
2384
2381 status = repo.status(match=matcher, ignored=ignored, unknown=unknown)
2385 status = repo.status(match=matcher, ignored=ignored, unknown=unknown)
2382
2386
2383 if confirm:
2387 if confirm:
2384 nb_ignored = len(status.ignored)
2388 nb_ignored = len(status.ignored)
2385 nb_unkown = len(status.unknown)
2389 nb_unkown = len(status.unknown)
2386 if nb_unkown and nb_ignored:
2390 if nb_unkown and nb_ignored:
2387 msg = _(b"permanently delete %d unkown and %d ignored files?")
2391 msg = _(b"permanently delete %d unkown and %d ignored files?")
2388 msg %= (nb_unkown, nb_ignored)
2392 msg %= (nb_unkown, nb_ignored)
2389 elif nb_unkown:
2393 elif nb_unkown:
2390 msg = _(b"permanently delete %d unkown files?")
2394 msg = _(b"permanently delete %d unkown files?")
2391 msg %= nb_unkown
2395 msg %= nb_unkown
2392 elif nb_ignored:
2396 elif nb_ignored:
2393 msg = _(b"permanently delete %d ignored files?")
2397 msg = _(b"permanently delete %d ignored files?")
2394 msg %= nb_ignored
2398 msg %= nb_ignored
2395 elif removeemptydirs:
2399 elif removeemptydirs:
2396 dir_count = 0
2400 dir_count = 0
2397 for f in directories:
2401 for f in directories:
2398 if matcher(f) and not repo.wvfs.listdir(f):
2402 if matcher(f) and not repo.wvfs.listdir(f):
2399 dir_count += 1
2403 dir_count += 1
2400 if dir_count:
2404 if dir_count:
2401 msg = _(
2405 msg = _(
2402 b"permanently delete at least %d empty directories?"
2406 b"permanently delete at least %d empty directories?"
2403 )
2407 )
2404 msg %= dir_count
2408 msg %= dir_count
2405 else:
2409 else:
2406 # XXX we might be missing directory there
2410 # XXX we might be missing directory there
2407 return res
2411 return res
2408 msg += b" (yN)$$ &Yes $$ &No"
2412 msg += b" (yN)$$ &Yes $$ &No"
2409 if repo.ui.promptchoice(msg, default=1) == 1:
2413 if repo.ui.promptchoice(msg, default=1) == 1:
2410 raise error.CanceledError(_(b'removal cancelled'))
2414 raise error.CanceledError(_(b'removal cancelled'))
2411
2415
2412 if removefiles:
2416 if removefiles:
2413 for f in sorted(status.unknown + status.ignored):
2417 for f in sorted(status.unknown + status.ignored):
2414 if not noop:
2418 if not noop:
2415 repo.ui.note(_(b'removing file %s\n') % f)
2419 repo.ui.note(_(b'removing file %s\n') % f)
2416 remove(repo.wvfs.unlink, f)
2420 remove(repo.wvfs.unlink, f)
2417 res.append(f)
2421 res.append(f)
2418
2422
2419 if removeemptydirs:
2423 if removeemptydirs:
2420 for f in sorted(directories, reverse=True):
2424 for f in sorted(directories, reverse=True):
2421 if matcher(f) and not repo.wvfs.listdir(f):
2425 if matcher(f) and not repo.wvfs.listdir(f):
2422 if not noop:
2426 if not noop:
2423 repo.ui.note(_(b'removing directory %s\n') % f)
2427 repo.ui.note(_(b'removing directory %s\n') % f)
2424 remove(repo.wvfs.rmdir, f)
2428 remove(repo.wvfs.rmdir, f)
2425 res.append(f)
2429 res.append(f)
2426
2430
2427 return res
2431 return res
2428
2432
2429 finally:
2433 finally:
2430 matcher.traversedir = oldtraversedir
2434 matcher.traversedir = oldtraversedir
@@ -1,886 +1,888 b''
1 # censor code related to censoring revision
1 # censor code related to censoring revision
2 # coding: utf8
2 # coding: utf8
3 #
3 #
4 # Copyright 2021 Pierre-Yves David <pierre-yves.david@octobus.net>
4 # Copyright 2021 Pierre-Yves David <pierre-yves.david@octobus.net>
5 # Copyright 2015 Google, Inc <martinvonz@google.com>
5 # Copyright 2015 Google, Inc <martinvonz@google.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 import binascii
10 import binascii
11 import contextlib
11 import contextlib
12 import os
12 import os
13 import struct
13 import struct
14
14
15 from ..node import (
15 from ..node import (
16 nullrev,
16 nullrev,
17 )
17 )
18 from .constants import (
18 from .constants import (
19 COMP_MODE_PLAIN,
19 COMP_MODE_PLAIN,
20 ENTRY_DATA_COMPRESSED_LENGTH,
20 ENTRY_DATA_COMPRESSED_LENGTH,
21 ENTRY_DATA_COMPRESSION_MODE,
21 ENTRY_DATA_COMPRESSION_MODE,
22 ENTRY_DATA_OFFSET,
22 ENTRY_DATA_OFFSET,
23 ENTRY_DATA_UNCOMPRESSED_LENGTH,
23 ENTRY_DATA_UNCOMPRESSED_LENGTH,
24 ENTRY_DELTA_BASE,
24 ENTRY_DELTA_BASE,
25 ENTRY_LINK_REV,
25 ENTRY_LINK_REV,
26 ENTRY_NODE_ID,
26 ENTRY_NODE_ID,
27 ENTRY_PARENT_1,
27 ENTRY_PARENT_1,
28 ENTRY_PARENT_2,
28 ENTRY_PARENT_2,
29 ENTRY_SIDEDATA_COMPRESSED_LENGTH,
29 ENTRY_SIDEDATA_COMPRESSED_LENGTH,
30 ENTRY_SIDEDATA_COMPRESSION_MODE,
30 ENTRY_SIDEDATA_COMPRESSION_MODE,
31 ENTRY_SIDEDATA_OFFSET,
31 ENTRY_SIDEDATA_OFFSET,
32 REVIDX_ISCENSORED,
32 REVIDX_ISCENSORED,
33 REVLOGV0,
33 REVLOGV0,
34 REVLOGV1,
34 REVLOGV1,
35 )
35 )
36 from ..i18n import _
36 from ..i18n import _
37
37
38 from .. import (
38 from .. import (
39 error,
39 error,
40 mdiff,
40 mdiff,
41 pycompat,
41 pycompat,
42 revlogutils,
42 revlogutils,
43 util,
43 util,
44 )
44 )
45 from ..utils import (
45 from ..utils import (
46 storageutil,
46 storageutil,
47 )
47 )
48 from . import (
48 from . import (
49 constants,
49 constants,
50 deltas,
50 deltas,
51 )
51 )
52
52
53
53
54 def v1_censor(rl, tr, censornode, tombstone=b''):
54 def v1_censor(rl, tr, censornode, tombstone=b''):
55 """censors a revision in a "version 1" revlog"""
55 """censors a revision in a "version 1" revlog"""
56 assert rl._format_version == constants.REVLOGV1, rl._format_version
56 assert rl._format_version == constants.REVLOGV1, rl._format_version
57
57
58 # avoid cycle
58 # avoid cycle
59 from .. import revlog
59 from .. import revlog
60
60
61 censorrev = rl.rev(censornode)
61 censorrev = rl.rev(censornode)
62 tombstone = storageutil.packmeta({b'censored': tombstone}, b'')
62 tombstone = storageutil.packmeta({b'censored': tombstone}, b'')
63
63
64 # Rewriting the revlog in place is hard. Our strategy for censoring is
64 # Rewriting the revlog in place is hard. Our strategy for censoring is
65 # to create a new revlog, copy all revisions to it, then replace the
65 # to create a new revlog, copy all revisions to it, then replace the
66 # revlogs on transaction close.
66 # revlogs on transaction close.
67 #
67 #
68 # This is a bit dangerous. We could easily have a mismatch of state.
68 # This is a bit dangerous. We could easily have a mismatch of state.
69 newrl = revlog.revlog(
69 newrl = revlog.revlog(
70 rl.opener,
70 rl.opener,
71 target=rl.target,
71 target=rl.target,
72 radix=rl.radix,
72 radix=rl.radix,
73 postfix=b'tmpcensored',
73 postfix=b'tmpcensored',
74 censorable=True,
74 censorable=True,
75 )
75 )
76 newrl._format_version = rl._format_version
76 newrl._format_version = rl._format_version
77 newrl._format_flags = rl._format_flags
77 newrl._format_flags = rl._format_flags
78 newrl._generaldelta = rl._generaldelta
78 newrl._generaldelta = rl._generaldelta
79 newrl._parse_index = rl._parse_index
79 newrl._parse_index = rl._parse_index
80
80
81 for rev in rl.revs():
81 for rev in rl.revs():
82 node = rl.node(rev)
82 node = rl.node(rev)
83 p1, p2 = rl.parents(node)
83 p1, p2 = rl.parents(node)
84
84
85 if rev == censorrev:
85 if rev == censorrev:
86 newrl.addrawrevision(
86 newrl.addrawrevision(
87 tombstone,
87 tombstone,
88 tr,
88 tr,
89 rl.linkrev(censorrev),
89 rl.linkrev(censorrev),
90 p1,
90 p1,
91 p2,
91 p2,
92 censornode,
92 censornode,
93 constants.REVIDX_ISCENSORED,
93 constants.REVIDX_ISCENSORED,
94 )
94 )
95
95
96 if newrl.deltaparent(rev) != nullrev:
96 if newrl.deltaparent(rev) != nullrev:
97 m = _(b'censored revision stored as delta; cannot censor')
97 m = _(b'censored revision stored as delta; cannot censor')
98 h = _(
98 h = _(
99 b'censoring of revlogs is not fully implemented;'
99 b'censoring of revlogs is not fully implemented;'
100 b' please report this bug'
100 b' please report this bug'
101 )
101 )
102 raise error.Abort(m, hint=h)
102 raise error.Abort(m, hint=h)
103 continue
103 continue
104
104
105 if rl.iscensored(rev):
105 if rl.iscensored(rev):
106 if rl.deltaparent(rev) != nullrev:
106 if rl.deltaparent(rev) != nullrev:
107 m = _(
107 m = _(
108 b'cannot censor due to censored '
108 b'cannot censor due to censored '
109 b'revision having delta stored'
109 b'revision having delta stored'
110 )
110 )
111 raise error.Abort(m)
111 raise error.Abort(m)
112 rawtext = rl._chunk(rev)
112 rawtext = rl._chunk(rev)
113 else:
113 else:
114 rawtext = rl.rawdata(rev)
114 rawtext = rl.rawdata(rev)
115
115
116 newrl.addrawrevision(
116 newrl.addrawrevision(
117 rawtext, tr, rl.linkrev(rev), p1, p2, node, rl.flags(rev)
117 rawtext, tr, rl.linkrev(rev), p1, p2, node, rl.flags(rev)
118 )
118 )
119
119
120 tr.addbackup(rl._indexfile, location=b'store')
120 tr.addbackup(rl._indexfile, location=b'store')
121 if not rl._inline:
121 if not rl._inline:
122 tr.addbackup(rl._datafile, location=b'store')
122 tr.addbackup(rl._datafile, location=b'store')
123
123
124 rl.opener.rename(newrl._indexfile, rl._indexfile)
124 rl.opener.rename(newrl._indexfile, rl._indexfile)
125 if not rl._inline:
125 if not rl._inline:
126 rl.opener.rename(newrl._datafile, rl._datafile)
126 rl.opener.rename(newrl._datafile, rl._datafile)
127
127
128 rl.clearcaches()
128 rl.clearcaches()
129 rl._loadindex()
129 rl._loadindex()
130
130
131
131
132 def v2_censor(revlog, tr, censornode, tombstone=b''):
132 def v2_censor(revlog, tr, censornode, tombstone=b''):
133 """censors a revision in a "version 2" revlog"""
133 """censors a revision in a "version 2" revlog"""
134 assert revlog._format_version != REVLOGV0, revlog._format_version
134 assert revlog._format_version != REVLOGV0, revlog._format_version
135 assert revlog._format_version != REVLOGV1, revlog._format_version
135 assert revlog._format_version != REVLOGV1, revlog._format_version
136
136
137 censor_revs = {revlog.rev(censornode)}
137 censor_revs = {revlog.rev(censornode)}
138 _rewrite_v2(revlog, tr, censor_revs, tombstone)
138 _rewrite_v2(revlog, tr, censor_revs, tombstone)
139
139
140
140
141 def _rewrite_v2(revlog, tr, censor_revs, tombstone=b''):
141 def _rewrite_v2(revlog, tr, censor_revs, tombstone=b''):
142 """rewrite a revlog to censor some of its content
142 """rewrite a revlog to censor some of its content
143
143
144 General principle
144 General principle
145
145
146 We create new revlog files (index/data/sidedata) to copy the content of
146 We create new revlog files (index/data/sidedata) to copy the content of
147 the existing data without the censored data.
147 the existing data without the censored data.
148
148
149 We need to recompute new delta for any revision that used the censored
149 We need to recompute new delta for any revision that used the censored
150 revision as delta base. As the cumulative size of the new delta may be
150 revision as delta base. As the cumulative size of the new delta may be
151 large, we store them in a temporary file until they are stored in their
151 large, we store them in a temporary file until they are stored in their
152 final destination.
152 final destination.
153
153
154 All data before the censored data can be blindly copied. The rest needs
154 All data before the censored data can be blindly copied. The rest needs
155 to be copied as we go and the associated index entry needs adjustement.
155 to be copied as we go and the associated index entry needs adjustement.
156 """
156 """
157 assert revlog._format_version != REVLOGV0, revlog._format_version
157 assert revlog._format_version != REVLOGV0, revlog._format_version
158 assert revlog._format_version != REVLOGV1, revlog._format_version
158 assert revlog._format_version != REVLOGV1, revlog._format_version
159
159
160 old_index = revlog.index
160 old_index = revlog.index
161 docket = revlog._docket
161 docket = revlog._docket
162
162
163 tombstone = storageutil.packmeta({b'censored': tombstone}, b'')
163 tombstone = storageutil.packmeta({b'censored': tombstone}, b'')
164
164
165 first_excl_rev = min(censor_revs)
165 first_excl_rev = min(censor_revs)
166
166
167 first_excl_entry = revlog.index[first_excl_rev]
167 first_excl_entry = revlog.index[first_excl_rev]
168 index_cutoff = revlog.index.entry_size * first_excl_rev
168 index_cutoff = revlog.index.entry_size * first_excl_rev
169 data_cutoff = first_excl_entry[ENTRY_DATA_OFFSET] >> 16
169 data_cutoff = first_excl_entry[ENTRY_DATA_OFFSET] >> 16
170 sidedata_cutoff = revlog.sidedata_cut_off(first_excl_rev)
170 sidedata_cutoff = revlog.sidedata_cut_off(first_excl_rev)
171
171
172 with pycompat.unnamedtempfile(mode=b"w+b") as tmp_storage:
172 with pycompat.unnamedtempfile(mode=b"w+b") as tmp_storage:
173 # rev → (new_base, data_start, data_end, compression_mode)
173 # rev → (new_base, data_start, data_end, compression_mode)
174 rewritten_entries = _precompute_rewritten_delta(
174 rewritten_entries = _precompute_rewritten_delta(
175 revlog,
175 revlog,
176 old_index,
176 old_index,
177 censor_revs,
177 censor_revs,
178 tmp_storage,
178 tmp_storage,
179 )
179 )
180
180
181 all_files = _setup_new_files(
181 all_files = _setup_new_files(
182 revlog,
182 revlog,
183 index_cutoff,
183 index_cutoff,
184 data_cutoff,
184 data_cutoff,
185 sidedata_cutoff,
185 sidedata_cutoff,
186 )
186 )
187
187
188 # we dont need to open the old index file since its content already
188 # we dont need to open the old index file since its content already
189 # exist in a usable form in `old_index`.
189 # exist in a usable form in `old_index`.
190 with all_files() as open_files:
190 with all_files() as open_files:
191 (
191 (
192 old_data_file,
192 old_data_file,
193 old_sidedata_file,
193 old_sidedata_file,
194 new_index_file,
194 new_index_file,
195 new_data_file,
195 new_data_file,
196 new_sidedata_file,
196 new_sidedata_file,
197 ) = open_files
197 ) = open_files
198
198
199 # writing the censored revision
199 # writing the censored revision
200
200
201 # Writing all subsequent revisions
201 # Writing all subsequent revisions
202 for rev in range(first_excl_rev, len(old_index)):
202 for rev in range(first_excl_rev, len(old_index)):
203 if rev in censor_revs:
203 if rev in censor_revs:
204 _rewrite_censor(
204 _rewrite_censor(
205 revlog,
205 revlog,
206 old_index,
206 old_index,
207 open_files,
207 open_files,
208 rev,
208 rev,
209 tombstone,
209 tombstone,
210 )
210 )
211 else:
211 else:
212 _rewrite_simple(
212 _rewrite_simple(
213 revlog,
213 revlog,
214 old_index,
214 old_index,
215 open_files,
215 open_files,
216 rev,
216 rev,
217 rewritten_entries,
217 rewritten_entries,
218 tmp_storage,
218 tmp_storage,
219 )
219 )
220 docket.write(transaction=None, stripping=True)
220 docket.write(transaction=None, stripping=True)
221
221
222
222
223 def _precompute_rewritten_delta(
223 def _precompute_rewritten_delta(
224 revlog,
224 revlog,
225 old_index,
225 old_index,
226 excluded_revs,
226 excluded_revs,
227 tmp_storage,
227 tmp_storage,
228 ):
228 ):
229 """Compute new delta for revisions whose delta is based on revision that
229 """Compute new delta for revisions whose delta is based on revision that
230 will not survive as is.
230 will not survive as is.
231
231
232 Return a mapping: {rev → (new_base, data_start, data_end, compression_mode)}
232 Return a mapping: {rev → (new_base, data_start, data_end, compression_mode)}
233 """
233 """
234 dc = deltas.deltacomputer(revlog)
234 dc = deltas.deltacomputer(revlog)
235 rewritten_entries = {}
235 rewritten_entries = {}
236 first_excl_rev = min(excluded_revs)
236 first_excl_rev = min(excluded_revs)
237 with revlog._segmentfile._open_read() as dfh:
237 with revlog._segmentfile._open_read() as dfh:
238 for rev in range(first_excl_rev, len(old_index)):
238 for rev in range(first_excl_rev, len(old_index)):
239 if rev in excluded_revs:
239 if rev in excluded_revs:
240 # this revision will be preserved as is, so we don't need to
240 # this revision will be preserved as is, so we don't need to
241 # consider recomputing a delta.
241 # consider recomputing a delta.
242 continue
242 continue
243 entry = old_index[rev]
243 entry = old_index[rev]
244 if entry[ENTRY_DELTA_BASE] not in excluded_revs:
244 if entry[ENTRY_DELTA_BASE] not in excluded_revs:
245 continue
245 continue
246 # This is a revision that use the censored revision as the base
246 # This is a revision that use the censored revision as the base
247 # for its delta. We need a need new deltas
247 # for its delta. We need a need new deltas
248 if entry[ENTRY_DATA_UNCOMPRESSED_LENGTH] == 0:
248 if entry[ENTRY_DATA_UNCOMPRESSED_LENGTH] == 0:
249 # this revision is empty, we can delta against nullrev
249 # this revision is empty, we can delta against nullrev
250 rewritten_entries[rev] = (nullrev, 0, 0, COMP_MODE_PLAIN)
250 rewritten_entries[rev] = (nullrev, 0, 0, COMP_MODE_PLAIN)
251 else:
251 else:
252
252
253 text = revlog.rawdata(rev, _df=dfh)
253 text = revlog.rawdata(rev, _df=dfh)
254 info = revlogutils.revisioninfo(
254 info = revlogutils.revisioninfo(
255 node=entry[ENTRY_NODE_ID],
255 node=entry[ENTRY_NODE_ID],
256 p1=revlog.node(entry[ENTRY_PARENT_1]),
256 p1=revlog.node(entry[ENTRY_PARENT_1]),
257 p2=revlog.node(entry[ENTRY_PARENT_2]),
257 p2=revlog.node(entry[ENTRY_PARENT_2]),
258 btext=[text],
258 btext=[text],
259 textlen=len(text),
259 textlen=len(text),
260 cachedelta=None,
260 cachedelta=None,
261 flags=entry[ENTRY_DATA_OFFSET] & 0xFFFF,
261 flags=entry[ENTRY_DATA_OFFSET] & 0xFFFF,
262 )
262 )
263 d = dc.finddeltainfo(
263 d = dc.finddeltainfo(
264 info, dfh, excluded_bases=excluded_revs, target_rev=rev
264 info, dfh, excluded_bases=excluded_revs, target_rev=rev
265 )
265 )
266 default_comp = revlog._docket.default_compression_header
266 default_comp = revlog._docket.default_compression_header
267 comp_mode, d = deltas.delta_compression(default_comp, d)
267 comp_mode, d = deltas.delta_compression(default_comp, d)
268 # using `tell` is a bit lazy, but we are not here for speed
268 # using `tell` is a bit lazy, but we are not here for speed
269 start = tmp_storage.tell()
269 start = tmp_storage.tell()
270 tmp_storage.write(d.data[1])
270 tmp_storage.write(d.data[1])
271 end = tmp_storage.tell()
271 end = tmp_storage.tell()
272 rewritten_entries[rev] = (d.base, start, end, comp_mode)
272 rewritten_entries[rev] = (d.base, start, end, comp_mode)
273 return rewritten_entries
273 return rewritten_entries
274
274
275
275
276 def _setup_new_files(
276 def _setup_new_files(
277 revlog,
277 revlog,
278 index_cutoff,
278 index_cutoff,
279 data_cutoff,
279 data_cutoff,
280 sidedata_cutoff,
280 sidedata_cutoff,
281 ):
281 ):
282 """
282 """
283
283
284 return a context manager to open all the relevant files:
284 return a context manager to open all the relevant files:
285 - old_data_file,
285 - old_data_file,
286 - old_sidedata_file,
286 - old_sidedata_file,
287 - new_index_file,
287 - new_index_file,
288 - new_data_file,
288 - new_data_file,
289 - new_sidedata_file,
289 - new_sidedata_file,
290
290
291 The old_index_file is not here because it is accessed through the
291 The old_index_file is not here because it is accessed through the
292 `old_index` object if the caller function.
292 `old_index` object if the caller function.
293 """
293 """
294 docket = revlog._docket
294 docket = revlog._docket
295 old_index_filepath = revlog.opener.join(docket.index_filepath())
295 old_index_filepath = revlog.opener.join(docket.index_filepath())
296 old_data_filepath = revlog.opener.join(docket.data_filepath())
296 old_data_filepath = revlog.opener.join(docket.data_filepath())
297 old_sidedata_filepath = revlog.opener.join(docket.sidedata_filepath())
297 old_sidedata_filepath = revlog.opener.join(docket.sidedata_filepath())
298
298
299 new_index_filepath = revlog.opener.join(docket.new_index_file())
299 new_index_filepath = revlog.opener.join(docket.new_index_file())
300 new_data_filepath = revlog.opener.join(docket.new_data_file())
300 new_data_filepath = revlog.opener.join(docket.new_data_file())
301 new_sidedata_filepath = revlog.opener.join(docket.new_sidedata_file())
301 new_sidedata_filepath = revlog.opener.join(docket.new_sidedata_file())
302
302
303 util.copyfile(old_index_filepath, new_index_filepath, nb_bytes=index_cutoff)
303 util.copyfile(old_index_filepath, new_index_filepath, nb_bytes=index_cutoff)
304 util.copyfile(old_data_filepath, new_data_filepath, nb_bytes=data_cutoff)
304 util.copyfile(old_data_filepath, new_data_filepath, nb_bytes=data_cutoff)
305 util.copyfile(
305 util.copyfile(
306 old_sidedata_filepath,
306 old_sidedata_filepath,
307 new_sidedata_filepath,
307 new_sidedata_filepath,
308 nb_bytes=sidedata_cutoff,
308 nb_bytes=sidedata_cutoff,
309 )
309 )
310 revlog.opener.register_file(docket.index_filepath())
310 revlog.opener.register_file(docket.index_filepath())
311 revlog.opener.register_file(docket.data_filepath())
311 revlog.opener.register_file(docket.data_filepath())
312 revlog.opener.register_file(docket.sidedata_filepath())
312 revlog.opener.register_file(docket.sidedata_filepath())
313
313
314 docket.index_end = index_cutoff
314 docket.index_end = index_cutoff
315 docket.data_end = data_cutoff
315 docket.data_end = data_cutoff
316 docket.sidedata_end = sidedata_cutoff
316 docket.sidedata_end = sidedata_cutoff
317
317
318 # reload the revlog internal information
318 # reload the revlog internal information
319 revlog.clearcaches()
319 revlog.clearcaches()
320 revlog._loadindex(docket=docket)
320 revlog._loadindex(docket=docket)
321
321
322 @contextlib.contextmanager
322 @contextlib.contextmanager
323 def all_files_opener():
323 def all_files_opener():
324 # hide opening in an helper function to please check-code, black
324 # hide opening in an helper function to please check-code, black
325 # and various python version at the same time
325 # and various python version at the same time
326 with open(old_data_filepath, 'rb') as old_data_file:
326 with open(old_data_filepath, 'rb') as old_data_file:
327 with open(old_sidedata_filepath, 'rb') as old_sidedata_file:
327 with open(old_sidedata_filepath, 'rb') as old_sidedata_file:
328 with open(new_index_filepath, 'r+b') as new_index_file:
328 with open(new_index_filepath, 'r+b') as new_index_file:
329 with open(new_data_filepath, 'r+b') as new_data_file:
329 with open(new_data_filepath, 'r+b') as new_data_file:
330 with open(
330 with open(
331 new_sidedata_filepath, 'r+b'
331 new_sidedata_filepath, 'r+b'
332 ) as new_sidedata_file:
332 ) as new_sidedata_file:
333 new_index_file.seek(0, os.SEEK_END)
333 new_index_file.seek(0, os.SEEK_END)
334 assert new_index_file.tell() == index_cutoff
334 assert new_index_file.tell() == index_cutoff
335 new_data_file.seek(0, os.SEEK_END)
335 new_data_file.seek(0, os.SEEK_END)
336 assert new_data_file.tell() == data_cutoff
336 assert new_data_file.tell() == data_cutoff
337 new_sidedata_file.seek(0, os.SEEK_END)
337 new_sidedata_file.seek(0, os.SEEK_END)
338 assert new_sidedata_file.tell() == sidedata_cutoff
338 assert new_sidedata_file.tell() == sidedata_cutoff
339 yield (
339 yield (
340 old_data_file,
340 old_data_file,
341 old_sidedata_file,
341 old_sidedata_file,
342 new_index_file,
342 new_index_file,
343 new_data_file,
343 new_data_file,
344 new_sidedata_file,
344 new_sidedata_file,
345 )
345 )
346
346
347 return all_files_opener
347 return all_files_opener
348
348
349
349
350 def _rewrite_simple(
350 def _rewrite_simple(
351 revlog,
351 revlog,
352 old_index,
352 old_index,
353 all_files,
353 all_files,
354 rev,
354 rev,
355 rewritten_entries,
355 rewritten_entries,
356 tmp_storage,
356 tmp_storage,
357 ):
357 ):
358 """append a normal revision to the index after the rewritten one(s)"""
358 """append a normal revision to the index after the rewritten one(s)"""
359 (
359 (
360 old_data_file,
360 old_data_file,
361 old_sidedata_file,
361 old_sidedata_file,
362 new_index_file,
362 new_index_file,
363 new_data_file,
363 new_data_file,
364 new_sidedata_file,
364 new_sidedata_file,
365 ) = all_files
365 ) = all_files
366 entry = old_index[rev]
366 entry = old_index[rev]
367 flags = entry[ENTRY_DATA_OFFSET] & 0xFFFF
367 flags = entry[ENTRY_DATA_OFFSET] & 0xFFFF
368 old_data_offset = entry[ENTRY_DATA_OFFSET] >> 16
368 old_data_offset = entry[ENTRY_DATA_OFFSET] >> 16
369
369
370 if rev not in rewritten_entries:
370 if rev not in rewritten_entries:
371 old_data_file.seek(old_data_offset)
371 old_data_file.seek(old_data_offset)
372 new_data_size = entry[ENTRY_DATA_COMPRESSED_LENGTH]
372 new_data_size = entry[ENTRY_DATA_COMPRESSED_LENGTH]
373 new_data = old_data_file.read(new_data_size)
373 new_data = old_data_file.read(new_data_size)
374 data_delta_base = entry[ENTRY_DELTA_BASE]
374 data_delta_base = entry[ENTRY_DELTA_BASE]
375 d_comp_mode = entry[ENTRY_DATA_COMPRESSION_MODE]
375 d_comp_mode = entry[ENTRY_DATA_COMPRESSION_MODE]
376 else:
376 else:
377 (
377 (
378 data_delta_base,
378 data_delta_base,
379 start,
379 start,
380 end,
380 end,
381 d_comp_mode,
381 d_comp_mode,
382 ) = rewritten_entries[rev]
382 ) = rewritten_entries[rev]
383 new_data_size = end - start
383 new_data_size = end - start
384 tmp_storage.seek(start)
384 tmp_storage.seek(start)
385 new_data = tmp_storage.read(new_data_size)
385 new_data = tmp_storage.read(new_data_size)
386
386
387 # It might be faster to group continuous read/write operation,
387 # It might be faster to group continuous read/write operation,
388 # however, this is censor, an operation that is not focussed
388 # however, this is censor, an operation that is not focussed
389 # around stellar performance. So I have not written this
389 # around stellar performance. So I have not written this
390 # optimisation yet.
390 # optimisation yet.
391 new_data_offset = new_data_file.tell()
391 new_data_offset = new_data_file.tell()
392 new_data_file.write(new_data)
392 new_data_file.write(new_data)
393
393
394 sidedata_size = entry[ENTRY_SIDEDATA_COMPRESSED_LENGTH]
394 sidedata_size = entry[ENTRY_SIDEDATA_COMPRESSED_LENGTH]
395 new_sidedata_offset = new_sidedata_file.tell()
395 new_sidedata_offset = new_sidedata_file.tell()
396 if 0 < sidedata_size:
396 if 0 < sidedata_size:
397 old_sidedata_offset = entry[ENTRY_SIDEDATA_OFFSET]
397 old_sidedata_offset = entry[ENTRY_SIDEDATA_OFFSET]
398 old_sidedata_file.seek(old_sidedata_offset)
398 old_sidedata_file.seek(old_sidedata_offset)
399 new_sidedata = old_sidedata_file.read(sidedata_size)
399 new_sidedata = old_sidedata_file.read(sidedata_size)
400 new_sidedata_file.write(new_sidedata)
400 new_sidedata_file.write(new_sidedata)
401
401
402 data_uncompressed_length = entry[ENTRY_DATA_UNCOMPRESSED_LENGTH]
402 data_uncompressed_length = entry[ENTRY_DATA_UNCOMPRESSED_LENGTH]
403 sd_com_mode = entry[ENTRY_SIDEDATA_COMPRESSION_MODE]
403 sd_com_mode = entry[ENTRY_SIDEDATA_COMPRESSION_MODE]
404 assert data_delta_base <= rev, (data_delta_base, rev)
404 assert data_delta_base <= rev, (data_delta_base, rev)
405
405
406 new_entry = revlogutils.entry(
406 new_entry = revlogutils.entry(
407 flags=flags,
407 flags=flags,
408 data_offset=new_data_offset,
408 data_offset=new_data_offset,
409 data_compressed_length=new_data_size,
409 data_compressed_length=new_data_size,
410 data_uncompressed_length=data_uncompressed_length,
410 data_uncompressed_length=data_uncompressed_length,
411 data_delta_base=data_delta_base,
411 data_delta_base=data_delta_base,
412 link_rev=entry[ENTRY_LINK_REV],
412 link_rev=entry[ENTRY_LINK_REV],
413 parent_rev_1=entry[ENTRY_PARENT_1],
413 parent_rev_1=entry[ENTRY_PARENT_1],
414 parent_rev_2=entry[ENTRY_PARENT_2],
414 parent_rev_2=entry[ENTRY_PARENT_2],
415 node_id=entry[ENTRY_NODE_ID],
415 node_id=entry[ENTRY_NODE_ID],
416 sidedata_offset=new_sidedata_offset,
416 sidedata_offset=new_sidedata_offset,
417 sidedata_compressed_length=sidedata_size,
417 sidedata_compressed_length=sidedata_size,
418 data_compression_mode=d_comp_mode,
418 data_compression_mode=d_comp_mode,
419 sidedata_compression_mode=sd_com_mode,
419 sidedata_compression_mode=sd_com_mode,
420 )
420 )
421 revlog.index.append(new_entry)
421 revlog.index.append(new_entry)
422 entry_bin = revlog.index.entry_binary(rev)
422 entry_bin = revlog.index.entry_binary(rev)
423 new_index_file.write(entry_bin)
423 new_index_file.write(entry_bin)
424
424
425 revlog._docket.index_end = new_index_file.tell()
425 revlog._docket.index_end = new_index_file.tell()
426 revlog._docket.data_end = new_data_file.tell()
426 revlog._docket.data_end = new_data_file.tell()
427 revlog._docket.sidedata_end = new_sidedata_file.tell()
427 revlog._docket.sidedata_end = new_sidedata_file.tell()
428
428
429
429
430 def _rewrite_censor(
430 def _rewrite_censor(
431 revlog,
431 revlog,
432 old_index,
432 old_index,
433 all_files,
433 all_files,
434 rev,
434 rev,
435 tombstone,
435 tombstone,
436 ):
436 ):
437 """rewrite and append a censored revision"""
437 """rewrite and append a censored revision"""
438 (
438 (
439 old_data_file,
439 old_data_file,
440 old_sidedata_file,
440 old_sidedata_file,
441 new_index_file,
441 new_index_file,
442 new_data_file,
442 new_data_file,
443 new_sidedata_file,
443 new_sidedata_file,
444 ) = all_files
444 ) = all_files
445 entry = old_index[rev]
445 entry = old_index[rev]
446
446
447 # XXX consider trying the default compression too
447 # XXX consider trying the default compression too
448 new_data_size = len(tombstone)
448 new_data_size = len(tombstone)
449 new_data_offset = new_data_file.tell()
449 new_data_offset = new_data_file.tell()
450 new_data_file.write(tombstone)
450 new_data_file.write(tombstone)
451
451
452 # we are not adding any sidedata as they might leak info about the censored version
452 # we are not adding any sidedata as they might leak info about the censored version
453
453
454 link_rev = entry[ENTRY_LINK_REV]
454 link_rev = entry[ENTRY_LINK_REV]
455
455
456 p1 = entry[ENTRY_PARENT_1]
456 p1 = entry[ENTRY_PARENT_1]
457 p2 = entry[ENTRY_PARENT_2]
457 p2 = entry[ENTRY_PARENT_2]
458
458
459 new_entry = revlogutils.entry(
459 new_entry = revlogutils.entry(
460 flags=constants.REVIDX_ISCENSORED,
460 flags=constants.REVIDX_ISCENSORED,
461 data_offset=new_data_offset,
461 data_offset=new_data_offset,
462 data_compressed_length=new_data_size,
462 data_compressed_length=new_data_size,
463 data_uncompressed_length=new_data_size,
463 data_uncompressed_length=new_data_size,
464 data_delta_base=rev,
464 data_delta_base=rev,
465 link_rev=link_rev,
465 link_rev=link_rev,
466 parent_rev_1=p1,
466 parent_rev_1=p1,
467 parent_rev_2=p2,
467 parent_rev_2=p2,
468 node_id=entry[ENTRY_NODE_ID],
468 node_id=entry[ENTRY_NODE_ID],
469 sidedata_offset=0,
469 sidedata_offset=0,
470 sidedata_compressed_length=0,
470 sidedata_compressed_length=0,
471 data_compression_mode=COMP_MODE_PLAIN,
471 data_compression_mode=COMP_MODE_PLAIN,
472 sidedata_compression_mode=COMP_MODE_PLAIN,
472 sidedata_compression_mode=COMP_MODE_PLAIN,
473 )
473 )
474 revlog.index.append(new_entry)
474 revlog.index.append(new_entry)
475 entry_bin = revlog.index.entry_binary(rev)
475 entry_bin = revlog.index.entry_binary(rev)
476 new_index_file.write(entry_bin)
476 new_index_file.write(entry_bin)
477 revlog._docket.index_end = new_index_file.tell()
477 revlog._docket.index_end = new_index_file.tell()
478 revlog._docket.data_end = new_data_file.tell()
478 revlog._docket.data_end = new_data_file.tell()
479
479
480
480
481 def _get_filename_from_filelog_index(path):
481 def _get_filename_from_filelog_index(path):
482 # Drop the extension and the `data/` prefix
482 # Drop the extension and the `data/` prefix
483 path_part = path.rsplit(b'.', 1)[0].split(b'/', 1)
483 path_part = path.rsplit(b'.', 1)[0].split(b'/', 1)
484 if len(path_part) < 2:
484 if len(path_part) < 2:
485 msg = _(b"cannot recognize filelog from filename: '%s'")
485 msg = _(b"cannot recognize filelog from filename: '%s'")
486 msg %= path
486 msg %= path
487 raise error.Abort(msg)
487 raise error.Abort(msg)
488
488
489 return path_part[1]
489 return path_part[1]
490
490
491
491
492 def _filelog_from_filename(repo, path):
492 def _filelog_from_filename(repo, path):
493 """Returns the filelog for the given `path`. Stolen from `engine.py`"""
493 """Returns the filelog for the given `path`. Stolen from `engine.py`"""
494
494
495 from .. import filelog # avoid cycle
495 from .. import filelog # avoid cycle
496
496
497 fl = filelog.filelog(repo.svfs, path)
497 fl = filelog.filelog(repo.svfs, path)
498 return fl
498 return fl
499
499
500
500
501 def _write_swapped_parents(repo, rl, rev, offset, fp):
501 def _write_swapped_parents(repo, rl, rev, offset, fp):
502 """Swaps p1 and p2 and overwrites the revlog entry for `rev` in `fp`"""
502 """Swaps p1 and p2 and overwrites the revlog entry for `rev` in `fp`"""
503 from ..pure import parsers # avoid cycle
503 from ..pure import parsers # avoid cycle
504
504
505 if repo._currentlock(repo._lockref) is None:
505 if repo._currentlock(repo._lockref) is None:
506 # Let's be paranoid about it
506 # Let's be paranoid about it
507 msg = "repo needs to be locked to rewrite parents"
507 msg = "repo needs to be locked to rewrite parents"
508 raise error.ProgrammingError(msg)
508 raise error.ProgrammingError(msg)
509
509
510 index_format = parsers.IndexObject.index_format
510 index_format = parsers.IndexObject.index_format
511 entry = rl.index[rev]
511 entry = rl.index[rev]
512 new_entry = list(entry)
512 new_entry = list(entry)
513 new_entry[5], new_entry[6] = entry[6], entry[5]
513 new_entry[5], new_entry[6] = entry[6], entry[5]
514 packed = index_format.pack(*new_entry[:8])
514 packed = index_format.pack(*new_entry[:8])
515 fp.seek(offset)
515 fp.seek(offset)
516 fp.write(packed)
516 fp.write(packed)
517
517
518
518
519 def _reorder_filelog_parents(repo, fl, to_fix):
519 def _reorder_filelog_parents(repo, fl, to_fix):
520 """
520 """
521 Swaps p1 and p2 for all `to_fix` revisions of filelog `fl` and writes the
521 Swaps p1 and p2 for all `to_fix` revisions of filelog `fl` and writes the
522 new version to disk, overwriting the old one with a rename.
522 new version to disk, overwriting the old one with a rename.
523 """
523 """
524 from ..pure import parsers # avoid cycle
524 from ..pure import parsers # avoid cycle
525
525
526 ui = repo.ui
526 ui = repo.ui
527 assert len(to_fix) > 0
527 assert len(to_fix) > 0
528 rl = fl._revlog
528 rl = fl._revlog
529 if rl._format_version != constants.REVLOGV1:
529 if rl._format_version != constants.REVLOGV1:
530 msg = "expected version 1 revlog, got version '%d'" % rl._format_version
530 msg = "expected version 1 revlog, got version '%d'" % rl._format_version
531 raise error.ProgrammingError(msg)
531 raise error.ProgrammingError(msg)
532
532
533 index_file = rl._indexfile
533 index_file = rl._indexfile
534 new_file_path = index_file + b'.tmp-parents-fix'
534 new_file_path = index_file + b'.tmp-parents-fix'
535 repaired_msg = _(b"repaired revision %d of 'filelog %s'\n")
535 repaired_msg = _(b"repaired revision %d of 'filelog %s'\n")
536
536
537 with ui.uninterruptible():
537 with ui.uninterruptible():
538 try:
538 try:
539 util.copyfile(
539 util.copyfile(
540 rl.opener.join(index_file),
540 rl.opener.join(index_file),
541 rl.opener.join(new_file_path),
541 rl.opener.join(new_file_path),
542 checkambig=rl._checkambig,
542 checkambig=rl._checkambig,
543 )
543 )
544
544
545 with rl.opener(new_file_path, mode=b"r+") as fp:
545 with rl.opener(new_file_path, mode=b"r+") as fp:
546 if rl._inline:
546 if rl._inline:
547 index = parsers.InlinedIndexObject(fp.read())
547 index = parsers.InlinedIndexObject(fp.read())
548 for rev in fl.revs():
548 for rev in fl.revs():
549 if rev in to_fix:
549 if rev in to_fix:
550 offset = index._calculate_index(rev)
550 offset = index._calculate_index(rev)
551 _write_swapped_parents(repo, rl, rev, offset, fp)
551 _write_swapped_parents(repo, rl, rev, offset, fp)
552 ui.write(repaired_msg % (rev, index_file))
552 ui.write(repaired_msg % (rev, index_file))
553 else:
553 else:
554 index_format = parsers.IndexObject.index_format
554 index_format = parsers.IndexObject.index_format
555 for rev in to_fix:
555 for rev in to_fix:
556 offset = rev * index_format.size
556 offset = rev * index_format.size
557 _write_swapped_parents(repo, rl, rev, offset, fp)
557 _write_swapped_parents(repo, rl, rev, offset, fp)
558 ui.write(repaired_msg % (rev, index_file))
558 ui.write(repaired_msg % (rev, index_file))
559
559
560 rl.opener.rename(new_file_path, index_file)
560 rl.opener.rename(new_file_path, index_file)
561 rl.clearcaches()
561 rl.clearcaches()
562 rl._loadindex()
562 rl._loadindex()
563 finally:
563 finally:
564 util.tryunlink(new_file_path)
564 util.tryunlink(new_file_path)
565
565
566
566
567 def _is_revision_affected(fl, filerev, metadata_cache=None):
567 def _is_revision_affected(fl, filerev, metadata_cache=None):
568 full_text = lambda: fl._revlog.rawdata(filerev)
568 full_text = lambda: fl._revlog.rawdata(filerev)
569 parent_revs = lambda: fl._revlog.parentrevs(filerev)
569 parent_revs = lambda: fl._revlog.parentrevs(filerev)
570 return _is_revision_affected_inner(
570 return _is_revision_affected_inner(
571 full_text, parent_revs, filerev, metadata_cache
571 full_text, parent_revs, filerev, metadata_cache
572 )
572 )
573
573
574
574
575 def _is_revision_affected_inner(
575 def _is_revision_affected_inner(
576 full_text,
576 full_text,
577 parents_revs,
577 parents_revs,
578 filerev,
578 filerev,
579 metadata_cache=None,
579 metadata_cache=None,
580 ):
580 ):
581 """Mercurial currently (5.9rc0) uses `p1 == nullrev and p2 != nullrev` as a
581 """Mercurial currently (5.9rc0) uses `p1 == nullrev and p2 != nullrev` as a
582 special meaning compared to the reverse in the context of filelog-based
582 special meaning compared to the reverse in the context of filelog-based
583 copytracing. issue6528 exists because new code assumed that parent ordering
583 copytracing. issue6528 exists because new code assumed that parent ordering
584 didn't matter, so this detects if the revision contains metadata (since
584 didn't matter, so this detects if the revision contains metadata (since
585 it's only used for filelog-based copytracing) and its parents are in the
585 it's only used for filelog-based copytracing) and its parents are in the
586 "wrong" order."""
586 "wrong" order."""
587 try:
587 try:
588 raw_text = full_text()
588 raw_text = full_text()
589 except error.CensoredNodeError:
589 except error.CensoredNodeError:
590 # We don't care about censored nodes as they never carry metadata
590 # We don't care about censored nodes as they never carry metadata
591 return False
591 return False
592 has_meta = raw_text.startswith(b'\x01\n')
592
593 # raw text can be a `memoryview`, which doesn't implement `startswith`
594 has_meta = bytes(raw_text[:2]) == b'\x01\n'
593 if metadata_cache is not None:
595 if metadata_cache is not None:
594 metadata_cache[filerev] = has_meta
596 metadata_cache[filerev] = has_meta
595 if has_meta:
597 if has_meta:
596 (p1, p2) = parents_revs()
598 (p1, p2) = parents_revs()
597 if p1 != nullrev and p2 == nullrev:
599 if p1 != nullrev and p2 == nullrev:
598 return True
600 return True
599 return False
601 return False
600
602
601
603
602 def _is_revision_affected_fast(repo, fl, filerev, metadata_cache):
604 def _is_revision_affected_fast(repo, fl, filerev, metadata_cache):
603 rl = fl._revlog
605 rl = fl._revlog
604 is_censored = lambda: rl.iscensored(filerev)
606 is_censored = lambda: rl.iscensored(filerev)
605 delta_base = lambda: rl.deltaparent(filerev)
607 delta_base = lambda: rl.deltaparent(filerev)
606 delta = lambda: rl._chunk(filerev)
608 delta = lambda: rl._chunk(filerev)
607 full_text = lambda: rl.rawdata(filerev)
609 full_text = lambda: rl.rawdata(filerev)
608 parent_revs = lambda: rl.parentrevs(filerev)
610 parent_revs = lambda: rl.parentrevs(filerev)
609 return _is_revision_affected_fast_inner(
611 return _is_revision_affected_fast_inner(
610 is_censored,
612 is_censored,
611 delta_base,
613 delta_base,
612 delta,
614 delta,
613 full_text,
615 full_text,
614 parent_revs,
616 parent_revs,
615 filerev,
617 filerev,
616 metadata_cache,
618 metadata_cache,
617 )
619 )
618
620
619
621
620 def _is_revision_affected_fast_inner(
622 def _is_revision_affected_fast_inner(
621 is_censored,
623 is_censored,
622 delta_base,
624 delta_base,
623 delta,
625 delta,
624 full_text,
626 full_text,
625 parent_revs,
627 parent_revs,
626 filerev,
628 filerev,
627 metadata_cache,
629 metadata_cache,
628 ):
630 ):
629 """Optimization fast-path for `_is_revision_affected`.
631 """Optimization fast-path for `_is_revision_affected`.
630
632
631 `metadata_cache` is a dict of `{rev: has_metadata}` which allows any
633 `metadata_cache` is a dict of `{rev: has_metadata}` which allows any
632 revision to check if its base has metadata, saving computation of the full
634 revision to check if its base has metadata, saving computation of the full
633 text, instead looking at the current delta.
635 text, instead looking at the current delta.
634
636
635 This optimization only works if the revisions are looked at in order."""
637 This optimization only works if the revisions are looked at in order."""
636
638
637 if is_censored():
639 if is_censored():
638 # Censored revisions don't contain metadata, so they cannot be affected
640 # Censored revisions don't contain metadata, so they cannot be affected
639 metadata_cache[filerev] = False
641 metadata_cache[filerev] = False
640 return False
642 return False
641
643
642 p1, p2 = parent_revs()
644 p1, p2 = parent_revs()
643 if p1 == nullrev or p2 != nullrev:
645 if p1 == nullrev or p2 != nullrev:
644 return False
646 return False
645
647
646 delta_parent = delta_base()
648 delta_parent = delta_base()
647 parent_has_metadata = metadata_cache.get(delta_parent)
649 parent_has_metadata = metadata_cache.get(delta_parent)
648 if parent_has_metadata is None:
650 if parent_has_metadata is None:
649 return _is_revision_affected_inner(
651 return _is_revision_affected_inner(
650 full_text,
652 full_text,
651 parent_revs,
653 parent_revs,
652 filerev,
654 filerev,
653 metadata_cache,
655 metadata_cache,
654 )
656 )
655
657
656 chunk = delta()
658 chunk = delta()
657 if not len(chunk):
659 if not len(chunk):
658 # No diff for this revision
660 # No diff for this revision
659 return parent_has_metadata
661 return parent_has_metadata
660
662
661 header_length = 12
663 header_length = 12
662 if len(chunk) < header_length:
664 if len(chunk) < header_length:
663 raise error.Abort(_(b"patch cannot be decoded"))
665 raise error.Abort(_(b"patch cannot be decoded"))
664
666
665 start, _end, _length = struct.unpack(b">lll", chunk[:header_length])
667 start, _end, _length = struct.unpack(b">lll", chunk[:header_length])
666
668
667 if start < 2: # len(b'\x01\n') == 2
669 if start < 2: # len(b'\x01\n') == 2
668 # This delta does *something* to the metadata marker (if any).
670 # This delta does *something* to the metadata marker (if any).
669 # Check it the slow way
671 # Check it the slow way
670 is_affected = _is_revision_affected_inner(
672 is_affected = _is_revision_affected_inner(
671 full_text,
673 full_text,
672 parent_revs,
674 parent_revs,
673 filerev,
675 filerev,
674 metadata_cache,
676 metadata_cache,
675 )
677 )
676 return is_affected
678 return is_affected
677
679
678 # The diff did not remove or add the metadata header, it's then in the same
680 # The diff did not remove or add the metadata header, it's then in the same
679 # situation as its parent
681 # situation as its parent
680 metadata_cache[filerev] = parent_has_metadata
682 metadata_cache[filerev] = parent_has_metadata
681 return parent_has_metadata
683 return parent_has_metadata
682
684
683
685
684 def _from_report(ui, repo, context, from_report, dry_run):
686 def _from_report(ui, repo, context, from_report, dry_run):
685 """
687 """
686 Fix the revisions given in the `from_report` file, but still checks if the
688 Fix the revisions given in the `from_report` file, but still checks if the
687 revisions are indeed affected to prevent an unfortunate cyclic situation
689 revisions are indeed affected to prevent an unfortunate cyclic situation
688 where we'd swap well-ordered parents again.
690 where we'd swap well-ordered parents again.
689
691
690 See the doc for `debug_fix_issue6528` for the format documentation.
692 See the doc for `debug_fix_issue6528` for the format documentation.
691 """
693 """
692 ui.write(_(b"loading report file '%s'\n") % from_report)
694 ui.write(_(b"loading report file '%s'\n") % from_report)
693
695
694 with context(), open(from_report, mode='rb') as f:
696 with context(), open(from_report, mode='rb') as f:
695 for line in f.read().split(b'\n'):
697 for line in f.read().split(b'\n'):
696 if not line:
698 if not line:
697 continue
699 continue
698 filenodes, filename = line.split(b' ', 1)
700 filenodes, filename = line.split(b' ', 1)
699 fl = _filelog_from_filename(repo, filename)
701 fl = _filelog_from_filename(repo, filename)
700 to_fix = set(
702 to_fix = set(
701 fl.rev(binascii.unhexlify(n)) for n in filenodes.split(b',')
703 fl.rev(binascii.unhexlify(n)) for n in filenodes.split(b',')
702 )
704 )
703 excluded = set()
705 excluded = set()
704
706
705 for filerev in to_fix:
707 for filerev in to_fix:
706 if _is_revision_affected(fl, filerev):
708 if _is_revision_affected(fl, filerev):
707 msg = b"found affected revision %d for filelog '%s'\n"
709 msg = b"found affected revision %d for filelog '%s'\n"
708 ui.warn(msg % (filerev, filename))
710 ui.warn(msg % (filerev, filename))
709 else:
711 else:
710 msg = _(b"revision %s of file '%s' is not affected\n")
712 msg = _(b"revision %s of file '%s' is not affected\n")
711 msg %= (binascii.hexlify(fl.node(filerev)), filename)
713 msg %= (binascii.hexlify(fl.node(filerev)), filename)
712 ui.warn(msg)
714 ui.warn(msg)
713 excluded.add(filerev)
715 excluded.add(filerev)
714
716
715 to_fix = to_fix - excluded
717 to_fix = to_fix - excluded
716 if not to_fix:
718 if not to_fix:
717 msg = _(b"no affected revisions were found for '%s'\n")
719 msg = _(b"no affected revisions were found for '%s'\n")
718 ui.write(msg % filename)
720 ui.write(msg % filename)
719 continue
721 continue
720 if not dry_run:
722 if not dry_run:
721 _reorder_filelog_parents(repo, fl, sorted(to_fix))
723 _reorder_filelog_parents(repo, fl, sorted(to_fix))
722
724
723
725
724 def filter_delta_issue6528(revlog, deltas_iter):
726 def filter_delta_issue6528(revlog, deltas_iter):
725 """filter incomind deltas to repaire issue 6528 on the fly"""
727 """filter incomind deltas to repaire issue 6528 on the fly"""
726 metadata_cache = {}
728 metadata_cache = {}
727
729
728 deltacomputer = deltas.deltacomputer(revlog)
730 deltacomputer = deltas.deltacomputer(revlog)
729
731
730 for rev, d in enumerate(deltas_iter, len(revlog)):
732 for rev, d in enumerate(deltas_iter, len(revlog)):
731 (
733 (
732 node,
734 node,
733 p1_node,
735 p1_node,
734 p2_node,
736 p2_node,
735 linknode,
737 linknode,
736 deltabase,
738 deltabase,
737 delta,
739 delta,
738 flags,
740 flags,
739 sidedata,
741 sidedata,
740 ) = d
742 ) = d
741
743
742 if not revlog.index.has_node(deltabase):
744 if not revlog.index.has_node(deltabase):
743 raise error.LookupError(
745 raise error.LookupError(
744 deltabase, revlog.radix, _(b'unknown parent')
746 deltabase, revlog.radix, _(b'unknown parent')
745 )
747 )
746 base_rev = revlog.rev(deltabase)
748 base_rev = revlog.rev(deltabase)
747 if not revlog.index.has_node(p1_node):
749 if not revlog.index.has_node(p1_node):
748 raise error.LookupError(p1_node, revlog.radix, _(b'unknown parent'))
750 raise error.LookupError(p1_node, revlog.radix, _(b'unknown parent'))
749 p1_rev = revlog.rev(p1_node)
751 p1_rev = revlog.rev(p1_node)
750 if not revlog.index.has_node(p2_node):
752 if not revlog.index.has_node(p2_node):
751 raise error.LookupError(p2_node, revlog.radix, _(b'unknown parent'))
753 raise error.LookupError(p2_node, revlog.radix, _(b'unknown parent'))
752 p2_rev = revlog.rev(p2_node)
754 p2_rev = revlog.rev(p2_node)
753
755
754 is_censored = lambda: bool(flags & REVIDX_ISCENSORED)
756 is_censored = lambda: bool(flags & REVIDX_ISCENSORED)
755 delta_base = lambda: revlog.rev(delta_base)
757 delta_base = lambda: revlog.rev(delta_base)
756 delta_base = lambda: base_rev
758 delta_base = lambda: base_rev
757 parent_revs = lambda: (p1_rev, p2_rev)
759 parent_revs = lambda: (p1_rev, p2_rev)
758
760
759 def full_text():
761 def full_text():
760 # note: being able to reuse the full text computation in the
762 # note: being able to reuse the full text computation in the
761 # underlying addrevision would be useful however this is a bit too
763 # underlying addrevision would be useful however this is a bit too
762 # intrusive the for the "quick" issue6528 we are writing before the
764 # intrusive the for the "quick" issue6528 we are writing before the
763 # 5.8 release
765 # 5.8 release
764 textlen = mdiff.patchedsize(revlog.size(base_rev), delta)
766 textlen = mdiff.patchedsize(revlog.size(base_rev), delta)
765
767
766 revinfo = revlogutils.revisioninfo(
768 revinfo = revlogutils.revisioninfo(
767 node,
769 node,
768 p1_node,
770 p1_node,
769 p2_node,
771 p2_node,
770 [None],
772 [None],
771 textlen,
773 textlen,
772 (base_rev, delta),
774 (base_rev, delta),
773 flags,
775 flags,
774 )
776 )
775 # cached by the global "writing" context
777 # cached by the global "writing" context
776 assert revlog._writinghandles is not None
778 assert revlog._writinghandles is not None
777 if revlog._inline:
779 if revlog._inline:
778 fh = revlog._writinghandles[0]
780 fh = revlog._writinghandles[0]
779 else:
781 else:
780 fh = revlog._writinghandles[1]
782 fh = revlog._writinghandles[1]
781 return deltacomputer.buildtext(revinfo, fh)
783 return deltacomputer.buildtext(revinfo, fh)
782
784
783 is_affected = _is_revision_affected_fast_inner(
785 is_affected = _is_revision_affected_fast_inner(
784 is_censored,
786 is_censored,
785 delta_base,
787 delta_base,
786 lambda: delta,
788 lambda: delta,
787 full_text,
789 full_text,
788 parent_revs,
790 parent_revs,
789 rev,
791 rev,
790 metadata_cache,
792 metadata_cache,
791 )
793 )
792 if is_affected:
794 if is_affected:
793 d = (
795 d = (
794 node,
796 node,
795 p2_node,
797 p2_node,
796 p1_node,
798 p1_node,
797 linknode,
799 linknode,
798 deltabase,
800 deltabase,
799 delta,
801 delta,
800 flags,
802 flags,
801 sidedata,
803 sidedata,
802 )
804 )
803 yield d
805 yield d
804
806
805
807
806 def repair_issue6528(
808 def repair_issue6528(
807 ui, repo, dry_run=False, to_report=None, from_report=None, paranoid=False
809 ui, repo, dry_run=False, to_report=None, from_report=None, paranoid=False
808 ):
810 ):
809 from .. import store # avoid cycle
811 from .. import store # avoid cycle
810
812
811 @contextlib.contextmanager
813 @contextlib.contextmanager
812 def context():
814 def context():
813 if dry_run or to_report: # No need for locking
815 if dry_run or to_report: # No need for locking
814 yield
816 yield
815 else:
817 else:
816 with repo.wlock(), repo.lock():
818 with repo.wlock(), repo.lock():
817 yield
819 yield
818
820
819 if from_report:
821 if from_report:
820 return _from_report(ui, repo, context, from_report, dry_run)
822 return _from_report(ui, repo, context, from_report, dry_run)
821
823
822 report_entries = []
824 report_entries = []
823
825
824 with context():
826 with context():
825 files = list(
827 files = list(
826 (file_type, path)
828 (file_type, path)
827 for (file_type, path, _s) in repo.store.datafiles()
829 for (file_type, path, _s) in repo.store.datafiles()
828 if path.endswith(b'.i') and file_type & store.FILEFLAGS_FILELOG
830 if path.endswith(b'.i') and file_type & store.FILEFLAGS_FILELOG
829 )
831 )
830
832
831 progress = ui.makeprogress(
833 progress = ui.makeprogress(
832 _(b"looking for affected revisions"),
834 _(b"looking for affected revisions"),
833 unit=_(b"filelogs"),
835 unit=_(b"filelogs"),
834 total=len(files),
836 total=len(files),
835 )
837 )
836 found_nothing = True
838 found_nothing = True
837
839
838 for file_type, path in files:
840 for file_type, path in files:
839 if (
841 if (
840 not path.endswith(b'.i')
842 not path.endswith(b'.i')
841 or not file_type & store.FILEFLAGS_FILELOG
843 or not file_type & store.FILEFLAGS_FILELOG
842 ):
844 ):
843 continue
845 continue
844 progress.increment()
846 progress.increment()
845 filename = _get_filename_from_filelog_index(path)
847 filename = _get_filename_from_filelog_index(path)
846 fl = _filelog_from_filename(repo, filename)
848 fl = _filelog_from_filename(repo, filename)
847
849
848 # Set of filerevs (or hex filenodes if `to_report`) that need fixing
850 # Set of filerevs (or hex filenodes if `to_report`) that need fixing
849 to_fix = set()
851 to_fix = set()
850 metadata_cache = {}
852 metadata_cache = {}
851 for filerev in fl.revs():
853 for filerev in fl.revs():
852 affected = _is_revision_affected_fast(
854 affected = _is_revision_affected_fast(
853 repo, fl, filerev, metadata_cache
855 repo, fl, filerev, metadata_cache
854 )
856 )
855 if paranoid:
857 if paranoid:
856 slow = _is_revision_affected(fl, filerev)
858 slow = _is_revision_affected(fl, filerev)
857 if slow != affected:
859 if slow != affected:
858 msg = _(b"paranoid check failed for '%s' at node %s")
860 msg = _(b"paranoid check failed for '%s' at node %s")
859 node = binascii.hexlify(fl.node(filerev))
861 node = binascii.hexlify(fl.node(filerev))
860 raise error.Abort(msg % (filename, node))
862 raise error.Abort(msg % (filename, node))
861 if affected:
863 if affected:
862 msg = b"found affected revision %d for filelog '%s'\n"
864 msg = b"found affected revision %d for filelog '%s'\n"
863 ui.warn(msg % (filerev, path))
865 ui.warn(msg % (filerev, path))
864 found_nothing = False
866 found_nothing = False
865 if not dry_run:
867 if not dry_run:
866 if to_report:
868 if to_report:
867 to_fix.add(binascii.hexlify(fl.node(filerev)))
869 to_fix.add(binascii.hexlify(fl.node(filerev)))
868 else:
870 else:
869 to_fix.add(filerev)
871 to_fix.add(filerev)
870
872
871 if to_fix:
873 if to_fix:
872 to_fix = sorted(to_fix)
874 to_fix = sorted(to_fix)
873 if to_report:
875 if to_report:
874 report_entries.append((filename, to_fix))
876 report_entries.append((filename, to_fix))
875 else:
877 else:
876 _reorder_filelog_parents(repo, fl, to_fix)
878 _reorder_filelog_parents(repo, fl, to_fix)
877
879
878 if found_nothing:
880 if found_nothing:
879 ui.write(_(b"no affected revisions were found\n"))
881 ui.write(_(b"no affected revisions were found\n"))
880
882
881 if to_report and report_entries:
883 if to_report and report_entries:
882 with open(to_report, mode="wb") as f:
884 with open(to_report, mode="wb") as f:
883 for path, to_fix in report_entries:
885 for path, to_fix in report_entries:
884 f.write(b"%s %s\n" % (b",".join(to_fix), path))
886 f.write(b"%s %s\n" % (b",".join(to_fix), path))
885
887
886 progress.complete()
888 progress.complete()
@@ -1,529 +1,512 b''
1 // revlog.rs
1 // revlog.rs
2 //
2 //
3 // Copyright 2019-2020 Georges Racinet <georges.racinet@octobus.net>
3 // Copyright 2019-2020 Georges Racinet <georges.racinet@octobus.net>
4 //
4 //
5 // This software may be used and distributed according to the terms of the
5 // This software may be used and distributed according to the terms of the
6 // GNU General Public License version 2 or any later version.
6 // GNU General Public License version 2 or any later version.
7
7
8 use crate::{
8 use crate::{
9 cindex,
9 cindex,
10 utils::{node_from_py_bytes, node_from_py_object},
10 utils::{node_from_py_bytes, node_from_py_object},
11 };
11 };
12 use cpython::{
12 use cpython::{
13 buffer::{Element, PyBuffer},
13 buffer::{Element, PyBuffer},
14 exc::{IndexError, ValueError},
14 exc::{IndexError, ValueError},
15 ObjectProtocol, PyBytes, PyClone, PyDict, PyErr, PyInt, PyModule,
15 ObjectProtocol, PyBytes, PyClone, PyDict, PyErr, PyInt, PyModule,
16 PyObject, PyResult, PyString, PyTuple, Python, PythonObject, ToPyObject,
16 PyObject, PyResult, PyString, PyTuple, Python, PythonObject, ToPyObject,
17 };
17 };
18 use hg::{
18 use hg::{
19 nodemap::{Block, NodeMapError, NodeTree},
19 nodemap::{Block, NodeMapError, NodeTree},
20 revlog::{nodemap::NodeMap, NodePrefix, RevlogIndex},
20 revlog::{nodemap::NodeMap, NodePrefix, RevlogIndex},
21 Revision,
21 Revision,
22 };
22 };
23 use std::cell::RefCell;
23 use std::cell::RefCell;
24
24
25 /// Return a Struct implementing the Graph trait
25 /// Return a Struct implementing the Graph trait
26 pub(crate) fn pyindex_to_graph(
26 pub(crate) fn pyindex_to_graph(
27 py: Python,
27 py: Python,
28 index: PyObject,
28 index: PyObject,
29 ) -> PyResult<cindex::Index> {
29 ) -> PyResult<cindex::Index> {
30 match index.extract::<MixedIndex>(py) {
30 match index.extract::<MixedIndex>(py) {
31 Ok(midx) => Ok(midx.clone_cindex(py)),
31 Ok(midx) => Ok(midx.clone_cindex(py)),
32 Err(_) => cindex::Index::new(py, index),
32 Err(_) => cindex::Index::new(py, index),
33 }
33 }
34 }
34 }
35
35
36 py_class!(pub class MixedIndex |py| {
36 py_class!(pub class MixedIndex |py| {
37 data cindex: RefCell<cindex::Index>;
37 data cindex: RefCell<cindex::Index>;
38 data nt: RefCell<Option<NodeTree>>;
38 data nt: RefCell<Option<NodeTree>>;
39 data docket: RefCell<Option<PyObject>>;
39 data docket: RefCell<Option<PyObject>>;
40 // Holds a reference to the mmap'ed persistent nodemap data
40 // Holds a reference to the mmap'ed persistent nodemap data
41 data mmap: RefCell<Option<PyBuffer>>;
41 data mmap: RefCell<Option<PyBuffer>>;
42
42
43 def __new__(_cls, cindex: PyObject) -> PyResult<MixedIndex> {
43 def __new__(_cls, cindex: PyObject) -> PyResult<MixedIndex> {
44 Self::new(py, cindex)
44 Self::new(py, cindex)
45 }
45 }
46
46
47 /// Compatibility layer used for Python consumers needing access to the C index
47 /// Compatibility layer used for Python consumers needing access to the C index
48 ///
48 ///
49 /// Only use case so far is `scmutil.shortesthexnodeidprefix`,
49 /// Only use case so far is `scmutil.shortesthexnodeidprefix`,
50 /// that may need to build a custom `nodetree`, based on a specified revset.
50 /// that may need to build a custom `nodetree`, based on a specified revset.
51 /// With a Rust implementation of the nodemap, we will be able to get rid of
51 /// With a Rust implementation of the nodemap, we will be able to get rid of
52 /// this, by exposing our own standalone nodemap class,
52 /// this, by exposing our own standalone nodemap class,
53 /// ready to accept `MixedIndex`.
53 /// ready to accept `MixedIndex`.
54 def get_cindex(&self) -> PyResult<PyObject> {
54 def get_cindex(&self) -> PyResult<PyObject> {
55 Ok(self.cindex(py).borrow().inner().clone_ref(py))
55 Ok(self.cindex(py).borrow().inner().clone_ref(py))
56 }
56 }
57
57
58 // Index API involving nodemap, as defined in mercurial/pure/parsers.py
58 // Index API involving nodemap, as defined in mercurial/pure/parsers.py
59
59
60 /// Return Revision if found, raises a bare `error.RevlogError`
60 /// Return Revision if found, raises a bare `error.RevlogError`
61 /// in case of ambiguity, same as C version does
61 /// in case of ambiguity, same as C version does
62 def get_rev(&self, pynode: PyBytes) -> PyResult<Option<Revision>> {
62 def get_rev(&self, node: PyBytes) -> PyResult<Option<Revision>> {
63 let opt = self.get_nodetree(py)?.borrow();
63 let opt = self.get_nodetree(py)?.borrow();
64 let nt = opt.as_ref().unwrap();
64 let nt = opt.as_ref().unwrap();
65 let idx = &*self.cindex(py).borrow();
65 let idx = &*self.cindex(py).borrow();
66 let node = node_from_py_bytes(py, &pynode)?;
66 let node = node_from_py_bytes(py, &node)?;
67 match nt.find_bin(idx, node.into())
67 nt.find_bin(idx, node.into()).map_err(|e| nodemap_error(py, e))
68 {
69 Ok(None) =>
70 // fallback to C implementation, remove once
71 // https://bz.mercurial-scm.org/show_bug.cgi?id=6554
72 // is fixed (a simple backout should do)
73 self.call_cindex(py, "get_rev", &PyTuple::new(py, &[pynode.into_object()]), None)?
74 .extract(py),
75 Ok(Some(rev)) => Ok(Some(rev)),
76 Err(e) => Err(nodemap_error(py, e)),
77 }
78 }
68 }
79
69
80 /// same as `get_rev()` but raises a bare `error.RevlogError` if node
70 /// same as `get_rev()` but raises a bare `error.RevlogError` if node
81 /// is not found.
71 /// is not found.
82 ///
72 ///
83 /// No need to repeat `node` in the exception, `mercurial/revlog.py`
73 /// No need to repeat `node` in the exception, `mercurial/revlog.py`
84 /// will catch and rewrap with it
74 /// will catch and rewrap with it
85 def rev(&self, node: PyBytes) -> PyResult<Revision> {
75 def rev(&self, node: PyBytes) -> PyResult<Revision> {
86 self.get_rev(py, node)?.ok_or_else(|| revlog_error(py))
76 self.get_rev(py, node)?.ok_or_else(|| revlog_error(py))
87 }
77 }
88
78
89 /// return True if the node exist in the index
79 /// return True if the node exist in the index
90 def has_node(&self, node: PyBytes) -> PyResult<bool> {
80 def has_node(&self, node: PyBytes) -> PyResult<bool> {
91 self.get_rev(py, node).map(|opt| opt.is_some())
81 self.get_rev(py, node).map(|opt| opt.is_some())
92 }
82 }
93
83
94 /// find length of shortest hex nodeid of a binary ID
84 /// find length of shortest hex nodeid of a binary ID
95 def shortest(&self, node: PyBytes) -> PyResult<usize> {
85 def shortest(&self, node: PyBytes) -> PyResult<usize> {
96 let opt = self.get_nodetree(py)?.borrow();
86 let opt = self.get_nodetree(py)?.borrow();
97 let nt = opt.as_ref().unwrap();
87 let nt = opt.as_ref().unwrap();
98 let idx = &*self.cindex(py).borrow();
88 let idx = &*self.cindex(py).borrow();
99 match nt.unique_prefix_len_node(idx, &node_from_py_bytes(py, &node)?)
89 match nt.unique_prefix_len_node(idx, &node_from_py_bytes(py, &node)?)
100 {
90 {
101 Ok(Some(l)) => Ok(l),
91 Ok(Some(l)) => Ok(l),
102 Ok(None) => Err(revlog_error(py)),
92 Ok(None) => Err(revlog_error(py)),
103 Err(e) => Err(nodemap_error(py, e)),
93 Err(e) => Err(nodemap_error(py, e)),
104 }
94 }
105 }
95 }
106
96
107 def partialmatch(&self, pynode: PyObject) -> PyResult<Option<PyBytes>> {
97 def partialmatch(&self, node: PyObject) -> PyResult<Option<PyBytes>> {
108 let opt = self.get_nodetree(py)?.borrow();
98 let opt = self.get_nodetree(py)?.borrow();
109 let nt = opt.as_ref().unwrap();
99 let nt = opt.as_ref().unwrap();
110 let idx = &*self.cindex(py).borrow();
100 let idx = &*self.cindex(py).borrow();
111
101
112 let node_as_string = if cfg!(feature = "python3-sys") {
102 let node_as_string = if cfg!(feature = "python3-sys") {
113 pynode.cast_as::<PyString>(py)?.to_string(py)?.to_string()
103 node.cast_as::<PyString>(py)?.to_string(py)?.to_string()
114 }
104 }
115 else {
105 else {
116 let node = pynode.extract::<PyBytes>(py)?;
106 let node = node.extract::<PyBytes>(py)?;
117 String::from_utf8_lossy(node.data(py)).to_string()
107 String::from_utf8_lossy(node.data(py)).to_string()
118 };
108 };
119
109
120 let prefix = NodePrefix::from_hex(&node_as_string).map_err(|_| PyErr::new::<ValueError, _>(py, "Invalid node or prefix"))?;
110 let prefix = NodePrefix::from_hex(&node_as_string).map_err(|_| PyErr::new::<ValueError, _>(py, "Invalid node or prefix"))?;
121
111
122 match nt.find_bin(idx, prefix) {
112 nt.find_bin(idx, prefix)
123 Ok(None) =>
113 // TODO make an inner API returning the node directly
124 // fallback to C implementation, remove once
114 .map(|opt| opt.map(
125 // https://bz.mercurial-scm.org/show_bug.cgi?id=6554
115 |rev| PyBytes::new(py, idx.node(rev).unwrap().as_bytes())))
126 // is fixed (a simple backout should do)
116 .map_err(|e| nodemap_error(py, e))
127 self.call_cindex(
117
128 py, "partialmatch",
129 &PyTuple::new(py, &[pynode]), None
130 )?.extract(py),
131 Ok(Some(rev)) =>
132 Ok(Some(PyBytes::new(py, idx.node(rev).unwrap().as_bytes()))),
133 Err(e) => Err(nodemap_error(py, e)),
134 }
135 }
118 }
136
119
137 /// append an index entry
120 /// append an index entry
138 def append(&self, tup: PyTuple) -> PyResult<PyObject> {
121 def append(&self, tup: PyTuple) -> PyResult<PyObject> {
139 if tup.len(py) < 8 {
122 if tup.len(py) < 8 {
140 // this is better than the panic promised by tup.get_item()
123 // this is better than the panic promised by tup.get_item()
141 return Err(
124 return Err(
142 PyErr::new::<IndexError, _>(py, "tuple index out of range"))
125 PyErr::new::<IndexError, _>(py, "tuple index out of range"))
143 }
126 }
144 let node_bytes = tup.get_item(py, 7).extract(py)?;
127 let node_bytes = tup.get_item(py, 7).extract(py)?;
145 let node = node_from_py_object(py, &node_bytes)?;
128 let node = node_from_py_object(py, &node_bytes)?;
146
129
147 let mut idx = self.cindex(py).borrow_mut();
130 let mut idx = self.cindex(py).borrow_mut();
148 let rev = idx.len() as Revision;
131 let rev = idx.len() as Revision;
149
132
150 idx.append(py, tup)?;
133 idx.append(py, tup)?;
151 self.get_nodetree(py)?.borrow_mut().as_mut().unwrap()
134 self.get_nodetree(py)?.borrow_mut().as_mut().unwrap()
152 .insert(&*idx, &node, rev)
135 .insert(&*idx, &node, rev)
153 .map_err(|e| nodemap_error(py, e))?;
136 .map_err(|e| nodemap_error(py, e))?;
154 Ok(py.None())
137 Ok(py.None())
155 }
138 }
156
139
157 def __delitem__(&self, key: PyObject) -> PyResult<()> {
140 def __delitem__(&self, key: PyObject) -> PyResult<()> {
158 // __delitem__ is both for `del idx[r]` and `del idx[r1:r2]`
141 // __delitem__ is both for `del idx[r]` and `del idx[r1:r2]`
159 self.cindex(py).borrow().inner().del_item(py, key)?;
142 self.cindex(py).borrow().inner().del_item(py, key)?;
160 let mut opt = self.get_nodetree(py)?.borrow_mut();
143 let mut opt = self.get_nodetree(py)?.borrow_mut();
161 let mut nt = opt.as_mut().unwrap();
144 let mut nt = opt.as_mut().unwrap();
162 nt.invalidate_all();
145 nt.invalidate_all();
163 self.fill_nodemap(py, &mut nt)?;
146 self.fill_nodemap(py, &mut nt)?;
164 Ok(())
147 Ok(())
165 }
148 }
166
149
167 //
150 //
168 // Reforwarded C index API
151 // Reforwarded C index API
169 //
152 //
170
153
171 // index_methods (tp_methods). Same ordering as in revlog.c
154 // index_methods (tp_methods). Same ordering as in revlog.c
172
155
173 /// return the gca set of the given revs
156 /// return the gca set of the given revs
174 def ancestors(&self, *args, **kw) -> PyResult<PyObject> {
157 def ancestors(&self, *args, **kw) -> PyResult<PyObject> {
175 self.call_cindex(py, "ancestors", args, kw)
158 self.call_cindex(py, "ancestors", args, kw)
176 }
159 }
177
160
178 /// return the heads of the common ancestors of the given revs
161 /// return the heads of the common ancestors of the given revs
179 def commonancestorsheads(&self, *args, **kw) -> PyResult<PyObject> {
162 def commonancestorsheads(&self, *args, **kw) -> PyResult<PyObject> {
180 self.call_cindex(py, "commonancestorsheads", args, kw)
163 self.call_cindex(py, "commonancestorsheads", args, kw)
181 }
164 }
182
165
183 /// Clear the index caches and inner py_class data.
166 /// Clear the index caches and inner py_class data.
184 /// It is Python's responsibility to call `update_nodemap_data` again.
167 /// It is Python's responsibility to call `update_nodemap_data` again.
185 def clearcaches(&self, *args, **kw) -> PyResult<PyObject> {
168 def clearcaches(&self, *args, **kw) -> PyResult<PyObject> {
186 self.nt(py).borrow_mut().take();
169 self.nt(py).borrow_mut().take();
187 self.docket(py).borrow_mut().take();
170 self.docket(py).borrow_mut().take();
188 self.mmap(py).borrow_mut().take();
171 self.mmap(py).borrow_mut().take();
189 self.call_cindex(py, "clearcaches", args, kw)
172 self.call_cindex(py, "clearcaches", args, kw)
190 }
173 }
191
174
192 /// return the raw binary string representing a revision
175 /// return the raw binary string representing a revision
193 def entry_binary(&self, *args, **kw) -> PyResult<PyObject> {
176 def entry_binary(&self, *args, **kw) -> PyResult<PyObject> {
194 self.call_cindex(py, "entry_binary", args, kw)
177 self.call_cindex(py, "entry_binary", args, kw)
195 }
178 }
196
179
197 /// return a binary packed version of the header
180 /// return a binary packed version of the header
198 def pack_header(&self, *args, **kw) -> PyResult<PyObject> {
181 def pack_header(&self, *args, **kw) -> PyResult<PyObject> {
199 self.call_cindex(py, "pack_header", args, kw)
182 self.call_cindex(py, "pack_header", args, kw)
200 }
183 }
201
184
202 /// get an index entry
185 /// get an index entry
203 def get(&self, *args, **kw) -> PyResult<PyObject> {
186 def get(&self, *args, **kw) -> PyResult<PyObject> {
204 self.call_cindex(py, "get", args, kw)
187 self.call_cindex(py, "get", args, kw)
205 }
188 }
206
189
207 /// compute phases
190 /// compute phases
208 def computephasesmapsets(&self, *args, **kw) -> PyResult<PyObject> {
191 def computephasesmapsets(&self, *args, **kw) -> PyResult<PyObject> {
209 self.call_cindex(py, "computephasesmapsets", args, kw)
192 self.call_cindex(py, "computephasesmapsets", args, kw)
210 }
193 }
211
194
212 /// reachableroots
195 /// reachableroots
213 def reachableroots2(&self, *args, **kw) -> PyResult<PyObject> {
196 def reachableroots2(&self, *args, **kw) -> PyResult<PyObject> {
214 self.call_cindex(py, "reachableroots2", args, kw)
197 self.call_cindex(py, "reachableroots2", args, kw)
215 }
198 }
216
199
217 /// get head revisions
200 /// get head revisions
218 def headrevs(&self, *args, **kw) -> PyResult<PyObject> {
201 def headrevs(&self, *args, **kw) -> PyResult<PyObject> {
219 self.call_cindex(py, "headrevs", args, kw)
202 self.call_cindex(py, "headrevs", args, kw)
220 }
203 }
221
204
222 /// get filtered head revisions
205 /// get filtered head revisions
223 def headrevsfiltered(&self, *args, **kw) -> PyResult<PyObject> {
206 def headrevsfiltered(&self, *args, **kw) -> PyResult<PyObject> {
224 self.call_cindex(py, "headrevsfiltered", args, kw)
207 self.call_cindex(py, "headrevsfiltered", args, kw)
225 }
208 }
226
209
227 /// True if the object is a snapshot
210 /// True if the object is a snapshot
228 def issnapshot(&self, *args, **kw) -> PyResult<PyObject> {
211 def issnapshot(&self, *args, **kw) -> PyResult<PyObject> {
229 self.call_cindex(py, "issnapshot", args, kw)
212 self.call_cindex(py, "issnapshot", args, kw)
230 }
213 }
231
214
232 /// Gather snapshot data in a cache dict
215 /// Gather snapshot data in a cache dict
233 def findsnapshots(&self, *args, **kw) -> PyResult<PyObject> {
216 def findsnapshots(&self, *args, **kw) -> PyResult<PyObject> {
234 self.call_cindex(py, "findsnapshots", args, kw)
217 self.call_cindex(py, "findsnapshots", args, kw)
235 }
218 }
236
219
237 /// determine revisions with deltas to reconstruct fulltext
220 /// determine revisions with deltas to reconstruct fulltext
238 def deltachain(&self, *args, **kw) -> PyResult<PyObject> {
221 def deltachain(&self, *args, **kw) -> PyResult<PyObject> {
239 self.call_cindex(py, "deltachain", args, kw)
222 self.call_cindex(py, "deltachain", args, kw)
240 }
223 }
241
224
242 /// slice planned chunk read to reach a density threshold
225 /// slice planned chunk read to reach a density threshold
243 def slicechunktodensity(&self, *args, **kw) -> PyResult<PyObject> {
226 def slicechunktodensity(&self, *args, **kw) -> PyResult<PyObject> {
244 self.call_cindex(py, "slicechunktodensity", args, kw)
227 self.call_cindex(py, "slicechunktodensity", args, kw)
245 }
228 }
246
229
247 /// stats for the index
230 /// stats for the index
248 def stats(&self, *args, **kw) -> PyResult<PyObject> {
231 def stats(&self, *args, **kw) -> PyResult<PyObject> {
249 self.call_cindex(py, "stats", args, kw)
232 self.call_cindex(py, "stats", args, kw)
250 }
233 }
251
234
252 // index_sequence_methods and index_mapping_methods.
235 // index_sequence_methods and index_mapping_methods.
253 //
236 //
254 // Since we call back through the high level Python API,
237 // Since we call back through the high level Python API,
255 // there's no point making a distinction between index_get
238 // there's no point making a distinction between index_get
256 // and index_getitem.
239 // and index_getitem.
257
240
258 def __len__(&self) -> PyResult<usize> {
241 def __len__(&self) -> PyResult<usize> {
259 self.cindex(py).borrow().inner().len(py)
242 self.cindex(py).borrow().inner().len(py)
260 }
243 }
261
244
262 def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {
245 def __getitem__(&self, key: PyObject) -> PyResult<PyObject> {
263 // this conversion seems needless, but that's actually because
246 // this conversion seems needless, but that's actually because
264 // `index_getitem` does not handle conversion from PyLong,
247 // `index_getitem` does not handle conversion from PyLong,
265 // which expressions such as [e for e in index] internally use.
248 // which expressions such as [e for e in index] internally use.
266 // Note that we don't seem to have a direct way to call
249 // Note that we don't seem to have a direct way to call
267 // PySequence_GetItem (does the job), which would possibly be better
250 // PySequence_GetItem (does the job), which would possibly be better
268 // for performance
251 // for performance
269 let key = match key.extract::<Revision>(py) {
252 let key = match key.extract::<Revision>(py) {
270 Ok(rev) => rev.to_py_object(py).into_object(),
253 Ok(rev) => rev.to_py_object(py).into_object(),
271 Err(_) => key,
254 Err(_) => key,
272 };
255 };
273 self.cindex(py).borrow().inner().get_item(py, key)
256 self.cindex(py).borrow().inner().get_item(py, key)
274 }
257 }
275
258
276 def __setitem__(&self, key: PyObject, value: PyObject) -> PyResult<()> {
259 def __setitem__(&self, key: PyObject, value: PyObject) -> PyResult<()> {
277 self.cindex(py).borrow().inner().set_item(py, key, value)
260 self.cindex(py).borrow().inner().set_item(py, key, value)
278 }
261 }
279
262
280 def __contains__(&self, item: PyObject) -> PyResult<bool> {
263 def __contains__(&self, item: PyObject) -> PyResult<bool> {
281 // ObjectProtocol does not seem to provide contains(), so
264 // ObjectProtocol does not seem to provide contains(), so
282 // this is an equivalent implementation of the index_contains()
265 // this is an equivalent implementation of the index_contains()
283 // defined in revlog.c
266 // defined in revlog.c
284 let cindex = self.cindex(py).borrow();
267 let cindex = self.cindex(py).borrow();
285 match item.extract::<Revision>(py) {
268 match item.extract::<Revision>(py) {
286 Ok(rev) => {
269 Ok(rev) => {
287 Ok(rev >= -1 && rev < cindex.inner().len(py)? as Revision)
270 Ok(rev >= -1 && rev < cindex.inner().len(py)? as Revision)
288 }
271 }
289 Err(_) => {
272 Err(_) => {
290 cindex.inner().call_method(
273 cindex.inner().call_method(
291 py,
274 py,
292 "has_node",
275 "has_node",
293 PyTuple::new(py, &[item]),
276 PyTuple::new(py, &[item]),
294 None)?
277 None)?
295 .extract(py)
278 .extract(py)
296 }
279 }
297 }
280 }
298 }
281 }
299
282
300 def nodemap_data_all(&self) -> PyResult<PyBytes> {
283 def nodemap_data_all(&self) -> PyResult<PyBytes> {
301 self.inner_nodemap_data_all(py)
284 self.inner_nodemap_data_all(py)
302 }
285 }
303
286
304 def nodemap_data_incremental(&self) -> PyResult<PyObject> {
287 def nodemap_data_incremental(&self) -> PyResult<PyObject> {
305 self.inner_nodemap_data_incremental(py)
288 self.inner_nodemap_data_incremental(py)
306 }
289 }
307 def update_nodemap_data(
290 def update_nodemap_data(
308 &self,
291 &self,
309 docket: PyObject,
292 docket: PyObject,
310 nm_data: PyObject
293 nm_data: PyObject
311 ) -> PyResult<PyObject> {
294 ) -> PyResult<PyObject> {
312 self.inner_update_nodemap_data(py, docket, nm_data)
295 self.inner_update_nodemap_data(py, docket, nm_data)
313 }
296 }
314
297
315 @property
298 @property
316 def entry_size(&self) -> PyResult<PyInt> {
299 def entry_size(&self) -> PyResult<PyInt> {
317 self.cindex(py).borrow().inner().getattr(py, "entry_size")?.extract::<PyInt>(py)
300 self.cindex(py).borrow().inner().getattr(py, "entry_size")?.extract::<PyInt>(py)
318 }
301 }
319
302
320 @property
303 @property
321 def rust_ext_compat(&self) -> PyResult<PyInt> {
304 def rust_ext_compat(&self) -> PyResult<PyInt> {
322 self.cindex(py).borrow().inner().getattr(py, "rust_ext_compat")?.extract::<PyInt>(py)
305 self.cindex(py).borrow().inner().getattr(py, "rust_ext_compat")?.extract::<PyInt>(py)
323 }
306 }
324
307
325 });
308 });
326
309
327 impl MixedIndex {
310 impl MixedIndex {
328 fn new(py: Python, cindex: PyObject) -> PyResult<MixedIndex> {
311 fn new(py: Python, cindex: PyObject) -> PyResult<MixedIndex> {
329 Self::create_instance(
312 Self::create_instance(
330 py,
313 py,
331 RefCell::new(cindex::Index::new(py, cindex)?),
314 RefCell::new(cindex::Index::new(py, cindex)?),
332 RefCell::new(None),
315 RefCell::new(None),
333 RefCell::new(None),
316 RefCell::new(None),
334 RefCell::new(None),
317 RefCell::new(None),
335 )
318 )
336 }
319 }
337
320
338 /// This is scaffolding at this point, but it could also become
321 /// This is scaffolding at this point, but it could also become
339 /// a way to start a persistent nodemap or perform a
322 /// a way to start a persistent nodemap or perform a
340 /// vacuum / repack operation
323 /// vacuum / repack operation
341 fn fill_nodemap(
324 fn fill_nodemap(
342 &self,
325 &self,
343 py: Python,
326 py: Python,
344 nt: &mut NodeTree,
327 nt: &mut NodeTree,
345 ) -> PyResult<PyObject> {
328 ) -> PyResult<PyObject> {
346 let index = self.cindex(py).borrow();
329 let index = self.cindex(py).borrow();
347 for r in 0..index.len() {
330 for r in 0..index.len() {
348 let rev = r as Revision;
331 let rev = r as Revision;
349 // in this case node() won't ever return None
332 // in this case node() won't ever return None
350 nt.insert(&*index, index.node(rev).unwrap(), rev)
333 nt.insert(&*index, index.node(rev).unwrap(), rev)
351 .map_err(|e| nodemap_error(py, e))?
334 .map_err(|e| nodemap_error(py, e))?
352 }
335 }
353 Ok(py.None())
336 Ok(py.None())
354 }
337 }
355
338
356 fn get_nodetree<'a>(
339 fn get_nodetree<'a>(
357 &'a self,
340 &'a self,
358 py: Python<'a>,
341 py: Python<'a>,
359 ) -> PyResult<&'a RefCell<Option<NodeTree>>> {
342 ) -> PyResult<&'a RefCell<Option<NodeTree>>> {
360 if self.nt(py).borrow().is_none() {
343 if self.nt(py).borrow().is_none() {
361 let readonly = Box::new(Vec::new());
344 let readonly = Box::new(Vec::new());
362 let mut nt = NodeTree::load_bytes(readonly, 0);
345 let mut nt = NodeTree::load_bytes(readonly, 0);
363 self.fill_nodemap(py, &mut nt)?;
346 self.fill_nodemap(py, &mut nt)?;
364 self.nt(py).borrow_mut().replace(nt);
347 self.nt(py).borrow_mut().replace(nt);
365 }
348 }
366 Ok(self.nt(py))
349 Ok(self.nt(py))
367 }
350 }
368
351
369 /// forward a method call to the underlying C index
352 /// forward a method call to the underlying C index
370 fn call_cindex(
353 fn call_cindex(
371 &self,
354 &self,
372 py: Python,
355 py: Python,
373 name: &str,
356 name: &str,
374 args: &PyTuple,
357 args: &PyTuple,
375 kwargs: Option<&PyDict>,
358 kwargs: Option<&PyDict>,
376 ) -> PyResult<PyObject> {
359 ) -> PyResult<PyObject> {
377 self.cindex(py)
360 self.cindex(py)
378 .borrow()
361 .borrow()
379 .inner()
362 .inner()
380 .call_method(py, name, args, kwargs)
363 .call_method(py, name, args, kwargs)
381 }
364 }
382
365
383 pub fn clone_cindex(&self, py: Python) -> cindex::Index {
366 pub fn clone_cindex(&self, py: Python) -> cindex::Index {
384 self.cindex(py).borrow().clone_ref(py)
367 self.cindex(py).borrow().clone_ref(py)
385 }
368 }
386
369
387 /// Returns the full nodemap bytes to be written as-is to disk
370 /// Returns the full nodemap bytes to be written as-is to disk
388 fn inner_nodemap_data_all(&self, py: Python) -> PyResult<PyBytes> {
371 fn inner_nodemap_data_all(&self, py: Python) -> PyResult<PyBytes> {
389 let nodemap = self.get_nodetree(py)?.borrow_mut().take().unwrap();
372 let nodemap = self.get_nodetree(py)?.borrow_mut().take().unwrap();
390 let (readonly, bytes) = nodemap.into_readonly_and_added_bytes();
373 let (readonly, bytes) = nodemap.into_readonly_and_added_bytes();
391
374
392 // If there's anything readonly, we need to build the data again from
375 // If there's anything readonly, we need to build the data again from
393 // scratch
376 // scratch
394 let bytes = if readonly.len() > 0 {
377 let bytes = if readonly.len() > 0 {
395 let mut nt = NodeTree::load_bytes(Box::new(vec![]), 0);
378 let mut nt = NodeTree::load_bytes(Box::new(vec![]), 0);
396 self.fill_nodemap(py, &mut nt)?;
379 self.fill_nodemap(py, &mut nt)?;
397
380
398 let (readonly, bytes) = nt.into_readonly_and_added_bytes();
381 let (readonly, bytes) = nt.into_readonly_and_added_bytes();
399 assert_eq!(readonly.len(), 0);
382 assert_eq!(readonly.len(), 0);
400
383
401 bytes
384 bytes
402 } else {
385 } else {
403 bytes
386 bytes
404 };
387 };
405
388
406 let bytes = PyBytes::new(py, &bytes);
389 let bytes = PyBytes::new(py, &bytes);
407 Ok(bytes)
390 Ok(bytes)
408 }
391 }
409
392
410 /// Returns the last saved docket along with the size of any changed data
393 /// Returns the last saved docket along with the size of any changed data
411 /// (in number of blocks), and said data as bytes.
394 /// (in number of blocks), and said data as bytes.
412 fn inner_nodemap_data_incremental(
395 fn inner_nodemap_data_incremental(
413 &self,
396 &self,
414 py: Python,
397 py: Python,
415 ) -> PyResult<PyObject> {
398 ) -> PyResult<PyObject> {
416 let docket = self.docket(py).borrow();
399 let docket = self.docket(py).borrow();
417 let docket = match docket.as_ref() {
400 let docket = match docket.as_ref() {
418 Some(d) => d,
401 Some(d) => d,
419 None => return Ok(py.None()),
402 None => return Ok(py.None()),
420 };
403 };
421
404
422 let node_tree = self.get_nodetree(py)?.borrow_mut().take().unwrap();
405 let node_tree = self.get_nodetree(py)?.borrow_mut().take().unwrap();
423 let masked_blocks = node_tree.masked_readonly_blocks();
406 let masked_blocks = node_tree.masked_readonly_blocks();
424 let (_, data) = node_tree.into_readonly_and_added_bytes();
407 let (_, data) = node_tree.into_readonly_and_added_bytes();
425 let changed = masked_blocks * std::mem::size_of::<Block>();
408 let changed = masked_blocks * std::mem::size_of::<Block>();
426
409
427 Ok((docket, changed, PyBytes::new(py, &data))
410 Ok((docket, changed, PyBytes::new(py, &data))
428 .to_py_object(py)
411 .to_py_object(py)
429 .into_object())
412 .into_object())
430 }
413 }
431
414
432 /// Update the nodemap from the new (mmaped) data.
415 /// Update the nodemap from the new (mmaped) data.
433 /// The docket is kept as a reference for later incremental calls.
416 /// The docket is kept as a reference for later incremental calls.
434 fn inner_update_nodemap_data(
417 fn inner_update_nodemap_data(
435 &self,
418 &self,
436 py: Python,
419 py: Python,
437 docket: PyObject,
420 docket: PyObject,
438 nm_data: PyObject,
421 nm_data: PyObject,
439 ) -> PyResult<PyObject> {
422 ) -> PyResult<PyObject> {
440 let buf = PyBuffer::get(py, &nm_data)?;
423 let buf = PyBuffer::get(py, &nm_data)?;
441 let len = buf.item_count();
424 let len = buf.item_count();
442
425
443 // Build a slice from the mmap'ed buffer data
426 // Build a slice from the mmap'ed buffer data
444 let cbuf = buf.buf_ptr();
427 let cbuf = buf.buf_ptr();
445 let bytes = if std::mem::size_of::<u8>() == buf.item_size()
428 let bytes = if std::mem::size_of::<u8>() == buf.item_size()
446 && buf.is_c_contiguous()
429 && buf.is_c_contiguous()
447 && u8::is_compatible_format(buf.format())
430 && u8::is_compatible_format(buf.format())
448 {
431 {
449 unsafe { std::slice::from_raw_parts(cbuf as *const u8, len) }
432 unsafe { std::slice::from_raw_parts(cbuf as *const u8, len) }
450 } else {
433 } else {
451 return Err(PyErr::new::<ValueError, _>(
434 return Err(PyErr::new::<ValueError, _>(
452 py,
435 py,
453 "Nodemap data buffer has an invalid memory representation"
436 "Nodemap data buffer has an invalid memory representation"
454 .to_string(),
437 .to_string(),
455 ));
438 ));
456 };
439 };
457
440
458 // Keep a reference to the mmap'ed buffer, otherwise we get a dangling
441 // Keep a reference to the mmap'ed buffer, otherwise we get a dangling
459 // pointer.
442 // pointer.
460 self.mmap(py).borrow_mut().replace(buf);
443 self.mmap(py).borrow_mut().replace(buf);
461
444
462 let mut nt = NodeTree::load_bytes(Box::new(bytes), len);
445 let mut nt = NodeTree::load_bytes(Box::new(bytes), len);
463
446
464 let data_tip =
447 let data_tip =
465 docket.getattr(py, "tip_rev")?.extract::<Revision>(py)?;
448 docket.getattr(py, "tip_rev")?.extract::<Revision>(py)?;
466 self.docket(py).borrow_mut().replace(docket.clone_ref(py));
449 self.docket(py).borrow_mut().replace(docket.clone_ref(py));
467 let idx = self.cindex(py).borrow();
450 let idx = self.cindex(py).borrow();
468 let current_tip = idx.len();
451 let current_tip = idx.len();
469
452
470 for r in (data_tip + 1)..current_tip as Revision {
453 for r in (data_tip + 1)..current_tip as Revision {
471 let rev = r as Revision;
454 let rev = r as Revision;
472 // in this case node() won't ever return None
455 // in this case node() won't ever return None
473 nt.insert(&*idx, idx.node(rev).unwrap(), rev)
456 nt.insert(&*idx, idx.node(rev).unwrap(), rev)
474 .map_err(|e| nodemap_error(py, e))?
457 .map_err(|e| nodemap_error(py, e))?
475 }
458 }
476
459
477 *self.nt(py).borrow_mut() = Some(nt);
460 *self.nt(py).borrow_mut() = Some(nt);
478
461
479 Ok(py.None())
462 Ok(py.None())
480 }
463 }
481 }
464 }
482
465
483 fn revlog_error(py: Python) -> PyErr {
466 fn revlog_error(py: Python) -> PyErr {
484 match py
467 match py
485 .import("mercurial.error")
468 .import("mercurial.error")
486 .and_then(|m| m.get(py, "RevlogError"))
469 .and_then(|m| m.get(py, "RevlogError"))
487 {
470 {
488 Err(e) => e,
471 Err(e) => e,
489 Ok(cls) => PyErr::from_instance(
472 Ok(cls) => PyErr::from_instance(
490 py,
473 py,
491 cls.call(py, (py.None(),), None).ok().into_py_object(py),
474 cls.call(py, (py.None(),), None).ok().into_py_object(py),
492 ),
475 ),
493 }
476 }
494 }
477 }
495
478
496 fn rev_not_in_index(py: Python, rev: Revision) -> PyErr {
479 fn rev_not_in_index(py: Python, rev: Revision) -> PyErr {
497 PyErr::new::<ValueError, _>(
480 PyErr::new::<ValueError, _>(
498 py,
481 py,
499 format!(
482 format!(
500 "Inconsistency: Revision {} found in nodemap \
483 "Inconsistency: Revision {} found in nodemap \
501 is not in revlog index",
484 is not in revlog index",
502 rev
485 rev
503 ),
486 ),
504 )
487 )
505 }
488 }
506
489
507 /// Standard treatment of NodeMapError
490 /// Standard treatment of NodeMapError
508 fn nodemap_error(py: Python, err: NodeMapError) -> PyErr {
491 fn nodemap_error(py: Python, err: NodeMapError) -> PyErr {
509 match err {
492 match err {
510 NodeMapError::MultipleResults => revlog_error(py),
493 NodeMapError::MultipleResults => revlog_error(py),
511 NodeMapError::RevisionNotInIndex(r) => rev_not_in_index(py, r),
494 NodeMapError::RevisionNotInIndex(r) => rev_not_in_index(py, r),
512 }
495 }
513 }
496 }
514
497
515 /// Create the module, with __package__ given from parent
498 /// Create the module, with __package__ given from parent
516 pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
499 pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
517 let dotted_name = &format!("{}.revlog", package);
500 let dotted_name = &format!("{}.revlog", package);
518 let m = PyModule::new(py, dotted_name)?;
501 let m = PyModule::new(py, dotted_name)?;
519 m.add(py, "__package__", package)?;
502 m.add(py, "__package__", package)?;
520 m.add(py, "__doc__", "RevLog - Rust implementations")?;
503 m.add(py, "__doc__", "RevLog - Rust implementations")?;
521
504
522 m.add_class::<MixedIndex>(py)?;
505 m.add_class::<MixedIndex>(py)?;
523
506
524 let sys = PyModule::import(py, "sys")?;
507 let sys = PyModule::import(py, "sys")?;
525 let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
508 let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
526 sys_modules.set_item(py, dotted_name, &m)?;
509 sys_modules.set_item(py, dotted_name, &m)?;
527
510
528 Ok(m)
511 Ok(m)
529 }
512 }
@@ -1,336 +1,336 b''
1 # The following variables can be passed in as parameters:
1 # The following variables can be passed in as parameters:
2 #
2 #
3 # VERSION
3 # VERSION
4 # Version string of program being produced.
4 # Version string of program being produced.
5 #
5 #
6 # MSI_NAME
6 # MSI_NAME
7 # Root name of MSI installer.
7 # Root name of MSI installer.
8 #
8 #
9 # EXTRA_MSI_FEATURES
9 # EXTRA_MSI_FEATURES
10 # ; delimited string of extra features to advertise in the built MSA.
10 # ; delimited string of extra features to advertise in the built MSA.
11 #
11 #
12 # SIGNING_PFX_PATH
12 # SIGNING_PFX_PATH
13 # Path to code signing certificate to use.
13 # Path to code signing certificate to use.
14 #
14 #
15 # SIGNING_PFX_PASSWORD
15 # SIGNING_PFX_PASSWORD
16 # Password to code signing PFX file defined by SIGNING_PFX_PATH.
16 # Password to code signing PFX file defined by SIGNING_PFX_PATH.
17 #
17 #
18 # SIGNING_SUBJECT_NAME
18 # SIGNING_SUBJECT_NAME
19 # String fragment in code signing certificate subject name used to find
19 # String fragment in code signing certificate subject name used to find
20 # code signing certificate in Windows certificate store.
20 # code signing certificate in Windows certificate store.
21 #
21 #
22 # TIME_STAMP_SERVER_URL
22 # TIME_STAMP_SERVER_URL
23 # URL of time-stamp token authority (RFC 3161) servers to stamp code signatures.
23 # URL of time-stamp token authority (RFC 3161) servers to stamp code signatures.
24
24
25 ROOT = CWD + "/../.."
25 ROOT = CWD + "/../.."
26
26
27 VERSION = VARS.get("VERSION", "0.0")
27 VERSION = VARS.get("VERSION", "0.0")
28 MSI_NAME = VARS.get("MSI_NAME", "mercurial")
28 MSI_NAME = VARS.get("MSI_NAME", "mercurial")
29 EXTRA_MSI_FEATURES = VARS.get("EXTRA_MSI_FEATURES")
29 EXTRA_MSI_FEATURES = VARS.get("EXTRA_MSI_FEATURES")
30 SIGNING_PFX_PATH = VARS.get("SIGNING_PFX_PATH")
30 SIGNING_PFX_PATH = VARS.get("SIGNING_PFX_PATH")
31 SIGNING_PFX_PASSWORD = VARS.get("SIGNING_PFX_PASSWORD", "")
31 SIGNING_PFX_PASSWORD = VARS.get("SIGNING_PFX_PASSWORD", "")
32 SIGNING_SUBJECT_NAME = VARS.get("SIGNING_SUBJECT_NAME")
32 SIGNING_SUBJECT_NAME = VARS.get("SIGNING_SUBJECT_NAME")
33 TIME_STAMP_SERVER_URL = VARS.get("TIME_STAMP_SERVER_URL", "http://timestamp.digicert.com")
33 TIME_STAMP_SERVER_URL = VARS.get("TIME_STAMP_SERVER_URL", "http://timestamp.digicert.com")
34
34
35 IS_WINDOWS = "windows" in BUILD_TARGET_TRIPLE
35 IS_WINDOWS = "windows" in BUILD_TARGET_TRIPLE
36
36
37 USE_IN_MEMORY_RESOURCES = False
37 USE_IN_MEMORY_RESOURCES = False
38
38
39 # Code to run in Python interpreter.
39 # Code to run in Python interpreter.
40 RUN_CODE = """
40 RUN_CODE = """
41 import os
41 import os
42 import sys
42 import sys
43 extra_path = os.environ.get('PYTHONPATH')
43 extra_path = os.environ.get('PYTHONPATH')
44 if extra_path is not None:
44 if extra_path is not None:
45 # extensions and hooks expect a working python environment
45 # extensions and hooks expect a working python environment
46 # We do not prepend the values because the Mercurial library wants to be in
46 # We do not prepend the values because the Mercurial library wants to be in
47 # the front of the sys.path to avoid picking up other installations.
47 # the front of the sys.path to avoid picking up other installations.
48 sys.path.extend(extra_path.split(os.pathsep))
48 sys.path.extend(extra_path.split(os.pathsep))
49 # Add user site to sys.path to load extensions without the full path
49 # Add user site to sys.path to load extensions without the full path
50 if os.name == 'nt':
50 if os.name == 'nt':
51 vi = sys.version_info
51 vi = sys.version_info
52 appdata = os.environ.get('APPDATA')
52 appdata = os.environ.get('APPDATA')
53 if appdata:
53 if appdata:
54 sys.path.append(
54 sys.path.append(
55 os.path.join(
55 os.path.join(
56 appdata,
56 appdata,
57 'Python',
57 'Python',
58 'Python%d%d' % (vi[0], vi[1]),
58 'Python%d%d' % (vi[0], vi[1]),
59 'site-packages',
59 'site-packages',
60 )
60 )
61 )
61 )
62 elif sys.platform == "darwin":
62 elif sys.platform == "darwin":
63 vi = sys.version_info
63 vi = sys.version_info
64
64
65 def joinuser(*args):
65 def joinuser(*args):
66 return os.path.expanduser(os.path.join(*args))
66 return os.path.expanduser(os.path.join(*args))
67
67
68 # Note: site.py uses `sys._framework` instead of hardcoding "Python" as the
68 # Note: site.py uses `sys._framework` instead of hardcoding "Python" as the
69 # 3rd arg, but that is set to an empty string in an oxidized binary. It
69 # 3rd arg, but that is set to an empty string in an oxidized binary. It
70 # has a fallback to ~/.local when `sys._framework` isn't set, but we want
70 # has a fallback to ~/.local when `sys._framework` isn't set, but we want
71 # to match what the system python uses, so it sees pip installed stuff.
71 # to match what the system python uses, so it sees pip installed stuff.
72 usersite = joinuser("~", "Library", "Python",
72 usersite = joinuser("~", "Library", "Python",
73 "%d.%d" % vi[:2], "lib/python/site-packages")
73 "%d.%d" % vi[:2], "lib/python/site-packages")
74
74
75 sys.path.append(usersite)
75 sys.path.append(usersite)
76 import hgdemandimport;
76 import hgdemandimport;
77 hgdemandimport.enable();
77 hgdemandimport.enable();
78 from mercurial import dispatch;
78 from mercurial import dispatch;
79 dispatch.run();
79 dispatch.run();
80 """
80 """
81
81
82 set_build_path(ROOT + "/build/pyoxidizer")
82 set_build_path(ROOT + "/build/pyoxidizer")
83
83
84 def make_distribution():
84 def make_distribution():
85 return default_python_distribution(python_version = "3.9")
85 return default_python_distribution(python_version = "3.9")
86
86
87 def resource_callback(policy, resource):
87 def resource_callback(policy, resource):
88 if USE_IN_MEMORY_RESOURCES:
88 if USE_IN_MEMORY_RESOURCES:
89 resource.add_location = "in-memory"
89 resource.add_location = "in-memory"
90 return
90 return
91
91
92 # We use a custom resource routing policy to influence where things are loaded
92 # We use a custom resource routing policy to influence where things are loaded
93 # from.
93 # from.
94 #
94 #
95 # For Python modules and resources, we load from memory if they are in
95 # For Python modules and resources, we load from memory if they are in
96 # the standard library and from the filesystem if not. This is because
96 # the standard library and from the filesystem if not. This is because
97 # parts of Mercurial and some 3rd party packages aren't yet compatible
97 # parts of Mercurial and some 3rd party packages aren't yet compatible
98 # with memory loading.
98 # with memory loading.
99 #
99 #
100 # For Python extension modules, we load from the filesystem because
100 # For Python extension modules, we load from the filesystem because
101 # this yields greatest compatibility.
101 # this yields greatest compatibility.
102 if type(resource) in ("PythonModuleSource", "PythonPackageResource", "PythonPackageDistributionResource"):
102 if type(resource) in ("PythonModuleSource", "PythonPackageResource", "PythonPackageDistributionResource"):
103 if resource.is_stdlib:
103 if resource.is_stdlib:
104 resource.add_location = "in-memory"
104 resource.add_location = "in-memory"
105 else:
105 else:
106 resource.add_location = "filesystem-relative:lib"
106 resource.add_location = "filesystem-relative:lib"
107
107
108 elif type(resource) == "PythonExtensionModule":
108 elif type(resource) == "PythonExtensionModule":
109 resource.add_location = "filesystem-relative:lib"
109 resource.add_location = "filesystem-relative:lib"
110
110
111 def make_exe(dist):
111 def make_exe(dist):
112 """Builds a Rust-wrapped Mercurial binary."""
112 """Builds a Rust-wrapped Mercurial binary."""
113 packaging_policy = dist.make_python_packaging_policy()
113 packaging_policy = dist.make_python_packaging_policy()
114
114
115 # Extension may depend on any Python functionality. Include all
115 # Extension may depend on any Python functionality. Include all
116 # extensions.
116 # extensions.
117 packaging_policy.extension_module_filter = "all"
117 packaging_policy.extension_module_filter = "all"
118 packaging_policy.resources_location = "in-memory"
118 packaging_policy.resources_location = "in-memory"
119 if not USE_IN_MEMORY_RESOURCES:
119 if not USE_IN_MEMORY_RESOURCES:
120 packaging_policy.resources_location_fallback = "filesystem-relative:lib"
120 packaging_policy.resources_location_fallback = "filesystem-relative:lib"
121 packaging_policy.register_resource_callback(resource_callback)
121 packaging_policy.register_resource_callback(resource_callback)
122
122
123 config = dist.make_python_interpreter_config()
123 config = dist.make_python_interpreter_config()
124 config.allocator_backend = "default"
124 config.allocator_backend = "default"
125 config.run_command = RUN_CODE
125 config.run_command = RUN_CODE
126
126
127 # We want to let the user load extensions from the file system
127 # We want to let the user load extensions from the file system
128 config.filesystem_importer = True
128 config.filesystem_importer = True
129
129
130 # We need this to make resourceutil happy, since it looks for sys.frozen.
130 # We need this to make resourceutil happy, since it looks for sys.frozen.
131 config.sys_frozen = True
131 config.sys_frozen = True
132 config.legacy_windows_stdio = True
132 config.legacy_windows_stdio = True
133
133
134 exe = dist.to_python_executable(
134 exe = dist.to_python_executable(
135 name = "hg",
135 name = "hg",
136 packaging_policy = packaging_policy,
136 packaging_policy = packaging_policy,
137 config = config,
137 config = config,
138 )
138 )
139
139
140 # Add Mercurial to resources.
140 # Add Mercurial to resources.
141 exe.add_python_resources(exe.pip_install(["--verbose", ROOT]))
141 exe.add_python_resources(exe.pip_install(["--verbose", "--no-use-pep517", ROOT]))
142
142
143 # On Windows, we install extra packages for convenience.
143 # On Windows, we install extra packages for convenience.
144 if IS_WINDOWS:
144 if IS_WINDOWS:
145 exe.add_python_resources(
145 exe.add_python_resources(
146 exe.pip_install(["-r", ROOT + "/contrib/packaging/requirements-windows-py3.txt"]),
146 exe.pip_install(["-r", ROOT + "/contrib/packaging/requirements-windows-py3.txt"]),
147 )
147 )
148 extra_packages = VARS.get("extra_py_packages", "")
148 extra_packages = VARS.get("extra_py_packages", "")
149 if extra_packages:
149 if extra_packages:
150 for extra in extra_packages.split(","):
150 for extra in extra_packages.split(","):
151 extra_src, pkgs = extra.split("=")
151 extra_src, pkgs = extra.split("=")
152 pkgs = pkgs.split(":")
152 pkgs = pkgs.split(":")
153 exe.add_python_resources(exe.read_package_root(extra_src, pkgs))
153 exe.add_python_resources(exe.read_package_root(extra_src, pkgs))
154
154
155 return exe
155 return exe
156
156
157 def make_manifest(dist, exe):
157 def make_manifest(dist, exe):
158 m = FileManifest()
158 m = FileManifest()
159 m.add_python_resource(".", exe)
159 m.add_python_resource(".", exe)
160
160
161 return m
161 return m
162
162
163
163
164 # This adjusts the InstallManifest produced from exe generation to provide
164 # This adjusts the InstallManifest produced from exe generation to provide
165 # additional files found in a Windows install layout.
165 # additional files found in a Windows install layout.
166 def make_windows_install_layout(manifest):
166 def make_windows_install_layout(manifest):
167 # Copy various files to new install locations. This can go away once
167 # Copy various files to new install locations. This can go away once
168 # we're using the importlib resource reader.
168 # we're using the importlib resource reader.
169 RECURSIVE_COPIES = {
169 RECURSIVE_COPIES = {
170 "lib/mercurial/locale/": "locale/",
170 "lib/mercurial/locale/": "locale/",
171 "lib/mercurial/templates/": "templates/",
171 "lib/mercurial/templates/": "templates/",
172 }
172 }
173 for (search, replace) in RECURSIVE_COPIES.items():
173 for (search, replace) in RECURSIVE_COPIES.items():
174 for path in manifest.paths():
174 for path in manifest.paths():
175 if path.startswith(search):
175 if path.startswith(search):
176 new_path = path.replace(search, replace)
176 new_path = path.replace(search, replace)
177 print("copy %s to %s" % (path, new_path))
177 print("copy %s to %s" % (path, new_path))
178 file = manifest.get_file(path)
178 file = manifest.get_file(path)
179 manifest.add_file(file, path = new_path)
179 manifest.add_file(file, path = new_path)
180
180
181 # Similar to above, but with filename pattern matching.
181 # Similar to above, but with filename pattern matching.
182 # lib/mercurial/helptext/**/*.txt -> helptext/
182 # lib/mercurial/helptext/**/*.txt -> helptext/
183 # lib/mercurial/defaultrc/*.rc -> defaultrc/
183 # lib/mercurial/defaultrc/*.rc -> defaultrc/
184 for path in manifest.paths():
184 for path in manifest.paths():
185 if path.startswith("lib/mercurial/helptext/") and path.endswith(".txt"):
185 if path.startswith("lib/mercurial/helptext/") and path.endswith(".txt"):
186 new_path = path[len("lib/mercurial/"):]
186 new_path = path[len("lib/mercurial/"):]
187 elif path.startswith("lib/mercurial/defaultrc/") and path.endswith(".rc"):
187 elif path.startswith("lib/mercurial/defaultrc/") and path.endswith(".rc"):
188 new_path = path[len("lib/mercurial/"):]
188 new_path = path[len("lib/mercurial/"):]
189 else:
189 else:
190 continue
190 continue
191
191
192 print("copying %s to %s" % (path, new_path))
192 print("copying %s to %s" % (path, new_path))
193 manifest.add_file(manifest.get_file(path), path = new_path)
193 manifest.add_file(manifest.get_file(path), path = new_path)
194
194
195 extra_install_files = VARS.get("extra_install_files", "")
195 extra_install_files = VARS.get("extra_install_files", "")
196 if extra_install_files:
196 if extra_install_files:
197 for extra in extra_install_files.split(","):
197 for extra in extra_install_files.split(","):
198 print("adding extra files from %s" % extra)
198 print("adding extra files from %s" % extra)
199 # TODO: I expected a ** glob to work, but it didn't.
199 # TODO: I expected a ** glob to work, but it didn't.
200 #
200 #
201 # TODO: I know this has forward-slash paths. As far as I can tell,
201 # TODO: I know this has forward-slash paths. As far as I can tell,
202 # backslashes don't ever match glob() expansions in
202 # backslashes don't ever match glob() expansions in
203 # tugger-starlark, even on Windows.
203 # tugger-starlark, even on Windows.
204 manifest.add_manifest(glob(include=[extra + "/*/*"], strip_prefix=extra+"/"))
204 manifest.add_manifest(glob(include=[extra + "/*/*"], strip_prefix=extra+"/"))
205
205
206 # We also install a handful of additional files.
206 # We also install a handful of additional files.
207 EXTRA_CONTRIB_FILES = [
207 EXTRA_CONTRIB_FILES = [
208 "bash_completion",
208 "bash_completion",
209 "hgweb.fcgi",
209 "hgweb.fcgi",
210 "hgweb.wsgi",
210 "hgweb.wsgi",
211 "logo-droplets.svg",
211 "logo-droplets.svg",
212 "mercurial.el",
212 "mercurial.el",
213 "mq.el",
213 "mq.el",
214 "tcsh_completion",
214 "tcsh_completion",
215 "tcsh_completion_build.sh",
215 "tcsh_completion_build.sh",
216 "xml.rnc",
216 "xml.rnc",
217 "zsh_completion",
217 "zsh_completion",
218 ]
218 ]
219
219
220 for f in EXTRA_CONTRIB_FILES:
220 for f in EXTRA_CONTRIB_FILES:
221 manifest.add_file(FileContent(path = ROOT + "/contrib/" + f), directory = "contrib")
221 manifest.add_file(FileContent(path = ROOT + "/contrib/" + f), directory = "contrib")
222
222
223 # Individual files with full source to destination path mapping.
223 # Individual files with full source to destination path mapping.
224 EXTRA_FILES = {
224 EXTRA_FILES = {
225 "contrib/hgk": "contrib/hgk.tcl",
225 "contrib/hgk": "contrib/hgk.tcl",
226 "contrib/win32/postinstall.txt": "ReleaseNotes.txt",
226 "contrib/win32/postinstall.txt": "ReleaseNotes.txt",
227 "contrib/win32/ReadMe.html": "ReadMe.html",
227 "contrib/win32/ReadMe.html": "ReadMe.html",
228 "doc/style.css": "doc/style.css",
228 "doc/style.css": "doc/style.css",
229 "COPYING": "Copying.txt",
229 "COPYING": "Copying.txt",
230 }
230 }
231
231
232 for source, dest in EXTRA_FILES.items():
232 for source, dest in EXTRA_FILES.items():
233 print("adding extra file %s" % dest)
233 print("adding extra file %s" % dest)
234 manifest.add_file(FileContent(path = ROOT + "/" + source), path = dest)
234 manifest.add_file(FileContent(path = ROOT + "/" + source), path = dest)
235
235
236 # And finally some wildcard matches.
236 # And finally some wildcard matches.
237 manifest.add_manifest(glob(
237 manifest.add_manifest(glob(
238 include = [ROOT + "/contrib/vim/*"],
238 include = [ROOT + "/contrib/vim/*"],
239 strip_prefix = ROOT + "/"
239 strip_prefix = ROOT + "/"
240 ))
240 ))
241 manifest.add_manifest(glob(
241 manifest.add_manifest(glob(
242 include = [ROOT + "/doc/*.html"],
242 include = [ROOT + "/doc/*.html"],
243 strip_prefix = ROOT + "/"
243 strip_prefix = ROOT + "/"
244 ))
244 ))
245
245
246 # But we don't ship hg-ssh on Windows, so exclude its documentation.
246 # But we don't ship hg-ssh on Windows, so exclude its documentation.
247 manifest.remove("doc/hg-ssh.8.html")
247 manifest.remove("doc/hg-ssh.8.html")
248
248
249 return manifest
249 return manifest
250
250
251
251
252 def make_msi(manifest):
252 def make_msi(manifest):
253 manifest = make_windows_install_layout(manifest)
253 manifest = make_windows_install_layout(manifest)
254
254
255 if "x86_64" in BUILD_TARGET_TRIPLE:
255 if "x86_64" in BUILD_TARGET_TRIPLE:
256 platform = "x64"
256 platform = "x64"
257 else:
257 else:
258 platform = "x86"
258 platform = "x86"
259
259
260 manifest.add_file(
260 manifest.add_file(
261 FileContent(path = ROOT + "/contrib/packaging/wix/COPYING.rtf"),
261 FileContent(path = ROOT + "/contrib/packaging/wix/COPYING.rtf"),
262 path = "COPYING.rtf",
262 path = "COPYING.rtf",
263 )
263 )
264 manifest.remove("Copying.txt")
264 manifest.remove("Copying.txt")
265 manifest.add_file(
265 manifest.add_file(
266 FileContent(path = ROOT + "/contrib/win32/mercurial.ini"),
266 FileContent(path = ROOT + "/contrib/win32/mercurial.ini"),
267 path = "defaultrc/mercurial.rc",
267 path = "defaultrc/mercurial.rc",
268 )
268 )
269 manifest.add_file(
269 manifest.add_file(
270 FileContent(filename = "editor.rc", content = "[ui]\neditor = notepad\n"),
270 FileContent(filename = "editor.rc", content = "[ui]\neditor = notepad\n"),
271 path = "defaultrc/editor.rc",
271 path = "defaultrc/editor.rc",
272 )
272 )
273
273
274 wix = WiXInstaller(
274 wix = WiXInstaller(
275 "hg",
275 "hg",
276 "%s-%s-%s.msi" % (MSI_NAME, VERSION, platform),
276 "%s-%s-%s.msi" % (MSI_NAME, VERSION, platform),
277 arch = platform,
277 arch = platform,
278 )
278 )
279
279
280 # Materialize files in the manifest to the install layout.
280 # Materialize files in the manifest to the install layout.
281 wix.add_install_files(manifest)
281 wix.add_install_files(manifest)
282
282
283 # From mercurial.wxs.
283 # From mercurial.wxs.
284 wix.install_files_root_directory_id = "INSTALLDIR"
284 wix.install_files_root_directory_id = "INSTALLDIR"
285
285
286 # Pull in our custom .wxs files.
286 # Pull in our custom .wxs files.
287 defines = {
287 defines = {
288 "PyOxidizer": "1",
288 "PyOxidizer": "1",
289 "Platform": platform,
289 "Platform": platform,
290 "Version": VERSION,
290 "Version": VERSION,
291 "Comments": "Installs Mercurial version %s" % VERSION,
291 "Comments": "Installs Mercurial version %s" % VERSION,
292 "PythonVersion": "3",
292 "PythonVersion": "3",
293 "MercurialHasLib": "1",
293 "MercurialHasLib": "1",
294 }
294 }
295
295
296 if EXTRA_MSI_FEATURES:
296 if EXTRA_MSI_FEATURES:
297 defines["MercurialExtraFeatures"] = EXTRA_MSI_FEATURES
297 defines["MercurialExtraFeatures"] = EXTRA_MSI_FEATURES
298
298
299 wix.add_wxs_file(
299 wix.add_wxs_file(
300 ROOT + "/contrib/packaging/wix/mercurial.wxs",
300 ROOT + "/contrib/packaging/wix/mercurial.wxs",
301 preprocessor_parameters=defines,
301 preprocessor_parameters=defines,
302 )
302 )
303
303
304 # Our .wxs references to other files. Pull those into the build environment.
304 # Our .wxs references to other files. Pull those into the build environment.
305 for f in ("defines.wxi", "guids.wxi", "COPYING.rtf"):
305 for f in ("defines.wxi", "guids.wxi", "COPYING.rtf"):
306 wix.add_build_file(f, ROOT + "/contrib/packaging/wix/" + f)
306 wix.add_build_file(f, ROOT + "/contrib/packaging/wix/" + f)
307
307
308 wix.add_build_file("mercurial.ico", ROOT + "/contrib/win32/mercurial.ico")
308 wix.add_build_file("mercurial.ico", ROOT + "/contrib/win32/mercurial.ico")
309
309
310 return wix
310 return wix
311
311
312
312
313 def register_code_signers():
313 def register_code_signers():
314 if not IS_WINDOWS:
314 if not IS_WINDOWS:
315 return
315 return
316
316
317 if SIGNING_PFX_PATH:
317 if SIGNING_PFX_PATH:
318 signer = code_signer_from_pfx_file(SIGNING_PFX_PATH, SIGNING_PFX_PASSWORD)
318 signer = code_signer_from_pfx_file(SIGNING_PFX_PATH, SIGNING_PFX_PASSWORD)
319 elif SIGNING_SUBJECT_NAME:
319 elif SIGNING_SUBJECT_NAME:
320 signer = code_signer_from_windows_store_subject(SIGNING_SUBJECT_NAME)
320 signer = code_signer_from_windows_store_subject(SIGNING_SUBJECT_NAME)
321 else:
321 else:
322 signer = None
322 signer = None
323
323
324 if signer:
324 if signer:
325 signer.set_time_stamp_server(TIME_STAMP_SERVER_URL)
325 signer.set_time_stamp_server(TIME_STAMP_SERVER_URL)
326 signer.activate()
326 signer.activate()
327
327
328
328
329 register_code_signers()
329 register_code_signers()
330
330
331 register_target("distribution", make_distribution)
331 register_target("distribution", make_distribution)
332 register_target("exe", make_exe, depends = ["distribution"])
332 register_target("exe", make_exe, depends = ["distribution"])
333 register_target("app", make_manifest, depends = ["distribution", "exe"], default = True)
333 register_target("app", make_manifest, depends = ["distribution", "exe"], default = True)
334 register_target("msi", make_msi, depends = ["app"])
334 register_target("msi", make_msi, depends = ["app"])
335
335
336 resolve_targets()
336 resolve_targets()
@@ -1,52 +1,58 b''
1 from __future__ import absolute_import, print_function
1 from __future__ import absolute_import, print_function
2
2
3 import sys
3 import unittest
4 import unittest
4
5
6 if sys.version_info[0] < 3:
7 base_class = unittest._TextTestResult
8 else:
9 base_class = unittest.TextTestResult
5
10
6 class TestResult(unittest._TextTestResult):
11
12 class TestResult(base_class):
7 def __init__(self, options, *args, **kwargs):
13 def __init__(self, options, *args, **kwargs):
8 super(TestResult, self).__init__(*args, **kwargs)
14 super(TestResult, self).__init__(*args, **kwargs)
9 self._options = options
15 self._options = options
10
16
11 # unittest.TestResult didn't have skipped until 2.7. We need to
17 # unittest.TestResult didn't have skipped until 2.7. We need to
12 # polyfill it.
18 # polyfill it.
13 self.skipped = []
19 self.skipped = []
14
20
15 # We have a custom "ignored" result that isn't present in any Python
21 # We have a custom "ignored" result that isn't present in any Python
16 # unittest implementation. It is very similar to skipped. It may make
22 # unittest implementation. It is very similar to skipped. It may make
17 # sense to map it into skip some day.
23 # sense to map it into skip some day.
18 self.ignored = []
24 self.ignored = []
19
25
20 self.times = []
26 self.times = []
21 self._firststarttime = None
27 self._firststarttime = None
22 # Data stored for the benefit of generating xunit reports.
28 # Data stored for the benefit of generating xunit reports.
23 self.successes = []
29 self.successes = []
24 self.faildata = {}
30 self.faildata = {}
25
31
26 def addFailure(self, test, reason):
32 def addFailure(self, test, reason):
27 print("FAILURE!", test, reason)
33 print("FAILURE!", test, reason)
28
34
29 def addSuccess(self, test):
35 def addSuccess(self, test):
30 print("SUCCESS!", test)
36 print("SUCCESS!", test)
31
37
32 def addError(self, test, err):
38 def addError(self, test, err):
33 print("ERR!", test, err)
39 print("ERR!", test, err)
34
40
35 # Polyfill.
41 # Polyfill.
36 def addSkip(self, test, reason):
42 def addSkip(self, test, reason):
37 print("SKIP!", test, reason)
43 print("SKIP!", test, reason)
38
44
39 def addIgnore(self, test, reason):
45 def addIgnore(self, test, reason):
40 print("IGNORE!", test, reason)
46 print("IGNORE!", test, reason)
41
47
42 def onStart(self, test):
48 def onStart(self, test):
43 print("ON_START!", test)
49 print("ON_START!", test)
44
50
45 def onEnd(self):
51 def onEnd(self):
46 print("ON_END!")
52 print("ON_END!")
47
53
48 def addOutputMismatch(self, test, ret, got, expected):
54 def addOutputMismatch(self, test, ret, got, expected):
49 return False
55 return False
50
56
51 def stopTest(self, test, interrupted=False):
57 def stopTest(self, test, interrupted=False):
52 super(TestResult, self).stopTest(test)
58 super(TestResult, self).stopTest(test)
@@ -1,4069 +1,4072 b''
1 #!/usr/bin/env python3
1 #!/usr/bin/env python3
2 #
2 #
3 # run-tests.py - Run a set of tests on Mercurial
3 # run-tests.py - Run a set of tests on Mercurial
4 #
4 #
5 # Copyright 2006 Olivia Mackall <olivia@selenic.com>
5 # Copyright 2006 Olivia Mackall <olivia@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 # Modifying this script is tricky because it has many modes:
10 # Modifying this script is tricky because it has many modes:
11 # - serial (default) vs parallel (-jN, N > 1)
11 # - serial (default) vs parallel (-jN, N > 1)
12 # - no coverage (default) vs coverage (-c, -C, -s)
12 # - no coverage (default) vs coverage (-c, -C, -s)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
14 # - tests are a mix of shell scripts and Python scripts
14 # - tests are a mix of shell scripts and Python scripts
15 #
15 #
16 # If you change this script, it is recommended that you ensure you
16 # If you change this script, it is recommended that you ensure you
17 # haven't broken it by running it in various modes with a representative
17 # haven't broken it by running it in various modes with a representative
18 # sample of test scripts. For example:
18 # sample of test scripts. For example:
19 #
19 #
20 # 1) serial, no coverage, temp install:
20 # 1) serial, no coverage, temp install:
21 # ./run-tests.py test-s*
21 # ./run-tests.py test-s*
22 # 2) serial, no coverage, local hg:
22 # 2) serial, no coverage, local hg:
23 # ./run-tests.py --local test-s*
23 # ./run-tests.py --local test-s*
24 # 3) serial, coverage, temp install:
24 # 3) serial, coverage, temp install:
25 # ./run-tests.py -c test-s*
25 # ./run-tests.py -c test-s*
26 # 4) serial, coverage, local hg:
26 # 4) serial, coverage, local hg:
27 # ./run-tests.py -c --local test-s* # unsupported
27 # ./run-tests.py -c --local test-s* # unsupported
28 # 5) parallel, no coverage, temp install:
28 # 5) parallel, no coverage, temp install:
29 # ./run-tests.py -j2 test-s*
29 # ./run-tests.py -j2 test-s*
30 # 6) parallel, no coverage, local hg:
30 # 6) parallel, no coverage, local hg:
31 # ./run-tests.py -j2 --local test-s*
31 # ./run-tests.py -j2 --local test-s*
32 # 7) parallel, coverage, temp install:
32 # 7) parallel, coverage, temp install:
33 # ./run-tests.py -j2 -c test-s* # currently broken
33 # ./run-tests.py -j2 -c test-s* # currently broken
34 # 8) parallel, coverage, local install:
34 # 8) parallel, coverage, local install:
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
36 # 9) parallel, custom tmp dir:
36 # 9) parallel, custom tmp dir:
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
38 # 10) parallel, pure, tests that call run-tests:
38 # 10) parallel, pure, tests that call run-tests:
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
40 #
40 #
41 # (You could use any subset of the tests: test-s* happens to match
41 # (You could use any subset of the tests: test-s* happens to match
42 # enough that it's worth doing parallel runs, few enough that it
42 # enough that it's worth doing parallel runs, few enough that it
43 # completes fairly quickly, includes both shell and Python scripts, and
43 # completes fairly quickly, includes both shell and Python scripts, and
44 # includes some scripts that run daemon processes.)
44 # includes some scripts that run daemon processes.)
45
45
46 from __future__ import absolute_import, print_function
46 from __future__ import absolute_import, print_function
47
47
48 import argparse
48 import argparse
49 import collections
49 import collections
50 import contextlib
50 import contextlib
51 import difflib
51 import difflib
52 import distutils.version as version
52 import distutils.version as version
53 import errno
53 import errno
54 import json
54 import json
55 import multiprocessing
55 import multiprocessing
56 import os
56 import os
57 import platform
57 import platform
58 import random
58 import random
59 import re
59 import re
60 import shutil
60 import shutil
61 import signal
61 import signal
62 import socket
62 import socket
63 import subprocess
63 import subprocess
64 import sys
64 import sys
65 import sysconfig
65 import sysconfig
66 import tempfile
66 import tempfile
67 import threading
67 import threading
68 import time
68 import time
69 import unittest
69 import unittest
70 import uuid
70 import uuid
71 import xml.dom.minidom as minidom
71 import xml.dom.minidom as minidom
72
72
73 WINDOWS = os.name == r'nt'
73 WINDOWS = os.name == r'nt'
74
74
75 try:
75 try:
76 import Queue as queue
76 import Queue as queue
77 except ImportError:
77 except ImportError:
78 import queue
78 import queue
79
79
80 try:
80 try:
81 import shlex
81 import shlex
82
82
83 shellquote = shlex.quote
83 shellquote = shlex.quote
84 except (ImportError, AttributeError):
84 except (ImportError, AttributeError):
85 import pipes
85 import pipes
86
86
87 shellquote = pipes.quote
87 shellquote = pipes.quote
88
88
89
89
90 processlock = threading.Lock()
90 processlock = threading.Lock()
91
91
92 pygmentspresent = False
92 pygmentspresent = False
93 try: # is pygments installed
93 try: # is pygments installed
94 import pygments
94 import pygments
95 import pygments.lexers as lexers
95 import pygments.lexers as lexers
96 import pygments.lexer as lexer
96 import pygments.lexer as lexer
97 import pygments.formatters as formatters
97 import pygments.formatters as formatters
98 import pygments.token as token
98 import pygments.token as token
99 import pygments.style as style
99 import pygments.style as style
100
100
101 if WINDOWS:
101 if WINDOWS:
102 hgpath = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
102 hgpath = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
103 sys.path.append(hgpath)
103 sys.path.append(hgpath)
104 try:
104 try:
105 from mercurial import win32 # pytype: disable=import-error
105 from mercurial import win32 # pytype: disable=import-error
106
106
107 # Don't check the result code because it fails on heptapod, but
107 # Don't check the result code because it fails on heptapod, but
108 # something is able to convert to color anyway.
108 # something is able to convert to color anyway.
109 win32.enablevtmode()
109 win32.enablevtmode()
110 finally:
110 finally:
111 sys.path = sys.path[:-1]
111 sys.path = sys.path[:-1]
112
112
113 pygmentspresent = True
113 pygmentspresent = True
114 difflexer = lexers.DiffLexer()
114 difflexer = lexers.DiffLexer()
115 terminal256formatter = formatters.Terminal256Formatter()
115 terminal256formatter = formatters.Terminal256Formatter()
116 except ImportError:
116 except ImportError:
117 pass
117 pass
118
118
119 if pygmentspresent:
119 if pygmentspresent:
120
120
121 class TestRunnerStyle(style.Style):
121 class TestRunnerStyle(style.Style):
122 default_style = ""
122 default_style = ""
123 skipped = token.string_to_tokentype("Token.Generic.Skipped")
123 skipped = token.string_to_tokentype("Token.Generic.Skipped")
124 failed = token.string_to_tokentype("Token.Generic.Failed")
124 failed = token.string_to_tokentype("Token.Generic.Failed")
125 skippedname = token.string_to_tokentype("Token.Generic.SName")
125 skippedname = token.string_to_tokentype("Token.Generic.SName")
126 failedname = token.string_to_tokentype("Token.Generic.FName")
126 failedname = token.string_to_tokentype("Token.Generic.FName")
127 styles = {
127 styles = {
128 skipped: '#e5e5e5',
128 skipped: '#e5e5e5',
129 skippedname: '#00ffff',
129 skippedname: '#00ffff',
130 failed: '#7f0000',
130 failed: '#7f0000',
131 failedname: '#ff0000',
131 failedname: '#ff0000',
132 }
132 }
133
133
134 class TestRunnerLexer(lexer.RegexLexer):
134 class TestRunnerLexer(lexer.RegexLexer):
135 testpattern = r'[\w-]+\.(t|py)(#[a-zA-Z0-9_\-\.]+)?'
135 testpattern = r'[\w-]+\.(t|py)(#[a-zA-Z0-9_\-\.]+)?'
136 tokens = {
136 tokens = {
137 'root': [
137 'root': [
138 (r'^Skipped', token.Generic.Skipped, 'skipped'),
138 (r'^Skipped', token.Generic.Skipped, 'skipped'),
139 (r'^Failed ', token.Generic.Failed, 'failed'),
139 (r'^Failed ', token.Generic.Failed, 'failed'),
140 (r'^ERROR: ', token.Generic.Failed, 'failed'),
140 (r'^ERROR: ', token.Generic.Failed, 'failed'),
141 ],
141 ],
142 'skipped': [
142 'skipped': [
143 (testpattern, token.Generic.SName),
143 (testpattern, token.Generic.SName),
144 (r':.*', token.Generic.Skipped),
144 (r':.*', token.Generic.Skipped),
145 ],
145 ],
146 'failed': [
146 'failed': [
147 (testpattern, token.Generic.FName),
147 (testpattern, token.Generic.FName),
148 (r'(:| ).*', token.Generic.Failed),
148 (r'(:| ).*', token.Generic.Failed),
149 ],
149 ],
150 }
150 }
151
151
152 runnerformatter = formatters.Terminal256Formatter(style=TestRunnerStyle)
152 runnerformatter = formatters.Terminal256Formatter(style=TestRunnerStyle)
153 runnerlexer = TestRunnerLexer()
153 runnerlexer = TestRunnerLexer()
154
154
155 origenviron = os.environ.copy()
155 origenviron = os.environ.copy()
156
156
157
157
158 if sys.version_info > (3, 5, 0):
158 if sys.version_info > (3, 5, 0):
159 PYTHON3 = True
159 PYTHON3 = True
160 xrange = range # we use xrange in one place, and we'd rather not use range
160 xrange = range # we use xrange in one place, and we'd rather not use range
161
161
162 def _sys2bytes(p):
162 def _sys2bytes(p):
163 if p is None:
163 if p is None:
164 return p
164 return p
165 return p.encode('utf-8')
165 return p.encode('utf-8')
166
166
167 def _bytes2sys(p):
167 def _bytes2sys(p):
168 if p is None:
168 if p is None:
169 return p
169 return p
170 return p.decode('utf-8')
170 return p.decode('utf-8')
171
171
172 osenvironb = getattr(os, 'environb', None)
172 osenvironb = getattr(os, 'environb', None)
173 if osenvironb is None:
173 if osenvironb is None:
174 # Windows lacks os.environb, for instance. A proxy over the real thing
174 # Windows lacks os.environb, for instance. A proxy over the real thing
175 # instead of a copy allows the environment to be updated via bytes on
175 # instead of a copy allows the environment to be updated via bytes on
176 # all platforms.
176 # all platforms.
177 class environbytes(object):
177 class environbytes(object):
178 def __init__(self, strenv):
178 def __init__(self, strenv):
179 self.__len__ = strenv.__len__
179 self.__len__ = strenv.__len__
180 self.clear = strenv.clear
180 self.clear = strenv.clear
181 self._strenv = strenv
181 self._strenv = strenv
182
182
183 def __getitem__(self, k):
183 def __getitem__(self, k):
184 v = self._strenv.__getitem__(_bytes2sys(k))
184 v = self._strenv.__getitem__(_bytes2sys(k))
185 return _sys2bytes(v)
185 return _sys2bytes(v)
186
186
187 def __setitem__(self, k, v):
187 def __setitem__(self, k, v):
188 self._strenv.__setitem__(_bytes2sys(k), _bytes2sys(v))
188 self._strenv.__setitem__(_bytes2sys(k), _bytes2sys(v))
189
189
190 def __delitem__(self, k):
190 def __delitem__(self, k):
191 self._strenv.__delitem__(_bytes2sys(k))
191 self._strenv.__delitem__(_bytes2sys(k))
192
192
193 def __contains__(self, k):
193 def __contains__(self, k):
194 return self._strenv.__contains__(_bytes2sys(k))
194 return self._strenv.__contains__(_bytes2sys(k))
195
195
196 def __iter__(self):
196 def __iter__(self):
197 return iter([_sys2bytes(k) for k in iter(self._strenv)])
197 return iter([_sys2bytes(k) for k in iter(self._strenv)])
198
198
199 def get(self, k, default=None):
199 def get(self, k, default=None):
200 v = self._strenv.get(_bytes2sys(k), _bytes2sys(default))
200 v = self._strenv.get(_bytes2sys(k), _bytes2sys(default))
201 return _sys2bytes(v)
201 return _sys2bytes(v)
202
202
203 def pop(self, k, default=None):
203 def pop(self, k, default=None):
204 v = self._strenv.pop(_bytes2sys(k), _bytes2sys(default))
204 v = self._strenv.pop(_bytes2sys(k), _bytes2sys(default))
205 return _sys2bytes(v)
205 return _sys2bytes(v)
206
206
207 osenvironb = environbytes(os.environ)
207 osenvironb = environbytes(os.environ)
208
208
209 getcwdb = getattr(os, 'getcwdb')
209 getcwdb = getattr(os, 'getcwdb')
210 if not getcwdb or WINDOWS:
210 if not getcwdb or WINDOWS:
211 getcwdb = lambda: _sys2bytes(os.getcwd())
211 getcwdb = lambda: _sys2bytes(os.getcwd())
212
212
213 elif sys.version_info >= (3, 0, 0):
213 elif sys.version_info >= (3, 0, 0):
214 print(
214 print(
215 '%s is only supported on Python 3.5+ and 2.7, not %s'
215 '%s is only supported on Python 3.5+ and 2.7, not %s'
216 % (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3]))
216 % (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3]))
217 )
217 )
218 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
218 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
219 else:
219 else:
220 PYTHON3 = False
220 PYTHON3 = False
221
221
222 # In python 2.x, path operations are generally done using
222 # In python 2.x, path operations are generally done using
223 # bytestrings by default, so we don't have to do any extra
223 # bytestrings by default, so we don't have to do any extra
224 # fiddling there. We define the wrapper functions anyway just to
224 # fiddling there. We define the wrapper functions anyway just to
225 # help keep code consistent between platforms.
225 # help keep code consistent between platforms.
226 def _sys2bytes(p):
226 def _sys2bytes(p):
227 return p
227 return p
228
228
229 _bytes2sys = _sys2bytes
229 _bytes2sys = _sys2bytes
230 osenvironb = os.environ
230 osenvironb = os.environ
231 getcwdb = os.getcwd
231 getcwdb = os.getcwd
232
232
233 if WINDOWS:
233 if WINDOWS:
234 _getcwdb = getcwdb
234 _getcwdb = getcwdb
235
235
236 def getcwdb():
236 def getcwdb():
237 cwd = _getcwdb()
237 cwd = _getcwdb()
238 if re.match(b'^[a-z]:', cwd):
238 if re.match(b'^[a-z]:', cwd):
239 # os.getcwd() is inconsistent on the capitalization of the drive
239 # os.getcwd() is inconsistent on the capitalization of the drive
240 # letter, so adjust it. see https://bugs.python.org/issue40368
240 # letter, so adjust it. see https://bugs.python.org/issue40368
241 cwd = cwd[0:1].upper() + cwd[1:]
241 cwd = cwd[0:1].upper() + cwd[1:]
242 return cwd
242 return cwd
243
243
244
244
245 # For Windows support
245 # For Windows support
246 wifexited = getattr(os, "WIFEXITED", lambda x: False)
246 wifexited = getattr(os, "WIFEXITED", lambda x: False)
247
247
248 # Whether to use IPv6
248 # Whether to use IPv6
249 def checksocketfamily(name, port=20058):
249 def checksocketfamily(name, port=20058):
250 """return true if we can listen on localhost using family=name
250 """return true if we can listen on localhost using family=name
251
251
252 name should be either 'AF_INET', or 'AF_INET6'.
252 name should be either 'AF_INET', or 'AF_INET6'.
253 port being used is okay - EADDRINUSE is considered as successful.
253 port being used is okay - EADDRINUSE is considered as successful.
254 """
254 """
255 family = getattr(socket, name, None)
255 family = getattr(socket, name, None)
256 if family is None:
256 if family is None:
257 return False
257 return False
258 try:
258 try:
259 s = socket.socket(family, socket.SOCK_STREAM)
259 s = socket.socket(family, socket.SOCK_STREAM)
260 s.bind(('localhost', port))
260 s.bind(('localhost', port))
261 s.close()
261 s.close()
262 return True
262 return True
263 except socket.error as exc:
263 except socket.error as exc:
264 if exc.errno == errno.EADDRINUSE:
264 if exc.errno == errno.EADDRINUSE:
265 return True
265 return True
266 elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
266 elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
267 return False
267 return False
268 else:
268 else:
269 raise
269 raise
270 else:
270 else:
271 return False
271 return False
272
272
273
273
274 # useipv6 will be set by parseargs
274 # useipv6 will be set by parseargs
275 useipv6 = None
275 useipv6 = None
276
276
277
277
278 def checkportisavailable(port):
278 def checkportisavailable(port):
279 """return true if a port seems free to bind on localhost"""
279 """return true if a port seems free to bind on localhost"""
280 if useipv6:
280 if useipv6:
281 family = socket.AF_INET6
281 family = socket.AF_INET6
282 else:
282 else:
283 family = socket.AF_INET
283 family = socket.AF_INET
284 try:
284 try:
285 with contextlib.closing(socket.socket(family, socket.SOCK_STREAM)) as s:
285 with contextlib.closing(socket.socket(family, socket.SOCK_STREAM)) as s:
286 s.bind(('localhost', port))
286 s.bind(('localhost', port))
287 return True
287 return True
288 except socket.error as exc:
288 except socket.error as exc:
289 if WINDOWS and exc.errno == errno.WSAEACCES:
289 if WINDOWS and exc.errno == errno.WSAEACCES:
290 return False
290 return False
291 elif PYTHON3:
291 elif PYTHON3:
292 # TODO: make a proper exception handler after dropping py2. This
292 # TODO: make a proper exception handler after dropping py2. This
293 # works because socket.error is an alias for OSError on py3,
293 # works because socket.error is an alias for OSError on py3,
294 # which is also the baseclass of PermissionError.
294 # which is also the baseclass of PermissionError.
295 if isinstance(exc, PermissionError):
295 if isinstance(exc, PermissionError):
296 return False
296 return False
297 if exc.errno not in (
297 if exc.errno not in (
298 errno.EADDRINUSE,
298 errno.EADDRINUSE,
299 errno.EADDRNOTAVAIL,
299 errno.EADDRNOTAVAIL,
300 errno.EPROTONOSUPPORT,
300 errno.EPROTONOSUPPORT,
301 ):
301 ):
302 raise
302 raise
303 return False
303 return False
304
304
305
305
306 closefds = os.name == 'posix'
306 closefds = os.name == 'posix'
307
307
308
308
309 def Popen4(cmd, wd, timeout, env=None):
309 def Popen4(cmd, wd, timeout, env=None):
310 processlock.acquire()
310 processlock.acquire()
311 p = subprocess.Popen(
311 p = subprocess.Popen(
312 _bytes2sys(cmd),
312 _bytes2sys(cmd),
313 shell=True,
313 shell=True,
314 bufsize=-1,
314 bufsize=-1,
315 cwd=_bytes2sys(wd),
315 cwd=_bytes2sys(wd),
316 env=env,
316 env=env,
317 close_fds=closefds,
317 close_fds=closefds,
318 stdin=subprocess.PIPE,
318 stdin=subprocess.PIPE,
319 stdout=subprocess.PIPE,
319 stdout=subprocess.PIPE,
320 stderr=subprocess.STDOUT,
320 stderr=subprocess.STDOUT,
321 )
321 )
322 processlock.release()
322 processlock.release()
323
323
324 p.fromchild = p.stdout
324 p.fromchild = p.stdout
325 p.tochild = p.stdin
325 p.tochild = p.stdin
326 p.childerr = p.stderr
326 p.childerr = p.stderr
327
327
328 p.timeout = False
328 p.timeout = False
329 if timeout:
329 if timeout:
330
330
331 def t():
331 def t():
332 start = time.time()
332 start = time.time()
333 while time.time() - start < timeout and p.returncode is None:
333 while time.time() - start < timeout and p.returncode is None:
334 time.sleep(0.1)
334 time.sleep(0.1)
335 p.timeout = True
335 p.timeout = True
336 vlog('# Timout reached for process %d' % p.pid)
336 vlog('# Timout reached for process %d' % p.pid)
337 if p.returncode is None:
337 if p.returncode is None:
338 terminate(p)
338 terminate(p)
339
339
340 threading.Thread(target=t).start()
340 threading.Thread(target=t).start()
341
341
342 return p
342 return p
343
343
344
344
345 if sys.executable:
345 if sys.executable:
346 sysexecutable = sys.executable
346 sysexecutable = sys.executable
347 elif os.environ.get('PYTHONEXECUTABLE'):
347 elif os.environ.get('PYTHONEXECUTABLE'):
348 sysexecutable = os.environ['PYTHONEXECUTABLE']
348 sysexecutable = os.environ['PYTHONEXECUTABLE']
349 elif os.environ.get('PYTHON'):
349 elif os.environ.get('PYTHON'):
350 sysexecutable = os.environ['PYTHON']
350 sysexecutable = os.environ['PYTHON']
351 else:
351 else:
352 raise AssertionError('Could not find Python interpreter')
352 raise AssertionError('Could not find Python interpreter')
353
353
354 PYTHON = _sys2bytes(sysexecutable.replace('\\', '/'))
354 PYTHON = _sys2bytes(sysexecutable.replace('\\', '/'))
355 IMPL_PATH = b'PYTHONPATH'
355 IMPL_PATH = b'PYTHONPATH'
356 if 'java' in sys.platform:
356 if 'java' in sys.platform:
357 IMPL_PATH = b'JYTHONPATH'
357 IMPL_PATH = b'JYTHONPATH'
358
358
359 default_defaults = {
359 default_defaults = {
360 'jobs': ('HGTEST_JOBS', multiprocessing.cpu_count()),
360 'jobs': ('HGTEST_JOBS', multiprocessing.cpu_count()),
361 'timeout': ('HGTEST_TIMEOUT', 360),
361 'timeout': ('HGTEST_TIMEOUT', 360),
362 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 1500),
362 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 1500),
363 'port': ('HGTEST_PORT', 20059),
363 'port': ('HGTEST_PORT', 20059),
364 'shell': ('HGTEST_SHELL', 'sh'),
364 'shell': ('HGTEST_SHELL', 'sh'),
365 }
365 }
366
366
367 defaults = default_defaults.copy()
367 defaults = default_defaults.copy()
368
368
369
369
370 def canonpath(path):
370 def canonpath(path):
371 return os.path.realpath(os.path.expanduser(path))
371 return os.path.realpath(os.path.expanduser(path))
372
372
373
373
374 def which(exe):
374 def which(exe):
375 if PYTHON3:
375 if PYTHON3:
376 # shutil.which only accept bytes from 3.8
376 # shutil.which only accept bytes from 3.8
377 cmd = _bytes2sys(exe)
377 cmd = _bytes2sys(exe)
378 real_exec = shutil.which(cmd)
378 real_exec = shutil.which(cmd)
379 return _sys2bytes(real_exec)
379 return _sys2bytes(real_exec)
380 else:
380 else:
381 # let us do the os work
381 # let us do the os work
382 for p in osenvironb[b'PATH'].split(os.pathsep):
382 for p in osenvironb[b'PATH'].split(os.pathsep):
383 f = os.path.join(p, exe)
383 f = os.path.join(p, exe)
384 if os.path.isfile(f):
384 if os.path.isfile(f):
385 return f
385 return f
386 return None
386 return None
387
387
388
388
389 def parselistfiles(files, listtype, warn=True):
389 def parselistfiles(files, listtype, warn=True):
390 entries = dict()
390 entries = dict()
391 for filename in files:
391 for filename in files:
392 try:
392 try:
393 path = os.path.expanduser(os.path.expandvars(filename))
393 path = os.path.expanduser(os.path.expandvars(filename))
394 f = open(path, "rb")
394 f = open(path, "rb")
395 except IOError as err:
395 except IOError as err:
396 if err.errno != errno.ENOENT:
396 if err.errno != errno.ENOENT:
397 raise
397 raise
398 if warn:
398 if warn:
399 print("warning: no such %s file: %s" % (listtype, filename))
399 print("warning: no such %s file: %s" % (listtype, filename))
400 continue
400 continue
401
401
402 for line in f.readlines():
402 for line in f.readlines():
403 line = line.split(b'#', 1)[0].strip()
403 line = line.split(b'#', 1)[0].strip()
404 if line:
404 if line:
405 # Ensure path entries are compatible with os.path.relpath()
405 # Ensure path entries are compatible with os.path.relpath()
406 entries[os.path.normpath(line)] = filename
406 entries[os.path.normpath(line)] = filename
407
407
408 f.close()
408 f.close()
409 return entries
409 return entries
410
410
411
411
412 def parsettestcases(path):
412 def parsettestcases(path):
413 """read a .t test file, return a set of test case names
413 """read a .t test file, return a set of test case names
414
414
415 If path does not exist, return an empty set.
415 If path does not exist, return an empty set.
416 """
416 """
417 cases = []
417 cases = []
418 try:
418 try:
419 with open(path, 'rb') as f:
419 with open(path, 'rb') as f:
420 for l in f:
420 for l in f:
421 if l.startswith(b'#testcases '):
421 if l.startswith(b'#testcases '):
422 cases.append(sorted(l[11:].split()))
422 cases.append(sorted(l[11:].split()))
423 except IOError as ex:
423 except IOError as ex:
424 if ex.errno != errno.ENOENT:
424 if ex.errno != errno.ENOENT:
425 raise
425 raise
426 return cases
426 return cases
427
427
428
428
429 def getparser():
429 def getparser():
430 """Obtain the OptionParser used by the CLI."""
430 """Obtain the OptionParser used by the CLI."""
431 parser = argparse.ArgumentParser(usage='%(prog)s [options] [tests]')
431 parser = argparse.ArgumentParser(usage='%(prog)s [options] [tests]')
432
432
433 selection = parser.add_argument_group('Test Selection')
433 selection = parser.add_argument_group('Test Selection')
434 selection.add_argument(
434 selection.add_argument(
435 '--allow-slow-tests',
435 '--allow-slow-tests',
436 action='store_true',
436 action='store_true',
437 help='allow extremely slow tests',
437 help='allow extremely slow tests',
438 )
438 )
439 selection.add_argument(
439 selection.add_argument(
440 "--blacklist",
440 "--blacklist",
441 action="append",
441 action="append",
442 help="skip tests listed in the specified blacklist file",
442 help="skip tests listed in the specified blacklist file",
443 )
443 )
444 selection.add_argument(
444 selection.add_argument(
445 "--changed",
445 "--changed",
446 help="run tests that are changed in parent rev or working directory",
446 help="run tests that are changed in parent rev or working directory",
447 )
447 )
448 selection.add_argument(
448 selection.add_argument(
449 "-k", "--keywords", help="run tests matching keywords"
449 "-k", "--keywords", help="run tests matching keywords"
450 )
450 )
451 selection.add_argument(
451 selection.add_argument(
452 "-r", "--retest", action="store_true", help="retest failed tests"
452 "-r", "--retest", action="store_true", help="retest failed tests"
453 )
453 )
454 selection.add_argument(
454 selection.add_argument(
455 "--test-list",
455 "--test-list",
456 action="append",
456 action="append",
457 help="read tests to run from the specified file",
457 help="read tests to run from the specified file",
458 )
458 )
459 selection.add_argument(
459 selection.add_argument(
460 "--whitelist",
460 "--whitelist",
461 action="append",
461 action="append",
462 help="always run tests listed in the specified whitelist file",
462 help="always run tests listed in the specified whitelist file",
463 )
463 )
464 selection.add_argument(
464 selection.add_argument(
465 'tests', metavar='TESTS', nargs='*', help='Tests to run'
465 'tests', metavar='TESTS', nargs='*', help='Tests to run'
466 )
466 )
467
467
468 harness = parser.add_argument_group('Test Harness Behavior')
468 harness = parser.add_argument_group('Test Harness Behavior')
469 harness.add_argument(
469 harness.add_argument(
470 '--bisect-repo',
470 '--bisect-repo',
471 metavar='bisect_repo',
471 metavar='bisect_repo',
472 help=(
472 help=(
473 "Path of a repo to bisect. Use together with " "--known-good-rev"
473 "Path of a repo to bisect. Use together with " "--known-good-rev"
474 ),
474 ),
475 )
475 )
476 harness.add_argument(
476 harness.add_argument(
477 "-d",
477 "-d",
478 "--debug",
478 "--debug",
479 action="store_true",
479 action="store_true",
480 help="debug mode: write output of test scripts to console"
480 help="debug mode: write output of test scripts to console"
481 " rather than capturing and diffing it (disables timeout)",
481 " rather than capturing and diffing it (disables timeout)",
482 )
482 )
483 harness.add_argument(
483 harness.add_argument(
484 "-f",
484 "-f",
485 "--first",
485 "--first",
486 action="store_true",
486 action="store_true",
487 help="exit on the first test failure",
487 help="exit on the first test failure",
488 )
488 )
489 harness.add_argument(
489 harness.add_argument(
490 "-i",
490 "-i",
491 "--interactive",
491 "--interactive",
492 action="store_true",
492 action="store_true",
493 help="prompt to accept changed output",
493 help="prompt to accept changed output",
494 )
494 )
495 harness.add_argument(
495 harness.add_argument(
496 "-j",
496 "-j",
497 "--jobs",
497 "--jobs",
498 type=int,
498 type=int,
499 help="number of jobs to run in parallel"
499 help="number of jobs to run in parallel"
500 " (default: $%s or %d)" % defaults['jobs'],
500 " (default: $%s or %d)" % defaults['jobs'],
501 )
501 )
502 harness.add_argument(
502 harness.add_argument(
503 "--keep-tmpdir",
503 "--keep-tmpdir",
504 action="store_true",
504 action="store_true",
505 help="keep temporary directory after running tests",
505 help="keep temporary directory after running tests",
506 )
506 )
507 harness.add_argument(
507 harness.add_argument(
508 '--known-good-rev',
508 '--known-good-rev',
509 metavar="known_good_rev",
509 metavar="known_good_rev",
510 help=(
510 help=(
511 "Automatically bisect any failures using this "
511 "Automatically bisect any failures using this "
512 "revision as a known-good revision."
512 "revision as a known-good revision."
513 ),
513 ),
514 )
514 )
515 harness.add_argument(
515 harness.add_argument(
516 "--list-tests",
516 "--list-tests",
517 action="store_true",
517 action="store_true",
518 help="list tests instead of running them",
518 help="list tests instead of running them",
519 )
519 )
520 harness.add_argument(
520 harness.add_argument(
521 "--loop", action="store_true", help="loop tests repeatedly"
521 "--loop", action="store_true", help="loop tests repeatedly"
522 )
522 )
523 harness.add_argument(
523 harness.add_argument(
524 '--random', action="store_true", help='run tests in random order'
524 '--random', action="store_true", help='run tests in random order'
525 )
525 )
526 harness.add_argument(
526 harness.add_argument(
527 '--order-by-runtime',
527 '--order-by-runtime',
528 action="store_true",
528 action="store_true",
529 help='run slowest tests first, according to .testtimes',
529 help='run slowest tests first, according to .testtimes',
530 )
530 )
531 harness.add_argument(
531 harness.add_argument(
532 "-p",
532 "-p",
533 "--port",
533 "--port",
534 type=int,
534 type=int,
535 help="port on which servers should listen"
535 help="port on which servers should listen"
536 " (default: $%s or %d)" % defaults['port'],
536 " (default: $%s or %d)" % defaults['port'],
537 )
537 )
538 harness.add_argument(
538 harness.add_argument(
539 '--profile-runner',
539 '--profile-runner',
540 action='store_true',
540 action='store_true',
541 help='run statprof on run-tests',
541 help='run statprof on run-tests',
542 )
542 )
543 harness.add_argument(
543 harness.add_argument(
544 "-R", "--restart", action="store_true", help="restart at last error"
544 "-R", "--restart", action="store_true", help="restart at last error"
545 )
545 )
546 harness.add_argument(
546 harness.add_argument(
547 "--runs-per-test",
547 "--runs-per-test",
548 type=int,
548 type=int,
549 dest="runs_per_test",
549 dest="runs_per_test",
550 help="run each test N times (default=1)",
550 help="run each test N times (default=1)",
551 default=1,
551 default=1,
552 )
552 )
553 harness.add_argument(
553 harness.add_argument(
554 "--shell", help="shell to use (default: $%s or %s)" % defaults['shell']
554 "--shell", help="shell to use (default: $%s or %s)" % defaults['shell']
555 )
555 )
556 harness.add_argument(
556 harness.add_argument(
557 '--showchannels', action='store_true', help='show scheduling channels'
557 '--showchannels', action='store_true', help='show scheduling channels'
558 )
558 )
559 harness.add_argument(
559 harness.add_argument(
560 "--slowtimeout",
560 "--slowtimeout",
561 type=int,
561 type=int,
562 help="kill errant slow tests after SLOWTIMEOUT seconds"
562 help="kill errant slow tests after SLOWTIMEOUT seconds"
563 " (default: $%s or %d)" % defaults['slowtimeout'],
563 " (default: $%s or %d)" % defaults['slowtimeout'],
564 )
564 )
565 harness.add_argument(
565 harness.add_argument(
566 "-t",
566 "-t",
567 "--timeout",
567 "--timeout",
568 type=int,
568 type=int,
569 help="kill errant tests after TIMEOUT seconds"
569 help="kill errant tests after TIMEOUT seconds"
570 " (default: $%s or %d)" % defaults['timeout'],
570 " (default: $%s or %d)" % defaults['timeout'],
571 )
571 )
572 harness.add_argument(
572 harness.add_argument(
573 "--tmpdir",
573 "--tmpdir",
574 help="run tests in the given temporary directory"
574 help="run tests in the given temporary directory"
575 " (implies --keep-tmpdir)",
575 " (implies --keep-tmpdir)",
576 )
576 )
577 harness.add_argument(
577 harness.add_argument(
578 "-v", "--verbose", action="store_true", help="output verbose messages"
578 "-v", "--verbose", action="store_true", help="output verbose messages"
579 )
579 )
580
580
581 hgconf = parser.add_argument_group('Mercurial Configuration')
581 hgconf = parser.add_argument_group('Mercurial Configuration')
582 hgconf.add_argument(
582 hgconf.add_argument(
583 "--chg",
583 "--chg",
584 action="store_true",
584 action="store_true",
585 help="install and use chg wrapper in place of hg",
585 help="install and use chg wrapper in place of hg",
586 )
586 )
587 hgconf.add_argument(
587 hgconf.add_argument(
588 "--chg-debug",
588 "--chg-debug",
589 action="store_true",
589 action="store_true",
590 help="show chg debug logs",
590 help="show chg debug logs",
591 )
591 )
592 hgconf.add_argument(
592 hgconf.add_argument(
593 "--rhg",
593 "--rhg",
594 action="store_true",
594 action="store_true",
595 help="install and use rhg Rust implementation in place of hg",
595 help="install and use rhg Rust implementation in place of hg",
596 )
596 )
597 hgconf.add_argument(
597 hgconf.add_argument(
598 "--pyoxidized",
598 "--pyoxidized",
599 action="store_true",
599 action="store_true",
600 help="build the hg binary using pyoxidizer",
600 help="build the hg binary using pyoxidizer",
601 )
601 )
602 hgconf.add_argument("--compiler", help="compiler to build with")
602 hgconf.add_argument("--compiler", help="compiler to build with")
603 hgconf.add_argument(
603 hgconf.add_argument(
604 '--extra-config-opt',
604 '--extra-config-opt',
605 action="append",
605 action="append",
606 default=[],
606 default=[],
607 help='set the given config opt in the test hgrc',
607 help='set the given config opt in the test hgrc',
608 )
608 )
609 hgconf.add_argument(
609 hgconf.add_argument(
610 "-l",
610 "-l",
611 "--local",
611 "--local",
612 action="store_true",
612 action="store_true",
613 help="shortcut for --with-hg=<testdir>/../hg, "
613 help="shortcut for --with-hg=<testdir>/../hg, "
614 "--with-rhg=<testdir>/../rust/target/release/rhg if --rhg is set, "
614 "--with-rhg=<testdir>/../rust/target/release/rhg if --rhg is set, "
615 "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set",
615 "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set",
616 )
616 )
617 hgconf.add_argument(
617 hgconf.add_argument(
618 "--ipv6",
618 "--ipv6",
619 action="store_true",
619 action="store_true",
620 help="prefer IPv6 to IPv4 for network related tests",
620 help="prefer IPv6 to IPv4 for network related tests",
621 )
621 )
622 hgconf.add_argument(
622 hgconf.add_argument(
623 "--pure",
623 "--pure",
624 action="store_true",
624 action="store_true",
625 help="use pure Python code instead of C extensions",
625 help="use pure Python code instead of C extensions",
626 )
626 )
627 hgconf.add_argument(
627 hgconf.add_argument(
628 "--rust",
628 "--rust",
629 action="store_true",
629 action="store_true",
630 help="use Rust code alongside C extensions",
630 help="use Rust code alongside C extensions",
631 )
631 )
632 hgconf.add_argument(
632 hgconf.add_argument(
633 "--no-rust",
633 "--no-rust",
634 action="store_true",
634 action="store_true",
635 help="do not use Rust code even if compiled",
635 help="do not use Rust code even if compiled",
636 )
636 )
637 hgconf.add_argument(
637 hgconf.add_argument(
638 "--with-chg",
638 "--with-chg",
639 metavar="CHG",
639 metavar="CHG",
640 help="use specified chg wrapper in place of hg",
640 help="use specified chg wrapper in place of hg",
641 )
641 )
642 hgconf.add_argument(
642 hgconf.add_argument(
643 "--with-rhg",
643 "--with-rhg",
644 metavar="RHG",
644 metavar="RHG",
645 help="use specified rhg Rust implementation in place of hg",
645 help="use specified rhg Rust implementation in place of hg",
646 )
646 )
647 hgconf.add_argument(
647 hgconf.add_argument(
648 "--with-hg",
648 "--with-hg",
649 metavar="HG",
649 metavar="HG",
650 help="test using specified hg script rather than a "
650 help="test using specified hg script rather than a "
651 "temporary installation",
651 "temporary installation",
652 )
652 )
653
653
654 reporting = parser.add_argument_group('Results Reporting')
654 reporting = parser.add_argument_group('Results Reporting')
655 reporting.add_argument(
655 reporting.add_argument(
656 "-C",
656 "-C",
657 "--annotate",
657 "--annotate",
658 action="store_true",
658 action="store_true",
659 help="output files annotated with coverage",
659 help="output files annotated with coverage",
660 )
660 )
661 reporting.add_argument(
661 reporting.add_argument(
662 "--color",
662 "--color",
663 choices=["always", "auto", "never"],
663 choices=["always", "auto", "never"],
664 default=os.environ.get('HGRUNTESTSCOLOR', 'auto'),
664 default=os.environ.get('HGRUNTESTSCOLOR', 'auto'),
665 help="colorisation: always|auto|never (default: auto)",
665 help="colorisation: always|auto|never (default: auto)",
666 )
666 )
667 reporting.add_argument(
667 reporting.add_argument(
668 "-c",
668 "-c",
669 "--cover",
669 "--cover",
670 action="store_true",
670 action="store_true",
671 help="print a test coverage report",
671 help="print a test coverage report",
672 )
672 )
673 reporting.add_argument(
673 reporting.add_argument(
674 '--exceptions',
674 '--exceptions',
675 action='store_true',
675 action='store_true',
676 help='log all exceptions and generate an exception report',
676 help='log all exceptions and generate an exception report',
677 )
677 )
678 reporting.add_argument(
678 reporting.add_argument(
679 "-H",
679 "-H",
680 "--htmlcov",
680 "--htmlcov",
681 action="store_true",
681 action="store_true",
682 help="create an HTML report of the coverage of the files",
682 help="create an HTML report of the coverage of the files",
683 )
683 )
684 reporting.add_argument(
684 reporting.add_argument(
685 "--json",
685 "--json",
686 action="store_true",
686 action="store_true",
687 help="store test result data in 'report.json' file",
687 help="store test result data in 'report.json' file",
688 )
688 )
689 reporting.add_argument(
689 reporting.add_argument(
690 "--outputdir",
690 "--outputdir",
691 help="directory to write error logs to (default=test directory)",
691 help="directory to write error logs to (default=test directory)",
692 )
692 )
693 reporting.add_argument(
693 reporting.add_argument(
694 "-n", "--nodiff", action="store_true", help="skip showing test changes"
694 "-n", "--nodiff", action="store_true", help="skip showing test changes"
695 )
695 )
696 reporting.add_argument(
696 reporting.add_argument(
697 "-S",
697 "-S",
698 "--noskips",
698 "--noskips",
699 action="store_true",
699 action="store_true",
700 help="don't report skip tests verbosely",
700 help="don't report skip tests verbosely",
701 )
701 )
702 reporting.add_argument(
702 reporting.add_argument(
703 "--time", action="store_true", help="time how long each test takes"
703 "--time", action="store_true", help="time how long each test takes"
704 )
704 )
705 reporting.add_argument("--view", help="external diff viewer")
705 reporting.add_argument("--view", help="external diff viewer")
706 reporting.add_argument(
706 reporting.add_argument(
707 "--xunit", help="record xunit results at specified path"
707 "--xunit", help="record xunit results at specified path"
708 )
708 )
709
709
710 for option, (envvar, default) in defaults.items():
710 for option, (envvar, default) in defaults.items():
711 defaults[option] = type(default)(os.environ.get(envvar, default))
711 defaults[option] = type(default)(os.environ.get(envvar, default))
712 parser.set_defaults(**defaults)
712 parser.set_defaults(**defaults)
713
713
714 return parser
714 return parser
715
715
716
716
717 def parseargs(args, parser):
717 def parseargs(args, parser):
718 """Parse arguments with our OptionParser and validate results."""
718 """Parse arguments with our OptionParser and validate results."""
719 options = parser.parse_args(args)
719 options = parser.parse_args(args)
720
720
721 # jython is always pure
721 # jython is always pure
722 if 'java' in sys.platform or '__pypy__' in sys.modules:
722 if 'java' in sys.platform or '__pypy__' in sys.modules:
723 options.pure = True
723 options.pure = True
724
724
725 if platform.python_implementation() != 'CPython' and options.rust:
725 if platform.python_implementation() != 'CPython' and options.rust:
726 parser.error('Rust extensions are only available with CPython')
726 parser.error('Rust extensions are only available with CPython')
727
727
728 if options.pure and options.rust:
728 if options.pure and options.rust:
729 parser.error('--rust cannot be used with --pure')
729 parser.error('--rust cannot be used with --pure')
730
730
731 if options.rust and options.no_rust:
731 if options.rust and options.no_rust:
732 parser.error('--rust cannot be used with --no-rust')
732 parser.error('--rust cannot be used with --no-rust')
733
733
734 if options.local:
734 if options.local:
735 if options.with_hg or options.with_rhg or options.with_chg:
735 if options.with_hg or options.with_rhg or options.with_chg:
736 parser.error(
736 parser.error(
737 '--local cannot be used with --with-hg or --with-rhg or --with-chg'
737 '--local cannot be used with --with-hg or --with-rhg or --with-chg'
738 )
738 )
739 if options.pyoxidized:
739 if options.pyoxidized:
740 parser.error('--pyoxidized does not work with --local (yet)')
740 parser.error('--pyoxidized does not work with --local (yet)')
741 testdir = os.path.dirname(_sys2bytes(canonpath(sys.argv[0])))
741 testdir = os.path.dirname(_sys2bytes(canonpath(sys.argv[0])))
742 reporootdir = os.path.dirname(testdir)
742 reporootdir = os.path.dirname(testdir)
743 pathandattrs = [(b'hg', 'with_hg')]
743 pathandattrs = [(b'hg', 'with_hg')]
744 if options.chg:
744 if options.chg:
745 pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
745 pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
746 if options.rhg:
746 if options.rhg:
747 pathandattrs.append((b'rust/target/release/rhg', 'with_rhg'))
747 pathandattrs.append((b'rust/target/release/rhg', 'with_rhg'))
748 for relpath, attr in pathandattrs:
748 for relpath, attr in pathandattrs:
749 binpath = os.path.join(reporootdir, relpath)
749 binpath = os.path.join(reporootdir, relpath)
750 if not (WINDOWS or os.access(binpath, os.X_OK)):
750 if not (WINDOWS or os.access(binpath, os.X_OK)):
751 parser.error(
751 parser.error(
752 '--local specified, but %r not found or '
752 '--local specified, but %r not found or '
753 'not executable' % binpath
753 'not executable' % binpath
754 )
754 )
755 setattr(options, attr, _bytes2sys(binpath))
755 setattr(options, attr, _bytes2sys(binpath))
756
756
757 if options.with_hg:
757 if options.with_hg:
758 options.with_hg = canonpath(_sys2bytes(options.with_hg))
758 options.with_hg = canonpath(_sys2bytes(options.with_hg))
759 if not (
759 if not (
760 os.path.isfile(options.with_hg)
760 os.path.isfile(options.with_hg)
761 and os.access(options.with_hg, os.X_OK)
761 and os.access(options.with_hg, os.X_OK)
762 ):
762 ):
763 parser.error('--with-hg must specify an executable hg script')
763 parser.error('--with-hg must specify an executable hg script')
764 if os.path.basename(options.with_hg) not in [b'hg', b'hg.exe']:
764 if os.path.basename(options.with_hg) not in [b'hg', b'hg.exe']:
765 msg = 'warning: --with-hg should specify an hg script, not: %s\n'
765 msg = 'warning: --with-hg should specify an hg script, not: %s\n'
766 msg %= _bytes2sys(os.path.basename(options.with_hg))
766 msg %= _bytes2sys(os.path.basename(options.with_hg))
767 sys.stderr.write(msg)
767 sys.stderr.write(msg)
768 sys.stderr.flush()
768 sys.stderr.flush()
769
769
770 if (options.chg or options.with_chg) and WINDOWS:
770 if (options.chg or options.with_chg) and WINDOWS:
771 parser.error('chg does not work on %s' % os.name)
771 parser.error('chg does not work on %s' % os.name)
772 if (options.rhg or options.with_rhg) and WINDOWS:
772 if (options.rhg or options.with_rhg) and WINDOWS:
773 parser.error('rhg does not work on %s' % os.name)
773 parser.error('rhg does not work on %s' % os.name)
774 if options.pyoxidized and not WINDOWS:
774 if options.pyoxidized and not WINDOWS:
775 parser.error('--pyoxidized is currently Windows only')
775 parser.error('--pyoxidized is currently Windows only')
776 if options.with_chg:
776 if options.with_chg:
777 options.chg = False # no installation to temporary location
777 options.chg = False # no installation to temporary location
778 options.with_chg = canonpath(_sys2bytes(options.with_chg))
778 options.with_chg = canonpath(_sys2bytes(options.with_chg))
779 if not (
779 if not (
780 os.path.isfile(options.with_chg)
780 os.path.isfile(options.with_chg)
781 and os.access(options.with_chg, os.X_OK)
781 and os.access(options.with_chg, os.X_OK)
782 ):
782 ):
783 parser.error('--with-chg must specify a chg executable')
783 parser.error('--with-chg must specify a chg executable')
784 if options.with_rhg:
784 if options.with_rhg:
785 options.rhg = False # no installation to temporary location
785 options.rhg = False # no installation to temporary location
786 options.with_rhg = canonpath(_sys2bytes(options.with_rhg))
786 options.with_rhg = canonpath(_sys2bytes(options.with_rhg))
787 if not (
787 if not (
788 os.path.isfile(options.with_rhg)
788 os.path.isfile(options.with_rhg)
789 and os.access(options.with_rhg, os.X_OK)
789 and os.access(options.with_rhg, os.X_OK)
790 ):
790 ):
791 parser.error('--with-rhg must specify a rhg executable')
791 parser.error('--with-rhg must specify a rhg executable')
792 if options.chg and options.with_hg:
792 if options.chg and options.with_hg:
793 # chg shares installation location with hg
793 # chg shares installation location with hg
794 parser.error(
794 parser.error(
795 '--chg does not work when --with-hg is specified '
795 '--chg does not work when --with-hg is specified '
796 '(use --with-chg instead)'
796 '(use --with-chg instead)'
797 )
797 )
798 if options.rhg and options.with_hg:
798 if options.rhg and options.with_hg:
799 # rhg shares installation location with hg
799 # rhg shares installation location with hg
800 parser.error(
800 parser.error(
801 '--rhg does not work when --with-hg is specified '
801 '--rhg does not work when --with-hg is specified '
802 '(use --with-rhg instead)'
802 '(use --with-rhg instead)'
803 )
803 )
804 if options.rhg and options.chg:
804 if options.rhg and options.chg:
805 parser.error('--rhg and --chg do not work together')
805 parser.error('--rhg and --chg do not work together')
806
806
807 if options.color == 'always' and not pygmentspresent:
807 if options.color == 'always' and not pygmentspresent:
808 sys.stderr.write(
808 sys.stderr.write(
809 'warning: --color=always ignored because '
809 'warning: --color=always ignored because '
810 'pygments is not installed\n'
810 'pygments is not installed\n'
811 )
811 )
812
812
813 if options.bisect_repo and not options.known_good_rev:
813 if options.bisect_repo and not options.known_good_rev:
814 parser.error("--bisect-repo cannot be used without --known-good-rev")
814 parser.error("--bisect-repo cannot be used without --known-good-rev")
815
815
816 global useipv6
816 global useipv6
817 if options.ipv6:
817 if options.ipv6:
818 useipv6 = checksocketfamily('AF_INET6')
818 useipv6 = checksocketfamily('AF_INET6')
819 else:
819 else:
820 # only use IPv6 if IPv4 is unavailable and IPv6 is available
820 # only use IPv6 if IPv4 is unavailable and IPv6 is available
821 useipv6 = (not checksocketfamily('AF_INET')) and checksocketfamily(
821 useipv6 = (not checksocketfamily('AF_INET')) and checksocketfamily(
822 'AF_INET6'
822 'AF_INET6'
823 )
823 )
824
824
825 options.anycoverage = options.cover or options.annotate or options.htmlcov
825 options.anycoverage = options.cover or options.annotate or options.htmlcov
826 if options.anycoverage:
826 if options.anycoverage:
827 try:
827 try:
828 import coverage
828 import coverage
829
829
830 covver = version.StrictVersion(coverage.__version__).version
830 covver = version.StrictVersion(coverage.__version__).version
831 if covver < (3, 3):
831 if covver < (3, 3):
832 parser.error('coverage options require coverage 3.3 or later')
832 parser.error('coverage options require coverage 3.3 or later')
833 except ImportError:
833 except ImportError:
834 parser.error('coverage options now require the coverage package')
834 parser.error('coverage options now require the coverage package')
835
835
836 if options.anycoverage and options.local:
836 if options.anycoverage and options.local:
837 # this needs some path mangling somewhere, I guess
837 # this needs some path mangling somewhere, I guess
838 parser.error(
838 parser.error(
839 "sorry, coverage options do not work when --local " "is specified"
839 "sorry, coverage options do not work when --local " "is specified"
840 )
840 )
841
841
842 if options.anycoverage and options.with_hg:
842 if options.anycoverage and options.with_hg:
843 parser.error(
843 parser.error(
844 "sorry, coverage options do not work when --with-hg " "is specified"
844 "sorry, coverage options do not work when --with-hg " "is specified"
845 )
845 )
846
846
847 global verbose
847 global verbose
848 if options.verbose:
848 if options.verbose:
849 verbose = ''
849 verbose = ''
850
850
851 if options.tmpdir:
851 if options.tmpdir:
852 options.tmpdir = canonpath(options.tmpdir)
852 options.tmpdir = canonpath(options.tmpdir)
853
853
854 if options.jobs < 1:
854 if options.jobs < 1:
855 parser.error('--jobs must be positive')
855 parser.error('--jobs must be positive')
856 if options.interactive and options.debug:
856 if options.interactive and options.debug:
857 parser.error("-i/--interactive and -d/--debug are incompatible")
857 parser.error("-i/--interactive and -d/--debug are incompatible")
858 if options.debug:
858 if options.debug:
859 if options.timeout != defaults['timeout']:
859 if options.timeout != defaults['timeout']:
860 sys.stderr.write('warning: --timeout option ignored with --debug\n')
860 sys.stderr.write('warning: --timeout option ignored with --debug\n')
861 if options.slowtimeout != defaults['slowtimeout']:
861 if options.slowtimeout != defaults['slowtimeout']:
862 sys.stderr.write(
862 sys.stderr.write(
863 'warning: --slowtimeout option ignored with --debug\n'
863 'warning: --slowtimeout option ignored with --debug\n'
864 )
864 )
865 options.timeout = 0
865 options.timeout = 0
866 options.slowtimeout = 0
866 options.slowtimeout = 0
867
867
868 if options.blacklist:
868 if options.blacklist:
869 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
869 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
870 if options.whitelist:
870 if options.whitelist:
871 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
871 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
872 else:
872 else:
873 options.whitelisted = {}
873 options.whitelisted = {}
874
874
875 if options.showchannels:
875 if options.showchannels:
876 options.nodiff = True
876 options.nodiff = True
877
877
878 return options
878 return options
879
879
880
880
881 def rename(src, dst):
881 def rename(src, dst):
882 """Like os.rename(), trade atomicity and opened files friendliness
882 """Like os.rename(), trade atomicity and opened files friendliness
883 for existing destination support.
883 for existing destination support.
884 """
884 """
885 shutil.copy(src, dst)
885 shutil.copy(src, dst)
886 os.remove(src)
886 os.remove(src)
887
887
888
888
889 def makecleanable(path):
889 def makecleanable(path):
890 """Try to fix directory permission recursively so that the entire tree
890 """Try to fix directory permission recursively so that the entire tree
891 can be deleted"""
891 can be deleted"""
892 for dirpath, dirnames, _filenames in os.walk(path, topdown=True):
892 for dirpath, dirnames, _filenames in os.walk(path, topdown=True):
893 for d in dirnames:
893 for d in dirnames:
894 p = os.path.join(dirpath, d)
894 p = os.path.join(dirpath, d)
895 try:
895 try:
896 os.chmod(p, os.stat(p).st_mode & 0o777 | 0o700) # chmod u+rwx
896 os.chmod(p, os.stat(p).st_mode & 0o777 | 0o700) # chmod u+rwx
897 except OSError:
897 except OSError:
898 pass
898 pass
899
899
900
900
901 _unified_diff = difflib.unified_diff
901 _unified_diff = difflib.unified_diff
902 if PYTHON3:
902 if PYTHON3:
903 import functools
903 import functools
904
904
905 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
905 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
906
906
907
907
908 def getdiff(expected, output, ref, err):
908 def getdiff(expected, output, ref, err):
909 servefail = False
909 servefail = False
910 lines = []
910 lines = []
911 for line in _unified_diff(expected, output, ref, err):
911 for line in _unified_diff(expected, output, ref, err):
912 if line.startswith(b'+++') or line.startswith(b'---'):
912 if line.startswith(b'+++') or line.startswith(b'---'):
913 line = line.replace(b'\\', b'/')
913 line = line.replace(b'\\', b'/')
914 if line.endswith(b' \n'):
914 if line.endswith(b' \n'):
915 line = line[:-2] + b'\n'
915 line = line[:-2] + b'\n'
916 lines.append(line)
916 lines.append(line)
917 if not servefail and line.startswith(
917 if not servefail and line.startswith(
918 b'+ abort: child process failed to start'
918 b'+ abort: child process failed to start'
919 ):
919 ):
920 servefail = True
920 servefail = True
921
921
922 return servefail, lines
922 return servefail, lines
923
923
924
924
925 verbose = False
925 verbose = False
926
926
927
927
928 def vlog(*msg):
928 def vlog(*msg):
929 """Log only when in verbose mode."""
929 """Log only when in verbose mode."""
930 if verbose is False:
930 if verbose is False:
931 return
931 return
932
932
933 return log(*msg)
933 return log(*msg)
934
934
935
935
936 # Bytes that break XML even in a CDATA block: control characters 0-31
936 # Bytes that break XML even in a CDATA block: control characters 0-31
937 # sans \t, \n and \r
937 # sans \t, \n and \r
938 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
938 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
939
939
940 # Match feature conditionalized output lines in the form, capturing the feature
940 # Match feature conditionalized output lines in the form, capturing the feature
941 # list in group 2, and the preceeding line output in group 1:
941 # list in group 2, and the preceeding line output in group 1:
942 #
942 #
943 # output..output (feature !)\n
943 # output..output (feature !)\n
944 optline = re.compile(br'(.*) \((.+?) !\)\n$')
944 optline = re.compile(br'(.*) \((.+?) !\)\n$')
945
945
946
946
947 def cdatasafe(data):
947 def cdatasafe(data):
948 """Make a string safe to include in a CDATA block.
948 """Make a string safe to include in a CDATA block.
949
949
950 Certain control characters are illegal in a CDATA block, and
950 Certain control characters are illegal in a CDATA block, and
951 there's no way to include a ]]> in a CDATA either. This function
951 there's no way to include a ]]> in a CDATA either. This function
952 replaces illegal bytes with ? and adds a space between the ]] so
952 replaces illegal bytes with ? and adds a space between the ]] so
953 that it won't break the CDATA block.
953 that it won't break the CDATA block.
954 """
954 """
955 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
955 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
956
956
957
957
958 def log(*msg):
958 def log(*msg):
959 """Log something to stdout.
959 """Log something to stdout.
960
960
961 Arguments are strings to print.
961 Arguments are strings to print.
962 """
962 """
963 with iolock:
963 with iolock:
964 if verbose:
964 if verbose:
965 print(verbose, end=' ')
965 print(verbose, end=' ')
966 for m in msg:
966 for m in msg:
967 print(m, end=' ')
967 print(m, end=' ')
968 print()
968 print()
969 sys.stdout.flush()
969 sys.stdout.flush()
970
970
971
971
972 def highlightdiff(line, color):
972 def highlightdiff(line, color):
973 if not color:
973 if not color:
974 return line
974 return line
975 assert pygmentspresent
975 assert pygmentspresent
976 return pygments.highlight(
976 return pygments.highlight(
977 line.decode('latin1'), difflexer, terminal256formatter
977 line.decode('latin1'), difflexer, terminal256formatter
978 ).encode('latin1')
978 ).encode('latin1')
979
979
980
980
981 def highlightmsg(msg, color):
981 def highlightmsg(msg, color):
982 if not color:
982 if not color:
983 return msg
983 return msg
984 assert pygmentspresent
984 assert pygmentspresent
985 return pygments.highlight(msg, runnerlexer, runnerformatter)
985 return pygments.highlight(msg, runnerlexer, runnerformatter)
986
986
987
987
988 def terminate(proc):
988 def terminate(proc):
989 """Terminate subprocess"""
989 """Terminate subprocess"""
990 vlog('# Terminating process %d' % proc.pid)
990 vlog('# Terminating process %d' % proc.pid)
991 try:
991 try:
992 proc.terminate()
992 proc.terminate()
993 except OSError:
993 except OSError:
994 pass
994 pass
995
995
996
996
997 def killdaemons(pidfile):
997 def killdaemons(pidfile):
998 import killdaemons as killmod
998 import killdaemons as killmod
999
999
1000 return killmod.killdaemons(pidfile, tryhard=False, remove=True, logfn=vlog)
1000 return killmod.killdaemons(pidfile, tryhard=False, remove=True, logfn=vlog)
1001
1001
1002
1002
1003 class Test(unittest.TestCase):
1003 class Test(unittest.TestCase):
1004 """Encapsulates a single, runnable test.
1004 """Encapsulates a single, runnable test.
1005
1005
1006 While this class conforms to the unittest.TestCase API, it differs in that
1006 While this class conforms to the unittest.TestCase API, it differs in that
1007 instances need to be instantiated manually. (Typically, unittest.TestCase
1007 instances need to be instantiated manually. (Typically, unittest.TestCase
1008 classes are instantiated automatically by scanning modules.)
1008 classes are instantiated automatically by scanning modules.)
1009 """
1009 """
1010
1010
1011 # Status code reserved for skipped tests (used by hghave).
1011 # Status code reserved for skipped tests (used by hghave).
1012 SKIPPED_STATUS = 80
1012 SKIPPED_STATUS = 80
1013
1013
1014 def __init__(
1014 def __init__(
1015 self,
1015 self,
1016 path,
1016 path,
1017 outputdir,
1017 outputdir,
1018 tmpdir,
1018 tmpdir,
1019 keeptmpdir=False,
1019 keeptmpdir=False,
1020 debug=False,
1020 debug=False,
1021 first=False,
1021 first=False,
1022 timeout=None,
1022 timeout=None,
1023 startport=None,
1023 startport=None,
1024 extraconfigopts=None,
1024 extraconfigopts=None,
1025 shell=None,
1025 shell=None,
1026 hgcommand=None,
1026 hgcommand=None,
1027 slowtimeout=None,
1027 slowtimeout=None,
1028 usechg=False,
1028 usechg=False,
1029 chgdebug=False,
1029 chgdebug=False,
1030 useipv6=False,
1030 useipv6=False,
1031 ):
1031 ):
1032 """Create a test from parameters.
1032 """Create a test from parameters.
1033
1033
1034 path is the full path to the file defining the test.
1034 path is the full path to the file defining the test.
1035
1035
1036 tmpdir is the main temporary directory to use for this test.
1036 tmpdir is the main temporary directory to use for this test.
1037
1037
1038 keeptmpdir determines whether to keep the test's temporary directory
1038 keeptmpdir determines whether to keep the test's temporary directory
1039 after execution. It defaults to removal (False).
1039 after execution. It defaults to removal (False).
1040
1040
1041 debug mode will make the test execute verbosely, with unfiltered
1041 debug mode will make the test execute verbosely, with unfiltered
1042 output.
1042 output.
1043
1043
1044 timeout controls the maximum run time of the test. It is ignored when
1044 timeout controls the maximum run time of the test. It is ignored when
1045 debug is True. See slowtimeout for tests with #require slow.
1045 debug is True. See slowtimeout for tests with #require slow.
1046
1046
1047 slowtimeout overrides timeout if the test has #require slow.
1047 slowtimeout overrides timeout if the test has #require slow.
1048
1048
1049 startport controls the starting port number to use for this test. Each
1049 startport controls the starting port number to use for this test. Each
1050 test will reserve 3 port numbers for execution. It is the caller's
1050 test will reserve 3 port numbers for execution. It is the caller's
1051 responsibility to allocate a non-overlapping port range to Test
1051 responsibility to allocate a non-overlapping port range to Test
1052 instances.
1052 instances.
1053
1053
1054 extraconfigopts is an iterable of extra hgrc config options. Values
1054 extraconfigopts is an iterable of extra hgrc config options. Values
1055 must have the form "key=value" (something understood by hgrc). Values
1055 must have the form "key=value" (something understood by hgrc). Values
1056 of the form "foo.key=value" will result in "[foo] key=value".
1056 of the form "foo.key=value" will result in "[foo] key=value".
1057
1057
1058 shell is the shell to execute tests in.
1058 shell is the shell to execute tests in.
1059 """
1059 """
1060 if timeout is None:
1060 if timeout is None:
1061 timeout = defaults['timeout']
1061 timeout = defaults['timeout']
1062 if startport is None:
1062 if startport is None:
1063 startport = defaults['port']
1063 startport = defaults['port']
1064 if slowtimeout is None:
1064 if slowtimeout is None:
1065 slowtimeout = defaults['slowtimeout']
1065 slowtimeout = defaults['slowtimeout']
1066 self.path = path
1066 self.path = path
1067 self.relpath = os.path.relpath(path)
1067 self.relpath = os.path.relpath(path)
1068 self.bname = os.path.basename(path)
1068 self.bname = os.path.basename(path)
1069 self.name = _bytes2sys(self.bname)
1069 self.name = _bytes2sys(self.bname)
1070 self._testdir = os.path.dirname(path)
1070 self._testdir = os.path.dirname(path)
1071 self._outputdir = outputdir
1071 self._outputdir = outputdir
1072 self._tmpname = os.path.basename(path)
1072 self._tmpname = os.path.basename(path)
1073 self.errpath = os.path.join(self._outputdir, b'%s.err' % self.bname)
1073 self.errpath = os.path.join(self._outputdir, b'%s.err' % self.bname)
1074
1074
1075 self._threadtmp = tmpdir
1075 self._threadtmp = tmpdir
1076 self._keeptmpdir = keeptmpdir
1076 self._keeptmpdir = keeptmpdir
1077 self._debug = debug
1077 self._debug = debug
1078 self._first = first
1078 self._first = first
1079 self._timeout = timeout
1079 self._timeout = timeout
1080 self._slowtimeout = slowtimeout
1080 self._slowtimeout = slowtimeout
1081 self._startport = startport
1081 self._startport = startport
1082 self._extraconfigopts = extraconfigopts or []
1082 self._extraconfigopts = extraconfigopts or []
1083 self._shell = _sys2bytes(shell)
1083 self._shell = _sys2bytes(shell)
1084 self._hgcommand = hgcommand or b'hg'
1084 self._hgcommand = hgcommand or b'hg'
1085 self._usechg = usechg
1085 self._usechg = usechg
1086 self._chgdebug = chgdebug
1086 self._chgdebug = chgdebug
1087 self._useipv6 = useipv6
1087 self._useipv6 = useipv6
1088
1088
1089 self._aborted = False
1089 self._aborted = False
1090 self._daemonpids = []
1090 self._daemonpids = []
1091 self._finished = None
1091 self._finished = None
1092 self._ret = None
1092 self._ret = None
1093 self._out = None
1093 self._out = None
1094 self._skipped = None
1094 self._skipped = None
1095 self._testtmp = None
1095 self._testtmp = None
1096 self._chgsockdir = None
1096 self._chgsockdir = None
1097
1097
1098 self._refout = self.readrefout()
1098 self._refout = self.readrefout()
1099
1099
1100 def readrefout(self):
1100 def readrefout(self):
1101 """read reference output"""
1101 """read reference output"""
1102 # If we're not in --debug mode and reference output file exists,
1102 # If we're not in --debug mode and reference output file exists,
1103 # check test output against it.
1103 # check test output against it.
1104 if self._debug:
1104 if self._debug:
1105 return None # to match "out is None"
1105 return None # to match "out is None"
1106 elif os.path.exists(self.refpath):
1106 elif os.path.exists(self.refpath):
1107 with open(self.refpath, 'rb') as f:
1107 with open(self.refpath, 'rb') as f:
1108 return f.read().splitlines(True)
1108 return f.read().splitlines(True)
1109 else:
1109 else:
1110 return []
1110 return []
1111
1111
1112 # needed to get base class __repr__ running
1112 # needed to get base class __repr__ running
1113 @property
1113 @property
1114 def _testMethodName(self):
1114 def _testMethodName(self):
1115 return self.name
1115 return self.name
1116
1116
1117 def __str__(self):
1117 def __str__(self):
1118 return self.name
1118 return self.name
1119
1119
1120 def shortDescription(self):
1120 def shortDescription(self):
1121 return self.name
1121 return self.name
1122
1122
1123 def setUp(self):
1123 def setUp(self):
1124 """Tasks to perform before run()."""
1124 """Tasks to perform before run()."""
1125 self._finished = False
1125 self._finished = False
1126 self._ret = None
1126 self._ret = None
1127 self._out = None
1127 self._out = None
1128 self._skipped = None
1128 self._skipped = None
1129
1129
1130 try:
1130 try:
1131 os.mkdir(self._threadtmp)
1131 os.mkdir(self._threadtmp)
1132 except OSError as e:
1132 except OSError as e:
1133 if e.errno != errno.EEXIST:
1133 if e.errno != errno.EEXIST:
1134 raise
1134 raise
1135
1135
1136 name = self._tmpname
1136 name = self._tmpname
1137 self._testtmp = os.path.join(self._threadtmp, name)
1137 self._testtmp = os.path.join(self._threadtmp, name)
1138 os.mkdir(self._testtmp)
1138 os.mkdir(self._testtmp)
1139
1139
1140 # Remove any previous output files.
1140 # Remove any previous output files.
1141 if os.path.exists(self.errpath):
1141 if os.path.exists(self.errpath):
1142 try:
1142 try:
1143 os.remove(self.errpath)
1143 os.remove(self.errpath)
1144 except OSError as e:
1144 except OSError as e:
1145 # We might have raced another test to clean up a .err
1145 # We might have raced another test to clean up a .err
1146 # file, so ignore ENOENT when removing a previous .err
1146 # file, so ignore ENOENT when removing a previous .err
1147 # file.
1147 # file.
1148 if e.errno != errno.ENOENT:
1148 if e.errno != errno.ENOENT:
1149 raise
1149 raise
1150
1150
1151 if self._usechg:
1151 if self._usechg:
1152 self._chgsockdir = os.path.join(
1152 self._chgsockdir = os.path.join(
1153 self._threadtmp, b'%s.chgsock' % name
1153 self._threadtmp, b'%s.chgsock' % name
1154 )
1154 )
1155 os.mkdir(self._chgsockdir)
1155 os.mkdir(self._chgsockdir)
1156
1156
1157 def run(self, result):
1157 def run(self, result):
1158 """Run this test and report results against a TestResult instance."""
1158 """Run this test and report results against a TestResult instance."""
1159 # This function is extremely similar to unittest.TestCase.run(). Once
1159 # This function is extremely similar to unittest.TestCase.run(). Once
1160 # we require Python 2.7 (or at least its version of unittest), this
1160 # we require Python 2.7 (or at least its version of unittest), this
1161 # function can largely go away.
1161 # function can largely go away.
1162 self._result = result
1162 self._result = result
1163 result.startTest(self)
1163 result.startTest(self)
1164 try:
1164 try:
1165 try:
1165 try:
1166 self.setUp()
1166 self.setUp()
1167 except (KeyboardInterrupt, SystemExit):
1167 except (KeyboardInterrupt, SystemExit):
1168 self._aborted = True
1168 self._aborted = True
1169 raise
1169 raise
1170 except Exception:
1170 except Exception:
1171 result.addError(self, sys.exc_info())
1171 result.addError(self, sys.exc_info())
1172 return
1172 return
1173
1173
1174 success = False
1174 success = False
1175 try:
1175 try:
1176 self.runTest()
1176 self.runTest()
1177 except KeyboardInterrupt:
1177 except KeyboardInterrupt:
1178 self._aborted = True
1178 self._aborted = True
1179 raise
1179 raise
1180 except unittest.SkipTest as e:
1180 except unittest.SkipTest as e:
1181 result.addSkip(self, str(e))
1181 result.addSkip(self, str(e))
1182 # The base class will have already counted this as a
1182 # The base class will have already counted this as a
1183 # test we "ran", but we want to exclude skipped tests
1183 # test we "ran", but we want to exclude skipped tests
1184 # from those we count towards those run.
1184 # from those we count towards those run.
1185 result.testsRun -= 1
1185 result.testsRun -= 1
1186 except self.failureException as e:
1186 except self.failureException as e:
1187 # This differs from unittest in that we don't capture
1187 # This differs from unittest in that we don't capture
1188 # the stack trace. This is for historical reasons and
1188 # the stack trace. This is for historical reasons and
1189 # this decision could be revisited in the future,
1189 # this decision could be revisited in the future,
1190 # especially for PythonTest instances.
1190 # especially for PythonTest instances.
1191 if result.addFailure(self, str(e)):
1191 if result.addFailure(self, str(e)):
1192 success = True
1192 success = True
1193 except Exception:
1193 except Exception:
1194 result.addError(self, sys.exc_info())
1194 result.addError(self, sys.exc_info())
1195 else:
1195 else:
1196 success = True
1196 success = True
1197
1197
1198 try:
1198 try:
1199 self.tearDown()
1199 self.tearDown()
1200 except (KeyboardInterrupt, SystemExit):
1200 except (KeyboardInterrupt, SystemExit):
1201 self._aborted = True
1201 self._aborted = True
1202 raise
1202 raise
1203 except Exception:
1203 except Exception:
1204 result.addError(self, sys.exc_info())
1204 result.addError(self, sys.exc_info())
1205 success = False
1205 success = False
1206
1206
1207 if success:
1207 if success:
1208 result.addSuccess(self)
1208 result.addSuccess(self)
1209 finally:
1209 finally:
1210 result.stopTest(self, interrupted=self._aborted)
1210 result.stopTest(self, interrupted=self._aborted)
1211
1211
1212 def runTest(self):
1212 def runTest(self):
1213 """Run this test instance.
1213 """Run this test instance.
1214
1214
1215 This will return a tuple describing the result of the test.
1215 This will return a tuple describing the result of the test.
1216 """
1216 """
1217 env = self._getenv()
1217 env = self._getenv()
1218 self._genrestoreenv(env)
1218 self._genrestoreenv(env)
1219 self._daemonpids.append(env['DAEMON_PIDS'])
1219 self._daemonpids.append(env['DAEMON_PIDS'])
1220 self._createhgrc(env['HGRCPATH'])
1220 self._createhgrc(env['HGRCPATH'])
1221
1221
1222 vlog('# Test', self.name)
1222 vlog('# Test', self.name)
1223
1223
1224 ret, out = self._run(env)
1224 ret, out = self._run(env)
1225 self._finished = True
1225 self._finished = True
1226 self._ret = ret
1226 self._ret = ret
1227 self._out = out
1227 self._out = out
1228
1228
1229 def describe(ret):
1229 def describe(ret):
1230 if ret < 0:
1230 if ret < 0:
1231 return 'killed by signal: %d' % -ret
1231 return 'killed by signal: %d' % -ret
1232 return 'returned error code %d' % ret
1232 return 'returned error code %d' % ret
1233
1233
1234 self._skipped = False
1234 self._skipped = False
1235
1235
1236 if ret == self.SKIPPED_STATUS:
1236 if ret == self.SKIPPED_STATUS:
1237 if out is None: # Debug mode, nothing to parse.
1237 if out is None: # Debug mode, nothing to parse.
1238 missing = ['unknown']
1238 missing = ['unknown']
1239 failed = None
1239 failed = None
1240 else:
1240 else:
1241 missing, failed = TTest.parsehghaveoutput(out)
1241 missing, failed = TTest.parsehghaveoutput(out)
1242
1242
1243 if not missing:
1243 if not missing:
1244 missing = ['skipped']
1244 missing = ['skipped']
1245
1245
1246 if failed:
1246 if failed:
1247 self.fail('hg have failed checking for %s' % failed[-1])
1247 self.fail('hg have failed checking for %s' % failed[-1])
1248 else:
1248 else:
1249 self._skipped = True
1249 self._skipped = True
1250 raise unittest.SkipTest(missing[-1])
1250 raise unittest.SkipTest(missing[-1])
1251 elif ret == 'timeout':
1251 elif ret == 'timeout':
1252 self.fail('timed out')
1252 self.fail('timed out')
1253 elif ret is False:
1253 elif ret is False:
1254 self.fail('no result code from test')
1254 self.fail('no result code from test')
1255 elif out != self._refout:
1255 elif out != self._refout:
1256 # Diff generation may rely on written .err file.
1256 # Diff generation may rely on written .err file.
1257 if (
1257 if (
1258 (ret != 0 or out != self._refout)
1258 (ret != 0 or out != self._refout)
1259 and not self._skipped
1259 and not self._skipped
1260 and not self._debug
1260 and not self._debug
1261 ):
1261 ):
1262 with open(self.errpath, 'wb') as f:
1262 with open(self.errpath, 'wb') as f:
1263 for line in out:
1263 for line in out:
1264 f.write(line)
1264 f.write(line)
1265
1265
1266 # The result object handles diff calculation for us.
1266 # The result object handles diff calculation for us.
1267 with firstlock:
1267 with firstlock:
1268 if self._result.addOutputMismatch(self, ret, out, self._refout):
1268 if self._result.addOutputMismatch(self, ret, out, self._refout):
1269 # change was accepted, skip failing
1269 # change was accepted, skip failing
1270 return
1270 return
1271 if self._first:
1271 if self._first:
1272 global firsterror
1272 global firsterror
1273 firsterror = True
1273 firsterror = True
1274
1274
1275 if ret:
1275 if ret:
1276 msg = 'output changed and ' + describe(ret)
1276 msg = 'output changed and ' + describe(ret)
1277 else:
1277 else:
1278 msg = 'output changed'
1278 msg = 'output changed'
1279
1279
1280 self.fail(msg)
1280 self.fail(msg)
1281 elif ret:
1281 elif ret:
1282 self.fail(describe(ret))
1282 self.fail(describe(ret))
1283
1283
1284 def tearDown(self):
1284 def tearDown(self):
1285 """Tasks to perform after run()."""
1285 """Tasks to perform after run()."""
1286 for entry in self._daemonpids:
1286 for entry in self._daemonpids:
1287 killdaemons(entry)
1287 killdaemons(entry)
1288 self._daemonpids = []
1288 self._daemonpids = []
1289
1289
1290 if self._keeptmpdir:
1290 if self._keeptmpdir:
1291 log(
1291 log(
1292 '\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s'
1292 '\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s'
1293 % (
1293 % (
1294 _bytes2sys(self._testtmp),
1294 _bytes2sys(self._testtmp),
1295 _bytes2sys(self._threadtmp),
1295 _bytes2sys(self._threadtmp),
1296 )
1296 )
1297 )
1297 )
1298 else:
1298 else:
1299 try:
1299 try:
1300 shutil.rmtree(self._testtmp)
1300 shutil.rmtree(self._testtmp)
1301 except OSError:
1301 except OSError:
1302 # unreadable directory may be left in $TESTTMP; fix permission
1302 # unreadable directory may be left in $TESTTMP; fix permission
1303 # and try again
1303 # and try again
1304 makecleanable(self._testtmp)
1304 makecleanable(self._testtmp)
1305 shutil.rmtree(self._testtmp, True)
1305 shutil.rmtree(self._testtmp, True)
1306 shutil.rmtree(self._threadtmp, True)
1306 shutil.rmtree(self._threadtmp, True)
1307
1307
1308 if self._usechg:
1308 if self._usechg:
1309 # chgservers will stop automatically after they find the socket
1309 # chgservers will stop automatically after they find the socket
1310 # files are deleted
1310 # files are deleted
1311 shutil.rmtree(self._chgsockdir, True)
1311 shutil.rmtree(self._chgsockdir, True)
1312
1312
1313 if (
1313 if (
1314 (self._ret != 0 or self._out != self._refout)
1314 (self._ret != 0 or self._out != self._refout)
1315 and not self._skipped
1315 and not self._skipped
1316 and not self._debug
1316 and not self._debug
1317 and self._out
1317 and self._out
1318 ):
1318 ):
1319 with open(self.errpath, 'wb') as f:
1319 with open(self.errpath, 'wb') as f:
1320 for line in self._out:
1320 for line in self._out:
1321 f.write(line)
1321 f.write(line)
1322
1322
1323 vlog("# Ret was:", self._ret, '(%s)' % self.name)
1323 vlog("# Ret was:", self._ret, '(%s)' % self.name)
1324
1324
1325 def _run(self, env):
1325 def _run(self, env):
1326 # This should be implemented in child classes to run tests.
1326 # This should be implemented in child classes to run tests.
1327 raise unittest.SkipTest('unknown test type')
1327 raise unittest.SkipTest('unknown test type')
1328
1328
1329 def abort(self):
1329 def abort(self):
1330 """Terminate execution of this test."""
1330 """Terminate execution of this test."""
1331 self._aborted = True
1331 self._aborted = True
1332
1332
1333 def _portmap(self, i):
1333 def _portmap(self, i):
1334 offset = b'' if i == 0 else b'%d' % i
1334 offset = b'' if i == 0 else b'%d' % i
1335 return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset)
1335 return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset)
1336
1336
1337 def _getreplacements(self):
1337 def _getreplacements(self):
1338 """Obtain a mapping of text replacements to apply to test output.
1338 """Obtain a mapping of text replacements to apply to test output.
1339
1339
1340 Test output needs to be normalized so it can be compared to expected
1340 Test output needs to be normalized so it can be compared to expected
1341 output. This function defines how some of that normalization will
1341 output. This function defines how some of that normalization will
1342 occur.
1342 occur.
1343 """
1343 """
1344 r = [
1344 r = [
1345 # This list should be parallel to defineport in _getenv
1345 # This list should be parallel to defineport in _getenv
1346 self._portmap(0),
1346 self._portmap(0),
1347 self._portmap(1),
1347 self._portmap(1),
1348 self._portmap(2),
1348 self._portmap(2),
1349 (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
1349 (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
1350 (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
1350 (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
1351 ]
1351 ]
1352 r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
1352 r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
1353 if WINDOWS:
1353 if WINDOWS:
1354 # JSON output escapes backslashes in Windows paths, so also catch a
1354 # JSON output escapes backslashes in Windows paths, so also catch a
1355 # double-escape.
1355 # double-escape.
1356 replaced = self._testtmp.replace(b'\\', br'\\')
1356 replaced = self._testtmp.replace(b'\\', br'\\')
1357 r.append((self._escapepath(replaced), b'$STR_REPR_TESTTMP'))
1357 r.append((self._escapepath(replaced), b'$STR_REPR_TESTTMP'))
1358
1358
1359 replacementfile = os.path.join(self._testdir, b'common-pattern.py')
1359 replacementfile = os.path.join(self._testdir, b'common-pattern.py')
1360
1360
1361 if os.path.exists(replacementfile):
1361 if os.path.exists(replacementfile):
1362 data = {}
1362 data = {}
1363 with open(replacementfile, mode='rb') as source:
1363 with open(replacementfile, mode='rb') as source:
1364 # the intermediate 'compile' step help with debugging
1364 # the intermediate 'compile' step help with debugging
1365 code = compile(source.read(), replacementfile, 'exec')
1365 code = compile(source.read(), replacementfile, 'exec')
1366 exec(code, data)
1366 exec(code, data)
1367 for value in data.get('substitutions', ()):
1367 for value in data.get('substitutions', ()):
1368 if len(value) != 2:
1368 if len(value) != 2:
1369 msg = 'malformatted substitution in %s: %r'
1369 msg = 'malformatted substitution in %s: %r'
1370 msg %= (replacementfile, value)
1370 msg %= (replacementfile, value)
1371 raise ValueError(msg)
1371 raise ValueError(msg)
1372 r.append(value)
1372 r.append(value)
1373 return r
1373 return r
1374
1374
1375 def _escapepath(self, p):
1375 def _escapepath(self, p):
1376 if WINDOWS:
1376 if WINDOWS:
1377 return b''.join(
1377 return b''.join(
1378 c.isalpha()
1378 c.isalpha()
1379 and b'[%s%s]' % (c.lower(), c.upper())
1379 and b'[%s%s]' % (c.lower(), c.upper())
1380 or c in b'/\\'
1380 or c in b'/\\'
1381 and br'[/\\]'
1381 and br'[/\\]'
1382 or c.isdigit()
1382 or c.isdigit()
1383 and c
1383 and c
1384 or b'\\' + c
1384 or b'\\' + c
1385 for c in [p[i : i + 1] for i in range(len(p))]
1385 for c in [p[i : i + 1] for i in range(len(p))]
1386 )
1386 )
1387 else:
1387 else:
1388 return re.escape(p)
1388 return re.escape(p)
1389
1389
1390 def _localip(self):
1390 def _localip(self):
1391 if self._useipv6:
1391 if self._useipv6:
1392 return b'::1'
1392 return b'::1'
1393 else:
1393 else:
1394 return b'127.0.0.1'
1394 return b'127.0.0.1'
1395
1395
1396 def _genrestoreenv(self, testenv):
1396 def _genrestoreenv(self, testenv):
1397 """Generate a script that can be used by tests to restore the original
1397 """Generate a script that can be used by tests to restore the original
1398 environment."""
1398 environment."""
1399 # Put the restoreenv script inside self._threadtmp
1399 # Put the restoreenv script inside self._threadtmp
1400 scriptpath = os.path.join(self._threadtmp, b'restoreenv.sh')
1400 scriptpath = os.path.join(self._threadtmp, b'restoreenv.sh')
1401 testenv['HGTEST_RESTOREENV'] = _bytes2sys(scriptpath)
1401 testenv['HGTEST_RESTOREENV'] = _bytes2sys(scriptpath)
1402
1402
1403 # Only restore environment variable names that the shell allows
1403 # Only restore environment variable names that the shell allows
1404 # us to export.
1404 # us to export.
1405 name_regex = re.compile('^[a-zA-Z][a-zA-Z0-9_]*$')
1405 name_regex = re.compile('^[a-zA-Z][a-zA-Z0-9_]*$')
1406
1406
1407 # Do not restore these variables; otherwise tests would fail.
1407 # Do not restore these variables; otherwise tests would fail.
1408 reqnames = {'PYTHON', 'TESTDIR', 'TESTTMP'}
1408 reqnames = {'PYTHON', 'TESTDIR', 'TESTTMP'}
1409
1409
1410 with open(scriptpath, 'w') as envf:
1410 with open(scriptpath, 'w') as envf:
1411 for name, value in origenviron.items():
1411 for name, value in origenviron.items():
1412 if not name_regex.match(name):
1412 if not name_regex.match(name):
1413 # Skip environment variables with unusual names not
1413 # Skip environment variables with unusual names not
1414 # allowed by most shells.
1414 # allowed by most shells.
1415 continue
1415 continue
1416 if name in reqnames:
1416 if name in reqnames:
1417 continue
1417 continue
1418 envf.write('%s=%s\n' % (name, shellquote(value)))
1418 envf.write('%s=%s\n' % (name, shellquote(value)))
1419
1419
1420 for name in testenv:
1420 for name in testenv:
1421 if name in origenviron or name in reqnames:
1421 if name in origenviron or name in reqnames:
1422 continue
1422 continue
1423 envf.write('unset %s\n' % (name,))
1423 envf.write('unset %s\n' % (name,))
1424
1424
1425 def _getenv(self):
1425 def _getenv(self):
1426 """Obtain environment variables to use during test execution."""
1426 """Obtain environment variables to use during test execution."""
1427
1427
1428 def defineport(i):
1428 def defineport(i):
1429 offset = '' if i == 0 else '%s' % i
1429 offset = '' if i == 0 else '%s' % i
1430 env["HGPORT%s" % offset] = '%s' % (self._startport + i)
1430 env["HGPORT%s" % offset] = '%s' % (self._startport + i)
1431
1431
1432 env = os.environ.copy()
1432 env = os.environ.copy()
1433 env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase') or ''
1433 env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase') or ''
1434 env['HGEMITWARNINGS'] = '1'
1434 env['HGEMITWARNINGS'] = '1'
1435 env['TESTTMP'] = _bytes2sys(self._testtmp)
1435 env['TESTTMP'] = _bytes2sys(self._testtmp)
1436 uid_file = os.path.join(_bytes2sys(self._testtmp), 'UID')
1436 uid_file = os.path.join(_bytes2sys(self._testtmp), 'UID')
1437 env['HGTEST_UUIDFILE'] = uid_file
1437 env['HGTEST_UUIDFILE'] = uid_file
1438 env['TESTNAME'] = self.name
1438 env['TESTNAME'] = self.name
1439 env['HOME'] = _bytes2sys(self._testtmp)
1439 env['HOME'] = _bytes2sys(self._testtmp)
1440 if WINDOWS:
1440 if WINDOWS:
1441 env['REALUSERPROFILE'] = env['USERPROFILE']
1441 env['REALUSERPROFILE'] = env['USERPROFILE']
1442 # py3.8+ ignores HOME: https://bugs.python.org/issue36264
1442 # py3.8+ ignores HOME: https://bugs.python.org/issue36264
1443 env['USERPROFILE'] = env['HOME']
1443 env['USERPROFILE'] = env['HOME']
1444 formated_timeout = _bytes2sys(b"%d" % default_defaults['timeout'][1])
1444 formated_timeout = _bytes2sys(b"%d" % default_defaults['timeout'][1])
1445 env['HGTEST_TIMEOUT_DEFAULT'] = formated_timeout
1445 env['HGTEST_TIMEOUT_DEFAULT'] = formated_timeout
1446 env['HGTEST_TIMEOUT'] = _bytes2sys(b"%d" % self._timeout)
1446 env['HGTEST_TIMEOUT'] = _bytes2sys(b"%d" % self._timeout)
1447 # This number should match portneeded in _getport
1447 # This number should match portneeded in _getport
1448 for port in xrange(3):
1448 for port in xrange(3):
1449 # This list should be parallel to _portmap in _getreplacements
1449 # This list should be parallel to _portmap in _getreplacements
1450 defineport(port)
1450 defineport(port)
1451 env["HGRCPATH"] = _bytes2sys(os.path.join(self._threadtmp, b'.hgrc'))
1451 env["HGRCPATH"] = _bytes2sys(os.path.join(self._threadtmp, b'.hgrc'))
1452 env["DAEMON_PIDS"] = _bytes2sys(
1452 env["DAEMON_PIDS"] = _bytes2sys(
1453 os.path.join(self._threadtmp, b'daemon.pids')
1453 os.path.join(self._threadtmp, b'daemon.pids')
1454 )
1454 )
1455 env["HGEDITOR"] = (
1455 env["HGEDITOR"] = (
1456 '"' + sysexecutable + '"' + ' -c "import sys; sys.exit(0)"'
1456 '"' + sysexecutable + '"' + ' -c "import sys; sys.exit(0)"'
1457 )
1457 )
1458 env["HGUSER"] = "test"
1458 env["HGUSER"] = "test"
1459 env["HGENCODING"] = "ascii"
1459 env["HGENCODING"] = "ascii"
1460 env["HGENCODINGMODE"] = "strict"
1460 env["HGENCODINGMODE"] = "strict"
1461 env["HGHOSTNAME"] = "test-hostname"
1461 env["HGHOSTNAME"] = "test-hostname"
1462 env['HGIPV6'] = str(int(self._useipv6))
1462 env['HGIPV6'] = str(int(self._useipv6))
1463 # See contrib/catapipe.py for how to use this functionality.
1463 # See contrib/catapipe.py for how to use this functionality.
1464 if 'HGTESTCATAPULTSERVERPIPE' not in env:
1464 if 'HGTESTCATAPULTSERVERPIPE' not in env:
1465 # If we don't have HGTESTCATAPULTSERVERPIPE explicitly set, pull the
1465 # If we don't have HGTESTCATAPULTSERVERPIPE explicitly set, pull the
1466 # non-test one in as a default, otherwise set to devnull
1466 # non-test one in as a default, otherwise set to devnull
1467 env['HGTESTCATAPULTSERVERPIPE'] = env.get(
1467 env['HGTESTCATAPULTSERVERPIPE'] = env.get(
1468 'HGCATAPULTSERVERPIPE', os.devnull
1468 'HGCATAPULTSERVERPIPE', os.devnull
1469 )
1469 )
1470
1470
1471 extraextensions = []
1471 extraextensions = []
1472 for opt in self._extraconfigopts:
1472 for opt in self._extraconfigopts:
1473 section, key = opt.split('.', 1)
1473 section, key = opt.split('.', 1)
1474 if section != 'extensions':
1474 if section != 'extensions':
1475 continue
1475 continue
1476 name = key.split('=', 1)[0]
1476 name = key.split('=', 1)[0]
1477 extraextensions.append(name)
1477 extraextensions.append(name)
1478
1478
1479 if extraextensions:
1479 if extraextensions:
1480 env['HGTESTEXTRAEXTENSIONS'] = ' '.join(extraextensions)
1480 env['HGTESTEXTRAEXTENSIONS'] = ' '.join(extraextensions)
1481
1481
1482 # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
1482 # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
1483 # IP addresses.
1483 # IP addresses.
1484 env['LOCALIP'] = _bytes2sys(self._localip())
1484 env['LOCALIP'] = _bytes2sys(self._localip())
1485
1485
1486 # This has the same effect as Py_LegacyWindowsStdioFlag in exewrapper.c,
1486 # This has the same effect as Py_LegacyWindowsStdioFlag in exewrapper.c,
1487 # but this is needed for testing python instances like dummyssh,
1487 # but this is needed for testing python instances like dummyssh,
1488 # dummysmtpd.py, and dumbhttp.py.
1488 # dummysmtpd.py, and dumbhttp.py.
1489 if PYTHON3 and WINDOWS:
1489 if PYTHON3 and WINDOWS:
1490 env['PYTHONLEGACYWINDOWSSTDIO'] = '1'
1490 env['PYTHONLEGACYWINDOWSSTDIO'] = '1'
1491
1491
1492 # Modified HOME in test environment can confuse Rust tools. So set
1492 # Modified HOME in test environment can confuse Rust tools. So set
1493 # CARGO_HOME and RUSTUP_HOME automatically if a Rust toolchain is
1493 # CARGO_HOME and RUSTUP_HOME automatically if a Rust toolchain is
1494 # present and these variables aren't already defined.
1494 # present and these variables aren't already defined.
1495 cargo_home_path = os.path.expanduser('~/.cargo')
1495 cargo_home_path = os.path.expanduser('~/.cargo')
1496 rustup_home_path = os.path.expanduser('~/.rustup')
1496 rustup_home_path = os.path.expanduser('~/.rustup')
1497
1497
1498 if os.path.exists(cargo_home_path) and b'CARGO_HOME' not in osenvironb:
1498 if os.path.exists(cargo_home_path) and b'CARGO_HOME' not in osenvironb:
1499 env['CARGO_HOME'] = cargo_home_path
1499 env['CARGO_HOME'] = cargo_home_path
1500 if (
1500 if (
1501 os.path.exists(rustup_home_path)
1501 os.path.exists(rustup_home_path)
1502 and b'RUSTUP_HOME' not in osenvironb
1502 and b'RUSTUP_HOME' not in osenvironb
1503 ):
1503 ):
1504 env['RUSTUP_HOME'] = rustup_home_path
1504 env['RUSTUP_HOME'] = rustup_home_path
1505
1505
1506 # Reset some environment variables to well-known values so that
1506 # Reset some environment variables to well-known values so that
1507 # the tests produce repeatable output.
1507 # the tests produce repeatable output.
1508 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
1508 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
1509 env['TZ'] = 'GMT'
1509 env['TZ'] = 'GMT'
1510 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
1510 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
1511 env['COLUMNS'] = '80'
1511 env['COLUMNS'] = '80'
1512 env['TERM'] = 'xterm'
1512 env['TERM'] = 'xterm'
1513
1513
1514 dropped = [
1514 dropped = [
1515 'CDPATH',
1515 'CDPATH',
1516 'CHGDEBUG',
1516 'CHGDEBUG',
1517 'EDITOR',
1517 'EDITOR',
1518 'GREP_OPTIONS',
1518 'GREP_OPTIONS',
1519 'HG',
1519 'HG',
1520 'HGMERGE',
1520 'HGMERGE',
1521 'HGPLAIN',
1521 'HGPLAIN',
1522 'HGPLAINEXCEPT',
1522 'HGPLAINEXCEPT',
1523 'HGPROF',
1523 'HGPROF',
1524 'http_proxy',
1524 'http_proxy',
1525 'no_proxy',
1525 'no_proxy',
1526 'NO_PROXY',
1526 'NO_PROXY',
1527 'PAGER',
1527 'PAGER',
1528 'VISUAL',
1528 'VISUAL',
1529 ]
1529 ]
1530
1530
1531 for k in dropped:
1531 for k in dropped:
1532 if k in env:
1532 if k in env:
1533 del env[k]
1533 del env[k]
1534
1534
1535 # unset env related to hooks
1535 # unset env related to hooks
1536 for k in list(env):
1536 for k in list(env):
1537 if k.startswith('HG_'):
1537 if k.startswith('HG_'):
1538 del env[k]
1538 del env[k]
1539
1539
1540 if self._usechg:
1540 if self._usechg:
1541 env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
1541 env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
1542 if self._chgdebug:
1542 if self._chgdebug:
1543 env['CHGDEBUG'] = 'true'
1543 env['CHGDEBUG'] = 'true'
1544
1544
1545 return env
1545 return env
1546
1546
1547 def _createhgrc(self, path):
1547 def _createhgrc(self, path):
1548 """Create an hgrc file for this test."""
1548 """Create an hgrc file for this test."""
1549 with open(path, 'wb') as hgrc:
1549 with open(path, 'wb') as hgrc:
1550 hgrc.write(b'[ui]\n')
1550 hgrc.write(b'[ui]\n')
1551 hgrc.write(b'slash = True\n')
1551 hgrc.write(b'slash = True\n')
1552 hgrc.write(b'interactive = False\n')
1552 hgrc.write(b'interactive = False\n')
1553 hgrc.write(b'detailed-exit-code = True\n')
1553 hgrc.write(b'detailed-exit-code = True\n')
1554 hgrc.write(b'merge = internal:merge\n')
1554 hgrc.write(b'merge = internal:merge\n')
1555 hgrc.write(b'mergemarkers = detailed\n')
1555 hgrc.write(b'mergemarkers = detailed\n')
1556 hgrc.write(b'promptecho = True\n')
1556 hgrc.write(b'promptecho = True\n')
1557 dummyssh = os.path.join(self._testdir, b'dummyssh')
1557 dummyssh = os.path.join(self._testdir, b'dummyssh')
1558 hgrc.write(b'ssh = "%s" "%s"\n' % (PYTHON, dummyssh))
1558 hgrc.write(b'ssh = "%s" "%s"\n' % (PYTHON, dummyssh))
1559 hgrc.write(b'timeout.warn=15\n')
1559 hgrc.write(b'timeout.warn=15\n')
1560 hgrc.write(b'[chgserver]\n')
1560 hgrc.write(b'[chgserver]\n')
1561 hgrc.write(b'idletimeout=60\n')
1561 hgrc.write(b'idletimeout=60\n')
1562 hgrc.write(b'[defaults]\n')
1562 hgrc.write(b'[defaults]\n')
1563 hgrc.write(b'[devel]\n')
1563 hgrc.write(b'[devel]\n')
1564 hgrc.write(b'all-warnings = true\n')
1564 hgrc.write(b'all-warnings = true\n')
1565 hgrc.write(b'default-date = 0 0\n')
1565 hgrc.write(b'default-date = 0 0\n')
1566 hgrc.write(b'[largefiles]\n')
1566 hgrc.write(b'[largefiles]\n')
1567 hgrc.write(
1567 hgrc.write(
1568 b'usercache = %s\n'
1568 b'usercache = %s\n'
1569 % (os.path.join(self._testtmp, b'.cache/largefiles'))
1569 % (os.path.join(self._testtmp, b'.cache/largefiles'))
1570 )
1570 )
1571 hgrc.write(b'[lfs]\n')
1571 hgrc.write(b'[lfs]\n')
1572 hgrc.write(
1572 hgrc.write(
1573 b'usercache = %s\n'
1573 b'usercache = %s\n'
1574 % (os.path.join(self._testtmp, b'.cache/lfs'))
1574 % (os.path.join(self._testtmp, b'.cache/lfs'))
1575 )
1575 )
1576 hgrc.write(b'[web]\n')
1576 hgrc.write(b'[web]\n')
1577 hgrc.write(b'address = localhost\n')
1577 hgrc.write(b'address = localhost\n')
1578 hgrc.write(b'ipv6 = %r\n' % self._useipv6)
1578 hgrc.write(b'ipv6 = %r\n' % self._useipv6)
1579 hgrc.write(b'server-header = testing stub value\n')
1579 hgrc.write(b'server-header = testing stub value\n')
1580
1580
1581 for opt in self._extraconfigopts:
1581 for opt in self._extraconfigopts:
1582 section, key = _sys2bytes(opt).split(b'.', 1)
1582 section, key = _sys2bytes(opt).split(b'.', 1)
1583 assert b'=' in key, (
1583 assert b'=' in key, (
1584 'extra config opt %s must ' 'have an = for assignment' % opt
1584 'extra config opt %s must ' 'have an = for assignment' % opt
1585 )
1585 )
1586 hgrc.write(b'[%s]\n%s\n' % (section, key))
1586 hgrc.write(b'[%s]\n%s\n' % (section, key))
1587
1587
1588 def fail(self, msg):
1588 def fail(self, msg):
1589 # unittest differentiates between errored and failed.
1589 # unittest differentiates between errored and failed.
1590 # Failed is denoted by AssertionError (by default at least).
1590 # Failed is denoted by AssertionError (by default at least).
1591 raise AssertionError(msg)
1591 raise AssertionError(msg)
1592
1592
1593 def _runcommand(self, cmd, env, normalizenewlines=False):
1593 def _runcommand(self, cmd, env, normalizenewlines=False):
1594 """Run command in a sub-process, capturing the output (stdout and
1594 """Run command in a sub-process, capturing the output (stdout and
1595 stderr).
1595 stderr).
1596
1596
1597 Return a tuple (exitcode, output). output is None in debug mode.
1597 Return a tuple (exitcode, output). output is None in debug mode.
1598 """
1598 """
1599 if self._debug:
1599 if self._debug:
1600 proc = subprocess.Popen(
1600 proc = subprocess.Popen(
1601 _bytes2sys(cmd),
1601 _bytes2sys(cmd),
1602 shell=True,
1602 shell=True,
1603 close_fds=closefds,
1603 close_fds=closefds,
1604 cwd=_bytes2sys(self._testtmp),
1604 cwd=_bytes2sys(self._testtmp),
1605 env=env,
1605 env=env,
1606 )
1606 )
1607 ret = proc.wait()
1607 ret = proc.wait()
1608 return (ret, None)
1608 return (ret, None)
1609
1609
1610 proc = Popen4(cmd, self._testtmp, self._timeout, env)
1610 proc = Popen4(cmd, self._testtmp, self._timeout, env)
1611
1611
1612 def cleanup():
1612 def cleanup():
1613 terminate(proc)
1613 terminate(proc)
1614 ret = proc.wait()
1614 ret = proc.wait()
1615 if ret == 0:
1615 if ret == 0:
1616 ret = signal.SIGTERM << 8
1616 ret = signal.SIGTERM << 8
1617 killdaemons(env['DAEMON_PIDS'])
1617 killdaemons(env['DAEMON_PIDS'])
1618 return ret
1618 return ret
1619
1619
1620 proc.tochild.close()
1620 proc.tochild.close()
1621
1621
1622 try:
1622 try:
1623 output = proc.fromchild.read()
1623 output = proc.fromchild.read()
1624 except KeyboardInterrupt:
1624 except KeyboardInterrupt:
1625 vlog('# Handling keyboard interrupt')
1625 vlog('# Handling keyboard interrupt')
1626 cleanup()
1626 cleanup()
1627 raise
1627 raise
1628
1628
1629 ret = proc.wait()
1629 ret = proc.wait()
1630 if wifexited(ret):
1630 if wifexited(ret):
1631 ret = os.WEXITSTATUS(ret)
1631 ret = os.WEXITSTATUS(ret)
1632
1632
1633 if proc.timeout:
1633 if proc.timeout:
1634 ret = 'timeout'
1634 ret = 'timeout'
1635
1635
1636 if ret:
1636 if ret:
1637 killdaemons(env['DAEMON_PIDS'])
1637 killdaemons(env['DAEMON_PIDS'])
1638
1638
1639 for s, r in self._getreplacements():
1639 for s, r in self._getreplacements():
1640 output = re.sub(s, r, output)
1640 output = re.sub(s, r, output)
1641
1641
1642 if normalizenewlines:
1642 if normalizenewlines:
1643 output = output.replace(b'\r\n', b'\n')
1643 output = output.replace(b'\r\n', b'\n')
1644
1644
1645 return ret, output.splitlines(True)
1645 return ret, output.splitlines(True)
1646
1646
1647
1647
1648 class PythonTest(Test):
1648 class PythonTest(Test):
1649 """A Python-based test."""
1649 """A Python-based test."""
1650
1650
1651 @property
1651 @property
1652 def refpath(self):
1652 def refpath(self):
1653 return os.path.join(self._testdir, b'%s.out' % self.bname)
1653 return os.path.join(self._testdir, b'%s.out' % self.bname)
1654
1654
1655 def _run(self, env):
1655 def _run(self, env):
1656 # Quote the python(3) executable for Windows
1656 # Quote the python(3) executable for Windows
1657 cmd = b'"%s" "%s"' % (PYTHON, self.path)
1657 cmd = b'"%s" "%s"' % (PYTHON, self.path)
1658 vlog("# Running", cmd.decode("utf-8"))
1658 vlog("# Running", cmd.decode("utf-8"))
1659 result = self._runcommand(cmd, env, normalizenewlines=WINDOWS)
1659 result = self._runcommand(cmd, env, normalizenewlines=WINDOWS)
1660 if self._aborted:
1660 if self._aborted:
1661 raise KeyboardInterrupt()
1661 raise KeyboardInterrupt()
1662
1662
1663 return result
1663 return result
1664
1664
1665
1665
1666 # Some glob patterns apply only in some circumstances, so the script
1666 # Some glob patterns apply only in some circumstances, so the script
1667 # might want to remove (glob) annotations that otherwise should be
1667 # might want to remove (glob) annotations that otherwise should be
1668 # retained.
1668 # retained.
1669 checkcodeglobpats = [
1669 checkcodeglobpats = [
1670 # On Windows it looks like \ doesn't require a (glob), but we know
1670 # On Windows it looks like \ doesn't require a (glob), but we know
1671 # better.
1671 # better.
1672 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
1672 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
1673 re.compile(br'^moving \S+/.*[^)]$'),
1673 re.compile(br'^moving \S+/.*[^)]$'),
1674 re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
1674 re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
1675 # Not all platforms have 127.0.0.1 as loopback (though most do),
1675 # Not all platforms have 127.0.0.1 as loopback (though most do),
1676 # so we always glob that too.
1676 # so we always glob that too.
1677 re.compile(br'.*\$LOCALIP.*$'),
1677 re.compile(br'.*\$LOCALIP.*$'),
1678 ]
1678 ]
1679
1679
1680 bchr = chr
1680 bchr = chr
1681 if PYTHON3:
1681 if PYTHON3:
1682 bchr = lambda x: bytes([x])
1682 bchr = lambda x: bytes([x])
1683
1683
1684 WARN_UNDEFINED = 1
1684 WARN_UNDEFINED = 1
1685 WARN_YES = 2
1685 WARN_YES = 2
1686 WARN_NO = 3
1686 WARN_NO = 3
1687
1687
1688 MARK_OPTIONAL = b" (?)\n"
1688 MARK_OPTIONAL = b" (?)\n"
1689
1689
1690
1690
1691 def isoptional(line):
1691 def isoptional(line):
1692 return line.endswith(MARK_OPTIONAL)
1692 return line.endswith(MARK_OPTIONAL)
1693
1693
1694
1694
1695 class TTest(Test):
1695 class TTest(Test):
1696 """A "t test" is a test backed by a .t file."""
1696 """A "t test" is a test backed by a .t file."""
1697
1697
1698 SKIPPED_PREFIX = b'skipped: '
1698 SKIPPED_PREFIX = b'skipped: '
1699 FAILED_PREFIX = b'hghave check failed: '
1699 FAILED_PREFIX = b'hghave check failed: '
1700 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
1700 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
1701
1701
1702 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
1702 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
1703 ESCAPEMAP = {bchr(i): br'\x%02x' % i for i in range(256)}
1703 ESCAPEMAP = {bchr(i): br'\x%02x' % i for i in range(256)}
1704 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
1704 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
1705
1705
1706 def __init__(self, path, *args, **kwds):
1706 def __init__(self, path, *args, **kwds):
1707 # accept an extra "case" parameter
1707 # accept an extra "case" parameter
1708 case = kwds.pop('case', [])
1708 case = kwds.pop('case', [])
1709 self._case = case
1709 self._case = case
1710 self._allcases = {x for y in parsettestcases(path) for x in y}
1710 self._allcases = {x for y in parsettestcases(path) for x in y}
1711 super(TTest, self).__init__(path, *args, **kwds)
1711 super(TTest, self).__init__(path, *args, **kwds)
1712 if case:
1712 if case:
1713 casepath = b'#'.join(case)
1713 casepath = b'#'.join(case)
1714 self.name = '%s#%s' % (self.name, _bytes2sys(casepath))
1714 self.name = '%s#%s' % (self.name, _bytes2sys(casepath))
1715 self.errpath = b'%s#%s.err' % (self.errpath[:-4], casepath)
1715 self.errpath = b'%s#%s.err' % (self.errpath[:-4], casepath)
1716 self._tmpname += b'-%s' % casepath.replace(b'#', b'-')
1716 self._tmpname += b'-%s' % casepath.replace(b'#', b'-')
1717 self._have = {}
1717 self._have = {}
1718
1718
1719 @property
1719 @property
1720 def refpath(self):
1720 def refpath(self):
1721 return os.path.join(self._testdir, self.bname)
1721 return os.path.join(self._testdir, self.bname)
1722
1722
1723 def _run(self, env):
1723 def _run(self, env):
1724 with open(self.path, 'rb') as f:
1724 with open(self.path, 'rb') as f:
1725 lines = f.readlines()
1725 lines = f.readlines()
1726
1726
1727 # .t file is both reference output and the test input, keep reference
1727 # .t file is both reference output and the test input, keep reference
1728 # output updated with the the test input. This avoids some race
1728 # output updated with the the test input. This avoids some race
1729 # conditions where the reference output does not match the actual test.
1729 # conditions where the reference output does not match the actual test.
1730 if self._refout is not None:
1730 if self._refout is not None:
1731 self._refout = lines
1731 self._refout = lines
1732
1732
1733 salt, script, after, expected = self._parsetest(lines)
1733 salt, script, after, expected = self._parsetest(lines)
1734
1734
1735 # Write out the generated script.
1735 # Write out the generated script.
1736 fname = b'%s.sh' % self._testtmp
1736 fname = b'%s.sh' % self._testtmp
1737 with open(fname, 'wb') as f:
1737 with open(fname, 'wb') as f:
1738 for l in script:
1738 for l in script:
1739 f.write(l)
1739 f.write(l)
1740
1740
1741 cmd = b'%s "%s"' % (self._shell, fname)
1741 cmd = b'%s "%s"' % (self._shell, fname)
1742 vlog("# Running", cmd.decode("utf-8"))
1742 vlog("# Running", cmd.decode("utf-8"))
1743
1743
1744 exitcode, output = self._runcommand(cmd, env)
1744 exitcode, output = self._runcommand(cmd, env)
1745
1745
1746 if self._aborted:
1746 if self._aborted:
1747 raise KeyboardInterrupt()
1747 raise KeyboardInterrupt()
1748
1748
1749 # Do not merge output if skipped. Return hghave message instead.
1749 # Do not merge output if skipped. Return hghave message instead.
1750 # Similarly, with --debug, output is None.
1750 # Similarly, with --debug, output is None.
1751 if exitcode == self.SKIPPED_STATUS or output is None:
1751 if exitcode == self.SKIPPED_STATUS or output is None:
1752 return exitcode, output
1752 return exitcode, output
1753
1753
1754 return self._processoutput(exitcode, output, salt, after, expected)
1754 return self._processoutput(exitcode, output, salt, after, expected)
1755
1755
1756 def _hghave(self, reqs):
1756 def _hghave(self, reqs):
1757 allreqs = b' '.join(reqs)
1757 allreqs = b' '.join(reqs)
1758
1758
1759 self._detectslow(reqs)
1759 self._detectslow(reqs)
1760
1760
1761 if allreqs in self._have:
1761 if allreqs in self._have:
1762 return self._have.get(allreqs)
1762 return self._have.get(allreqs)
1763
1763
1764 # TODO do something smarter when all other uses of hghave are gone.
1764 # TODO do something smarter when all other uses of hghave are gone.
1765 runtestdir = osenvironb[b'RUNTESTDIR']
1765 runtestdir = osenvironb[b'RUNTESTDIR']
1766 tdir = runtestdir.replace(b'\\', b'/')
1766 tdir = runtestdir.replace(b'\\', b'/')
1767 proc = Popen4(
1767 proc = Popen4(
1768 b'%s -c "%s/hghave %s"' % (self._shell, tdir, allreqs),
1768 b'%s -c "%s/hghave %s"' % (self._shell, tdir, allreqs),
1769 self._testtmp,
1769 self._testtmp,
1770 0,
1770 0,
1771 self._getenv(),
1771 self._getenv(),
1772 )
1772 )
1773 stdout, stderr = proc.communicate()
1773 stdout, stderr = proc.communicate()
1774 ret = proc.wait()
1774 ret = proc.wait()
1775 if wifexited(ret):
1775 if wifexited(ret):
1776 ret = os.WEXITSTATUS(ret)
1776 ret = os.WEXITSTATUS(ret)
1777 if ret == 2:
1777 if ret == 2:
1778 print(stdout.decode('utf-8'))
1778 print(stdout.decode('utf-8'))
1779 sys.exit(1)
1779 sys.exit(1)
1780
1780
1781 if ret != 0:
1781 if ret != 0:
1782 self._have[allreqs] = (False, stdout)
1782 self._have[allreqs] = (False, stdout)
1783 return False, stdout
1783 return False, stdout
1784
1784
1785 self._have[allreqs] = (True, None)
1785 self._have[allreqs] = (True, None)
1786 return True, None
1786 return True, None
1787
1787
1788 def _detectslow(self, reqs):
1788 def _detectslow(self, reqs):
1789 """update the timeout of slow test when appropriate"""
1789 """update the timeout of slow test when appropriate"""
1790 if b'slow' in reqs:
1790 if b'slow' in reqs:
1791 self._timeout = self._slowtimeout
1791 self._timeout = self._slowtimeout
1792
1792
1793 def _iftest(self, args):
1793 def _iftest(self, args):
1794 # implements "#if"
1794 # implements "#if"
1795 reqs = []
1795 reqs = []
1796 for arg in args:
1796 for arg in args:
1797 if arg.startswith(b'no-') and arg[3:] in self._allcases:
1797 if arg.startswith(b'no-') and arg[3:] in self._allcases:
1798 if arg[3:] in self._case:
1798 if arg[3:] in self._case:
1799 return False
1799 return False
1800 elif arg in self._allcases:
1800 elif arg in self._allcases:
1801 if arg not in self._case:
1801 if arg not in self._case:
1802 return False
1802 return False
1803 else:
1803 else:
1804 reqs.append(arg)
1804 reqs.append(arg)
1805 self._detectslow(reqs)
1805 self._detectslow(reqs)
1806 return self._hghave(reqs)[0]
1806 return self._hghave(reqs)[0]
1807
1807
1808 def _parsetest(self, lines):
1808 def _parsetest(self, lines):
1809 # We generate a shell script which outputs unique markers to line
1809 # We generate a shell script which outputs unique markers to line
1810 # up script results with our source. These markers include input
1810 # up script results with our source. These markers include input
1811 # line number and the last return code.
1811 # line number and the last return code.
1812 salt = b"SALT%d" % time.time()
1812 salt = b"SALT%d" % time.time()
1813
1813
1814 def addsalt(line, inpython):
1814 def addsalt(line, inpython):
1815 if inpython:
1815 if inpython:
1816 script.append(b'%s %d 0\n' % (salt, line))
1816 script.append(b'%s %d 0\n' % (salt, line))
1817 else:
1817 else:
1818 script.append(b'echo %s %d $?\n' % (salt, line))
1818 script.append(b'echo %s %d $?\n' % (salt, line))
1819
1819
1820 activetrace = []
1820 activetrace = []
1821 session = str(uuid.uuid4())
1821 session = str(uuid.uuid4())
1822 if PYTHON3:
1822 if PYTHON3:
1823 session = session.encode('ascii')
1823 session = session.encode('ascii')
1824 hgcatapult = os.getenv('HGTESTCATAPULTSERVERPIPE') or os.getenv(
1824 hgcatapult = os.getenv('HGTESTCATAPULTSERVERPIPE') or os.getenv(
1825 'HGCATAPULTSERVERPIPE'
1825 'HGCATAPULTSERVERPIPE'
1826 )
1826 )
1827
1827
1828 def toggletrace(cmd=None):
1828 def toggletrace(cmd=None):
1829 if not hgcatapult or hgcatapult == os.devnull:
1829 if not hgcatapult or hgcatapult == os.devnull:
1830 return
1830 return
1831
1831
1832 if activetrace:
1832 if activetrace:
1833 script.append(
1833 script.append(
1834 b'echo END %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1834 b'echo END %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1835 % (session, activetrace[0])
1835 % (session, activetrace[0])
1836 )
1836 )
1837 if cmd is None:
1837 if cmd is None:
1838 return
1838 return
1839
1839
1840 if isinstance(cmd, str):
1840 if isinstance(cmd, str):
1841 quoted = shellquote(cmd.strip())
1841 quoted = shellquote(cmd.strip())
1842 else:
1842 else:
1843 quoted = shellquote(cmd.strip().decode('utf8')).encode('utf8')
1843 quoted = shellquote(cmd.strip().decode('utf8')).encode('utf8')
1844 quoted = quoted.replace(b'\\', b'\\\\')
1844 quoted = quoted.replace(b'\\', b'\\\\')
1845 script.append(
1845 script.append(
1846 b'echo START %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1846 b'echo START %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1847 % (session, quoted)
1847 % (session, quoted)
1848 )
1848 )
1849 activetrace[0:] = [quoted]
1849 activetrace[0:] = [quoted]
1850
1850
1851 script = []
1851 script = []
1852
1852
1853 # After we run the shell script, we re-unify the script output
1853 # After we run the shell script, we re-unify the script output
1854 # with non-active parts of the source, with synchronization by our
1854 # with non-active parts of the source, with synchronization by our
1855 # SALT line number markers. The after table contains the non-active
1855 # SALT line number markers. The after table contains the non-active
1856 # components, ordered by line number.
1856 # components, ordered by line number.
1857 after = {}
1857 after = {}
1858
1858
1859 # Expected shell script output.
1859 # Expected shell script output.
1860 expected = {}
1860 expected = {}
1861
1861
1862 pos = prepos = -1
1862 pos = prepos = -1
1863
1863
1864 # True or False when in a true or false conditional section
1864 # True or False when in a true or false conditional section
1865 skipping = None
1865 skipping = None
1866
1866
1867 # We keep track of whether or not we're in a Python block so we
1867 # We keep track of whether or not we're in a Python block so we
1868 # can generate the surrounding doctest magic.
1868 # can generate the surrounding doctest magic.
1869 inpython = False
1869 inpython = False
1870
1870
1871 if self._debug:
1871 if self._debug:
1872 script.append(b'set -x\n')
1872 script.append(b'set -x\n')
1873 if os.getenv('MSYSTEM'):
1873 if os.getenv('MSYSTEM'):
1874 script.append(b'alias pwd="pwd -W"\n')
1874 script.append(b'alias pwd="pwd -W"\n')
1875
1875
1876 if hgcatapult and hgcatapult != os.devnull:
1876 if hgcatapult and hgcatapult != os.devnull:
1877 if PYTHON3:
1877 if PYTHON3:
1878 hgcatapult = hgcatapult.encode('utf8')
1878 hgcatapult = hgcatapult.encode('utf8')
1879 cataname = self.name.encode('utf8')
1879 cataname = self.name.encode('utf8')
1880 else:
1880 else:
1881 cataname = self.name
1881 cataname = self.name
1882
1882
1883 # Kludge: use a while loop to keep the pipe from getting
1883 # Kludge: use a while loop to keep the pipe from getting
1884 # closed by our echo commands. The still-running file gets
1884 # closed by our echo commands. The still-running file gets
1885 # reaped at the end of the script, which causes the while
1885 # reaped at the end of the script, which causes the while
1886 # loop to exit and closes the pipe. Sigh.
1886 # loop to exit and closes the pipe. Sigh.
1887 script.append(
1887 script.append(
1888 b'rtendtracing() {\n'
1888 b'rtendtracing() {\n'
1889 b' echo END %(session)s %(name)s >> %(catapult)s\n'
1889 b' echo END %(session)s %(name)s >> %(catapult)s\n'
1890 b' rm -f "$TESTTMP/.still-running"\n'
1890 b' rm -f "$TESTTMP/.still-running"\n'
1891 b'}\n'
1891 b'}\n'
1892 b'trap "rtendtracing" 0\n'
1892 b'trap "rtendtracing" 0\n'
1893 b'touch "$TESTTMP/.still-running"\n'
1893 b'touch "$TESTTMP/.still-running"\n'
1894 b'while [ -f "$TESTTMP/.still-running" ]; do sleep 1; done '
1894 b'while [ -f "$TESTTMP/.still-running" ]; do sleep 1; done '
1895 b'> %(catapult)s &\n'
1895 b'> %(catapult)s &\n'
1896 b'HGCATAPULTSESSION=%(session)s ; export HGCATAPULTSESSION\n'
1896 b'HGCATAPULTSESSION=%(session)s ; export HGCATAPULTSESSION\n'
1897 b'echo START %(session)s %(name)s >> %(catapult)s\n'
1897 b'echo START %(session)s %(name)s >> %(catapult)s\n'
1898 % {
1898 % {
1899 b'name': cataname,
1899 b'name': cataname,
1900 b'session': session,
1900 b'session': session,
1901 b'catapult': hgcatapult,
1901 b'catapult': hgcatapult,
1902 }
1902 }
1903 )
1903 )
1904
1904
1905 if self._case:
1905 if self._case:
1906 casestr = b'#'.join(self._case)
1906 casestr = b'#'.join(self._case)
1907 if isinstance(casestr, str):
1907 if isinstance(casestr, str):
1908 quoted = shellquote(casestr)
1908 quoted = shellquote(casestr)
1909 else:
1909 else:
1910 quoted = shellquote(casestr.decode('utf8')).encode('utf8')
1910 quoted = shellquote(casestr.decode('utf8')).encode('utf8')
1911 script.append(b'TESTCASE=%s\n' % quoted)
1911 script.append(b'TESTCASE=%s\n' % quoted)
1912 script.append(b'export TESTCASE\n')
1912 script.append(b'export TESTCASE\n')
1913
1913
1914 n = 0
1914 n = 0
1915 for n, l in enumerate(lines):
1915 for n, l in enumerate(lines):
1916 if not l.endswith(b'\n'):
1916 if not l.endswith(b'\n'):
1917 l += b'\n'
1917 l += b'\n'
1918 if l.startswith(b'#require'):
1918 if l.startswith(b'#require'):
1919 lsplit = l.split()
1919 lsplit = l.split()
1920 if len(lsplit) < 2 or lsplit[0] != b'#require':
1920 if len(lsplit) < 2 or lsplit[0] != b'#require':
1921 after.setdefault(pos, []).append(
1921 after.setdefault(pos, []).append(
1922 b' !!! invalid #require\n'
1922 b' !!! invalid #require\n'
1923 )
1923 )
1924 if not skipping:
1924 if not skipping:
1925 haveresult, message = self._hghave(lsplit[1:])
1925 haveresult, message = self._hghave(lsplit[1:])
1926 if not haveresult:
1926 if not haveresult:
1927 script = [b'echo "%s"\nexit 80\n' % message]
1927 script = [b'echo "%s"\nexit 80\n' % message]
1928 break
1928 break
1929 after.setdefault(pos, []).append(l)
1929 after.setdefault(pos, []).append(l)
1930 elif l.startswith(b'#if'):
1930 elif l.startswith(b'#if'):
1931 lsplit = l.split()
1931 lsplit = l.split()
1932 if len(lsplit) < 2 or lsplit[0] != b'#if':
1932 if len(lsplit) < 2 or lsplit[0] != b'#if':
1933 after.setdefault(pos, []).append(b' !!! invalid #if\n')
1933 after.setdefault(pos, []).append(b' !!! invalid #if\n')
1934 if skipping is not None:
1934 if skipping is not None:
1935 after.setdefault(pos, []).append(b' !!! nested #if\n')
1935 after.setdefault(pos, []).append(b' !!! nested #if\n')
1936 skipping = not self._iftest(lsplit[1:])
1936 skipping = not self._iftest(lsplit[1:])
1937 after.setdefault(pos, []).append(l)
1937 after.setdefault(pos, []).append(l)
1938 elif l.startswith(b'#else'):
1938 elif l.startswith(b'#else'):
1939 if skipping is None:
1939 if skipping is None:
1940 after.setdefault(pos, []).append(b' !!! missing #if\n')
1940 after.setdefault(pos, []).append(b' !!! missing #if\n')
1941 skipping = not skipping
1941 skipping = not skipping
1942 after.setdefault(pos, []).append(l)
1942 after.setdefault(pos, []).append(l)
1943 elif l.startswith(b'#endif'):
1943 elif l.startswith(b'#endif'):
1944 if skipping is None:
1944 if skipping is None:
1945 after.setdefault(pos, []).append(b' !!! missing #if\n')
1945 after.setdefault(pos, []).append(b' !!! missing #if\n')
1946 skipping = None
1946 skipping = None
1947 after.setdefault(pos, []).append(l)
1947 after.setdefault(pos, []).append(l)
1948 elif skipping:
1948 elif skipping:
1949 after.setdefault(pos, []).append(l)
1949 after.setdefault(pos, []).append(l)
1950 elif l.startswith(b' >>> '): # python inlines
1950 elif l.startswith(b' >>> '): # python inlines
1951 after.setdefault(pos, []).append(l)
1951 after.setdefault(pos, []).append(l)
1952 prepos = pos
1952 prepos = pos
1953 pos = n
1953 pos = n
1954 if not inpython:
1954 if not inpython:
1955 # We've just entered a Python block. Add the header.
1955 # We've just entered a Python block. Add the header.
1956 inpython = True
1956 inpython = True
1957 addsalt(prepos, False) # Make sure we report the exit code.
1957 addsalt(prepos, False) # Make sure we report the exit code.
1958 script.append(b'"%s" -m heredoctest <<EOF\n' % PYTHON)
1958 script.append(b'"%s" -m heredoctest <<EOF\n' % PYTHON)
1959 addsalt(n, True)
1959 addsalt(n, True)
1960 script.append(l[2:])
1960 script.append(l[2:])
1961 elif l.startswith(b' ... '): # python inlines
1961 elif l.startswith(b' ... '): # python inlines
1962 after.setdefault(prepos, []).append(l)
1962 after.setdefault(prepos, []).append(l)
1963 script.append(l[2:])
1963 script.append(l[2:])
1964 elif l.startswith(b' $ '): # commands
1964 elif l.startswith(b' $ '): # commands
1965 if inpython:
1965 if inpython:
1966 script.append(b'EOF\n')
1966 script.append(b'EOF\n')
1967 inpython = False
1967 inpython = False
1968 after.setdefault(pos, []).append(l)
1968 after.setdefault(pos, []).append(l)
1969 prepos = pos
1969 prepos = pos
1970 pos = n
1970 pos = n
1971 addsalt(n, False)
1971 addsalt(n, False)
1972 rawcmd = l[4:]
1972 rawcmd = l[4:]
1973 cmd = rawcmd.split()
1973 cmd = rawcmd.split()
1974 toggletrace(rawcmd)
1974 toggletrace(rawcmd)
1975 if len(cmd) == 2 and cmd[0] == b'cd':
1975 if len(cmd) == 2 and cmd[0] == b'cd':
1976 rawcmd = b'cd %s || exit 1\n' % cmd[1]
1976 rawcmd = b'cd %s || exit 1\n' % cmd[1]
1977 script.append(rawcmd)
1977 script.append(rawcmd)
1978 elif l.startswith(b' > '): # continuations
1978 elif l.startswith(b' > '): # continuations
1979 after.setdefault(prepos, []).append(l)
1979 after.setdefault(prepos, []).append(l)
1980 script.append(l[4:])
1980 script.append(l[4:])
1981 elif l.startswith(b' '): # results
1981 elif l.startswith(b' '): # results
1982 # Queue up a list of expected results.
1982 # Queue up a list of expected results.
1983 expected.setdefault(pos, []).append(l[2:])
1983 expected.setdefault(pos, []).append(l[2:])
1984 else:
1984 else:
1985 if inpython:
1985 if inpython:
1986 script.append(b'EOF\n')
1986 script.append(b'EOF\n')
1987 inpython = False
1987 inpython = False
1988 # Non-command/result. Queue up for merged output.
1988 # Non-command/result. Queue up for merged output.
1989 after.setdefault(pos, []).append(l)
1989 after.setdefault(pos, []).append(l)
1990
1990
1991 if inpython:
1991 if inpython:
1992 script.append(b'EOF\n')
1992 script.append(b'EOF\n')
1993 if skipping is not None:
1993 if skipping is not None:
1994 after.setdefault(pos, []).append(b' !!! missing #endif\n')
1994 after.setdefault(pos, []).append(b' !!! missing #endif\n')
1995 addsalt(n + 1, False)
1995 addsalt(n + 1, False)
1996 # Need to end any current per-command trace
1996 # Need to end any current per-command trace
1997 if activetrace:
1997 if activetrace:
1998 toggletrace()
1998 toggletrace()
1999 return salt, script, after, expected
1999 return salt, script, after, expected
2000
2000
2001 def _processoutput(self, exitcode, output, salt, after, expected):
2001 def _processoutput(self, exitcode, output, salt, after, expected):
2002 # Merge the script output back into a unified test.
2002 # Merge the script output back into a unified test.
2003 warnonly = WARN_UNDEFINED # 1: not yet; 2: yes; 3: for sure not
2003 warnonly = WARN_UNDEFINED # 1: not yet; 2: yes; 3: for sure not
2004 if exitcode != 0:
2004 if exitcode != 0:
2005 warnonly = WARN_NO
2005 warnonly = WARN_NO
2006
2006
2007 pos = -1
2007 pos = -1
2008 postout = []
2008 postout = []
2009 for out_rawline in output:
2009 for out_rawline in output:
2010 out_line, cmd_line = out_rawline, None
2010 out_line, cmd_line = out_rawline, None
2011 if salt in out_rawline:
2011 if salt in out_rawline:
2012 out_line, cmd_line = out_rawline.split(salt, 1)
2012 out_line, cmd_line = out_rawline.split(salt, 1)
2013
2013
2014 pos, postout, warnonly = self._process_out_line(
2014 pos, postout, warnonly = self._process_out_line(
2015 out_line, pos, postout, expected, warnonly
2015 out_line, pos, postout, expected, warnonly
2016 )
2016 )
2017 pos, postout = self._process_cmd_line(cmd_line, pos, postout, after)
2017 pos, postout = self._process_cmd_line(cmd_line, pos, postout, after)
2018
2018
2019 if pos in after:
2019 if pos in after:
2020 postout += after.pop(pos)
2020 postout += after.pop(pos)
2021
2021
2022 if warnonly == WARN_YES:
2022 if warnonly == WARN_YES:
2023 exitcode = False # Set exitcode to warned.
2023 exitcode = False # Set exitcode to warned.
2024
2024
2025 return exitcode, postout
2025 return exitcode, postout
2026
2026
2027 def _process_out_line(self, out_line, pos, postout, expected, warnonly):
2027 def _process_out_line(self, out_line, pos, postout, expected, warnonly):
2028 while out_line:
2028 while out_line:
2029 if not out_line.endswith(b'\n'):
2029 if not out_line.endswith(b'\n'):
2030 out_line += b' (no-eol)\n'
2030 out_line += b' (no-eol)\n'
2031
2031
2032 # Find the expected output at the current position.
2032 # Find the expected output at the current position.
2033 els = [None]
2033 els = [None]
2034 if expected.get(pos, None):
2034 if expected.get(pos, None):
2035 els = expected[pos]
2035 els = expected[pos]
2036
2036
2037 optional = []
2037 optional = []
2038 for i, el in enumerate(els):
2038 for i, el in enumerate(els):
2039 r = False
2039 r = False
2040 if el:
2040 if el:
2041 r, exact = self.linematch(el, out_line)
2041 r, exact = self.linematch(el, out_line)
2042 if isinstance(r, str):
2042 if isinstance(r, str):
2043 if r == '-glob':
2043 if r == '-glob':
2044 out_line = ''.join(el.rsplit(' (glob)', 1))
2044 out_line = ''.join(el.rsplit(' (glob)', 1))
2045 r = '' # Warn only this line.
2045 r = '' # Warn only this line.
2046 elif r == "retry":
2046 elif r == "retry":
2047 postout.append(b' ' + el)
2047 postout.append(b' ' + el)
2048 else:
2048 else:
2049 log('\ninfo, unknown linematch result: %r\n' % r)
2049 log('\ninfo, unknown linematch result: %r\n' % r)
2050 r = False
2050 r = False
2051 if r:
2051 if r:
2052 els.pop(i)
2052 els.pop(i)
2053 break
2053 break
2054 if el:
2054 if el:
2055 if isoptional(el):
2055 if isoptional(el):
2056 optional.append(i)
2056 optional.append(i)
2057 else:
2057 else:
2058 m = optline.match(el)
2058 m = optline.match(el)
2059 if m:
2059 if m:
2060 conditions = [c for c in m.group(2).split(b' ')]
2060 conditions = [c for c in m.group(2).split(b' ')]
2061
2061
2062 if not self._iftest(conditions):
2062 if not self._iftest(conditions):
2063 optional.append(i)
2063 optional.append(i)
2064 if exact:
2064 if exact:
2065 # Don't allow line to be matches against a later
2065 # Don't allow line to be matches against a later
2066 # line in the output
2066 # line in the output
2067 els.pop(i)
2067 els.pop(i)
2068 break
2068 break
2069
2069
2070 if r:
2070 if r:
2071 if r == "retry":
2071 if r == "retry":
2072 continue
2072 continue
2073 # clean up any optional leftovers
2073 # clean up any optional leftovers
2074 for i in optional:
2074 for i in optional:
2075 postout.append(b' ' + els[i])
2075 postout.append(b' ' + els[i])
2076 for i in reversed(optional):
2076 for i in reversed(optional):
2077 del els[i]
2077 del els[i]
2078 postout.append(b' ' + el)
2078 postout.append(b' ' + el)
2079 else:
2079 else:
2080 if self.NEEDESCAPE(out_line):
2080 if self.NEEDESCAPE(out_line):
2081 out_line = TTest._stringescape(
2081 out_line = TTest._stringescape(
2082 b'%s (esc)\n' % out_line.rstrip(b'\n')
2082 b'%s (esc)\n' % out_line.rstrip(b'\n')
2083 )
2083 )
2084 postout.append(b' ' + out_line) # Let diff deal with it.
2084 postout.append(b' ' + out_line) # Let diff deal with it.
2085 if r != '': # If line failed.
2085 if r != '': # If line failed.
2086 warnonly = WARN_NO
2086 warnonly = WARN_NO
2087 elif warnonly == WARN_UNDEFINED:
2087 elif warnonly == WARN_UNDEFINED:
2088 warnonly = WARN_YES
2088 warnonly = WARN_YES
2089 break
2089 break
2090 else:
2090 else:
2091 # clean up any optional leftovers
2091 # clean up any optional leftovers
2092 while expected.get(pos, None):
2092 while expected.get(pos, None):
2093 el = expected[pos].pop(0)
2093 el = expected[pos].pop(0)
2094 if el:
2094 if el:
2095 if not isoptional(el):
2095 if not isoptional(el):
2096 m = optline.match(el)
2096 m = optline.match(el)
2097 if m:
2097 if m:
2098 conditions = [c for c in m.group(2).split(b' ')]
2098 conditions = [c for c in m.group(2).split(b' ')]
2099
2099
2100 if self._iftest(conditions):
2100 if self._iftest(conditions):
2101 # Don't append as optional line
2101 # Don't append as optional line
2102 continue
2102 continue
2103 else:
2103 else:
2104 continue
2104 continue
2105 postout.append(b' ' + el)
2105 postout.append(b' ' + el)
2106 return pos, postout, warnonly
2106 return pos, postout, warnonly
2107
2107
2108 def _process_cmd_line(self, cmd_line, pos, postout, after):
2108 def _process_cmd_line(self, cmd_line, pos, postout, after):
2109 """process a "command" part of a line from unified test output"""
2109 """process a "command" part of a line from unified test output"""
2110 if cmd_line:
2110 if cmd_line:
2111 # Add on last return code.
2111 # Add on last return code.
2112 ret = int(cmd_line.split()[1])
2112 ret = int(cmd_line.split()[1])
2113 if ret != 0:
2113 if ret != 0:
2114 postout.append(b' [%d]\n' % ret)
2114 postout.append(b' [%d]\n' % ret)
2115 if pos in after:
2115 if pos in after:
2116 # Merge in non-active test bits.
2116 # Merge in non-active test bits.
2117 postout += after.pop(pos)
2117 postout += after.pop(pos)
2118 pos = int(cmd_line.split()[0])
2118 pos = int(cmd_line.split()[0])
2119 return pos, postout
2119 return pos, postout
2120
2120
2121 @staticmethod
2121 @staticmethod
2122 def rematch(el, l):
2122 def rematch(el, l):
2123 try:
2123 try:
2124 # parse any flags at the beginning of the regex. Only 'i' is
2124 # parse any flags at the beginning of the regex. Only 'i' is
2125 # supported right now, but this should be easy to extend.
2125 # supported right now, but this should be easy to extend.
2126 flags, el = re.match(br'^(\(\?i\))?(.*)', el).groups()[0:2]
2126 flags, el = re.match(br'^(\(\?i\))?(.*)', el).groups()[0:2]
2127 flags = flags or b''
2127 flags = flags or b''
2128 el = flags + b'(?:' + el + b')'
2128 el = flags + b'(?:' + el + b')'
2129 # use \Z to ensure that the regex matches to the end of the string
2129 # use \Z to ensure that the regex matches to the end of the string
2130 if WINDOWS:
2130 if WINDOWS:
2131 return re.match(el + br'\r?\n\Z', l)
2131 return re.match(el + br'\r?\n\Z', l)
2132 return re.match(el + br'\n\Z', l)
2132 return re.match(el + br'\n\Z', l)
2133 except re.error:
2133 except re.error:
2134 # el is an invalid regex
2134 # el is an invalid regex
2135 return False
2135 return False
2136
2136
2137 @staticmethod
2137 @staticmethod
2138 def globmatch(el, l):
2138 def globmatch(el, l):
2139 # The only supported special characters are * and ? plus / which also
2139 # The only supported special characters are * and ? plus / which also
2140 # matches \ on windows. Escaping of these characters is supported.
2140 # matches \ on windows. Escaping of these characters is supported.
2141 if el + b'\n' == l:
2141 if el + b'\n' == l:
2142 if os.altsep:
2142 if os.altsep:
2143 # matching on "/" is not needed for this line
2143 # matching on "/" is not needed for this line
2144 for pat in checkcodeglobpats:
2144 for pat in checkcodeglobpats:
2145 if pat.match(el):
2145 if pat.match(el):
2146 return True
2146 return True
2147 return b'-glob'
2147 return b'-glob'
2148 return True
2148 return True
2149 el = el.replace(b'$LOCALIP', b'*')
2149 el = el.replace(b'$LOCALIP', b'*')
2150 i, n = 0, len(el)
2150 i, n = 0, len(el)
2151 res = b''
2151 res = b''
2152 while i < n:
2152 while i < n:
2153 c = el[i : i + 1]
2153 c = el[i : i + 1]
2154 i += 1
2154 i += 1
2155 if c == b'\\' and i < n and el[i : i + 1] in b'*?\\/':
2155 if c == b'\\' and i < n and el[i : i + 1] in b'*?\\/':
2156 res += el[i - 1 : i + 1]
2156 res += el[i - 1 : i + 1]
2157 i += 1
2157 i += 1
2158 elif c == b'*':
2158 elif c == b'*':
2159 res += b'.*'
2159 res += b'.*'
2160 elif c == b'?':
2160 elif c == b'?':
2161 res += b'.'
2161 res += b'.'
2162 elif c == b'/' and os.altsep:
2162 elif c == b'/' and os.altsep:
2163 res += b'[/\\\\]'
2163 res += b'[/\\\\]'
2164 else:
2164 else:
2165 res += re.escape(c)
2165 res += re.escape(c)
2166 return TTest.rematch(res, l)
2166 return TTest.rematch(res, l)
2167
2167
2168 def linematch(self, el, l):
2168 def linematch(self, el, l):
2169 if el == l: # perfect match (fast)
2169 if el == l: # perfect match (fast)
2170 return True, True
2170 return True, True
2171 retry = False
2171 retry = False
2172 if isoptional(el):
2172 if isoptional(el):
2173 retry = "retry"
2173 retry = "retry"
2174 el = el[: -len(MARK_OPTIONAL)] + b"\n"
2174 el = el[: -len(MARK_OPTIONAL)] + b"\n"
2175 else:
2175 else:
2176 m = optline.match(el)
2176 m = optline.match(el)
2177 if m:
2177 if m:
2178 conditions = [c for c in m.group(2).split(b' ')]
2178 conditions = [c for c in m.group(2).split(b' ')]
2179
2179
2180 el = m.group(1) + b"\n"
2180 el = m.group(1) + b"\n"
2181 if not self._iftest(conditions):
2181 if not self._iftest(conditions):
2182 # listed feature missing, should not match
2182 # listed feature missing, should not match
2183 return "retry", False
2183 return "retry", False
2184
2184
2185 if el.endswith(b" (esc)\n"):
2185 if el.endswith(b" (esc)\n"):
2186 if PYTHON3:
2186 if PYTHON3:
2187 el = el[:-7].decode('unicode_escape') + '\n'
2187 el = el[:-7].decode('unicode_escape') + '\n'
2188 el = el.encode('latin-1')
2188 el = el.encode('latin-1')
2189 else:
2189 else:
2190 el = el[:-7].decode('string-escape') + '\n'
2190 el = el[:-7].decode('string-escape') + '\n'
2191 if el == l or WINDOWS and el[:-1] + b'\r\n' == l:
2191 if el == l or WINDOWS and el[:-1] + b'\r\n' == l:
2192 return True, True
2192 return True, True
2193 if el.endswith(b" (re)\n"):
2193 if el.endswith(b" (re)\n"):
2194 return (TTest.rematch(el[:-6], l) or retry), False
2194 return (TTest.rematch(el[:-6], l) or retry), False
2195 if el.endswith(b" (glob)\n"):
2195 if el.endswith(b" (glob)\n"):
2196 # ignore '(glob)' added to l by 'replacements'
2196 # ignore '(glob)' added to l by 'replacements'
2197 if l.endswith(b" (glob)\n"):
2197 if l.endswith(b" (glob)\n"):
2198 l = l[:-8] + b"\n"
2198 l = l[:-8] + b"\n"
2199 return (TTest.globmatch(el[:-8], l) or retry), False
2199 return (TTest.globmatch(el[:-8], l) or retry), False
2200 if os.altsep:
2200 if os.altsep:
2201 _l = l.replace(b'\\', b'/')
2201 _l = l.replace(b'\\', b'/')
2202 if el == _l or WINDOWS and el[:-1] + b'\r\n' == _l:
2202 if el == _l or WINDOWS and el[:-1] + b'\r\n' == _l:
2203 return True, True
2203 return True, True
2204 return retry, True
2204 return retry, True
2205
2205
2206 @staticmethod
2206 @staticmethod
2207 def parsehghaveoutput(lines):
2207 def parsehghaveoutput(lines):
2208 """Parse hghave log lines.
2208 """Parse hghave log lines.
2209
2209
2210 Return tuple of lists (missing, failed):
2210 Return tuple of lists (missing, failed):
2211 * the missing/unknown features
2211 * the missing/unknown features
2212 * the features for which existence check failed"""
2212 * the features for which existence check failed"""
2213 missing = []
2213 missing = []
2214 failed = []
2214 failed = []
2215 for line in lines:
2215 for line in lines:
2216 if line.startswith(TTest.SKIPPED_PREFIX):
2216 if line.startswith(TTest.SKIPPED_PREFIX):
2217 line = line.splitlines()[0]
2217 line = line.splitlines()[0]
2218 missing.append(_bytes2sys(line[len(TTest.SKIPPED_PREFIX) :]))
2218 missing.append(_bytes2sys(line[len(TTest.SKIPPED_PREFIX) :]))
2219 elif line.startswith(TTest.FAILED_PREFIX):
2219 elif line.startswith(TTest.FAILED_PREFIX):
2220 line = line.splitlines()[0]
2220 line = line.splitlines()[0]
2221 failed.append(_bytes2sys(line[len(TTest.FAILED_PREFIX) :]))
2221 failed.append(_bytes2sys(line[len(TTest.FAILED_PREFIX) :]))
2222
2222
2223 return missing, failed
2223 return missing, failed
2224
2224
2225 @staticmethod
2225 @staticmethod
2226 def _escapef(m):
2226 def _escapef(m):
2227 return TTest.ESCAPEMAP[m.group(0)]
2227 return TTest.ESCAPEMAP[m.group(0)]
2228
2228
2229 @staticmethod
2229 @staticmethod
2230 def _stringescape(s):
2230 def _stringescape(s):
2231 return TTest.ESCAPESUB(TTest._escapef, s)
2231 return TTest.ESCAPESUB(TTest._escapef, s)
2232
2232
2233
2233
2234 iolock = threading.RLock()
2234 iolock = threading.RLock()
2235 firstlock = threading.RLock()
2235 firstlock = threading.RLock()
2236 firsterror = False
2236 firsterror = False
2237
2237
2238
2238 if PYTHON3:
2239 class TestResult(unittest._TextTestResult):
2239 base_class = unittest.TextTestResult
2240 else:
2241 base_class = unittest._TextTestResult
2242
2243
2244 class TestResult(base_class):
2240 """Holds results when executing via unittest."""
2245 """Holds results when executing via unittest."""
2241
2246
2242 # Don't worry too much about accessing the non-public _TextTestResult.
2243 # It is relatively common in Python testing tools.
2244 def __init__(self, options, *args, **kwargs):
2247 def __init__(self, options, *args, **kwargs):
2245 super(TestResult, self).__init__(*args, **kwargs)
2248 super(TestResult, self).__init__(*args, **kwargs)
2246
2249
2247 self._options = options
2250 self._options = options
2248
2251
2249 # unittest.TestResult didn't have skipped until 2.7. We need to
2252 # unittest.TestResult didn't have skipped until 2.7. We need to
2250 # polyfill it.
2253 # polyfill it.
2251 self.skipped = []
2254 self.skipped = []
2252
2255
2253 # We have a custom "ignored" result that isn't present in any Python
2256 # We have a custom "ignored" result that isn't present in any Python
2254 # unittest implementation. It is very similar to skipped. It may make
2257 # unittest implementation. It is very similar to skipped. It may make
2255 # sense to map it into skip some day.
2258 # sense to map it into skip some day.
2256 self.ignored = []
2259 self.ignored = []
2257
2260
2258 self.times = []
2261 self.times = []
2259 self._firststarttime = None
2262 self._firststarttime = None
2260 # Data stored for the benefit of generating xunit reports.
2263 # Data stored for the benefit of generating xunit reports.
2261 self.successes = []
2264 self.successes = []
2262 self.faildata = {}
2265 self.faildata = {}
2263
2266
2264 if options.color == 'auto':
2267 if options.color == 'auto':
2265 isatty = self.stream.isatty()
2268 isatty = self.stream.isatty()
2266 # For some reason, redirecting stdout on Windows disables the ANSI
2269 # For some reason, redirecting stdout on Windows disables the ANSI
2267 # color processing of stderr, which is what is used to print the
2270 # color processing of stderr, which is what is used to print the
2268 # output. Therefore, both must be tty on Windows to enable color.
2271 # output. Therefore, both must be tty on Windows to enable color.
2269 if WINDOWS:
2272 if WINDOWS:
2270 isatty = isatty and sys.stdout.isatty()
2273 isatty = isatty and sys.stdout.isatty()
2271 self.color = pygmentspresent and isatty
2274 self.color = pygmentspresent and isatty
2272 elif options.color == 'never':
2275 elif options.color == 'never':
2273 self.color = False
2276 self.color = False
2274 else: # 'always', for testing purposes
2277 else: # 'always', for testing purposes
2275 self.color = pygmentspresent
2278 self.color = pygmentspresent
2276
2279
2277 def onStart(self, test):
2280 def onStart(self, test):
2278 """Can be overriden by custom TestResult"""
2281 """Can be overriden by custom TestResult"""
2279
2282
2280 def onEnd(self):
2283 def onEnd(self):
2281 """Can be overriden by custom TestResult"""
2284 """Can be overriden by custom TestResult"""
2282
2285
2283 def addFailure(self, test, reason):
2286 def addFailure(self, test, reason):
2284 self.failures.append((test, reason))
2287 self.failures.append((test, reason))
2285
2288
2286 if self._options.first:
2289 if self._options.first:
2287 self.stop()
2290 self.stop()
2288 else:
2291 else:
2289 with iolock:
2292 with iolock:
2290 if reason == "timed out":
2293 if reason == "timed out":
2291 self.stream.write('t')
2294 self.stream.write('t')
2292 else:
2295 else:
2293 if not self._options.nodiff:
2296 if not self._options.nodiff:
2294 self.stream.write('\n')
2297 self.stream.write('\n')
2295 # Exclude the '\n' from highlighting to lex correctly
2298 # Exclude the '\n' from highlighting to lex correctly
2296 formatted = 'ERROR: %s output changed\n' % test
2299 formatted = 'ERROR: %s output changed\n' % test
2297 self.stream.write(highlightmsg(formatted, self.color))
2300 self.stream.write(highlightmsg(formatted, self.color))
2298 self.stream.write('!')
2301 self.stream.write('!')
2299
2302
2300 self.stream.flush()
2303 self.stream.flush()
2301
2304
2302 def addSuccess(self, test):
2305 def addSuccess(self, test):
2303 with iolock:
2306 with iolock:
2304 super(TestResult, self).addSuccess(test)
2307 super(TestResult, self).addSuccess(test)
2305 self.successes.append(test)
2308 self.successes.append(test)
2306
2309
2307 def addError(self, test, err):
2310 def addError(self, test, err):
2308 super(TestResult, self).addError(test, err)
2311 super(TestResult, self).addError(test, err)
2309 if self._options.first:
2312 if self._options.first:
2310 self.stop()
2313 self.stop()
2311
2314
2312 # Polyfill.
2315 # Polyfill.
2313 def addSkip(self, test, reason):
2316 def addSkip(self, test, reason):
2314 self.skipped.append((test, reason))
2317 self.skipped.append((test, reason))
2315 with iolock:
2318 with iolock:
2316 if self.showAll:
2319 if self.showAll:
2317 self.stream.writeln('skipped %s' % reason)
2320 self.stream.writeln('skipped %s' % reason)
2318 else:
2321 else:
2319 self.stream.write('s')
2322 self.stream.write('s')
2320 self.stream.flush()
2323 self.stream.flush()
2321
2324
2322 def addIgnore(self, test, reason):
2325 def addIgnore(self, test, reason):
2323 self.ignored.append((test, reason))
2326 self.ignored.append((test, reason))
2324 with iolock:
2327 with iolock:
2325 if self.showAll:
2328 if self.showAll:
2326 self.stream.writeln('ignored %s' % reason)
2329 self.stream.writeln('ignored %s' % reason)
2327 else:
2330 else:
2328 if reason not in ('not retesting', "doesn't match keyword"):
2331 if reason not in ('not retesting', "doesn't match keyword"):
2329 self.stream.write('i')
2332 self.stream.write('i')
2330 else:
2333 else:
2331 self.testsRun += 1
2334 self.testsRun += 1
2332 self.stream.flush()
2335 self.stream.flush()
2333
2336
2334 def addOutputMismatch(self, test, ret, got, expected):
2337 def addOutputMismatch(self, test, ret, got, expected):
2335 """Record a mismatch in test output for a particular test."""
2338 """Record a mismatch in test output for a particular test."""
2336 if self.shouldStop or firsterror:
2339 if self.shouldStop or firsterror:
2337 # don't print, some other test case already failed and
2340 # don't print, some other test case already failed and
2338 # printed, we're just stale and probably failed due to our
2341 # printed, we're just stale and probably failed due to our
2339 # temp dir getting cleaned up.
2342 # temp dir getting cleaned up.
2340 return
2343 return
2341
2344
2342 accepted = False
2345 accepted = False
2343 lines = []
2346 lines = []
2344
2347
2345 with iolock:
2348 with iolock:
2346 if self._options.nodiff:
2349 if self._options.nodiff:
2347 pass
2350 pass
2348 elif self._options.view:
2351 elif self._options.view:
2349 v = self._options.view
2352 v = self._options.view
2350 subprocess.call(
2353 subprocess.call(
2351 r'"%s" "%s" "%s"'
2354 r'"%s" "%s" "%s"'
2352 % (v, _bytes2sys(test.refpath), _bytes2sys(test.errpath)),
2355 % (v, _bytes2sys(test.refpath), _bytes2sys(test.errpath)),
2353 shell=True,
2356 shell=True,
2354 )
2357 )
2355 else:
2358 else:
2356 servefail, lines = getdiff(
2359 servefail, lines = getdiff(
2357 expected, got, test.refpath, test.errpath
2360 expected, got, test.refpath, test.errpath
2358 )
2361 )
2359 self.stream.write('\n')
2362 self.stream.write('\n')
2360 for line in lines:
2363 for line in lines:
2361 line = highlightdiff(line, self.color)
2364 line = highlightdiff(line, self.color)
2362 if PYTHON3:
2365 if PYTHON3:
2363 self.stream.flush()
2366 self.stream.flush()
2364 self.stream.buffer.write(line)
2367 self.stream.buffer.write(line)
2365 self.stream.buffer.flush()
2368 self.stream.buffer.flush()
2366 else:
2369 else:
2367 self.stream.write(line)
2370 self.stream.write(line)
2368 self.stream.flush()
2371 self.stream.flush()
2369
2372
2370 if servefail:
2373 if servefail:
2371 raise test.failureException(
2374 raise test.failureException(
2372 'server failed to start (HGPORT=%s)' % test._startport
2375 'server failed to start (HGPORT=%s)' % test._startport
2373 )
2376 )
2374
2377
2375 # handle interactive prompt without releasing iolock
2378 # handle interactive prompt without releasing iolock
2376 if self._options.interactive:
2379 if self._options.interactive:
2377 if test.readrefout() != expected:
2380 if test.readrefout() != expected:
2378 self.stream.write(
2381 self.stream.write(
2379 'Reference output has changed (run again to prompt '
2382 'Reference output has changed (run again to prompt '
2380 'changes)'
2383 'changes)'
2381 )
2384 )
2382 else:
2385 else:
2383 self.stream.write('Accept this change? [y/N] ')
2386 self.stream.write('Accept this change? [y/N] ')
2384 self.stream.flush()
2387 self.stream.flush()
2385 answer = sys.stdin.readline().strip()
2388 answer = sys.stdin.readline().strip()
2386 if answer.lower() in ('y', 'yes'):
2389 if answer.lower() in ('y', 'yes'):
2387 if test.path.endswith(b'.t'):
2390 if test.path.endswith(b'.t'):
2388 rename(test.errpath, test.path)
2391 rename(test.errpath, test.path)
2389 else:
2392 else:
2390 rename(test.errpath, b'%s.out' % test.path)
2393 rename(test.errpath, b'%s.out' % test.path)
2391 accepted = True
2394 accepted = True
2392 if not accepted:
2395 if not accepted:
2393 self.faildata[test.name] = b''.join(lines)
2396 self.faildata[test.name] = b''.join(lines)
2394
2397
2395 return accepted
2398 return accepted
2396
2399
2397 def startTest(self, test):
2400 def startTest(self, test):
2398 super(TestResult, self).startTest(test)
2401 super(TestResult, self).startTest(test)
2399
2402
2400 # os.times module computes the user time and system time spent by
2403 # os.times module computes the user time and system time spent by
2401 # child's processes along with real elapsed time taken by a process.
2404 # child's processes along with real elapsed time taken by a process.
2402 # This module has one limitation. It can only work for Linux user
2405 # This module has one limitation. It can only work for Linux user
2403 # and not for Windows. Hence why we fall back to another function
2406 # and not for Windows. Hence why we fall back to another function
2404 # for wall time calculations.
2407 # for wall time calculations.
2405 test.started_times = os.times()
2408 test.started_times = os.times()
2406 # TODO use a monotonic clock once support for Python 2.7 is dropped.
2409 # TODO use a monotonic clock once support for Python 2.7 is dropped.
2407 test.started_time = time.time()
2410 test.started_time = time.time()
2408 if self._firststarttime is None: # thread racy but irrelevant
2411 if self._firststarttime is None: # thread racy but irrelevant
2409 self._firststarttime = test.started_time
2412 self._firststarttime = test.started_time
2410
2413
2411 def stopTest(self, test, interrupted=False):
2414 def stopTest(self, test, interrupted=False):
2412 super(TestResult, self).stopTest(test)
2415 super(TestResult, self).stopTest(test)
2413
2416
2414 test.stopped_times = os.times()
2417 test.stopped_times = os.times()
2415 stopped_time = time.time()
2418 stopped_time = time.time()
2416
2419
2417 starttime = test.started_times
2420 starttime = test.started_times
2418 endtime = test.stopped_times
2421 endtime = test.stopped_times
2419 origin = self._firststarttime
2422 origin = self._firststarttime
2420 self.times.append(
2423 self.times.append(
2421 (
2424 (
2422 test.name,
2425 test.name,
2423 endtime[2] - starttime[2], # user space CPU time
2426 endtime[2] - starttime[2], # user space CPU time
2424 endtime[3] - starttime[3], # sys space CPU time
2427 endtime[3] - starttime[3], # sys space CPU time
2425 stopped_time - test.started_time, # real time
2428 stopped_time - test.started_time, # real time
2426 test.started_time - origin, # start date in run context
2429 test.started_time - origin, # start date in run context
2427 stopped_time - origin, # end date in run context
2430 stopped_time - origin, # end date in run context
2428 )
2431 )
2429 )
2432 )
2430
2433
2431 if interrupted:
2434 if interrupted:
2432 with iolock:
2435 with iolock:
2433 self.stream.writeln(
2436 self.stream.writeln(
2434 'INTERRUPTED: %s (after %d seconds)'
2437 'INTERRUPTED: %s (after %d seconds)'
2435 % (test.name, self.times[-1][3])
2438 % (test.name, self.times[-1][3])
2436 )
2439 )
2437
2440
2438
2441
2439 def getTestResult():
2442 def getTestResult():
2440 """
2443 """
2441 Returns the relevant test result
2444 Returns the relevant test result
2442 """
2445 """
2443 if "CUSTOM_TEST_RESULT" in os.environ:
2446 if "CUSTOM_TEST_RESULT" in os.environ:
2444 testresultmodule = __import__(os.environ["CUSTOM_TEST_RESULT"])
2447 testresultmodule = __import__(os.environ["CUSTOM_TEST_RESULT"])
2445 return testresultmodule.TestResult
2448 return testresultmodule.TestResult
2446 else:
2449 else:
2447 return TestResult
2450 return TestResult
2448
2451
2449
2452
2450 class TestSuite(unittest.TestSuite):
2453 class TestSuite(unittest.TestSuite):
2451 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
2454 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
2452
2455
2453 def __init__(
2456 def __init__(
2454 self,
2457 self,
2455 testdir,
2458 testdir,
2456 jobs=1,
2459 jobs=1,
2457 whitelist=None,
2460 whitelist=None,
2458 blacklist=None,
2461 blacklist=None,
2459 keywords=None,
2462 keywords=None,
2460 loop=False,
2463 loop=False,
2461 runs_per_test=1,
2464 runs_per_test=1,
2462 loadtest=None,
2465 loadtest=None,
2463 showchannels=False,
2466 showchannels=False,
2464 *args,
2467 *args,
2465 **kwargs
2468 **kwargs
2466 ):
2469 ):
2467 """Create a new instance that can run tests with a configuration.
2470 """Create a new instance that can run tests with a configuration.
2468
2471
2469 testdir specifies the directory where tests are executed from. This
2472 testdir specifies the directory where tests are executed from. This
2470 is typically the ``tests`` directory from Mercurial's source
2473 is typically the ``tests`` directory from Mercurial's source
2471 repository.
2474 repository.
2472
2475
2473 jobs specifies the number of jobs to run concurrently. Each test
2476 jobs specifies the number of jobs to run concurrently. Each test
2474 executes on its own thread. Tests actually spawn new processes, so
2477 executes on its own thread. Tests actually spawn new processes, so
2475 state mutation should not be an issue.
2478 state mutation should not be an issue.
2476
2479
2477 If there is only one job, it will use the main thread.
2480 If there is only one job, it will use the main thread.
2478
2481
2479 whitelist and blacklist denote tests that have been whitelisted and
2482 whitelist and blacklist denote tests that have been whitelisted and
2480 blacklisted, respectively. These arguments don't belong in TestSuite.
2483 blacklisted, respectively. These arguments don't belong in TestSuite.
2481 Instead, whitelist and blacklist should be handled by the thing that
2484 Instead, whitelist and blacklist should be handled by the thing that
2482 populates the TestSuite with tests. They are present to preserve
2485 populates the TestSuite with tests. They are present to preserve
2483 backwards compatible behavior which reports skipped tests as part
2486 backwards compatible behavior which reports skipped tests as part
2484 of the results.
2487 of the results.
2485
2488
2486 keywords denotes key words that will be used to filter which tests
2489 keywords denotes key words that will be used to filter which tests
2487 to execute. This arguably belongs outside of TestSuite.
2490 to execute. This arguably belongs outside of TestSuite.
2488
2491
2489 loop denotes whether to loop over tests forever.
2492 loop denotes whether to loop over tests forever.
2490 """
2493 """
2491 super(TestSuite, self).__init__(*args, **kwargs)
2494 super(TestSuite, self).__init__(*args, **kwargs)
2492
2495
2493 self._jobs = jobs
2496 self._jobs = jobs
2494 self._whitelist = whitelist
2497 self._whitelist = whitelist
2495 self._blacklist = blacklist
2498 self._blacklist = blacklist
2496 self._keywords = keywords
2499 self._keywords = keywords
2497 self._loop = loop
2500 self._loop = loop
2498 self._runs_per_test = runs_per_test
2501 self._runs_per_test = runs_per_test
2499 self._loadtest = loadtest
2502 self._loadtest = loadtest
2500 self._showchannels = showchannels
2503 self._showchannels = showchannels
2501
2504
2502 def run(self, result):
2505 def run(self, result):
2503 # We have a number of filters that need to be applied. We do this
2506 # We have a number of filters that need to be applied. We do this
2504 # here instead of inside Test because it makes the running logic for
2507 # here instead of inside Test because it makes the running logic for
2505 # Test simpler.
2508 # Test simpler.
2506 tests = []
2509 tests = []
2507 num_tests = [0]
2510 num_tests = [0]
2508 for test in self._tests:
2511 for test in self._tests:
2509
2512
2510 def get():
2513 def get():
2511 num_tests[0] += 1
2514 num_tests[0] += 1
2512 if getattr(test, 'should_reload', False):
2515 if getattr(test, 'should_reload', False):
2513 return self._loadtest(test, num_tests[0])
2516 return self._loadtest(test, num_tests[0])
2514 return test
2517 return test
2515
2518
2516 if not os.path.exists(test.path):
2519 if not os.path.exists(test.path):
2517 result.addSkip(test, "Doesn't exist")
2520 result.addSkip(test, "Doesn't exist")
2518 continue
2521 continue
2519
2522
2520 is_whitelisted = self._whitelist and (
2523 is_whitelisted = self._whitelist and (
2521 test.relpath in self._whitelist or test.bname in self._whitelist
2524 test.relpath in self._whitelist or test.bname in self._whitelist
2522 )
2525 )
2523 if not is_whitelisted:
2526 if not is_whitelisted:
2524 is_blacklisted = self._blacklist and (
2527 is_blacklisted = self._blacklist and (
2525 test.relpath in self._blacklist
2528 test.relpath in self._blacklist
2526 or test.bname in self._blacklist
2529 or test.bname in self._blacklist
2527 )
2530 )
2528 if is_blacklisted:
2531 if is_blacklisted:
2529 result.addSkip(test, 'blacklisted')
2532 result.addSkip(test, 'blacklisted')
2530 continue
2533 continue
2531 if self._keywords:
2534 if self._keywords:
2532 with open(test.path, 'rb') as f:
2535 with open(test.path, 'rb') as f:
2533 t = f.read().lower() + test.bname.lower()
2536 t = f.read().lower() + test.bname.lower()
2534 ignored = False
2537 ignored = False
2535 for k in self._keywords.lower().split():
2538 for k in self._keywords.lower().split():
2536 if k not in t:
2539 if k not in t:
2537 result.addIgnore(test, "doesn't match keyword")
2540 result.addIgnore(test, "doesn't match keyword")
2538 ignored = True
2541 ignored = True
2539 break
2542 break
2540
2543
2541 if ignored:
2544 if ignored:
2542 continue
2545 continue
2543 for _ in xrange(self._runs_per_test):
2546 for _ in xrange(self._runs_per_test):
2544 tests.append(get())
2547 tests.append(get())
2545
2548
2546 runtests = list(tests)
2549 runtests = list(tests)
2547 done = queue.Queue()
2550 done = queue.Queue()
2548 running = 0
2551 running = 0
2549
2552
2550 channels = [""] * self._jobs
2553 channels = [""] * self._jobs
2551
2554
2552 def job(test, result):
2555 def job(test, result):
2553 for n, v in enumerate(channels):
2556 for n, v in enumerate(channels):
2554 if not v:
2557 if not v:
2555 channel = n
2558 channel = n
2556 break
2559 break
2557 else:
2560 else:
2558 raise ValueError('Could not find output channel')
2561 raise ValueError('Could not find output channel')
2559 channels[channel] = "=" + test.name[5:].split(".")[0]
2562 channels[channel] = "=" + test.name[5:].split(".")[0]
2560 try:
2563 try:
2561 test(result)
2564 test(result)
2562 done.put(None)
2565 done.put(None)
2563 except KeyboardInterrupt:
2566 except KeyboardInterrupt:
2564 pass
2567 pass
2565 except: # re-raises
2568 except: # re-raises
2566 done.put(('!', test, 'run-test raised an error, see traceback'))
2569 done.put(('!', test, 'run-test raised an error, see traceback'))
2567 raise
2570 raise
2568 finally:
2571 finally:
2569 try:
2572 try:
2570 channels[channel] = ''
2573 channels[channel] = ''
2571 except IndexError:
2574 except IndexError:
2572 pass
2575 pass
2573
2576
2574 def stat():
2577 def stat():
2575 count = 0
2578 count = 0
2576 while channels:
2579 while channels:
2577 d = '\n%03s ' % count
2580 d = '\n%03s ' % count
2578 for n, v in enumerate(channels):
2581 for n, v in enumerate(channels):
2579 if v:
2582 if v:
2580 d += v[0]
2583 d += v[0]
2581 channels[n] = v[1:] or '.'
2584 channels[n] = v[1:] or '.'
2582 else:
2585 else:
2583 d += ' '
2586 d += ' '
2584 d += ' '
2587 d += ' '
2585 with iolock:
2588 with iolock:
2586 sys.stdout.write(d + ' ')
2589 sys.stdout.write(d + ' ')
2587 sys.stdout.flush()
2590 sys.stdout.flush()
2588 for x in xrange(10):
2591 for x in xrange(10):
2589 if channels:
2592 if channels:
2590 time.sleep(0.1)
2593 time.sleep(0.1)
2591 count += 1
2594 count += 1
2592
2595
2593 stoppedearly = False
2596 stoppedearly = False
2594
2597
2595 if self._showchannels:
2598 if self._showchannels:
2596 statthread = threading.Thread(target=stat, name="stat")
2599 statthread = threading.Thread(target=stat, name="stat")
2597 statthread.start()
2600 statthread.start()
2598
2601
2599 try:
2602 try:
2600 while tests or running:
2603 while tests or running:
2601 if not done.empty() or running == self._jobs or not tests:
2604 if not done.empty() or running == self._jobs or not tests:
2602 try:
2605 try:
2603 done.get(True, 1)
2606 done.get(True, 1)
2604 running -= 1
2607 running -= 1
2605 if result and result.shouldStop:
2608 if result and result.shouldStop:
2606 stoppedearly = True
2609 stoppedearly = True
2607 break
2610 break
2608 except queue.Empty:
2611 except queue.Empty:
2609 continue
2612 continue
2610 if tests and not running == self._jobs:
2613 if tests and not running == self._jobs:
2611 test = tests.pop(0)
2614 test = tests.pop(0)
2612 if self._loop:
2615 if self._loop:
2613 if getattr(test, 'should_reload', False):
2616 if getattr(test, 'should_reload', False):
2614 num_tests[0] += 1
2617 num_tests[0] += 1
2615 tests.append(self._loadtest(test, num_tests[0]))
2618 tests.append(self._loadtest(test, num_tests[0]))
2616 else:
2619 else:
2617 tests.append(test)
2620 tests.append(test)
2618 if self._jobs == 1:
2621 if self._jobs == 1:
2619 job(test, result)
2622 job(test, result)
2620 else:
2623 else:
2621 t = threading.Thread(
2624 t = threading.Thread(
2622 target=job, name=test.name, args=(test, result)
2625 target=job, name=test.name, args=(test, result)
2623 )
2626 )
2624 t.start()
2627 t.start()
2625 running += 1
2628 running += 1
2626
2629
2627 # If we stop early we still need to wait on started tests to
2630 # If we stop early we still need to wait on started tests to
2628 # finish. Otherwise, there is a race between the test completing
2631 # finish. Otherwise, there is a race between the test completing
2629 # and the test's cleanup code running. This could result in the
2632 # and the test's cleanup code running. This could result in the
2630 # test reporting incorrect.
2633 # test reporting incorrect.
2631 if stoppedearly:
2634 if stoppedearly:
2632 while running:
2635 while running:
2633 try:
2636 try:
2634 done.get(True, 1)
2637 done.get(True, 1)
2635 running -= 1
2638 running -= 1
2636 except queue.Empty:
2639 except queue.Empty:
2637 continue
2640 continue
2638 except KeyboardInterrupt:
2641 except KeyboardInterrupt:
2639 for test in runtests:
2642 for test in runtests:
2640 test.abort()
2643 test.abort()
2641
2644
2642 channels = []
2645 channels = []
2643
2646
2644 return result
2647 return result
2645
2648
2646
2649
2647 # Save the most recent 5 wall-clock runtimes of each test to a
2650 # Save the most recent 5 wall-clock runtimes of each test to a
2648 # human-readable text file named .testtimes. Tests are sorted
2651 # human-readable text file named .testtimes. Tests are sorted
2649 # alphabetically, while times for each test are listed from oldest to
2652 # alphabetically, while times for each test are listed from oldest to
2650 # newest.
2653 # newest.
2651
2654
2652
2655
2653 def loadtimes(outputdir):
2656 def loadtimes(outputdir):
2654 times = []
2657 times = []
2655 try:
2658 try:
2656 with open(os.path.join(outputdir, b'.testtimes')) as fp:
2659 with open(os.path.join(outputdir, b'.testtimes')) as fp:
2657 for line in fp:
2660 for line in fp:
2658 m = re.match('(.*?) ([0-9. ]+)', line)
2661 m = re.match('(.*?) ([0-9. ]+)', line)
2659 times.append(
2662 times.append(
2660 (m.group(1), [float(t) for t in m.group(2).split()])
2663 (m.group(1), [float(t) for t in m.group(2).split()])
2661 )
2664 )
2662 except IOError as err:
2665 except IOError as err:
2663 if err.errno != errno.ENOENT:
2666 if err.errno != errno.ENOENT:
2664 raise
2667 raise
2665 return times
2668 return times
2666
2669
2667
2670
2668 def savetimes(outputdir, result):
2671 def savetimes(outputdir, result):
2669 saved = dict(loadtimes(outputdir))
2672 saved = dict(loadtimes(outputdir))
2670 maxruns = 5
2673 maxruns = 5
2671 skipped = {str(t[0]) for t in result.skipped}
2674 skipped = {str(t[0]) for t in result.skipped}
2672 for tdata in result.times:
2675 for tdata in result.times:
2673 test, real = tdata[0], tdata[3]
2676 test, real = tdata[0], tdata[3]
2674 if test not in skipped:
2677 if test not in skipped:
2675 ts = saved.setdefault(test, [])
2678 ts = saved.setdefault(test, [])
2676 ts.append(real)
2679 ts.append(real)
2677 ts[:] = ts[-maxruns:]
2680 ts[:] = ts[-maxruns:]
2678
2681
2679 fd, tmpname = tempfile.mkstemp(
2682 fd, tmpname = tempfile.mkstemp(
2680 prefix=b'.testtimes', dir=outputdir, text=True
2683 prefix=b'.testtimes', dir=outputdir, text=True
2681 )
2684 )
2682 with os.fdopen(fd, 'w') as fp:
2685 with os.fdopen(fd, 'w') as fp:
2683 for name, ts in sorted(saved.items()):
2686 for name, ts in sorted(saved.items()):
2684 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
2687 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
2685 timepath = os.path.join(outputdir, b'.testtimes')
2688 timepath = os.path.join(outputdir, b'.testtimes')
2686 try:
2689 try:
2687 os.unlink(timepath)
2690 os.unlink(timepath)
2688 except OSError:
2691 except OSError:
2689 pass
2692 pass
2690 try:
2693 try:
2691 os.rename(tmpname, timepath)
2694 os.rename(tmpname, timepath)
2692 except OSError:
2695 except OSError:
2693 pass
2696 pass
2694
2697
2695
2698
2696 class TextTestRunner(unittest.TextTestRunner):
2699 class TextTestRunner(unittest.TextTestRunner):
2697 """Custom unittest test runner that uses appropriate settings."""
2700 """Custom unittest test runner that uses appropriate settings."""
2698
2701
2699 def __init__(self, runner, *args, **kwargs):
2702 def __init__(self, runner, *args, **kwargs):
2700 super(TextTestRunner, self).__init__(*args, **kwargs)
2703 super(TextTestRunner, self).__init__(*args, **kwargs)
2701
2704
2702 self._runner = runner
2705 self._runner = runner
2703
2706
2704 self._result = getTestResult()(
2707 self._result = getTestResult()(
2705 self._runner.options, self.stream, self.descriptions, self.verbosity
2708 self._runner.options, self.stream, self.descriptions, self.verbosity
2706 )
2709 )
2707
2710
2708 def listtests(self, test):
2711 def listtests(self, test):
2709 test = sorted(test, key=lambda t: t.name)
2712 test = sorted(test, key=lambda t: t.name)
2710
2713
2711 self._result.onStart(test)
2714 self._result.onStart(test)
2712
2715
2713 for t in test:
2716 for t in test:
2714 print(t.name)
2717 print(t.name)
2715 self._result.addSuccess(t)
2718 self._result.addSuccess(t)
2716
2719
2717 if self._runner.options.xunit:
2720 if self._runner.options.xunit:
2718 with open(self._runner.options.xunit, "wb") as xuf:
2721 with open(self._runner.options.xunit, "wb") as xuf:
2719 self._writexunit(self._result, xuf)
2722 self._writexunit(self._result, xuf)
2720
2723
2721 if self._runner.options.json:
2724 if self._runner.options.json:
2722 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2725 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2723 with open(jsonpath, 'w') as fp:
2726 with open(jsonpath, 'w') as fp:
2724 self._writejson(self._result, fp)
2727 self._writejson(self._result, fp)
2725
2728
2726 return self._result
2729 return self._result
2727
2730
2728 def run(self, test):
2731 def run(self, test):
2729 self._result.onStart(test)
2732 self._result.onStart(test)
2730 test(self._result)
2733 test(self._result)
2731
2734
2732 failed = len(self._result.failures)
2735 failed = len(self._result.failures)
2733 skipped = len(self._result.skipped)
2736 skipped = len(self._result.skipped)
2734 ignored = len(self._result.ignored)
2737 ignored = len(self._result.ignored)
2735
2738
2736 with iolock:
2739 with iolock:
2737 self.stream.writeln('')
2740 self.stream.writeln('')
2738
2741
2739 if not self._runner.options.noskips:
2742 if not self._runner.options.noskips:
2740 for test, msg in sorted(
2743 for test, msg in sorted(
2741 self._result.skipped, key=lambda s: s[0].name
2744 self._result.skipped, key=lambda s: s[0].name
2742 ):
2745 ):
2743 formatted = 'Skipped %s: %s\n' % (test.name, msg)
2746 formatted = 'Skipped %s: %s\n' % (test.name, msg)
2744 msg = highlightmsg(formatted, self._result.color)
2747 msg = highlightmsg(formatted, self._result.color)
2745 self.stream.write(msg)
2748 self.stream.write(msg)
2746 for test, msg in sorted(
2749 for test, msg in sorted(
2747 self._result.failures, key=lambda f: f[0].name
2750 self._result.failures, key=lambda f: f[0].name
2748 ):
2751 ):
2749 formatted = 'Failed %s: %s\n' % (test.name, msg)
2752 formatted = 'Failed %s: %s\n' % (test.name, msg)
2750 self.stream.write(highlightmsg(formatted, self._result.color))
2753 self.stream.write(highlightmsg(formatted, self._result.color))
2751 for test, msg in sorted(
2754 for test, msg in sorted(
2752 self._result.errors, key=lambda e: e[0].name
2755 self._result.errors, key=lambda e: e[0].name
2753 ):
2756 ):
2754 self.stream.writeln('Errored %s: %s' % (test.name, msg))
2757 self.stream.writeln('Errored %s: %s' % (test.name, msg))
2755
2758
2756 if self._runner.options.xunit:
2759 if self._runner.options.xunit:
2757 with open(self._runner.options.xunit, "wb") as xuf:
2760 with open(self._runner.options.xunit, "wb") as xuf:
2758 self._writexunit(self._result, xuf)
2761 self._writexunit(self._result, xuf)
2759
2762
2760 if self._runner.options.json:
2763 if self._runner.options.json:
2761 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2764 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2762 with open(jsonpath, 'w') as fp:
2765 with open(jsonpath, 'w') as fp:
2763 self._writejson(self._result, fp)
2766 self._writejson(self._result, fp)
2764
2767
2765 self._runner._checkhglib('Tested')
2768 self._runner._checkhglib('Tested')
2766
2769
2767 savetimes(self._runner._outputdir, self._result)
2770 savetimes(self._runner._outputdir, self._result)
2768
2771
2769 if failed and self._runner.options.known_good_rev:
2772 if failed and self._runner.options.known_good_rev:
2770 self._bisecttests(t for t, m in self._result.failures)
2773 self._bisecttests(t for t, m in self._result.failures)
2771 self.stream.writeln(
2774 self.stream.writeln(
2772 '# Ran %d tests, %d skipped, %d failed.'
2775 '# Ran %d tests, %d skipped, %d failed.'
2773 % (self._result.testsRun, skipped + ignored, failed)
2776 % (self._result.testsRun, skipped + ignored, failed)
2774 )
2777 )
2775 if failed:
2778 if failed:
2776 self.stream.writeln(
2779 self.stream.writeln(
2777 'python hash seed: %s' % os.environ['PYTHONHASHSEED']
2780 'python hash seed: %s' % os.environ['PYTHONHASHSEED']
2778 )
2781 )
2779 if self._runner.options.time:
2782 if self._runner.options.time:
2780 self.printtimes(self._result.times)
2783 self.printtimes(self._result.times)
2781
2784
2782 if self._runner.options.exceptions:
2785 if self._runner.options.exceptions:
2783 exceptions = aggregateexceptions(
2786 exceptions = aggregateexceptions(
2784 os.path.join(self._runner._outputdir, b'exceptions')
2787 os.path.join(self._runner._outputdir, b'exceptions')
2785 )
2788 )
2786
2789
2787 self.stream.writeln('Exceptions Report:')
2790 self.stream.writeln('Exceptions Report:')
2788 self.stream.writeln(
2791 self.stream.writeln(
2789 '%d total from %d frames'
2792 '%d total from %d frames'
2790 % (exceptions['total'], len(exceptions['exceptioncounts']))
2793 % (exceptions['total'], len(exceptions['exceptioncounts']))
2791 )
2794 )
2792 combined = exceptions['combined']
2795 combined = exceptions['combined']
2793 for key in sorted(combined, key=combined.get, reverse=True):
2796 for key in sorted(combined, key=combined.get, reverse=True):
2794 frame, line, exc = key
2797 frame, line, exc = key
2795 totalcount, testcount, leastcount, leasttest = combined[key]
2798 totalcount, testcount, leastcount, leasttest = combined[key]
2796
2799
2797 self.stream.writeln(
2800 self.stream.writeln(
2798 '%d (%d tests)\t%s: %s (%s - %d total)'
2801 '%d (%d tests)\t%s: %s (%s - %d total)'
2799 % (
2802 % (
2800 totalcount,
2803 totalcount,
2801 testcount,
2804 testcount,
2802 frame,
2805 frame,
2803 exc,
2806 exc,
2804 leasttest,
2807 leasttest,
2805 leastcount,
2808 leastcount,
2806 )
2809 )
2807 )
2810 )
2808
2811
2809 self.stream.flush()
2812 self.stream.flush()
2810
2813
2811 return self._result
2814 return self._result
2812
2815
2813 def _bisecttests(self, tests):
2816 def _bisecttests(self, tests):
2814 bisectcmd = ['hg', 'bisect']
2817 bisectcmd = ['hg', 'bisect']
2815 bisectrepo = self._runner.options.bisect_repo
2818 bisectrepo = self._runner.options.bisect_repo
2816 if bisectrepo:
2819 if bisectrepo:
2817 bisectcmd.extend(['-R', os.path.abspath(bisectrepo)])
2820 bisectcmd.extend(['-R', os.path.abspath(bisectrepo)])
2818
2821
2819 def pread(args):
2822 def pread(args):
2820 env = os.environ.copy()
2823 env = os.environ.copy()
2821 env['HGPLAIN'] = '1'
2824 env['HGPLAIN'] = '1'
2822 p = subprocess.Popen(
2825 p = subprocess.Popen(
2823 args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=env
2826 args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=env
2824 )
2827 )
2825 data = p.stdout.read()
2828 data = p.stdout.read()
2826 p.wait()
2829 p.wait()
2827 return data
2830 return data
2828
2831
2829 for test in tests:
2832 for test in tests:
2830 pread(bisectcmd + ['--reset']),
2833 pread(bisectcmd + ['--reset']),
2831 pread(bisectcmd + ['--bad', '.'])
2834 pread(bisectcmd + ['--bad', '.'])
2832 pread(bisectcmd + ['--good', self._runner.options.known_good_rev])
2835 pread(bisectcmd + ['--good', self._runner.options.known_good_rev])
2833 # TODO: we probably need to forward more options
2836 # TODO: we probably need to forward more options
2834 # that alter hg's behavior inside the tests.
2837 # that alter hg's behavior inside the tests.
2835 opts = ''
2838 opts = ''
2836 withhg = self._runner.options.with_hg
2839 withhg = self._runner.options.with_hg
2837 if withhg:
2840 if withhg:
2838 opts += ' --with-hg=%s ' % shellquote(_bytes2sys(withhg))
2841 opts += ' --with-hg=%s ' % shellquote(_bytes2sys(withhg))
2839 rtc = '%s %s %s %s' % (sysexecutable, sys.argv[0], opts, test)
2842 rtc = '%s %s %s %s' % (sysexecutable, sys.argv[0], opts, test)
2840 data = pread(bisectcmd + ['--command', rtc])
2843 data = pread(bisectcmd + ['--command', rtc])
2841 m = re.search(
2844 m = re.search(
2842 (
2845 (
2843 br'\nThe first (?P<goodbad>bad|good) revision '
2846 br'\nThe first (?P<goodbad>bad|good) revision '
2844 br'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
2847 br'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
2845 br'summary: +(?P<summary>[^\n]+)\n'
2848 br'summary: +(?P<summary>[^\n]+)\n'
2846 ),
2849 ),
2847 data,
2850 data,
2848 (re.MULTILINE | re.DOTALL),
2851 (re.MULTILINE | re.DOTALL),
2849 )
2852 )
2850 if m is None:
2853 if m is None:
2851 self.stream.writeln(
2854 self.stream.writeln(
2852 'Failed to identify failure point for %s' % test
2855 'Failed to identify failure point for %s' % test
2853 )
2856 )
2854 continue
2857 continue
2855 dat = m.groupdict()
2858 dat = m.groupdict()
2856 verb = 'broken' if dat['goodbad'] == b'bad' else 'fixed'
2859 verb = 'broken' if dat['goodbad'] == b'bad' else 'fixed'
2857 self.stream.writeln(
2860 self.stream.writeln(
2858 '%s %s by %s (%s)'
2861 '%s %s by %s (%s)'
2859 % (
2862 % (
2860 test,
2863 test,
2861 verb,
2864 verb,
2862 dat['node'].decode('ascii'),
2865 dat['node'].decode('ascii'),
2863 dat['summary'].decode('utf8', 'ignore'),
2866 dat['summary'].decode('utf8', 'ignore'),
2864 )
2867 )
2865 )
2868 )
2866
2869
2867 def printtimes(self, times):
2870 def printtimes(self, times):
2868 # iolock held by run
2871 # iolock held by run
2869 self.stream.writeln('# Producing time report')
2872 self.stream.writeln('# Producing time report')
2870 times.sort(key=lambda t: (t[3]))
2873 times.sort(key=lambda t: (t[3]))
2871 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
2874 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
2872 self.stream.writeln(
2875 self.stream.writeln(
2873 '%-7s %-7s %-7s %-7s %-7s %s'
2876 '%-7s %-7s %-7s %-7s %-7s %s'
2874 % ('start', 'end', 'cuser', 'csys', 'real', 'Test')
2877 % ('start', 'end', 'cuser', 'csys', 'real', 'Test')
2875 )
2878 )
2876 for tdata in times:
2879 for tdata in times:
2877 test = tdata[0]
2880 test = tdata[0]
2878 cuser, csys, real, start, end = tdata[1:6]
2881 cuser, csys, real, start, end = tdata[1:6]
2879 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
2882 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
2880
2883
2881 @staticmethod
2884 @staticmethod
2882 def _writexunit(result, outf):
2885 def _writexunit(result, outf):
2883 # See http://llg.cubic.org/docs/junit/ for a reference.
2886 # See http://llg.cubic.org/docs/junit/ for a reference.
2884 timesd = {t[0]: t[3] for t in result.times}
2887 timesd = {t[0]: t[3] for t in result.times}
2885 doc = minidom.Document()
2888 doc = minidom.Document()
2886 s = doc.createElement('testsuite')
2889 s = doc.createElement('testsuite')
2887 s.setAttribute('errors', "0") # TODO
2890 s.setAttribute('errors', "0") # TODO
2888 s.setAttribute('failures', str(len(result.failures)))
2891 s.setAttribute('failures', str(len(result.failures)))
2889 s.setAttribute('name', 'run-tests')
2892 s.setAttribute('name', 'run-tests')
2890 s.setAttribute(
2893 s.setAttribute(
2891 'skipped', str(len(result.skipped) + len(result.ignored))
2894 'skipped', str(len(result.skipped) + len(result.ignored))
2892 )
2895 )
2893 s.setAttribute('tests', str(result.testsRun))
2896 s.setAttribute('tests', str(result.testsRun))
2894 doc.appendChild(s)
2897 doc.appendChild(s)
2895 for tc in result.successes:
2898 for tc in result.successes:
2896 t = doc.createElement('testcase')
2899 t = doc.createElement('testcase')
2897 t.setAttribute('name', tc.name)
2900 t.setAttribute('name', tc.name)
2898 tctime = timesd.get(tc.name)
2901 tctime = timesd.get(tc.name)
2899 if tctime is not None:
2902 if tctime is not None:
2900 t.setAttribute('time', '%.3f' % tctime)
2903 t.setAttribute('time', '%.3f' % tctime)
2901 s.appendChild(t)
2904 s.appendChild(t)
2902 for tc, err in sorted(result.faildata.items()):
2905 for tc, err in sorted(result.faildata.items()):
2903 t = doc.createElement('testcase')
2906 t = doc.createElement('testcase')
2904 t.setAttribute('name', tc)
2907 t.setAttribute('name', tc)
2905 tctime = timesd.get(tc)
2908 tctime = timesd.get(tc)
2906 if tctime is not None:
2909 if tctime is not None:
2907 t.setAttribute('time', '%.3f' % tctime)
2910 t.setAttribute('time', '%.3f' % tctime)
2908 # createCDATASection expects a unicode or it will
2911 # createCDATASection expects a unicode or it will
2909 # convert using default conversion rules, which will
2912 # convert using default conversion rules, which will
2910 # fail if string isn't ASCII.
2913 # fail if string isn't ASCII.
2911 err = cdatasafe(err).decode('utf-8', 'replace')
2914 err = cdatasafe(err).decode('utf-8', 'replace')
2912 cd = doc.createCDATASection(err)
2915 cd = doc.createCDATASection(err)
2913 # Use 'failure' here instead of 'error' to match errors = 0,
2916 # Use 'failure' here instead of 'error' to match errors = 0,
2914 # failures = len(result.failures) in the testsuite element.
2917 # failures = len(result.failures) in the testsuite element.
2915 failelem = doc.createElement('failure')
2918 failelem = doc.createElement('failure')
2916 failelem.setAttribute('message', 'output changed')
2919 failelem.setAttribute('message', 'output changed')
2917 failelem.setAttribute('type', 'output-mismatch')
2920 failelem.setAttribute('type', 'output-mismatch')
2918 failelem.appendChild(cd)
2921 failelem.appendChild(cd)
2919 t.appendChild(failelem)
2922 t.appendChild(failelem)
2920 s.appendChild(t)
2923 s.appendChild(t)
2921 for tc, message in result.skipped:
2924 for tc, message in result.skipped:
2922 # According to the schema, 'skipped' has no attributes. So store
2925 # According to the schema, 'skipped' has no attributes. So store
2923 # the skip message as a text node instead.
2926 # the skip message as a text node instead.
2924 t = doc.createElement('testcase')
2927 t = doc.createElement('testcase')
2925 t.setAttribute('name', tc.name)
2928 t.setAttribute('name', tc.name)
2926 binmessage = message.encode('utf-8')
2929 binmessage = message.encode('utf-8')
2927 message = cdatasafe(binmessage).decode('utf-8', 'replace')
2930 message = cdatasafe(binmessage).decode('utf-8', 'replace')
2928 cd = doc.createCDATASection(message)
2931 cd = doc.createCDATASection(message)
2929 skipelem = doc.createElement('skipped')
2932 skipelem = doc.createElement('skipped')
2930 skipelem.appendChild(cd)
2933 skipelem.appendChild(cd)
2931 t.appendChild(skipelem)
2934 t.appendChild(skipelem)
2932 s.appendChild(t)
2935 s.appendChild(t)
2933 outf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
2936 outf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
2934
2937
2935 @staticmethod
2938 @staticmethod
2936 def _writejson(result, outf):
2939 def _writejson(result, outf):
2937 timesd = {}
2940 timesd = {}
2938 for tdata in result.times:
2941 for tdata in result.times:
2939 test = tdata[0]
2942 test = tdata[0]
2940 timesd[test] = tdata[1:]
2943 timesd[test] = tdata[1:]
2941
2944
2942 outcome = {}
2945 outcome = {}
2943 groups = [
2946 groups = [
2944 ('success', ((tc, None) for tc in result.successes)),
2947 ('success', ((tc, None) for tc in result.successes)),
2945 ('failure', result.failures),
2948 ('failure', result.failures),
2946 ('skip', result.skipped),
2949 ('skip', result.skipped),
2947 ]
2950 ]
2948 for res, testcases in groups:
2951 for res, testcases in groups:
2949 for tc, __ in testcases:
2952 for tc, __ in testcases:
2950 if tc.name in timesd:
2953 if tc.name in timesd:
2951 diff = result.faildata.get(tc.name, b'')
2954 diff = result.faildata.get(tc.name, b'')
2952 try:
2955 try:
2953 diff = diff.decode('unicode_escape')
2956 diff = diff.decode('unicode_escape')
2954 except UnicodeDecodeError as e:
2957 except UnicodeDecodeError as e:
2955 diff = '%r decoding diff, sorry' % e
2958 diff = '%r decoding diff, sorry' % e
2956 tres = {
2959 tres = {
2957 'result': res,
2960 'result': res,
2958 'time': ('%0.3f' % timesd[tc.name][2]),
2961 'time': ('%0.3f' % timesd[tc.name][2]),
2959 'cuser': ('%0.3f' % timesd[tc.name][0]),
2962 'cuser': ('%0.3f' % timesd[tc.name][0]),
2960 'csys': ('%0.3f' % timesd[tc.name][1]),
2963 'csys': ('%0.3f' % timesd[tc.name][1]),
2961 'start': ('%0.3f' % timesd[tc.name][3]),
2964 'start': ('%0.3f' % timesd[tc.name][3]),
2962 'end': ('%0.3f' % timesd[tc.name][4]),
2965 'end': ('%0.3f' % timesd[tc.name][4]),
2963 'diff': diff,
2966 'diff': diff,
2964 }
2967 }
2965 else:
2968 else:
2966 # blacklisted test
2969 # blacklisted test
2967 tres = {'result': res}
2970 tres = {'result': res}
2968
2971
2969 outcome[tc.name] = tres
2972 outcome[tc.name] = tres
2970 jsonout = json.dumps(
2973 jsonout = json.dumps(
2971 outcome, sort_keys=True, indent=4, separators=(',', ': ')
2974 outcome, sort_keys=True, indent=4, separators=(',', ': ')
2972 )
2975 )
2973 outf.writelines(("testreport =", jsonout))
2976 outf.writelines(("testreport =", jsonout))
2974
2977
2975
2978
2976 def sorttests(testdescs, previoustimes, shuffle=False):
2979 def sorttests(testdescs, previoustimes, shuffle=False):
2977 """Do an in-place sort of tests."""
2980 """Do an in-place sort of tests."""
2978 if shuffle:
2981 if shuffle:
2979 random.shuffle(testdescs)
2982 random.shuffle(testdescs)
2980 return
2983 return
2981
2984
2982 if previoustimes:
2985 if previoustimes:
2983
2986
2984 def sortkey(f):
2987 def sortkey(f):
2985 f = f['path']
2988 f = f['path']
2986 if f in previoustimes:
2989 if f in previoustimes:
2987 # Use most recent time as estimate
2990 # Use most recent time as estimate
2988 return -(previoustimes[f][-1])
2991 return -(previoustimes[f][-1])
2989 else:
2992 else:
2990 # Default to a rather arbitrary value of 1 second for new tests
2993 # Default to a rather arbitrary value of 1 second for new tests
2991 return -1.0
2994 return -1.0
2992
2995
2993 else:
2996 else:
2994 # keywords for slow tests
2997 # keywords for slow tests
2995 slow = {
2998 slow = {
2996 b'svn': 10,
2999 b'svn': 10,
2997 b'cvs': 10,
3000 b'cvs': 10,
2998 b'hghave': 10,
3001 b'hghave': 10,
2999 b'largefiles-update': 10,
3002 b'largefiles-update': 10,
3000 b'run-tests': 10,
3003 b'run-tests': 10,
3001 b'corruption': 10,
3004 b'corruption': 10,
3002 b'race': 10,
3005 b'race': 10,
3003 b'i18n': 10,
3006 b'i18n': 10,
3004 b'check': 100,
3007 b'check': 100,
3005 b'gendoc': 100,
3008 b'gendoc': 100,
3006 b'contrib-perf': 200,
3009 b'contrib-perf': 200,
3007 b'merge-combination': 100,
3010 b'merge-combination': 100,
3008 }
3011 }
3009 perf = {}
3012 perf = {}
3010
3013
3011 def sortkey(f):
3014 def sortkey(f):
3012 # run largest tests first, as they tend to take the longest
3015 # run largest tests first, as they tend to take the longest
3013 f = f['path']
3016 f = f['path']
3014 try:
3017 try:
3015 return perf[f]
3018 return perf[f]
3016 except KeyError:
3019 except KeyError:
3017 try:
3020 try:
3018 val = -os.stat(f).st_size
3021 val = -os.stat(f).st_size
3019 except OSError as e:
3022 except OSError as e:
3020 if e.errno != errno.ENOENT:
3023 if e.errno != errno.ENOENT:
3021 raise
3024 raise
3022 perf[f] = -1e9 # file does not exist, tell early
3025 perf[f] = -1e9 # file does not exist, tell early
3023 return -1e9
3026 return -1e9
3024 for kw, mul in slow.items():
3027 for kw, mul in slow.items():
3025 if kw in f:
3028 if kw in f:
3026 val *= mul
3029 val *= mul
3027 if f.endswith(b'.py'):
3030 if f.endswith(b'.py'):
3028 val /= 10.0
3031 val /= 10.0
3029 perf[f] = val / 1000.0
3032 perf[f] = val / 1000.0
3030 return perf[f]
3033 return perf[f]
3031
3034
3032 testdescs.sort(key=sortkey)
3035 testdescs.sort(key=sortkey)
3033
3036
3034
3037
3035 class TestRunner(object):
3038 class TestRunner(object):
3036 """Holds context for executing tests.
3039 """Holds context for executing tests.
3037
3040
3038 Tests rely on a lot of state. This object holds it for them.
3041 Tests rely on a lot of state. This object holds it for them.
3039 """
3042 """
3040
3043
3041 # Programs required to run tests.
3044 # Programs required to run tests.
3042 REQUIREDTOOLS = [
3045 REQUIREDTOOLS = [
3043 b'diff',
3046 b'diff',
3044 b'grep',
3047 b'grep',
3045 b'unzip',
3048 b'unzip',
3046 b'gunzip',
3049 b'gunzip',
3047 b'bunzip2',
3050 b'bunzip2',
3048 b'sed',
3051 b'sed',
3049 ]
3052 ]
3050
3053
3051 # Maps file extensions to test class.
3054 # Maps file extensions to test class.
3052 TESTTYPES = [
3055 TESTTYPES = [
3053 (b'.py', PythonTest),
3056 (b'.py', PythonTest),
3054 (b'.t', TTest),
3057 (b'.t', TTest),
3055 ]
3058 ]
3056
3059
3057 def __init__(self):
3060 def __init__(self):
3058 self.options = None
3061 self.options = None
3059 self._hgroot = None
3062 self._hgroot = None
3060 self._testdir = None
3063 self._testdir = None
3061 self._outputdir = None
3064 self._outputdir = None
3062 self._hgtmp = None
3065 self._hgtmp = None
3063 self._installdir = None
3066 self._installdir = None
3064 self._bindir = None
3067 self._bindir = None
3065 # a place for run-tests.py to generate executable it needs
3068 # a place for run-tests.py to generate executable it needs
3066 self._custom_bin_dir = None
3069 self._custom_bin_dir = None
3067 self._pythondir = None
3070 self._pythondir = None
3068 # True if we had to infer the pythondir from --with-hg
3071 # True if we had to infer the pythondir from --with-hg
3069 self._pythondir_inferred = False
3072 self._pythondir_inferred = False
3070 self._coveragefile = None
3073 self._coveragefile = None
3071 self._createdfiles = []
3074 self._createdfiles = []
3072 self._hgcommand = None
3075 self._hgcommand = None
3073 self._hgpath = None
3076 self._hgpath = None
3074 self._portoffset = 0
3077 self._portoffset = 0
3075 self._ports = {}
3078 self._ports = {}
3076
3079
3077 def run(self, args, parser=None):
3080 def run(self, args, parser=None):
3078 """Run the test suite."""
3081 """Run the test suite."""
3079 oldmask = os.umask(0o22)
3082 oldmask = os.umask(0o22)
3080 try:
3083 try:
3081 parser = parser or getparser()
3084 parser = parser or getparser()
3082 options = parseargs(args, parser)
3085 options = parseargs(args, parser)
3083 tests = [_sys2bytes(a) for a in options.tests]
3086 tests = [_sys2bytes(a) for a in options.tests]
3084 if options.test_list is not None:
3087 if options.test_list is not None:
3085 for listfile in options.test_list:
3088 for listfile in options.test_list:
3086 with open(listfile, 'rb') as f:
3089 with open(listfile, 'rb') as f:
3087 tests.extend(t for t in f.read().splitlines() if t)
3090 tests.extend(t for t in f.read().splitlines() if t)
3088 self.options = options
3091 self.options = options
3089
3092
3090 self._checktools()
3093 self._checktools()
3091 testdescs = self.findtests(tests)
3094 testdescs = self.findtests(tests)
3092 if options.profile_runner:
3095 if options.profile_runner:
3093 import statprof
3096 import statprof
3094
3097
3095 statprof.start()
3098 statprof.start()
3096 result = self._run(testdescs)
3099 result = self._run(testdescs)
3097 if options.profile_runner:
3100 if options.profile_runner:
3098 statprof.stop()
3101 statprof.stop()
3099 statprof.display()
3102 statprof.display()
3100 return result
3103 return result
3101
3104
3102 finally:
3105 finally:
3103 os.umask(oldmask)
3106 os.umask(oldmask)
3104
3107
3105 def _run(self, testdescs):
3108 def _run(self, testdescs):
3106 testdir = getcwdb()
3109 testdir = getcwdb()
3107 # assume all tests in same folder for now
3110 # assume all tests in same folder for now
3108 if testdescs:
3111 if testdescs:
3109 pathname = os.path.dirname(testdescs[0]['path'])
3112 pathname = os.path.dirname(testdescs[0]['path'])
3110 if pathname:
3113 if pathname:
3111 testdir = os.path.join(testdir, pathname)
3114 testdir = os.path.join(testdir, pathname)
3112 self._testdir = osenvironb[b'TESTDIR'] = testdir
3115 self._testdir = osenvironb[b'TESTDIR'] = testdir
3113 if self.options.outputdir:
3116 if self.options.outputdir:
3114 self._outputdir = canonpath(_sys2bytes(self.options.outputdir))
3117 self._outputdir = canonpath(_sys2bytes(self.options.outputdir))
3115 else:
3118 else:
3116 self._outputdir = getcwdb()
3119 self._outputdir = getcwdb()
3117 if testdescs and pathname:
3120 if testdescs and pathname:
3118 self._outputdir = os.path.join(self._outputdir, pathname)
3121 self._outputdir = os.path.join(self._outputdir, pathname)
3119 previoustimes = {}
3122 previoustimes = {}
3120 if self.options.order_by_runtime:
3123 if self.options.order_by_runtime:
3121 previoustimes = dict(loadtimes(self._outputdir))
3124 previoustimes = dict(loadtimes(self._outputdir))
3122 sorttests(testdescs, previoustimes, shuffle=self.options.random)
3125 sorttests(testdescs, previoustimes, shuffle=self.options.random)
3123
3126
3124 if 'PYTHONHASHSEED' not in os.environ:
3127 if 'PYTHONHASHSEED' not in os.environ:
3125 # use a random python hash seed all the time
3128 # use a random python hash seed all the time
3126 # we do the randomness ourself to know what seed is used
3129 # we do the randomness ourself to know what seed is used
3127 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
3130 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
3128
3131
3129 # Rayon (Rust crate for multi-threading) will use all logical CPU cores
3132 # Rayon (Rust crate for multi-threading) will use all logical CPU cores
3130 # by default, causing thrashing on high-cpu-count systems.
3133 # by default, causing thrashing on high-cpu-count systems.
3131 # Setting its limit to 3 during tests should still let us uncover
3134 # Setting its limit to 3 during tests should still let us uncover
3132 # multi-threading bugs while keeping the thrashing reasonable.
3135 # multi-threading bugs while keeping the thrashing reasonable.
3133 os.environ.setdefault("RAYON_NUM_THREADS", "3")
3136 os.environ.setdefault("RAYON_NUM_THREADS", "3")
3134
3137
3135 if self.options.tmpdir:
3138 if self.options.tmpdir:
3136 self.options.keep_tmpdir = True
3139 self.options.keep_tmpdir = True
3137 tmpdir = _sys2bytes(self.options.tmpdir)
3140 tmpdir = _sys2bytes(self.options.tmpdir)
3138 if os.path.exists(tmpdir):
3141 if os.path.exists(tmpdir):
3139 # Meaning of tmpdir has changed since 1.3: we used to create
3142 # Meaning of tmpdir has changed since 1.3: we used to create
3140 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
3143 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
3141 # tmpdir already exists.
3144 # tmpdir already exists.
3142 print("error: temp dir %r already exists" % tmpdir)
3145 print("error: temp dir %r already exists" % tmpdir)
3143 return 1
3146 return 1
3144
3147
3145 os.makedirs(tmpdir)
3148 os.makedirs(tmpdir)
3146 else:
3149 else:
3147 d = None
3150 d = None
3148 if WINDOWS:
3151 if WINDOWS:
3149 # without this, we get the default temp dir location, but
3152 # without this, we get the default temp dir location, but
3150 # in all lowercase, which causes troubles with paths (issue3490)
3153 # in all lowercase, which causes troubles with paths (issue3490)
3151 d = osenvironb.get(b'TMP', None)
3154 d = osenvironb.get(b'TMP', None)
3152 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
3155 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
3153
3156
3154 self._hgtmp = osenvironb[b'HGTMP'] = os.path.realpath(tmpdir)
3157 self._hgtmp = osenvironb[b'HGTMP'] = os.path.realpath(tmpdir)
3155
3158
3156 self._custom_bin_dir = os.path.join(self._hgtmp, b'custom-bin')
3159 self._custom_bin_dir = os.path.join(self._hgtmp, b'custom-bin')
3157 os.makedirs(self._custom_bin_dir)
3160 os.makedirs(self._custom_bin_dir)
3158
3161
3159 if self.options.with_hg:
3162 if self.options.with_hg:
3160 self._installdir = None
3163 self._installdir = None
3161 whg = self.options.with_hg
3164 whg = self.options.with_hg
3162 self._bindir = os.path.dirname(os.path.realpath(whg))
3165 self._bindir = os.path.dirname(os.path.realpath(whg))
3163 assert isinstance(self._bindir, bytes)
3166 assert isinstance(self._bindir, bytes)
3164 self._hgcommand = os.path.basename(whg)
3167 self._hgcommand = os.path.basename(whg)
3165
3168
3166 normbin = os.path.normpath(os.path.abspath(whg))
3169 normbin = os.path.normpath(os.path.abspath(whg))
3167 normbin = normbin.replace(_sys2bytes(os.sep), b'/')
3170 normbin = normbin.replace(_sys2bytes(os.sep), b'/')
3168
3171
3169 # Other Python scripts in the test harness need to
3172 # Other Python scripts in the test harness need to
3170 # `import mercurial`. If `hg` is a Python script, we assume
3173 # `import mercurial`. If `hg` is a Python script, we assume
3171 # the Mercurial modules are relative to its path and tell the tests
3174 # the Mercurial modules are relative to its path and tell the tests
3172 # to load Python modules from its directory.
3175 # to load Python modules from its directory.
3173 with open(whg, 'rb') as fh:
3176 with open(whg, 'rb') as fh:
3174 initial = fh.read(1024)
3177 initial = fh.read(1024)
3175
3178
3176 if re.match(b'#!.*python', initial):
3179 if re.match(b'#!.*python', initial):
3177 self._pythondir = self._bindir
3180 self._pythondir = self._bindir
3178 # If it looks like our in-repo Rust binary, use the source root.
3181 # If it looks like our in-repo Rust binary, use the source root.
3179 # This is a bit hacky. But rhg is still not supported outside the
3182 # This is a bit hacky. But rhg is still not supported outside the
3180 # source directory. So until it is, do the simple thing.
3183 # source directory. So until it is, do the simple thing.
3181 elif re.search(b'/rust/target/[^/]+/hg', normbin):
3184 elif re.search(b'/rust/target/[^/]+/hg', normbin):
3182 self._pythondir = os.path.dirname(self._testdir)
3185 self._pythondir = os.path.dirname(self._testdir)
3183 # Fall back to the legacy behavior.
3186 # Fall back to the legacy behavior.
3184 else:
3187 else:
3185 self._pythondir = self._bindir
3188 self._pythondir = self._bindir
3186 self._pythondir_inferred = True
3189 self._pythondir_inferred = True
3187
3190
3188 else:
3191 else:
3189 self._installdir = os.path.join(self._hgtmp, b"install")
3192 self._installdir = os.path.join(self._hgtmp, b"install")
3190 self._bindir = os.path.join(self._installdir, b"bin")
3193 self._bindir = os.path.join(self._installdir, b"bin")
3191 self._hgcommand = b'hg'
3194 self._hgcommand = b'hg'
3192 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
3195 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
3193
3196
3194 # Force the use of hg.exe instead of relying on MSYS to recognize hg is
3197 # Force the use of hg.exe instead of relying on MSYS to recognize hg is
3195 # a python script and feed it to python.exe. Legacy stdio is force
3198 # a python script and feed it to python.exe. Legacy stdio is force
3196 # enabled by hg.exe, and this is a more realistic way to launch hg
3199 # enabled by hg.exe, and this is a more realistic way to launch hg
3197 # anyway.
3200 # anyway.
3198 if WINDOWS and not self._hgcommand.endswith(b'.exe'):
3201 if WINDOWS and not self._hgcommand.endswith(b'.exe'):
3199 self._hgcommand += b'.exe'
3202 self._hgcommand += b'.exe'
3200
3203
3201 real_hg = os.path.join(self._bindir, self._hgcommand)
3204 real_hg = os.path.join(self._bindir, self._hgcommand)
3202 osenvironb[b'HGTEST_REAL_HG'] = real_hg
3205 osenvironb[b'HGTEST_REAL_HG'] = real_hg
3203 # set CHGHG, then replace "hg" command by "chg"
3206 # set CHGHG, then replace "hg" command by "chg"
3204 chgbindir = self._bindir
3207 chgbindir = self._bindir
3205 if self.options.chg or self.options.with_chg:
3208 if self.options.chg or self.options.with_chg:
3206 osenvironb[b'CHG_INSTALLED_AS_HG'] = b'1'
3209 osenvironb[b'CHG_INSTALLED_AS_HG'] = b'1'
3207 osenvironb[b'CHGHG'] = real_hg
3210 osenvironb[b'CHGHG'] = real_hg
3208 else:
3211 else:
3209 # drop flag for hghave
3212 # drop flag for hghave
3210 osenvironb.pop(b'CHG_INSTALLED_AS_HG', None)
3213 osenvironb.pop(b'CHG_INSTALLED_AS_HG', None)
3211 if self.options.chg:
3214 if self.options.chg:
3212 self._hgcommand = b'chg'
3215 self._hgcommand = b'chg'
3213 elif self.options.with_chg:
3216 elif self.options.with_chg:
3214 chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg))
3217 chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg))
3215 self._hgcommand = os.path.basename(self.options.with_chg)
3218 self._hgcommand = os.path.basename(self.options.with_chg)
3216
3219
3217 # configure fallback and replace "hg" command by "rhg"
3220 # configure fallback and replace "hg" command by "rhg"
3218 rhgbindir = self._bindir
3221 rhgbindir = self._bindir
3219 if self.options.rhg or self.options.with_rhg:
3222 if self.options.rhg or self.options.with_rhg:
3220 # Affects hghave.py
3223 # Affects hghave.py
3221 osenvironb[b'RHG_INSTALLED_AS_HG'] = b'1'
3224 osenvironb[b'RHG_INSTALLED_AS_HG'] = b'1'
3222 # Affects configuration. Alternatives would be setting configuration through
3225 # Affects configuration. Alternatives would be setting configuration through
3223 # `$HGRCPATH` but some tests override that, or changing `_hgcommand` to include
3226 # `$HGRCPATH` but some tests override that, or changing `_hgcommand` to include
3224 # `--config` but that disrupts tests that print command lines and check expected
3227 # `--config` but that disrupts tests that print command lines and check expected
3225 # output.
3228 # output.
3226 osenvironb[b'RHG_ON_UNSUPPORTED'] = b'fallback'
3229 osenvironb[b'RHG_ON_UNSUPPORTED'] = b'fallback'
3227 osenvironb[b'RHG_FALLBACK_EXECUTABLE'] = real_hg
3230 osenvironb[b'RHG_FALLBACK_EXECUTABLE'] = real_hg
3228 else:
3231 else:
3229 # drop flag for hghave
3232 # drop flag for hghave
3230 osenvironb.pop(b'RHG_INSTALLED_AS_HG', None)
3233 osenvironb.pop(b'RHG_INSTALLED_AS_HG', None)
3231 if self.options.rhg:
3234 if self.options.rhg:
3232 self._hgcommand = b'rhg'
3235 self._hgcommand = b'rhg'
3233 elif self.options.with_rhg:
3236 elif self.options.with_rhg:
3234 rhgbindir = os.path.dirname(os.path.realpath(self.options.with_rhg))
3237 rhgbindir = os.path.dirname(os.path.realpath(self.options.with_rhg))
3235 self._hgcommand = os.path.basename(self.options.with_rhg)
3238 self._hgcommand = os.path.basename(self.options.with_rhg)
3236
3239
3237 if self.options.pyoxidized:
3240 if self.options.pyoxidized:
3238 testdir = os.path.dirname(_sys2bytes(canonpath(sys.argv[0])))
3241 testdir = os.path.dirname(_sys2bytes(canonpath(sys.argv[0])))
3239 reporootdir = os.path.dirname(testdir)
3242 reporootdir = os.path.dirname(testdir)
3240 # XXX we should ideally install stuff instead of using the local build
3243 # XXX we should ideally install stuff instead of using the local build
3241 bin_path = (
3244 bin_path = (
3242 b'build/pyoxidizer/x86_64-pc-windows-msvc/release/app/hg.exe'
3245 b'build/pyoxidizer/x86_64-pc-windows-msvc/release/app/hg.exe'
3243 )
3246 )
3244 full_path = os.path.join(reporootdir, bin_path)
3247 full_path = os.path.join(reporootdir, bin_path)
3245 self._hgcommand = full_path
3248 self._hgcommand = full_path
3246 # Affects hghave.py
3249 # Affects hghave.py
3247 osenvironb[b'PYOXIDIZED_INSTALLED_AS_HG'] = b'1'
3250 osenvironb[b'PYOXIDIZED_INSTALLED_AS_HG'] = b'1'
3248 else:
3251 else:
3249 osenvironb.pop(b'PYOXIDIZED_INSTALLED_AS_HG', None)
3252 osenvironb.pop(b'PYOXIDIZED_INSTALLED_AS_HG', None)
3250
3253
3251 osenvironb[b"BINDIR"] = self._bindir
3254 osenvironb[b"BINDIR"] = self._bindir
3252 osenvironb[b"PYTHON"] = PYTHON
3255 osenvironb[b"PYTHON"] = PYTHON
3253
3256
3254 fileb = _sys2bytes(__file__)
3257 fileb = _sys2bytes(__file__)
3255 runtestdir = os.path.abspath(os.path.dirname(fileb))
3258 runtestdir = os.path.abspath(os.path.dirname(fileb))
3256 osenvironb[b'RUNTESTDIR'] = runtestdir
3259 osenvironb[b'RUNTESTDIR'] = runtestdir
3257 if PYTHON3:
3260 if PYTHON3:
3258 sepb = _sys2bytes(os.pathsep)
3261 sepb = _sys2bytes(os.pathsep)
3259 else:
3262 else:
3260 sepb = os.pathsep
3263 sepb = os.pathsep
3261 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
3264 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
3262 if os.path.islink(__file__):
3265 if os.path.islink(__file__):
3263 # test helper will likely be at the end of the symlink
3266 # test helper will likely be at the end of the symlink
3264 realfile = os.path.realpath(fileb)
3267 realfile = os.path.realpath(fileb)
3265 realdir = os.path.abspath(os.path.dirname(realfile))
3268 realdir = os.path.abspath(os.path.dirname(realfile))
3266 path.insert(2, realdir)
3269 path.insert(2, realdir)
3267 if chgbindir != self._bindir:
3270 if chgbindir != self._bindir:
3268 path.insert(1, chgbindir)
3271 path.insert(1, chgbindir)
3269 if rhgbindir != self._bindir:
3272 if rhgbindir != self._bindir:
3270 path.insert(1, rhgbindir)
3273 path.insert(1, rhgbindir)
3271 if self._testdir != runtestdir:
3274 if self._testdir != runtestdir:
3272 path = [self._testdir] + path
3275 path = [self._testdir] + path
3273 path = [self._custom_bin_dir] + path
3276 path = [self._custom_bin_dir] + path
3274 osenvironb[b"PATH"] = sepb.join(path)
3277 osenvironb[b"PATH"] = sepb.join(path)
3275
3278
3276 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
3279 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
3277 # can run .../tests/run-tests.py test-foo where test-foo
3280 # can run .../tests/run-tests.py test-foo where test-foo
3278 # adds an extension to HGRC. Also include run-test.py directory to
3281 # adds an extension to HGRC. Also include run-test.py directory to
3279 # import modules like heredoctest.
3282 # import modules like heredoctest.
3280 pypath = [self._pythondir, self._testdir, runtestdir]
3283 pypath = [self._pythondir, self._testdir, runtestdir]
3281 # We have to augment PYTHONPATH, rather than simply replacing
3284 # We have to augment PYTHONPATH, rather than simply replacing
3282 # it, in case external libraries are only available via current
3285 # it, in case external libraries are only available via current
3283 # PYTHONPATH. (In particular, the Subversion bindings on OS X
3286 # PYTHONPATH. (In particular, the Subversion bindings on OS X
3284 # are in /opt/subversion.)
3287 # are in /opt/subversion.)
3285 oldpypath = osenvironb.get(IMPL_PATH)
3288 oldpypath = osenvironb.get(IMPL_PATH)
3286 if oldpypath:
3289 if oldpypath:
3287 pypath.append(oldpypath)
3290 pypath.append(oldpypath)
3288 osenvironb[IMPL_PATH] = sepb.join(pypath)
3291 osenvironb[IMPL_PATH] = sepb.join(pypath)
3289
3292
3290 if self.options.pure:
3293 if self.options.pure:
3291 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
3294 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
3292 os.environ["HGMODULEPOLICY"] = "py"
3295 os.environ["HGMODULEPOLICY"] = "py"
3293 if self.options.rust:
3296 if self.options.rust:
3294 os.environ["HGMODULEPOLICY"] = "rust+c"
3297 os.environ["HGMODULEPOLICY"] = "rust+c"
3295 if self.options.no_rust:
3298 if self.options.no_rust:
3296 current_policy = os.environ.get("HGMODULEPOLICY", "")
3299 current_policy = os.environ.get("HGMODULEPOLICY", "")
3297 if current_policy.startswith("rust+"):
3300 if current_policy.startswith("rust+"):
3298 os.environ["HGMODULEPOLICY"] = current_policy[len("rust+") :]
3301 os.environ["HGMODULEPOLICY"] = current_policy[len("rust+") :]
3299 os.environ.pop("HGWITHRUSTEXT", None)
3302 os.environ.pop("HGWITHRUSTEXT", None)
3300
3303
3301 if self.options.allow_slow_tests:
3304 if self.options.allow_slow_tests:
3302 os.environ["HGTEST_SLOW"] = "slow"
3305 os.environ["HGTEST_SLOW"] = "slow"
3303 elif 'HGTEST_SLOW' in os.environ:
3306 elif 'HGTEST_SLOW' in os.environ:
3304 del os.environ['HGTEST_SLOW']
3307 del os.environ['HGTEST_SLOW']
3305
3308
3306 self._coveragefile = os.path.join(self._testdir, b'.coverage')
3309 self._coveragefile = os.path.join(self._testdir, b'.coverage')
3307
3310
3308 if self.options.exceptions:
3311 if self.options.exceptions:
3309 exceptionsdir = os.path.join(self._outputdir, b'exceptions')
3312 exceptionsdir = os.path.join(self._outputdir, b'exceptions')
3310 try:
3313 try:
3311 os.makedirs(exceptionsdir)
3314 os.makedirs(exceptionsdir)
3312 except OSError as e:
3315 except OSError as e:
3313 if e.errno != errno.EEXIST:
3316 if e.errno != errno.EEXIST:
3314 raise
3317 raise
3315
3318
3316 # Remove all existing exception reports.
3319 # Remove all existing exception reports.
3317 for f in os.listdir(exceptionsdir):
3320 for f in os.listdir(exceptionsdir):
3318 os.unlink(os.path.join(exceptionsdir, f))
3321 os.unlink(os.path.join(exceptionsdir, f))
3319
3322
3320 osenvironb[b'HGEXCEPTIONSDIR'] = exceptionsdir
3323 osenvironb[b'HGEXCEPTIONSDIR'] = exceptionsdir
3321 logexceptions = os.path.join(self._testdir, b'logexceptions.py')
3324 logexceptions = os.path.join(self._testdir, b'logexceptions.py')
3322 self.options.extra_config_opt.append(
3325 self.options.extra_config_opt.append(
3323 'extensions.logexceptions=%s' % logexceptions.decode('utf-8')
3326 'extensions.logexceptions=%s' % logexceptions.decode('utf-8')
3324 )
3327 )
3325
3328
3326 vlog("# Using TESTDIR", _bytes2sys(self._testdir))
3329 vlog("# Using TESTDIR", _bytes2sys(self._testdir))
3327 vlog("# Using RUNTESTDIR", _bytes2sys(osenvironb[b'RUNTESTDIR']))
3330 vlog("# Using RUNTESTDIR", _bytes2sys(osenvironb[b'RUNTESTDIR']))
3328 vlog("# Using HGTMP", _bytes2sys(self._hgtmp))
3331 vlog("# Using HGTMP", _bytes2sys(self._hgtmp))
3329 vlog("# Using PATH", os.environ["PATH"])
3332 vlog("# Using PATH", os.environ["PATH"])
3330 vlog(
3333 vlog(
3331 "# Using",
3334 "# Using",
3332 _bytes2sys(IMPL_PATH),
3335 _bytes2sys(IMPL_PATH),
3333 _bytes2sys(osenvironb[IMPL_PATH]),
3336 _bytes2sys(osenvironb[IMPL_PATH]),
3334 )
3337 )
3335 vlog("# Writing to directory", _bytes2sys(self._outputdir))
3338 vlog("# Writing to directory", _bytes2sys(self._outputdir))
3336
3339
3337 try:
3340 try:
3338 return self._runtests(testdescs) or 0
3341 return self._runtests(testdescs) or 0
3339 finally:
3342 finally:
3340 time.sleep(0.1)
3343 time.sleep(0.1)
3341 self._cleanup()
3344 self._cleanup()
3342
3345
3343 def findtests(self, args):
3346 def findtests(self, args):
3344 """Finds possible test files from arguments.
3347 """Finds possible test files from arguments.
3345
3348
3346 If you wish to inject custom tests into the test harness, this would
3349 If you wish to inject custom tests into the test harness, this would
3347 be a good function to monkeypatch or override in a derived class.
3350 be a good function to monkeypatch or override in a derived class.
3348 """
3351 """
3349 if not args:
3352 if not args:
3350 if self.options.changed:
3353 if self.options.changed:
3351 proc = Popen4(
3354 proc = Popen4(
3352 b'hg st --rev "%s" -man0 .'
3355 b'hg st --rev "%s" -man0 .'
3353 % _sys2bytes(self.options.changed),
3356 % _sys2bytes(self.options.changed),
3354 None,
3357 None,
3355 0,
3358 0,
3356 )
3359 )
3357 stdout, stderr = proc.communicate()
3360 stdout, stderr = proc.communicate()
3358 args = stdout.strip(b'\0').split(b'\0')
3361 args = stdout.strip(b'\0').split(b'\0')
3359 else:
3362 else:
3360 args = os.listdir(b'.')
3363 args = os.listdir(b'.')
3361
3364
3362 expanded_args = []
3365 expanded_args = []
3363 for arg in args:
3366 for arg in args:
3364 if os.path.isdir(arg):
3367 if os.path.isdir(arg):
3365 if not arg.endswith(b'/'):
3368 if not arg.endswith(b'/'):
3366 arg += b'/'
3369 arg += b'/'
3367 expanded_args.extend([arg + a for a in os.listdir(arg)])
3370 expanded_args.extend([arg + a for a in os.listdir(arg)])
3368 else:
3371 else:
3369 expanded_args.append(arg)
3372 expanded_args.append(arg)
3370 args = expanded_args
3373 args = expanded_args
3371
3374
3372 testcasepattern = re.compile(br'([\w-]+\.t|py)(?:#([a-zA-Z0-9_\-.#]+))')
3375 testcasepattern = re.compile(br'([\w-]+\.t|py)(?:#([a-zA-Z0-9_\-.#]+))')
3373 tests = []
3376 tests = []
3374 for t in args:
3377 for t in args:
3375 case = []
3378 case = []
3376
3379
3377 if not (
3380 if not (
3378 os.path.basename(t).startswith(b'test-')
3381 os.path.basename(t).startswith(b'test-')
3379 and (t.endswith(b'.py') or t.endswith(b'.t'))
3382 and (t.endswith(b'.py') or t.endswith(b'.t'))
3380 ):
3383 ):
3381
3384
3382 m = testcasepattern.match(os.path.basename(t))
3385 m = testcasepattern.match(os.path.basename(t))
3383 if m is not None:
3386 if m is not None:
3384 t_basename, casestr = m.groups()
3387 t_basename, casestr = m.groups()
3385 t = os.path.join(os.path.dirname(t), t_basename)
3388 t = os.path.join(os.path.dirname(t), t_basename)
3386 if casestr:
3389 if casestr:
3387 case = casestr.split(b'#')
3390 case = casestr.split(b'#')
3388 else:
3391 else:
3389 continue
3392 continue
3390
3393
3391 if t.endswith(b'.t'):
3394 if t.endswith(b'.t'):
3392 # .t file may contain multiple test cases
3395 # .t file may contain multiple test cases
3393 casedimensions = parsettestcases(t)
3396 casedimensions = parsettestcases(t)
3394 if casedimensions:
3397 if casedimensions:
3395 cases = []
3398 cases = []
3396
3399
3397 def addcases(case, casedimensions):
3400 def addcases(case, casedimensions):
3398 if not casedimensions:
3401 if not casedimensions:
3399 cases.append(case)
3402 cases.append(case)
3400 else:
3403 else:
3401 for c in casedimensions[0]:
3404 for c in casedimensions[0]:
3402 addcases(case + [c], casedimensions[1:])
3405 addcases(case + [c], casedimensions[1:])
3403
3406
3404 addcases([], casedimensions)
3407 addcases([], casedimensions)
3405 if case and case in cases:
3408 if case and case in cases:
3406 cases = [case]
3409 cases = [case]
3407 elif case:
3410 elif case:
3408 # Ignore invalid cases
3411 # Ignore invalid cases
3409 cases = []
3412 cases = []
3410 else:
3413 else:
3411 pass
3414 pass
3412 tests += [{'path': t, 'case': c} for c in sorted(cases)]
3415 tests += [{'path': t, 'case': c} for c in sorted(cases)]
3413 else:
3416 else:
3414 tests.append({'path': t})
3417 tests.append({'path': t})
3415 else:
3418 else:
3416 tests.append({'path': t})
3419 tests.append({'path': t})
3417
3420
3418 if self.options.retest:
3421 if self.options.retest:
3419 retest_args = []
3422 retest_args = []
3420 for test in tests:
3423 for test in tests:
3421 errpath = self._geterrpath(test)
3424 errpath = self._geterrpath(test)
3422 if os.path.exists(errpath):
3425 if os.path.exists(errpath):
3423 retest_args.append(test)
3426 retest_args.append(test)
3424 tests = retest_args
3427 tests = retest_args
3425 return tests
3428 return tests
3426
3429
3427 def _runtests(self, testdescs):
3430 def _runtests(self, testdescs):
3428 def _reloadtest(test, i):
3431 def _reloadtest(test, i):
3429 # convert a test back to its description dict
3432 # convert a test back to its description dict
3430 desc = {'path': test.path}
3433 desc = {'path': test.path}
3431 case = getattr(test, '_case', [])
3434 case = getattr(test, '_case', [])
3432 if case:
3435 if case:
3433 desc['case'] = case
3436 desc['case'] = case
3434 return self._gettest(desc, i)
3437 return self._gettest(desc, i)
3435
3438
3436 try:
3439 try:
3437 if self.options.restart:
3440 if self.options.restart:
3438 orig = list(testdescs)
3441 orig = list(testdescs)
3439 while testdescs:
3442 while testdescs:
3440 desc = testdescs[0]
3443 desc = testdescs[0]
3441 errpath = self._geterrpath(desc)
3444 errpath = self._geterrpath(desc)
3442 if os.path.exists(errpath):
3445 if os.path.exists(errpath):
3443 break
3446 break
3444 testdescs.pop(0)
3447 testdescs.pop(0)
3445 if not testdescs:
3448 if not testdescs:
3446 print("running all tests")
3449 print("running all tests")
3447 testdescs = orig
3450 testdescs = orig
3448
3451
3449 tests = [self._gettest(d, i) for i, d in enumerate(testdescs)]
3452 tests = [self._gettest(d, i) for i, d in enumerate(testdescs)]
3450 num_tests = len(tests) * self.options.runs_per_test
3453 num_tests = len(tests) * self.options.runs_per_test
3451
3454
3452 jobs = min(num_tests, self.options.jobs)
3455 jobs = min(num_tests, self.options.jobs)
3453
3456
3454 failed = False
3457 failed = False
3455 kws = self.options.keywords
3458 kws = self.options.keywords
3456 if kws is not None and PYTHON3:
3459 if kws is not None and PYTHON3:
3457 kws = kws.encode('utf-8')
3460 kws = kws.encode('utf-8')
3458
3461
3459 suite = TestSuite(
3462 suite = TestSuite(
3460 self._testdir,
3463 self._testdir,
3461 jobs=jobs,
3464 jobs=jobs,
3462 whitelist=self.options.whitelisted,
3465 whitelist=self.options.whitelisted,
3463 blacklist=self.options.blacklist,
3466 blacklist=self.options.blacklist,
3464 keywords=kws,
3467 keywords=kws,
3465 loop=self.options.loop,
3468 loop=self.options.loop,
3466 runs_per_test=self.options.runs_per_test,
3469 runs_per_test=self.options.runs_per_test,
3467 showchannels=self.options.showchannels,
3470 showchannels=self.options.showchannels,
3468 tests=tests,
3471 tests=tests,
3469 loadtest=_reloadtest,
3472 loadtest=_reloadtest,
3470 )
3473 )
3471 verbosity = 1
3474 verbosity = 1
3472 if self.options.list_tests:
3475 if self.options.list_tests:
3473 verbosity = 0
3476 verbosity = 0
3474 elif self.options.verbose:
3477 elif self.options.verbose:
3475 verbosity = 2
3478 verbosity = 2
3476 runner = TextTestRunner(self, verbosity=verbosity)
3479 runner = TextTestRunner(self, verbosity=verbosity)
3477
3480
3478 if self.options.list_tests:
3481 if self.options.list_tests:
3479 result = runner.listtests(suite)
3482 result = runner.listtests(suite)
3480 else:
3483 else:
3481 self._usecorrectpython()
3484 self._usecorrectpython()
3482 if self._installdir:
3485 if self._installdir:
3483 self._installhg()
3486 self._installhg()
3484 self._checkhglib("Testing")
3487 self._checkhglib("Testing")
3485 if self.options.chg:
3488 if self.options.chg:
3486 assert self._installdir
3489 assert self._installdir
3487 self._installchg()
3490 self._installchg()
3488 if self.options.rhg:
3491 if self.options.rhg:
3489 assert self._installdir
3492 assert self._installdir
3490 self._installrhg()
3493 self._installrhg()
3491 elif self.options.pyoxidized:
3494 elif self.options.pyoxidized:
3492 self._build_pyoxidized()
3495 self._build_pyoxidized()
3493 self._use_correct_mercurial()
3496 self._use_correct_mercurial()
3494
3497
3495 log(
3498 log(
3496 'running %d tests using %d parallel processes'
3499 'running %d tests using %d parallel processes'
3497 % (num_tests, jobs)
3500 % (num_tests, jobs)
3498 )
3501 )
3499
3502
3500 result = runner.run(suite)
3503 result = runner.run(suite)
3501
3504
3502 if result.failures or result.errors:
3505 if result.failures or result.errors:
3503 failed = True
3506 failed = True
3504
3507
3505 result.onEnd()
3508 result.onEnd()
3506
3509
3507 if self.options.anycoverage:
3510 if self.options.anycoverage:
3508 self._outputcoverage()
3511 self._outputcoverage()
3509 except KeyboardInterrupt:
3512 except KeyboardInterrupt:
3510 failed = True
3513 failed = True
3511 print("\ninterrupted!")
3514 print("\ninterrupted!")
3512
3515
3513 if failed:
3516 if failed:
3514 return 1
3517 return 1
3515
3518
3516 def _geterrpath(self, test):
3519 def _geterrpath(self, test):
3517 # test['path'] is a relative path
3520 # test['path'] is a relative path
3518 if 'case' in test:
3521 if 'case' in test:
3519 # for multiple dimensions test cases
3522 # for multiple dimensions test cases
3520 casestr = b'#'.join(test['case'])
3523 casestr = b'#'.join(test['case'])
3521 errpath = b'%s#%s.err' % (test['path'], casestr)
3524 errpath = b'%s#%s.err' % (test['path'], casestr)
3522 else:
3525 else:
3523 errpath = b'%s.err' % test['path']
3526 errpath = b'%s.err' % test['path']
3524 if self.options.outputdir:
3527 if self.options.outputdir:
3525 self._outputdir = canonpath(_sys2bytes(self.options.outputdir))
3528 self._outputdir = canonpath(_sys2bytes(self.options.outputdir))
3526 errpath = os.path.join(self._outputdir, errpath)
3529 errpath = os.path.join(self._outputdir, errpath)
3527 return errpath
3530 return errpath
3528
3531
3529 def _getport(self, count):
3532 def _getport(self, count):
3530 port = self._ports.get(count) # do we have a cached entry?
3533 port = self._ports.get(count) # do we have a cached entry?
3531 if port is None:
3534 if port is None:
3532 portneeded = 3
3535 portneeded = 3
3533 # above 100 tries we just give up and let test reports failure
3536 # above 100 tries we just give up and let test reports failure
3534 for tries in xrange(100):
3537 for tries in xrange(100):
3535 allfree = True
3538 allfree = True
3536 port = self.options.port + self._portoffset
3539 port = self.options.port + self._portoffset
3537 for idx in xrange(portneeded):
3540 for idx in xrange(portneeded):
3538 if not checkportisavailable(port + idx):
3541 if not checkportisavailable(port + idx):
3539 allfree = False
3542 allfree = False
3540 break
3543 break
3541 self._portoffset += portneeded
3544 self._portoffset += portneeded
3542 if allfree:
3545 if allfree:
3543 break
3546 break
3544 self._ports[count] = port
3547 self._ports[count] = port
3545 return port
3548 return port
3546
3549
3547 def _gettest(self, testdesc, count):
3550 def _gettest(self, testdesc, count):
3548 """Obtain a Test by looking at its filename.
3551 """Obtain a Test by looking at its filename.
3549
3552
3550 Returns a Test instance. The Test may not be runnable if it doesn't
3553 Returns a Test instance. The Test may not be runnable if it doesn't
3551 map to a known type.
3554 map to a known type.
3552 """
3555 """
3553 path = testdesc['path']
3556 path = testdesc['path']
3554 lctest = path.lower()
3557 lctest = path.lower()
3555 testcls = Test
3558 testcls = Test
3556
3559
3557 for ext, cls in self.TESTTYPES:
3560 for ext, cls in self.TESTTYPES:
3558 if lctest.endswith(ext):
3561 if lctest.endswith(ext):
3559 testcls = cls
3562 testcls = cls
3560 break
3563 break
3561
3564
3562 refpath = os.path.join(getcwdb(), path)
3565 refpath = os.path.join(getcwdb(), path)
3563 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
3566 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
3564
3567
3565 # extra keyword parameters. 'case' is used by .t tests
3568 # extra keyword parameters. 'case' is used by .t tests
3566 kwds = {k: testdesc[k] for k in ['case'] if k in testdesc}
3569 kwds = {k: testdesc[k] for k in ['case'] if k in testdesc}
3567
3570
3568 t = testcls(
3571 t = testcls(
3569 refpath,
3572 refpath,
3570 self._outputdir,
3573 self._outputdir,
3571 tmpdir,
3574 tmpdir,
3572 keeptmpdir=self.options.keep_tmpdir,
3575 keeptmpdir=self.options.keep_tmpdir,
3573 debug=self.options.debug,
3576 debug=self.options.debug,
3574 first=self.options.first,
3577 first=self.options.first,
3575 timeout=self.options.timeout,
3578 timeout=self.options.timeout,
3576 startport=self._getport(count),
3579 startport=self._getport(count),
3577 extraconfigopts=self.options.extra_config_opt,
3580 extraconfigopts=self.options.extra_config_opt,
3578 shell=self.options.shell,
3581 shell=self.options.shell,
3579 hgcommand=self._hgcommand,
3582 hgcommand=self._hgcommand,
3580 usechg=bool(self.options.with_chg or self.options.chg),
3583 usechg=bool(self.options.with_chg or self.options.chg),
3581 chgdebug=self.options.chg_debug,
3584 chgdebug=self.options.chg_debug,
3582 useipv6=useipv6,
3585 useipv6=useipv6,
3583 **kwds
3586 **kwds
3584 )
3587 )
3585 t.should_reload = True
3588 t.should_reload = True
3586 return t
3589 return t
3587
3590
3588 def _cleanup(self):
3591 def _cleanup(self):
3589 """Clean up state from this test invocation."""
3592 """Clean up state from this test invocation."""
3590 if self.options.keep_tmpdir:
3593 if self.options.keep_tmpdir:
3591 return
3594 return
3592
3595
3593 vlog("# Cleaning up HGTMP", _bytes2sys(self._hgtmp))
3596 vlog("# Cleaning up HGTMP", _bytes2sys(self._hgtmp))
3594 shutil.rmtree(self._hgtmp, True)
3597 shutil.rmtree(self._hgtmp, True)
3595 for f in self._createdfiles:
3598 for f in self._createdfiles:
3596 try:
3599 try:
3597 os.remove(f)
3600 os.remove(f)
3598 except OSError:
3601 except OSError:
3599 pass
3602 pass
3600
3603
3601 def _usecorrectpython(self):
3604 def _usecorrectpython(self):
3602 """Configure the environment to use the appropriate Python in tests."""
3605 """Configure the environment to use the appropriate Python in tests."""
3603 # Tests must use the same interpreter as us or bad things will happen.
3606 # Tests must use the same interpreter as us or bad things will happen.
3604 if WINDOWS and PYTHON3:
3607 if WINDOWS and PYTHON3:
3605 pyexe_names = [b'python', b'python3', b'python.exe']
3608 pyexe_names = [b'python', b'python3', b'python.exe']
3606 elif WINDOWS:
3609 elif WINDOWS:
3607 pyexe_names = [b'python', b'python.exe']
3610 pyexe_names = [b'python', b'python.exe']
3608 elif PYTHON3:
3611 elif PYTHON3:
3609 pyexe_names = [b'python', b'python3']
3612 pyexe_names = [b'python', b'python3']
3610 else:
3613 else:
3611 pyexe_names = [b'python', b'python2']
3614 pyexe_names = [b'python', b'python2']
3612
3615
3613 # os.symlink() is a thing with py3 on Windows, but it requires
3616 # os.symlink() is a thing with py3 on Windows, but it requires
3614 # Administrator rights.
3617 # Administrator rights.
3615 if not WINDOWS and getattr(os, 'symlink', None):
3618 if not WINDOWS and getattr(os, 'symlink', None):
3616 msg = "# Making python executable in test path a symlink to '%s'"
3619 msg = "# Making python executable in test path a symlink to '%s'"
3617 msg %= sysexecutable
3620 msg %= sysexecutable
3618 vlog(msg)
3621 vlog(msg)
3619 for pyexename in pyexe_names:
3622 for pyexename in pyexe_names:
3620 mypython = os.path.join(self._custom_bin_dir, pyexename)
3623 mypython = os.path.join(self._custom_bin_dir, pyexename)
3621 try:
3624 try:
3622 if os.readlink(mypython) == sysexecutable:
3625 if os.readlink(mypython) == sysexecutable:
3623 continue
3626 continue
3624 os.unlink(mypython)
3627 os.unlink(mypython)
3625 except OSError as err:
3628 except OSError as err:
3626 if err.errno != errno.ENOENT:
3629 if err.errno != errno.ENOENT:
3627 raise
3630 raise
3628 if self._findprogram(pyexename) != sysexecutable:
3631 if self._findprogram(pyexename) != sysexecutable:
3629 try:
3632 try:
3630 os.symlink(sysexecutable, mypython)
3633 os.symlink(sysexecutable, mypython)
3631 self._createdfiles.append(mypython)
3634 self._createdfiles.append(mypython)
3632 except OSError as err:
3635 except OSError as err:
3633 # child processes may race, which is harmless
3636 # child processes may race, which is harmless
3634 if err.errno != errno.EEXIST:
3637 if err.errno != errno.EEXIST:
3635 raise
3638 raise
3636 elif WINDOWS and not os.getenv('MSYSTEM'):
3639 elif WINDOWS and not os.getenv('MSYSTEM'):
3637 raise AssertionError('cannot run test on Windows without MSYSTEM')
3640 raise AssertionError('cannot run test on Windows without MSYSTEM')
3638 else:
3641 else:
3639 # Generate explicit file instead of symlink
3642 # Generate explicit file instead of symlink
3640 #
3643 #
3641 # This is especially important as Windows doesn't have
3644 # This is especially important as Windows doesn't have
3642 # `python3.exe`, and MSYS cannot understand the reparse point with
3645 # `python3.exe`, and MSYS cannot understand the reparse point with
3643 # that name provided by Microsoft. Create a simple script on PATH
3646 # that name provided by Microsoft. Create a simple script on PATH
3644 # with that name that delegates to the py3 launcher so the shebang
3647 # with that name that delegates to the py3 launcher so the shebang
3645 # lines work.
3648 # lines work.
3646 esc_executable = _sys2bytes(shellquote(sysexecutable))
3649 esc_executable = _sys2bytes(shellquote(sysexecutable))
3647 for pyexename in pyexe_names:
3650 for pyexename in pyexe_names:
3648 stub_exec_path = os.path.join(self._custom_bin_dir, pyexename)
3651 stub_exec_path = os.path.join(self._custom_bin_dir, pyexename)
3649 with open(stub_exec_path, 'wb') as f:
3652 with open(stub_exec_path, 'wb') as f:
3650 f.write(b'#!/bin/sh\n')
3653 f.write(b'#!/bin/sh\n')
3651 f.write(b'%s "$@"\n' % esc_executable)
3654 f.write(b'%s "$@"\n' % esc_executable)
3652
3655
3653 if WINDOWS:
3656 if WINDOWS:
3654 if not PYTHON3:
3657 if not PYTHON3:
3655 # lets try to build a valid python3 executable for the
3658 # lets try to build a valid python3 executable for the
3656 # scrip that requires it.
3659 # scrip that requires it.
3657 py3exe_name = os.path.join(self._custom_bin_dir, b'python3')
3660 py3exe_name = os.path.join(self._custom_bin_dir, b'python3')
3658 with open(py3exe_name, 'wb') as f:
3661 with open(py3exe_name, 'wb') as f:
3659 f.write(b'#!/bin/sh\n')
3662 f.write(b'#!/bin/sh\n')
3660 f.write(b'py -3 "$@"\n')
3663 f.write(b'py -3 "$@"\n')
3661
3664
3662 # adjust the path to make sur the main python finds it own dll
3665 # adjust the path to make sur the main python finds it own dll
3663 path = os.environ['PATH'].split(os.pathsep)
3666 path = os.environ['PATH'].split(os.pathsep)
3664 main_exec_dir = os.path.dirname(sysexecutable)
3667 main_exec_dir = os.path.dirname(sysexecutable)
3665 extra_paths = [_bytes2sys(self._custom_bin_dir), main_exec_dir]
3668 extra_paths = [_bytes2sys(self._custom_bin_dir), main_exec_dir]
3666
3669
3667 # Binaries installed by pip into the user area like pylint.exe may
3670 # Binaries installed by pip into the user area like pylint.exe may
3668 # not be in PATH by default.
3671 # not be in PATH by default.
3669 appdata = os.environ.get('APPDATA')
3672 appdata = os.environ.get('APPDATA')
3670 vi = sys.version_info
3673 vi = sys.version_info
3671 if appdata is not None:
3674 if appdata is not None:
3672 python_dir = 'Python%d%d' % (vi[0], vi[1])
3675 python_dir = 'Python%d%d' % (vi[0], vi[1])
3673 scripts_path = [appdata, 'Python', python_dir, 'Scripts']
3676 scripts_path = [appdata, 'Python', python_dir, 'Scripts']
3674 if not PYTHON3:
3677 if not PYTHON3:
3675 scripts_path = [appdata, 'Python', 'Scripts']
3678 scripts_path = [appdata, 'Python', 'Scripts']
3676 scripts_dir = os.path.join(*scripts_path)
3679 scripts_dir = os.path.join(*scripts_path)
3677 extra_paths.append(scripts_dir)
3680 extra_paths.append(scripts_dir)
3678
3681
3679 os.environ['PATH'] = os.pathsep.join(extra_paths + path)
3682 os.environ['PATH'] = os.pathsep.join(extra_paths + path)
3680
3683
3681 def _use_correct_mercurial(self):
3684 def _use_correct_mercurial(self):
3682 target_exec = os.path.join(self._custom_bin_dir, b'hg')
3685 target_exec = os.path.join(self._custom_bin_dir, b'hg')
3683 if self._hgcommand != b'hg':
3686 if self._hgcommand != b'hg':
3684 # shutil.which only accept bytes from 3.8
3687 # shutil.which only accept bytes from 3.8
3685 real_exec = which(self._hgcommand)
3688 real_exec = which(self._hgcommand)
3686 if real_exec is None:
3689 if real_exec is None:
3687 raise ValueError('could not find exec path for "%s"', real_exec)
3690 raise ValueError('could not find exec path for "%s"', real_exec)
3688 if real_exec == target_exec:
3691 if real_exec == target_exec:
3689 # do not overwrite something with itself
3692 # do not overwrite something with itself
3690 return
3693 return
3691 if WINDOWS:
3694 if WINDOWS:
3692 with open(target_exec, 'wb') as f:
3695 with open(target_exec, 'wb') as f:
3693 f.write(b'#!/bin/sh\n')
3696 f.write(b'#!/bin/sh\n')
3694 escaped_exec = shellquote(_bytes2sys(real_exec))
3697 escaped_exec = shellquote(_bytes2sys(real_exec))
3695 f.write(b'%s "$@"\n' % _sys2bytes(escaped_exec))
3698 f.write(b'%s "$@"\n' % _sys2bytes(escaped_exec))
3696 else:
3699 else:
3697 os.symlink(real_exec, target_exec)
3700 os.symlink(real_exec, target_exec)
3698 self._createdfiles.append(target_exec)
3701 self._createdfiles.append(target_exec)
3699
3702
3700 def _installhg(self):
3703 def _installhg(self):
3701 """Install hg into the test environment.
3704 """Install hg into the test environment.
3702
3705
3703 This will also configure hg with the appropriate testing settings.
3706 This will also configure hg with the appropriate testing settings.
3704 """
3707 """
3705 vlog("# Performing temporary installation of HG")
3708 vlog("# Performing temporary installation of HG")
3706 installerrs = os.path.join(self._hgtmp, b"install.err")
3709 installerrs = os.path.join(self._hgtmp, b"install.err")
3707 compiler = ''
3710 compiler = ''
3708 if self.options.compiler:
3711 if self.options.compiler:
3709 compiler = '--compiler ' + self.options.compiler
3712 compiler = '--compiler ' + self.options.compiler
3710 setup_opts = b""
3713 setup_opts = b""
3711 if self.options.pure:
3714 if self.options.pure:
3712 setup_opts = b"--pure"
3715 setup_opts = b"--pure"
3713 elif self.options.rust:
3716 elif self.options.rust:
3714 setup_opts = b"--rust"
3717 setup_opts = b"--rust"
3715 elif self.options.no_rust:
3718 elif self.options.no_rust:
3716 setup_opts = b"--no-rust"
3719 setup_opts = b"--no-rust"
3717
3720
3718 # Run installer in hg root
3721 # Run installer in hg root
3719 script = os.path.realpath(sys.argv[0])
3722 script = os.path.realpath(sys.argv[0])
3720 exe = sysexecutable
3723 exe = sysexecutable
3721 if PYTHON3:
3724 if PYTHON3:
3722 compiler = _sys2bytes(compiler)
3725 compiler = _sys2bytes(compiler)
3723 script = _sys2bytes(script)
3726 script = _sys2bytes(script)
3724 exe = _sys2bytes(exe)
3727 exe = _sys2bytes(exe)
3725 hgroot = os.path.dirname(os.path.dirname(script))
3728 hgroot = os.path.dirname(os.path.dirname(script))
3726 self._hgroot = hgroot
3729 self._hgroot = hgroot
3727 os.chdir(hgroot)
3730 os.chdir(hgroot)
3728 nohome = b'--home=""'
3731 nohome = b'--home=""'
3729 if WINDOWS:
3732 if WINDOWS:
3730 # The --home="" trick works only on OS where os.sep == '/'
3733 # The --home="" trick works only on OS where os.sep == '/'
3731 # because of a distutils convert_path() fast-path. Avoid it at
3734 # because of a distutils convert_path() fast-path. Avoid it at
3732 # least on Windows for now, deal with .pydistutils.cfg bugs
3735 # least on Windows for now, deal with .pydistutils.cfg bugs
3733 # when they happen.
3736 # when they happen.
3734 nohome = b''
3737 nohome = b''
3735 cmd = (
3738 cmd = (
3736 b'"%(exe)s" setup.py %(setup_opts)s clean --all'
3739 b'"%(exe)s" setup.py %(setup_opts)s clean --all'
3737 b' build %(compiler)s --build-base="%(base)s"'
3740 b' build %(compiler)s --build-base="%(base)s"'
3738 b' install --force --prefix="%(prefix)s"'
3741 b' install --force --prefix="%(prefix)s"'
3739 b' --install-lib="%(libdir)s"'
3742 b' --install-lib="%(libdir)s"'
3740 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
3743 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
3741 % {
3744 % {
3742 b'exe': exe,
3745 b'exe': exe,
3743 b'setup_opts': setup_opts,
3746 b'setup_opts': setup_opts,
3744 b'compiler': compiler,
3747 b'compiler': compiler,
3745 b'base': os.path.join(self._hgtmp, b"build"),
3748 b'base': os.path.join(self._hgtmp, b"build"),
3746 b'prefix': self._installdir,
3749 b'prefix': self._installdir,
3747 b'libdir': self._pythondir,
3750 b'libdir': self._pythondir,
3748 b'bindir': self._bindir,
3751 b'bindir': self._bindir,
3749 b'nohome': nohome,
3752 b'nohome': nohome,
3750 b'logfile': installerrs,
3753 b'logfile': installerrs,
3751 }
3754 }
3752 )
3755 )
3753
3756
3754 # setuptools requires install directories to exist.
3757 # setuptools requires install directories to exist.
3755 def makedirs(p):
3758 def makedirs(p):
3756 try:
3759 try:
3757 os.makedirs(p)
3760 os.makedirs(p)
3758 except OSError as e:
3761 except OSError as e:
3759 if e.errno != errno.EEXIST:
3762 if e.errno != errno.EEXIST:
3760 raise
3763 raise
3761
3764
3762 makedirs(self._pythondir)
3765 makedirs(self._pythondir)
3763 makedirs(self._bindir)
3766 makedirs(self._bindir)
3764
3767
3765 vlog("# Running", cmd.decode("utf-8"))
3768 vlog("# Running", cmd.decode("utf-8"))
3766 if subprocess.call(_bytes2sys(cmd), shell=True) == 0:
3769 if subprocess.call(_bytes2sys(cmd), shell=True) == 0:
3767 if not self.options.verbose:
3770 if not self.options.verbose:
3768 try:
3771 try:
3769 os.remove(installerrs)
3772 os.remove(installerrs)
3770 except OSError as e:
3773 except OSError as e:
3771 if e.errno != errno.ENOENT:
3774 if e.errno != errno.ENOENT:
3772 raise
3775 raise
3773 else:
3776 else:
3774 with open(installerrs, 'rb') as f:
3777 with open(installerrs, 'rb') as f:
3775 for line in f:
3778 for line in f:
3776 if PYTHON3:
3779 if PYTHON3:
3777 sys.stdout.buffer.write(line)
3780 sys.stdout.buffer.write(line)
3778 else:
3781 else:
3779 sys.stdout.write(line)
3782 sys.stdout.write(line)
3780 sys.exit(1)
3783 sys.exit(1)
3781 os.chdir(self._testdir)
3784 os.chdir(self._testdir)
3782
3785
3783 hgbat = os.path.join(self._bindir, b'hg.bat')
3786 hgbat = os.path.join(self._bindir, b'hg.bat')
3784 if os.path.isfile(hgbat):
3787 if os.path.isfile(hgbat):
3785 # hg.bat expects to be put in bin/scripts while run-tests.py
3788 # hg.bat expects to be put in bin/scripts while run-tests.py
3786 # installation layout put it in bin/ directly. Fix it
3789 # installation layout put it in bin/ directly. Fix it
3787 with open(hgbat, 'rb') as f:
3790 with open(hgbat, 'rb') as f:
3788 data = f.read()
3791 data = f.read()
3789 if br'"%~dp0..\python" "%~dp0hg" %*' in data:
3792 if br'"%~dp0..\python" "%~dp0hg" %*' in data:
3790 data = data.replace(
3793 data = data.replace(
3791 br'"%~dp0..\python" "%~dp0hg" %*',
3794 br'"%~dp0..\python" "%~dp0hg" %*',
3792 b'"%~dp0python" "%~dp0hg" %*',
3795 b'"%~dp0python" "%~dp0hg" %*',
3793 )
3796 )
3794 with open(hgbat, 'wb') as f:
3797 with open(hgbat, 'wb') as f:
3795 f.write(data)
3798 f.write(data)
3796 else:
3799 else:
3797 print('WARNING: cannot fix hg.bat reference to python.exe')
3800 print('WARNING: cannot fix hg.bat reference to python.exe')
3798
3801
3799 if self.options.anycoverage:
3802 if self.options.anycoverage:
3800 custom = os.path.join(
3803 custom = os.path.join(
3801 osenvironb[b'RUNTESTDIR'], b'sitecustomize.py'
3804 osenvironb[b'RUNTESTDIR'], b'sitecustomize.py'
3802 )
3805 )
3803 target = os.path.join(self._pythondir, b'sitecustomize.py')
3806 target = os.path.join(self._pythondir, b'sitecustomize.py')
3804 vlog('# Installing coverage trigger to %s' % target)
3807 vlog('# Installing coverage trigger to %s' % target)
3805 shutil.copyfile(custom, target)
3808 shutil.copyfile(custom, target)
3806 rc = os.path.join(self._testdir, b'.coveragerc')
3809 rc = os.path.join(self._testdir, b'.coveragerc')
3807 vlog('# Installing coverage rc to %s' % rc)
3810 vlog('# Installing coverage rc to %s' % rc)
3808 osenvironb[b'COVERAGE_PROCESS_START'] = rc
3811 osenvironb[b'COVERAGE_PROCESS_START'] = rc
3809 covdir = os.path.join(self._installdir, b'..', b'coverage')
3812 covdir = os.path.join(self._installdir, b'..', b'coverage')
3810 try:
3813 try:
3811 os.mkdir(covdir)
3814 os.mkdir(covdir)
3812 except OSError as e:
3815 except OSError as e:
3813 if e.errno != errno.EEXIST:
3816 if e.errno != errno.EEXIST:
3814 raise
3817 raise
3815
3818
3816 osenvironb[b'COVERAGE_DIR'] = covdir
3819 osenvironb[b'COVERAGE_DIR'] = covdir
3817
3820
3818 def _checkhglib(self, verb):
3821 def _checkhglib(self, verb):
3819 """Ensure that the 'mercurial' package imported by python is
3822 """Ensure that the 'mercurial' package imported by python is
3820 the one we expect it to be. If not, print a warning to stderr."""
3823 the one we expect it to be. If not, print a warning to stderr."""
3821 if self._pythondir_inferred:
3824 if self._pythondir_inferred:
3822 # The pythondir has been inferred from --with-hg flag.
3825 # The pythondir has been inferred from --with-hg flag.
3823 # We cannot expect anything sensible here.
3826 # We cannot expect anything sensible here.
3824 return
3827 return
3825 expecthg = os.path.join(self._pythondir, b'mercurial')
3828 expecthg = os.path.join(self._pythondir, b'mercurial')
3826 actualhg = self._gethgpath()
3829 actualhg = self._gethgpath()
3827 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
3830 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
3828 sys.stderr.write(
3831 sys.stderr.write(
3829 'warning: %s with unexpected mercurial lib: %s\n'
3832 'warning: %s with unexpected mercurial lib: %s\n'
3830 ' (expected %s)\n' % (verb, actualhg, expecthg)
3833 ' (expected %s)\n' % (verb, actualhg, expecthg)
3831 )
3834 )
3832
3835
3833 def _gethgpath(self):
3836 def _gethgpath(self):
3834 """Return the path to the mercurial package that is actually found by
3837 """Return the path to the mercurial package that is actually found by
3835 the current Python interpreter."""
3838 the current Python interpreter."""
3836 if self._hgpath is not None:
3839 if self._hgpath is not None:
3837 return self._hgpath
3840 return self._hgpath
3838
3841
3839 cmd = b'"%s" -c "import mercurial; print (mercurial.__path__[0])"'
3842 cmd = b'"%s" -c "import mercurial; print (mercurial.__path__[0])"'
3840 cmd = cmd % PYTHON
3843 cmd = cmd % PYTHON
3841 if PYTHON3:
3844 if PYTHON3:
3842 cmd = _bytes2sys(cmd)
3845 cmd = _bytes2sys(cmd)
3843
3846
3844 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
3847 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
3845 out, err = p.communicate()
3848 out, err = p.communicate()
3846
3849
3847 self._hgpath = out.strip()
3850 self._hgpath = out.strip()
3848
3851
3849 return self._hgpath
3852 return self._hgpath
3850
3853
3851 def _installchg(self):
3854 def _installchg(self):
3852 """Install chg into the test environment"""
3855 """Install chg into the test environment"""
3853 vlog('# Performing temporary installation of CHG')
3856 vlog('# Performing temporary installation of CHG')
3854 assert os.path.dirname(self._bindir) == self._installdir
3857 assert os.path.dirname(self._bindir) == self._installdir
3855 assert self._hgroot, 'must be called after _installhg()'
3858 assert self._hgroot, 'must be called after _installhg()'
3856 cmd = b'"%(make)s" clean install PREFIX="%(prefix)s"' % {
3859 cmd = b'"%(make)s" clean install PREFIX="%(prefix)s"' % {
3857 b'make': b'make', # TODO: switch by option or environment?
3860 b'make': b'make', # TODO: switch by option or environment?
3858 b'prefix': self._installdir,
3861 b'prefix': self._installdir,
3859 }
3862 }
3860 cwd = os.path.join(self._hgroot, b'contrib', b'chg')
3863 cwd = os.path.join(self._hgroot, b'contrib', b'chg')
3861 vlog("# Running", cmd)
3864 vlog("# Running", cmd)
3862 proc = subprocess.Popen(
3865 proc = subprocess.Popen(
3863 cmd,
3866 cmd,
3864 shell=True,
3867 shell=True,
3865 cwd=cwd,
3868 cwd=cwd,
3866 stdin=subprocess.PIPE,
3869 stdin=subprocess.PIPE,
3867 stdout=subprocess.PIPE,
3870 stdout=subprocess.PIPE,
3868 stderr=subprocess.STDOUT,
3871 stderr=subprocess.STDOUT,
3869 )
3872 )
3870 out, _err = proc.communicate()
3873 out, _err = proc.communicate()
3871 if proc.returncode != 0:
3874 if proc.returncode != 0:
3872 if PYTHON3:
3875 if PYTHON3:
3873 sys.stdout.buffer.write(out)
3876 sys.stdout.buffer.write(out)
3874 else:
3877 else:
3875 sys.stdout.write(out)
3878 sys.stdout.write(out)
3876 sys.exit(1)
3879 sys.exit(1)
3877
3880
3878 def _installrhg(self):
3881 def _installrhg(self):
3879 """Install rhg into the test environment"""
3882 """Install rhg into the test environment"""
3880 vlog('# Performing temporary installation of rhg')
3883 vlog('# Performing temporary installation of rhg')
3881 assert os.path.dirname(self._bindir) == self._installdir
3884 assert os.path.dirname(self._bindir) == self._installdir
3882 assert self._hgroot, 'must be called after _installhg()'
3885 assert self._hgroot, 'must be called after _installhg()'
3883 cmd = b'"%(make)s" install-rhg PREFIX="%(prefix)s"' % {
3886 cmd = b'"%(make)s" install-rhg PREFIX="%(prefix)s"' % {
3884 b'make': b'make', # TODO: switch by option or environment?
3887 b'make': b'make', # TODO: switch by option or environment?
3885 b'prefix': self._installdir,
3888 b'prefix': self._installdir,
3886 }
3889 }
3887 cwd = self._hgroot
3890 cwd = self._hgroot
3888 vlog("# Running", cmd)
3891 vlog("# Running", cmd)
3889 proc = subprocess.Popen(
3892 proc = subprocess.Popen(
3890 cmd,
3893 cmd,
3891 shell=True,
3894 shell=True,
3892 cwd=cwd,
3895 cwd=cwd,
3893 stdin=subprocess.PIPE,
3896 stdin=subprocess.PIPE,
3894 stdout=subprocess.PIPE,
3897 stdout=subprocess.PIPE,
3895 stderr=subprocess.STDOUT,
3898 stderr=subprocess.STDOUT,
3896 )
3899 )
3897 out, _err = proc.communicate()
3900 out, _err = proc.communicate()
3898 if proc.returncode != 0:
3901 if proc.returncode != 0:
3899 if PYTHON3:
3902 if PYTHON3:
3900 sys.stdout.buffer.write(out)
3903 sys.stdout.buffer.write(out)
3901 else:
3904 else:
3902 sys.stdout.write(out)
3905 sys.stdout.write(out)
3903 sys.exit(1)
3906 sys.exit(1)
3904
3907
3905 def _build_pyoxidized(self):
3908 def _build_pyoxidized(self):
3906 """build a pyoxidized version of mercurial into the test environment
3909 """build a pyoxidized version of mercurial into the test environment
3907
3910
3908 Ideally this function would be `install_pyoxidier` and would both build
3911 Ideally this function would be `install_pyoxidier` and would both build
3909 and install pyoxidier. However we are starting small to get pyoxidizer
3912 and install pyoxidier. However we are starting small to get pyoxidizer
3910 build binary to testing quickly.
3913 build binary to testing quickly.
3911 """
3914 """
3912 vlog('# build a pyoxidized version of Mercurial')
3915 vlog('# build a pyoxidized version of Mercurial')
3913 assert os.path.dirname(self._bindir) == self._installdir
3916 assert os.path.dirname(self._bindir) == self._installdir
3914 assert self._hgroot, 'must be called after _installhg()'
3917 assert self._hgroot, 'must be called after _installhg()'
3915 cmd = b'"%(make)s" pyoxidizer-windows-tests' % {
3918 cmd = b'"%(make)s" pyoxidizer-windows-tests' % {
3916 b'make': b'make',
3919 b'make': b'make',
3917 }
3920 }
3918 cwd = self._hgroot
3921 cwd = self._hgroot
3919 vlog("# Running", cmd)
3922 vlog("# Running", cmd)
3920 proc = subprocess.Popen(
3923 proc = subprocess.Popen(
3921 _bytes2sys(cmd),
3924 _bytes2sys(cmd),
3922 shell=True,
3925 shell=True,
3923 cwd=_bytes2sys(cwd),
3926 cwd=_bytes2sys(cwd),
3924 stdin=subprocess.PIPE,
3927 stdin=subprocess.PIPE,
3925 stdout=subprocess.PIPE,
3928 stdout=subprocess.PIPE,
3926 stderr=subprocess.STDOUT,
3929 stderr=subprocess.STDOUT,
3927 )
3930 )
3928 out, _err = proc.communicate()
3931 out, _err = proc.communicate()
3929 if proc.returncode != 0:
3932 if proc.returncode != 0:
3930 if PYTHON3:
3933 if PYTHON3:
3931 sys.stdout.buffer.write(out)
3934 sys.stdout.buffer.write(out)
3932 else:
3935 else:
3933 sys.stdout.write(out)
3936 sys.stdout.write(out)
3934 sys.exit(1)
3937 sys.exit(1)
3935
3938
3936 def _outputcoverage(self):
3939 def _outputcoverage(self):
3937 """Produce code coverage output."""
3940 """Produce code coverage output."""
3938 import coverage
3941 import coverage
3939
3942
3940 coverage = coverage.coverage
3943 coverage = coverage.coverage
3941
3944
3942 vlog('# Producing coverage report')
3945 vlog('# Producing coverage report')
3943 # chdir is the easiest way to get short, relative paths in the
3946 # chdir is the easiest way to get short, relative paths in the
3944 # output.
3947 # output.
3945 os.chdir(self._hgroot)
3948 os.chdir(self._hgroot)
3946 covdir = os.path.join(_bytes2sys(self._installdir), '..', 'coverage')
3949 covdir = os.path.join(_bytes2sys(self._installdir), '..', 'coverage')
3947 cov = coverage(data_file=os.path.join(covdir, 'cov'))
3950 cov = coverage(data_file=os.path.join(covdir, 'cov'))
3948
3951
3949 # Map install directory paths back to source directory.
3952 # Map install directory paths back to source directory.
3950 cov.config.paths['srcdir'] = ['.', _bytes2sys(self._pythondir)]
3953 cov.config.paths['srcdir'] = ['.', _bytes2sys(self._pythondir)]
3951
3954
3952 cov.combine()
3955 cov.combine()
3953
3956
3954 omit = [
3957 omit = [
3955 _bytes2sys(os.path.join(x, b'*'))
3958 _bytes2sys(os.path.join(x, b'*'))
3956 for x in [self._bindir, self._testdir]
3959 for x in [self._bindir, self._testdir]
3957 ]
3960 ]
3958 cov.report(ignore_errors=True, omit=omit)
3961 cov.report(ignore_errors=True, omit=omit)
3959
3962
3960 if self.options.htmlcov:
3963 if self.options.htmlcov:
3961 htmldir = os.path.join(_bytes2sys(self._outputdir), 'htmlcov')
3964 htmldir = os.path.join(_bytes2sys(self._outputdir), 'htmlcov')
3962 cov.html_report(directory=htmldir, omit=omit)
3965 cov.html_report(directory=htmldir, omit=omit)
3963 if self.options.annotate:
3966 if self.options.annotate:
3964 adir = os.path.join(_bytes2sys(self._outputdir), 'annotated')
3967 adir = os.path.join(_bytes2sys(self._outputdir), 'annotated')
3965 if not os.path.isdir(adir):
3968 if not os.path.isdir(adir):
3966 os.mkdir(adir)
3969 os.mkdir(adir)
3967 cov.annotate(directory=adir, omit=omit)
3970 cov.annotate(directory=adir, omit=omit)
3968
3971
3969 def _findprogram(self, program):
3972 def _findprogram(self, program):
3970 """Search PATH for a executable program"""
3973 """Search PATH for a executable program"""
3971 dpb = _sys2bytes(os.defpath)
3974 dpb = _sys2bytes(os.defpath)
3972 sepb = _sys2bytes(os.pathsep)
3975 sepb = _sys2bytes(os.pathsep)
3973 for p in osenvironb.get(b'PATH', dpb).split(sepb):
3976 for p in osenvironb.get(b'PATH', dpb).split(sepb):
3974 name = os.path.join(p, program)
3977 name = os.path.join(p, program)
3975 if WINDOWS or os.access(name, os.X_OK):
3978 if WINDOWS or os.access(name, os.X_OK):
3976 return _bytes2sys(name)
3979 return _bytes2sys(name)
3977 return None
3980 return None
3978
3981
3979 def _checktools(self):
3982 def _checktools(self):
3980 """Ensure tools required to run tests are present."""
3983 """Ensure tools required to run tests are present."""
3981 for p in self.REQUIREDTOOLS:
3984 for p in self.REQUIREDTOOLS:
3982 if WINDOWS and not p.endswith(b'.exe'):
3985 if WINDOWS and not p.endswith(b'.exe'):
3983 p += b'.exe'
3986 p += b'.exe'
3984 found = self._findprogram(p)
3987 found = self._findprogram(p)
3985 p = p.decode("utf-8")
3988 p = p.decode("utf-8")
3986 if found:
3989 if found:
3987 vlog("# Found prerequisite", p, "at", found)
3990 vlog("# Found prerequisite", p, "at", found)
3988 else:
3991 else:
3989 print("WARNING: Did not find prerequisite tool: %s " % p)
3992 print("WARNING: Did not find prerequisite tool: %s " % p)
3990
3993
3991
3994
3992 def aggregateexceptions(path):
3995 def aggregateexceptions(path):
3993 exceptioncounts = collections.Counter()
3996 exceptioncounts = collections.Counter()
3994 testsbyfailure = collections.defaultdict(set)
3997 testsbyfailure = collections.defaultdict(set)
3995 failuresbytest = collections.defaultdict(set)
3998 failuresbytest = collections.defaultdict(set)
3996
3999
3997 for f in os.listdir(path):
4000 for f in os.listdir(path):
3998 with open(os.path.join(path, f), 'rb') as fh:
4001 with open(os.path.join(path, f), 'rb') as fh:
3999 data = fh.read().split(b'\0')
4002 data = fh.read().split(b'\0')
4000 if len(data) != 5:
4003 if len(data) != 5:
4001 continue
4004 continue
4002
4005
4003 exc, mainframe, hgframe, hgline, testname = data
4006 exc, mainframe, hgframe, hgline, testname = data
4004 exc = exc.decode('utf-8')
4007 exc = exc.decode('utf-8')
4005 mainframe = mainframe.decode('utf-8')
4008 mainframe = mainframe.decode('utf-8')
4006 hgframe = hgframe.decode('utf-8')
4009 hgframe = hgframe.decode('utf-8')
4007 hgline = hgline.decode('utf-8')
4010 hgline = hgline.decode('utf-8')
4008 testname = testname.decode('utf-8')
4011 testname = testname.decode('utf-8')
4009
4012
4010 key = (hgframe, hgline, exc)
4013 key = (hgframe, hgline, exc)
4011 exceptioncounts[key] += 1
4014 exceptioncounts[key] += 1
4012 testsbyfailure[key].add(testname)
4015 testsbyfailure[key].add(testname)
4013 failuresbytest[testname].add(key)
4016 failuresbytest[testname].add(key)
4014
4017
4015 # Find test having fewest failures for each failure.
4018 # Find test having fewest failures for each failure.
4016 leastfailing = {}
4019 leastfailing = {}
4017 for key, tests in testsbyfailure.items():
4020 for key, tests in testsbyfailure.items():
4018 fewesttest = None
4021 fewesttest = None
4019 fewestcount = 99999999
4022 fewestcount = 99999999
4020 for test in sorted(tests):
4023 for test in sorted(tests):
4021 if len(failuresbytest[test]) < fewestcount:
4024 if len(failuresbytest[test]) < fewestcount:
4022 fewesttest = test
4025 fewesttest = test
4023 fewestcount = len(failuresbytest[test])
4026 fewestcount = len(failuresbytest[test])
4024
4027
4025 leastfailing[key] = (fewestcount, fewesttest)
4028 leastfailing[key] = (fewestcount, fewesttest)
4026
4029
4027 # Create a combined counter so we can sort by total occurrences and
4030 # Create a combined counter so we can sort by total occurrences and
4028 # impacted tests.
4031 # impacted tests.
4029 combined = {}
4032 combined = {}
4030 for key in exceptioncounts:
4033 for key in exceptioncounts:
4031 combined[key] = (
4034 combined[key] = (
4032 exceptioncounts[key],
4035 exceptioncounts[key],
4033 len(testsbyfailure[key]),
4036 len(testsbyfailure[key]),
4034 leastfailing[key][0],
4037 leastfailing[key][0],
4035 leastfailing[key][1],
4038 leastfailing[key][1],
4036 )
4039 )
4037
4040
4038 return {
4041 return {
4039 'exceptioncounts': exceptioncounts,
4042 'exceptioncounts': exceptioncounts,
4040 'total': sum(exceptioncounts.values()),
4043 'total': sum(exceptioncounts.values()),
4041 'combined': combined,
4044 'combined': combined,
4042 'leastfailing': leastfailing,
4045 'leastfailing': leastfailing,
4043 'byfailure': testsbyfailure,
4046 'byfailure': testsbyfailure,
4044 'bytest': failuresbytest,
4047 'bytest': failuresbytest,
4045 }
4048 }
4046
4049
4047
4050
4048 if __name__ == '__main__':
4051 if __name__ == '__main__':
4049 if WINDOWS and not os.getenv('MSYSTEM'):
4052 if WINDOWS and not os.getenv('MSYSTEM'):
4050 print('cannot run test on Windows without MSYSTEM', file=sys.stderr)
4053 print('cannot run test on Windows without MSYSTEM', file=sys.stderr)
4051 print(
4054 print(
4052 '(if you need to do so contact the mercurial devs: '
4055 '(if you need to do so contact the mercurial devs: '
4053 'mercurial@mercurial-scm.org)',
4056 'mercurial@mercurial-scm.org)',
4054 file=sys.stderr,
4057 file=sys.stderr,
4055 )
4058 )
4056 sys.exit(255)
4059 sys.exit(255)
4057
4060
4058 runner = TestRunner()
4061 runner = TestRunner()
4059
4062
4060 try:
4063 try:
4061 import msvcrt
4064 import msvcrt
4062
4065
4063 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
4066 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
4064 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
4067 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
4065 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
4068 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
4066 except ImportError:
4069 except ImportError:
4067 pass
4070 pass
4068
4071
4069 sys.exit(runner.run(sys.argv[1:]))
4072 sys.exit(runner.run(sys.argv[1:]))
@@ -1,539 +1,539 b''
1 #require chg
1 #require chg
2
2
3 $ mkdir log
3 $ mkdir log
4 $ cp $HGRCPATH $HGRCPATH.unconfigured
4 $ cp $HGRCPATH $HGRCPATH.unconfigured
5 $ cat <<'EOF' >> $HGRCPATH
5 $ cat <<'EOF' >> $HGRCPATH
6 > [cmdserver]
6 > [cmdserver]
7 > log = $TESTTMP/log/server.log
7 > log = $TESTTMP/log/server.log
8 > max-log-files = 1
8 > max-log-files = 1
9 > max-log-size = 10 kB
9 > max-log-size = 10 kB
10 > EOF
10 > EOF
11 $ cp $HGRCPATH $HGRCPATH.orig
11 $ cp $HGRCPATH $HGRCPATH.orig
12
12
13 $ filterlog () {
13 $ filterlog () {
14 > sed -e 's!^[0-9/]* [0-9:]* ([0-9]*)>!YYYY/MM/DD HH:MM:SS (PID)>!' \
14 > sed -e 's!^[0-9/]* [0-9:]* ([0-9]*)>!YYYY/MM/DD HH:MM:SS (PID)>!' \
15 > -e 's!\(setprocname\|received fds\|setenv\): .*!\1: ...!' \
15 > -e 's!\(setprocname\|received fds\|setenv\): .*!\1: ...!' \
16 > -e 's!\(confighash\|mtimehash\) = [0-9a-f]*!\1 = ...!g' \
16 > -e 's!\(confighash\|mtimehash\) = [0-9a-f]*!\1 = ...!g' \
17 > -e 's!\(in \)[0-9.]*s\b!\1 ...s!g' \
17 > -e 's!\(in \)[0-9.]*s\b!\1 ...s!g' \
18 > -e 's!\(pid\)=[0-9]*!\1=...!g' \
18 > -e 's!\(pid\)=[0-9]*!\1=...!g' \
19 > -e 's!\(/server-\)[0-9a-f]*!\1...!g'
19 > -e 's!\(/server-\)[0-9a-f]*!\1...!g'
20 > }
20 > }
21
21
22 init repo
22 init repo
23
23
24 $ chg init foo
24 $ chg init foo
25 $ cd foo
25 $ cd foo
26
26
27 ill-formed config
27 ill-formed config
28
28
29 $ chg status
29 $ chg status
30 $ echo '=brokenconfig' >> $HGRCPATH
30 $ echo '=brokenconfig' >> $HGRCPATH
31 $ chg status
31 $ chg status
32 config error at * =brokenconfig (glob)
32 config error at * =brokenconfig (glob)
33 [30]
33 [30]
34
34
35 $ cp $HGRCPATH.orig $HGRCPATH
35 $ cp $HGRCPATH.orig $HGRCPATH
36
36
37 long socket path
37 long socket path
38
38
39 $ sockpath=$TESTTMP/this/path/should/be/longer/than/one-hundred-and-seven/characters/where/107/is/the/typical/size/limit/of/unix-domain-socket
39 $ sockpath=$TESTTMP/this/path/should/be/longer/than/one-hundred-and-seven/characters/where/107/is/the/typical/size/limit/of/unix-domain-socket
40 $ mkdir -p $sockpath
40 $ mkdir -p $sockpath
41 $ bakchgsockname=$CHGSOCKNAME
41 $ bakchgsockname=$CHGSOCKNAME
42 $ CHGSOCKNAME=$sockpath/server
42 $ CHGSOCKNAME=$sockpath/server
43 $ export CHGSOCKNAME
43 $ export CHGSOCKNAME
44 $ chg root
44 $ chg root
45 $TESTTMP/foo
45 $TESTTMP/foo
46 $ rm -rf $sockpath
46 $ rm -rf $sockpath
47 $ CHGSOCKNAME=$bakchgsockname
47 $ CHGSOCKNAME=$bakchgsockname
48 $ export CHGSOCKNAME
48 $ export CHGSOCKNAME
49
49
50 $ cd ..
50 $ cd ..
51
51
52 editor
52 editor
53 ------
53 ------
54
54
55 $ cat >> pushbuffer.py <<EOF
55 $ cat >> pushbuffer.py <<EOF
56 > def reposetup(ui, repo):
56 > def reposetup(ui, repo):
57 > repo.ui.pushbuffer(subproc=True)
57 > repo.ui.pushbuffer(subproc=True)
58 > EOF
58 > EOF
59
59
60 $ chg init editor
60 $ chg init editor
61 $ cd editor
61 $ cd editor
62
62
63 by default, system() should be redirected to the client:
63 by default, system() should be redirected to the client:
64
64
65 $ touch foo
65 $ touch foo
66 $ CHGDEBUG= HGEDITOR=cat chg ci -Am channeled --edit 2>&1 \
66 $ CHGDEBUG= HGEDITOR=cat chg ci -Am channeled --edit 2>&1 \
67 > | egrep "HG:|run 'cat"
67 > | egrep "HG:|run 'cat"
68 chg: debug: * run 'cat "*"' at '$TESTTMP/editor' (glob)
68 chg: debug: * run 'cat "*"' at '$TESTTMP/editor' (glob)
69 HG: Enter commit message. Lines beginning with 'HG:' are removed.
69 HG: Enter commit message. Lines beginning with 'HG:' are removed.
70 HG: Leave message empty to abort commit.
70 HG: Leave message empty to abort commit.
71 HG: --
71 HG: --
72 HG: user: test
72 HG: user: test
73 HG: branch 'default'
73 HG: branch 'default'
74 HG: added foo
74 HG: added foo
75
75
76 but no redirection should be made if output is captured:
76 but no redirection should be made if output is captured:
77
77
78 $ touch bar
78 $ touch bar
79 $ CHGDEBUG= HGEDITOR=cat chg ci -Am bufferred --edit \
79 $ CHGDEBUG= HGEDITOR=cat chg ci -Am bufferred --edit \
80 > --config extensions.pushbuffer="$TESTTMP/pushbuffer.py" 2>&1 \
80 > --config extensions.pushbuffer="$TESTTMP/pushbuffer.py" 2>&1 \
81 > | egrep "HG:|run 'cat"
81 > | egrep "HG:|run 'cat"
82 [1]
82 [1]
83
83
84 check that commit commands succeeded:
84 check that commit commands succeeded:
85
85
86 $ hg log -T '{rev}:{desc}\n'
86 $ hg log -T '{rev}:{desc}\n'
87 1:bufferred
87 1:bufferred
88 0:channeled
88 0:channeled
89
89
90 $ cd ..
90 $ cd ..
91
91
92 pager
92 pager
93 -----
93 -----
94
94
95 $ cat >> fakepager.py <<EOF
95 $ cat >> fakepager.py <<EOF
96 > import sys
96 > import sys
97 > for line in sys.stdin:
97 > for line in sys.stdin:
98 > sys.stdout.write('paged! %r\n' % line)
98 > sys.stdout.write('paged! %r\n' % line)
99 > EOF
99 > EOF
100
100
101 enable pager extension globally, but spawns the master server with no tty:
101 enable pager extension globally, but spawns the master server with no tty:
102
102
103 $ chg init pager
103 $ chg init pager
104 $ cd pager
104 $ cd pager
105 $ cat >> $HGRCPATH <<EOF
105 $ cat >> $HGRCPATH <<EOF
106 > [extensions]
106 > [extensions]
107 > pager =
107 > pager =
108 > [pager]
108 > [pager]
109 > pager = "$PYTHON" $TESTTMP/fakepager.py
109 > pager = "$PYTHON" $TESTTMP/fakepager.py
110 > EOF
110 > EOF
111 $ chg version > /dev/null
111 $ chg version > /dev/null
112 $ touch foo
112 $ touch foo
113 $ chg ci -qAm foo
113 $ chg ci -qAm foo
114
114
115 pager should be enabled if the attached client has a tty:
115 pager should be enabled if the attached client has a tty:
116
116
117 $ chg log -l1 -q --config ui.formatted=True
117 $ chg log -l1 -q --config ui.formatted=True
118 paged! '0:1f7b0de80e11\n'
118 paged! '0:1f7b0de80e11\n'
119 $ chg log -l1 -q --config ui.formatted=False
119 $ chg log -l1 -q --config ui.formatted=False
120 0:1f7b0de80e11
120 0:1f7b0de80e11
121
121
122 chg waits for pager if runcommand raises
122 chg waits for pager if runcommand raises
123
123
124 $ cat > $TESTTMP/crash.py <<EOF
124 $ cat > $TESTTMP/crash.py <<EOF
125 > from mercurial import registrar
125 > from mercurial import registrar
126 > cmdtable = {}
126 > cmdtable = {}
127 > command = registrar.command(cmdtable)
127 > command = registrar.command(cmdtable)
128 > @command(b'crash')
128 > @command(b'crash')
129 > def pagercrash(ui, repo, *pats, **opts):
129 > def pagercrash(ui, repo, *pats, **opts):
130 > ui.write(b'going to crash\n')
130 > ui.write(b'going to crash\n')
131 > raise Exception('.')
131 > raise Exception('.')
132 > EOF
132 > EOF
133
133
134 $ cat > $TESTTMP/fakepager.py <<EOF
134 $ cat > $TESTTMP/fakepager.py <<EOF
135 > from __future__ import absolute_import
135 > from __future__ import absolute_import
136 > import sys
136 > import sys
137 > import time
137 > import time
138 > for line in iter(sys.stdin.readline, ''):
138 > for line in iter(sys.stdin.readline, ''):
139 > if 'crash' in line: # only interested in lines containing 'crash'
139 > if 'crash' in line: # only interested in lines containing 'crash'
140 > # if chg exits when pager is sleeping (incorrectly), the output
140 > # if chg exits when pager is sleeping (incorrectly), the output
141 > # will be captured by the next test case
141 > # will be captured by the next test case
142 > time.sleep(1)
142 > time.sleep(1)
143 > sys.stdout.write('crash-pager: %s' % line)
143 > sys.stdout.write('crash-pager: %s' % line)
144 > EOF
144 > EOF
145
145
146 $ cat >> .hg/hgrc <<EOF
146 $ cat >> .hg/hgrc <<EOF
147 > [extensions]
147 > [extensions]
148 > crash = $TESTTMP/crash.py
148 > crash = $TESTTMP/crash.py
149 > EOF
149 > EOF
150
150
151 $ chg crash --pager=on --config ui.formatted=True 2>/dev/null
151 $ chg crash --pager=on --config ui.formatted=True 2>/dev/null
152 crash-pager: going to crash
152 crash-pager: going to crash
153 [255]
153 [255]
154
154
155 no stdout data should be printed after pager quits, and the buffered data
155 no stdout data should be printed after pager quits, and the buffered data
156 should never persist (issue6207)
156 should never persist (issue6207)
157
157
158 "killed!" may be printed if terminated by SIGPIPE, which isn't important
158 "killed!" may be printed if terminated by SIGPIPE, which isn't important
159 in this test.
159 in this test.
160
160
161 $ cat > $TESTTMP/bulkwrite.py <<'EOF'
161 $ cat > $TESTTMP/bulkwrite.py <<'EOF'
162 > import time
162 > import time
163 > from mercurial import error, registrar
163 > from mercurial import error, registrar
164 > cmdtable = {}
164 > cmdtable = {}
165 > command = registrar.command(cmdtable)
165 > command = registrar.command(cmdtable)
166 > @command(b'bulkwrite')
166 > @command(b'bulkwrite')
167 > def bulkwrite(ui, repo, *pats, **opts):
167 > def bulkwrite(ui, repo, *pats, **opts):
168 > ui.write(b'going to write massive data\n')
168 > ui.write(b'going to write massive data\n')
169 > ui.flush()
169 > ui.flush()
170 > t = time.time()
170 > t = time.time()
171 > while time.time() - t < 2:
171 > while time.time() - t < 2:
172 > ui.write(b'x' * 1023 + b'\n') # will be interrupted by SIGPIPE
172 > ui.write(b'x' * 1023 + b'\n') # will be interrupted by SIGPIPE
173 > raise error.Abort(b"write() doesn't block")
173 > raise error.Abort(b"write() doesn't block")
174 > EOF
174 > EOF
175
175
176 $ cat > $TESTTMP/fakepager.py <<'EOF'
176 $ cat > $TESTTMP/fakepager.py <<'EOF'
177 > import sys
177 > import sys
178 > import time
178 > import time
179 > sys.stdout.write('paged! %r\n' % sys.stdin.readline())
179 > sys.stdout.write('paged! %r\n' % sys.stdin.readline())
180 > time.sleep(1) # new data will be written
180 > time.sleep(1) # new data will be written
181 > EOF
181 > EOF
182
182
183 $ cat >> .hg/hgrc <<EOF
183 $ cat >> .hg/hgrc <<EOF
184 > [extensions]
184 > [extensions]
185 > bulkwrite = $TESTTMP/bulkwrite.py
185 > bulkwrite = $TESTTMP/bulkwrite.py
186 > EOF
186 > EOF
187
187
188 $ chg bulkwrite --pager=on --color no --config ui.formatted=True
188 $ chg bulkwrite --pager=on --color no --config ui.formatted=True
189 paged! 'going to write massive data\n'
189 paged! 'going to write massive data\n'
190 killed! (?)
190 killed! (?)
191 [255]
191 [255]
192
192
193 $ chg bulkwrite --pager=on --color no --config ui.formatted=True
193 $ chg bulkwrite --pager=on --color no --config ui.formatted=True
194 paged! 'going to write massive data\n'
194 paged! 'going to write massive data\n'
195 killed! (?)
195 killed! (?)
196 [255]
196 [255]
197
197
198 $ cd ..
198 $ cd ..
199
199
200 missing stdio
200 missing stdio
201 -------------
201 -------------
202
202
203 $ CHGDEBUG=1 chg version -q 0<&-
203 $ CHGDEBUG=1 chg version -q 0<&-
204 chg: debug: * stdio fds are missing (glob)
204 chg: debug: * stdio fds are missing (glob)
205 chg: debug: * execute original hg (glob)
205 chg: debug: * execute original hg (glob)
206 Mercurial Distributed SCM * (glob)
206 Mercurial Distributed SCM * (glob)
207
207
208 server lifecycle
208 server lifecycle
209 ----------------
209 ----------------
210
210
211 chg server should be restarted on code change, and old server will shut down
211 chg server should be restarted on code change, and old server will shut down
212 automatically. In this test, we use the following time parameters:
212 automatically. In this test, we use the following time parameters:
213
213
214 - "sleep 1" to make mtime different
214 - "sleep 1" to make mtime different
215 - "sleep 2" to notice mtime change (polling interval is 1 sec)
215 - "sleep 2" to notice mtime change (polling interval is 1 sec)
216
216
217 set up repository with an extension:
217 set up repository with an extension:
218
218
219 $ chg init extreload
219 $ chg init extreload
220 $ cd extreload
220 $ cd extreload
221 $ touch dummyext.py
221 $ touch dummyext.py
222 $ cat <<EOF >> .hg/hgrc
222 $ cat <<EOF >> .hg/hgrc
223 > [extensions]
223 > [extensions]
224 > dummyext = dummyext.py
224 > dummyext = dummyext.py
225 > EOF
225 > EOF
226
226
227 isolate socket directory for stable result:
227 isolate socket directory for stable result:
228
228
229 $ OLDCHGSOCKNAME=$CHGSOCKNAME
229 $ OLDCHGSOCKNAME=$CHGSOCKNAME
230 $ mkdir chgsock
230 $ mkdir chgsock
231 $ CHGSOCKNAME=`pwd`/chgsock/server
231 $ CHGSOCKNAME=`pwd`/chgsock/server
232
232
233 warm up server:
233 warm up server:
234
234
235 $ CHGDEBUG= chg log 2>&1 | egrep 'instruction|start'
235 $ CHGDEBUG= chg log 2>&1 | egrep 'instruction|start'
236 chg: debug: * start cmdserver at $TESTTMP/extreload/chgsock/server.* (glob)
236 chg: debug: * start cmdserver at $TESTTMP/extreload/chgsock/server.* (glob)
237
237
238 new server should be started if extension modified:
238 new server should be started if extension modified:
239
239
240 $ sleep 1
240 $ sleep 1
241 $ touch dummyext.py
241 $ touch dummyext.py
242 $ CHGDEBUG= chg log 2>&1 | egrep 'instruction|start'
242 $ CHGDEBUG= chg log 2>&1 | egrep 'instruction|start'
243 chg: debug: * instruction: unlink $TESTTMP/extreload/chgsock/server-* (glob)
243 chg: debug: * instruction: unlink $TESTTMP/extreload/chgsock/server-* (glob)
244 chg: debug: * instruction: reconnect (glob)
244 chg: debug: * instruction: reconnect (glob)
245 chg: debug: * start cmdserver at $TESTTMP/extreload/chgsock/server.* (glob)
245 chg: debug: * start cmdserver at $TESTTMP/extreload/chgsock/server.* (glob)
246
246
247 old server will shut down, while new server should still be reachable:
247 old server will shut down, while new server should still be reachable:
248
248
249 $ sleep 2
249 $ sleep 2
250 $ CHGDEBUG= chg log 2>&1 | (egrep 'instruction|start' || true)
250 $ CHGDEBUG= chg log 2>&1 | (egrep 'instruction|start' || true)
251
251
252 socket file should never be unlinked by old server:
252 socket file should never be unlinked by old server:
253 (simulates unowned socket by updating mtime, which makes sure server exits
253 (simulates unowned socket by updating mtime, which makes sure server exits
254 at polling cycle)
254 at polling cycle)
255
255
256 $ ls chgsock/server-*
256 $ ls chgsock/server-*
257 chgsock/server-* (glob)
257 chgsock/server-* (glob)
258 $ touch chgsock/server-*
258 $ touch chgsock/server-*
259 $ sleep 2
259 $ sleep 2
260 $ ls chgsock/server-*
260 $ ls chgsock/server-*
261 chgsock/server-* (glob)
261 chgsock/server-* (glob)
262
262
263 since no server is reachable from socket file, new server should be started:
263 since no server is reachable from socket file, new server should be started:
264 (this test makes sure that old server shut down automatically)
264 (this test makes sure that old server shut down automatically)
265
265
266 $ CHGDEBUG= chg log 2>&1 | egrep 'instruction|start'
266 $ CHGDEBUG= chg log 2>&1 | egrep 'instruction|start'
267 chg: debug: * start cmdserver at $TESTTMP/extreload/chgsock/server.* (glob)
267 chg: debug: * start cmdserver at $TESTTMP/extreload/chgsock/server.* (glob)
268
268
269 shut down servers and restore environment:
269 shut down servers and restore environment:
270
270
271 $ rm -R chgsock
271 $ rm -R chgsock
272 $ sleep 2
272 $ sleep 2
273 $ CHGSOCKNAME=$OLDCHGSOCKNAME
273 $ CHGSOCKNAME=$OLDCHGSOCKNAME
274 $ cd ..
274 $ cd ..
275
275
276 check that server events are recorded:
276 check that server events are recorded:
277
277
278 $ ls log
278 $ ls log
279 server.log
279 server.log
280 server.log.1
280 server.log.1
281
281
282 print only the last 10 lines, since we aren't sure how many records are
282 print only the last 10 lines, since we aren't sure how many records are
283 preserved (since setprocname isn't available on py3 and pure version,
283 preserved (since setprocname isn't available on py3 and pure version,
284 the 10th-most-recent line is different when using py3):
284 the 10th-most-recent line is different when using py3):
285
285
286 $ cat log/server.log.1 log/server.log | tail -10 | filterlog
286 $ cat log/server.log.1 log/server.log | tail -10 | filterlog
287 YYYY/MM/DD HH:MM:SS (PID)> confighash = ... mtimehash = ... (no-setprocname !)
287 YYYY/MM/DD HH:MM:SS (PID)> confighash = ... mtimehash = ... (no-setprocname !)
288 YYYY/MM/DD HH:MM:SS (PID)> forked worker process (pid=...)
288 YYYY/MM/DD HH:MM:SS (PID)> forked worker process (pid=...)
289 YYYY/MM/DD HH:MM:SS (PID)> setprocname: ... (setprocname !)
289 YYYY/MM/DD HH:MM:SS (PID)> setprocname: ... (setprocname !)
290 YYYY/MM/DD HH:MM:SS (PID)> received fds: ...
290 YYYY/MM/DD HH:MM:SS (PID)> received fds: ...
291 YYYY/MM/DD HH:MM:SS (PID)> chdir to '$TESTTMP/extreload'
291 YYYY/MM/DD HH:MM:SS (PID)> chdir to '$TESTTMP/extreload'
292 YYYY/MM/DD HH:MM:SS (PID)> setumask 18
292 YYYY/MM/DD HH:MM:SS (PID)> setumask 18
293 YYYY/MM/DD HH:MM:SS (PID)> setenv: ...
293 YYYY/MM/DD HH:MM:SS (PID)> setenv: ...
294 YYYY/MM/DD HH:MM:SS (PID)> confighash = ... mtimehash = ...
294 YYYY/MM/DD HH:MM:SS (PID)> confighash = ... mtimehash = ...
295 YYYY/MM/DD HH:MM:SS (PID)> validate: []
295 YYYY/MM/DD HH:MM:SS (PID)> validate: []
296 YYYY/MM/DD HH:MM:SS (PID)> worker process exited (pid=...)
296 YYYY/MM/DD HH:MM:SS (PID)> worker process exited (pid=...)
297 YYYY/MM/DD HH:MM:SS (PID)> $TESTTMP/extreload/chgsock/server-... is not owned, exiting.
297 YYYY/MM/DD HH:MM:SS (PID)> $TESTTMP/extreload/chgsock/server-... is not owned, exiting.
298
298
299 global data mutated by schems
299 global data mutated by schems
300 -----------------------------
300 -----------------------------
301
301
302 $ hg init schemes
302 $ hg init schemes
303 $ cd schemes
303 $ cd schemes
304
304
305 initial state
305 initial state
306
306
307 $ cat > .hg/hgrc <<'EOF'
307 $ cat > .hg/hgrc <<'EOF'
308 > [extensions]
308 > [extensions]
309 > schemes =
309 > schemes =
310 > [schemes]
310 > [schemes]
311 > foo = https://foo.example.org/
311 > foo = https://foo.example.org/
312 > EOF
312 > EOF
313 $ hg debugexpandscheme foo://expanded
313 $ hg debugexpandscheme foo://expanded
314 https://foo.example.org/expanded
314 https://foo.example.org/expanded
315 $ hg debugexpandscheme bar://unexpanded
315 $ hg debugexpandscheme bar://unexpanded
316 bar://unexpanded
316 bar://unexpanded
317
317
318 add bar
318 add bar
319
319
320 $ cat > .hg/hgrc <<'EOF'
320 $ cat > .hg/hgrc <<'EOF'
321 > [extensions]
321 > [extensions]
322 > schemes =
322 > schemes =
323 > [schemes]
323 > [schemes]
324 > foo = https://foo.example.org/
324 > foo = https://foo.example.org/
325 > bar = https://bar.example.org/
325 > bar = https://bar.example.org/
326 > EOF
326 > EOF
327 $ hg debugexpandscheme foo://expanded
327 $ hg debugexpandscheme foo://expanded
328 https://foo.example.org/expanded
328 https://foo.example.org/expanded
329 $ hg debugexpandscheme bar://expanded
329 $ hg debugexpandscheme bar://expanded
330 https://bar.example.org/expanded
330 https://bar.example.org/expanded
331
331
332 remove foo
332 remove foo
333
333
334 $ cat > .hg/hgrc <<'EOF'
334 $ cat > .hg/hgrc <<'EOF'
335 > [extensions]
335 > [extensions]
336 > schemes =
336 > schemes =
337 > [schemes]
337 > [schemes]
338 > bar = https://bar.example.org/
338 > bar = https://bar.example.org/
339 > EOF
339 > EOF
340 $ hg debugexpandscheme foo://unexpanded
340 $ hg debugexpandscheme foo://unexpanded
341 foo://unexpanded
341 foo://unexpanded
342 $ hg debugexpandscheme bar://expanded
342 $ hg debugexpandscheme bar://expanded
343 https://bar.example.org/expanded
343 https://bar.example.org/expanded
344
344
345 $ cd ..
345 $ cd ..
346
346
347 repository cache
347 repository cache
348 ----------------
348 ----------------
349
349
350 $ rm log/server.log*
350 $ rm log/server.log*
351 $ cp $HGRCPATH.unconfigured $HGRCPATH
351 $ cp $HGRCPATH.unconfigured $HGRCPATH
352 $ cat <<'EOF' >> $HGRCPATH
352 $ cat <<'EOF' >> $HGRCPATH
353 > [cmdserver]
353 > [cmdserver]
354 > log = $TESTTMP/log/server.log
354 > log = $TESTTMP/log/server.log
355 > max-repo-cache = 1
355 > max-repo-cache = 1
356 > track-log = command, repocache
356 > track-log = command, repocache
357 > EOF
357 > EOF
358
358
359 isolate socket directory for stable result:
359 isolate socket directory for stable result:
360
360
361 $ OLDCHGSOCKNAME=$CHGSOCKNAME
361 $ OLDCHGSOCKNAME=$CHGSOCKNAME
362 $ mkdir chgsock
362 $ mkdir chgsock
363 $ CHGSOCKNAME=`pwd`/chgsock/server
363 $ CHGSOCKNAME=`pwd`/chgsock/server
364
364
365 create empty repo and cache it:
365 create empty repo and cache it:
366
366
367 $ hg init cached
367 $ hg init cached
368 $ hg id -R cached
368 $ hg id -R cached
369 000000000000 tip
369 000000000000 tip
370 $ sleep 1
370 $ sleep 1
371
371
372 modify repo (and cache will be invalidated):
372 modify repo (and cache will be invalidated):
373
373
374 $ touch cached/a
374 $ touch cached/a
375 $ hg ci -R cached -Am 'add a'
375 $ hg ci -R cached -Am 'add a'
376 adding a
376 adding a
377 $ sleep 1
377 $ sleep 1
378
378
379 read cached repo:
379 read cached repo:
380
380
381 $ hg log -R cached
381 $ hg log -R cached
382 changeset: 0:ac82d8b1f7c4
382 changeset: 0:ac82d8b1f7c4
383 tag: tip
383 tag: tip
384 user: test
384 user: test
385 date: Thu Jan 01 00:00:00 1970 +0000
385 date: Thu Jan 01 00:00:00 1970 +0000
386 summary: add a
386 summary: add a
387
387
388 $ sleep 1
388 $ sleep 1
389
389
390 discard cached from LRU cache:
390 discard cached from LRU cache:
391
391
392 $ hg clone cached cached2
392 $ hg clone cached cached2
393 updating to branch default
393 updating to branch default
394 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
394 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
395 $ hg id -R cached2
395 $ hg id -R cached2
396 ac82d8b1f7c4 tip
396 ac82d8b1f7c4 tip
397 $ sleep 1
397 $ sleep 1
398
398
399 read uncached repo:
399 read uncached repo:
400
400
401 $ hg log -R cached
401 $ hg log -R cached
402 changeset: 0:ac82d8b1f7c4
402 changeset: 0:ac82d8b1f7c4
403 tag: tip
403 tag: tip
404 user: test
404 user: test
405 date: Thu Jan 01 00:00:00 1970 +0000
405 date: Thu Jan 01 00:00:00 1970 +0000
406 summary: add a
406 summary: add a
407
407
408 $ sleep 1
408 $ sleep 1
409
409
410 shut down servers and restore environment:
410 shut down servers and restore environment:
411
411
412 $ rm -R chgsock
412 $ rm -R chgsock
413 $ sleep 2
413 $ sleep 2
414 $ CHGSOCKNAME=$OLDCHGSOCKNAME
414 $ CHGSOCKNAME=$OLDCHGSOCKNAME
415
415
416 check server log:
416 check server log:
417
417
418 $ cat log/server.log | filterlog
418 $ cat log/server.log | filterlog
419 YYYY/MM/DD HH:MM:SS (PID)> worker process exited (pid=...)
419 YYYY/MM/DD HH:MM:SS (PID)> worker process exited (pid=...)
420 YYYY/MM/DD HH:MM:SS (PID)> worker process exited (pid=...) (?)
420 YYYY/MM/DD HH:MM:SS (PID)> worker process exited (pid=...) (?)
421 YYYY/MM/DD HH:MM:SS (PID)> init cached
421 YYYY/MM/DD HH:MM:SS (PID)> init cached
422 YYYY/MM/DD HH:MM:SS (PID)> id -R cached
422 YYYY/MM/DD HH:MM:SS (PID)> id -R cached
423 YYYY/MM/DD HH:MM:SS (PID)> loaded repo into cache: $TESTTMP/cached (in ...s)
423 YYYY/MM/DD HH:MM:SS (PID)> loaded repo into cache: $TESTTMP/cached (in ...s)
424 YYYY/MM/DD HH:MM:SS (PID)> repo from cache: $TESTTMP/cached
424 YYYY/MM/DD HH:MM:SS (PID)> repo from cache: $TESTTMP/cached
425 YYYY/MM/DD HH:MM:SS (PID)> ci -R cached -Am 'add a'
425 YYYY/MM/DD HH:MM:SS (PID)> ci -R cached -Am 'add a'
426 YYYY/MM/DD HH:MM:SS (PID)> loaded repo into cache: $TESTTMP/cached (in ...s)
426 YYYY/MM/DD HH:MM:SS (PID)> loaded repo into cache: $TESTTMP/cached (in ...s)
427 YYYY/MM/DD HH:MM:SS (PID)> repo from cache: $TESTTMP/cached
427 YYYY/MM/DD HH:MM:SS (PID)> repo from cache: $TESTTMP/cached
428 YYYY/MM/DD HH:MM:SS (PID)> log -R cached
428 YYYY/MM/DD HH:MM:SS (PID)> log -R cached
429 YYYY/MM/DD HH:MM:SS (PID)> loaded repo into cache: $TESTTMP/cached (in ...s)
429 YYYY/MM/DD HH:MM:SS (PID)> loaded repo into cache: $TESTTMP/cached (in ...s)
430 YYYY/MM/DD HH:MM:SS (PID)> clone cached cached2
430 YYYY/MM/DD HH:MM:SS (PID)> clone cached cached2
431 YYYY/MM/DD HH:MM:SS (PID)> id -R cached2
431 YYYY/MM/DD HH:MM:SS (PID)> id -R cached2
432 YYYY/MM/DD HH:MM:SS (PID)> loaded repo into cache: $TESTTMP/cached2 (in ...s)
432 YYYY/MM/DD HH:MM:SS (PID)> loaded repo into cache: $TESTTMP/cached2 (in ...s)
433 YYYY/MM/DD HH:MM:SS (PID)> log -R cached
433 YYYY/MM/DD HH:MM:SS (PID)> log -R cached
434 YYYY/MM/DD HH:MM:SS (PID)> loaded repo into cache: $TESTTMP/cached (in ...s)
434 YYYY/MM/DD HH:MM:SS (PID)> loaded repo into cache: $TESTTMP/cached (in ...s)
435
435
436 Test that chg works (sets to the user's actual LC_CTYPE) even when python
436 Test that chg works (sets to the user's actual LC_CTYPE) even when python
437 "coerces" the locale (py3.7+)
437 "coerces" the locale (py3.7+)
438
438
439 $ cat > $TESTTMP/debugenv.py <<EOF
439 $ cat > $TESTTMP/debugenv.py <<EOF
440 > from mercurial import encoding
440 > from mercurial import encoding
441 > from mercurial import registrar
441 > from mercurial import registrar
442 > cmdtable = {}
442 > cmdtable = {}
443 > command = registrar.command(cmdtable)
443 > command = registrar.command(cmdtable)
444 > @command(b'debugenv', [], b'', norepo=True)
444 > @command(b'debugenv', [], b'', norepo=True)
445 > def debugenv(ui):
445 > def debugenv(ui):
446 > for k in [b'LC_ALL', b'LC_CTYPE', b'LANG']:
446 > for k in [b'LC_ALL', b'LC_CTYPE', b'LANG']:
447 > v = encoding.environ.get(k)
447 > v = encoding.environ.get(k)
448 > if v is not None:
448 > if v is not None:
449 > ui.write(b'%s=%s\n' % (k, encoding.environ[k]))
449 > ui.write(b'%s=%s\n' % (k, encoding.environ[k]))
450 > EOF
450 > EOF
451 (hg keeps python's modified LC_CTYPE, chg doesn't)
451 (hg keeps python's modified LC_CTYPE, chg doesn't)
452 $ (unset LC_ALL; unset LANG; LC_CTYPE= "$CHGHG" \
452 $ (unset LC_ALL; unset LANG; LC_CTYPE= "$CHGHG" \
453 > --config extensions.debugenv=$TESTTMP/debugenv.py debugenv)
453 > --config extensions.debugenv=$TESTTMP/debugenv.py debugenv)
454 LC_CTYPE=C.UTF-8 (py37 !)
454 LC_CTYPE=C.UTF-8 (py37 !)
455 LC_CTYPE= (no-py37 !)
455 LC_CTYPE= (no-py37 !)
456 $ (unset LC_ALL; unset LANG; LC_CTYPE= chg \
456 $ (unset LC_ALL; unset LANG; LC_CTYPE= chg \
457 > --config extensions.debugenv=$TESTTMP/debugenv.py debugenv)
457 > --config extensions.debugenv=$TESTTMP/debugenv.py debugenv)
458 LC_CTYPE=
458 LC_CTYPE=
459 $ (unset LC_ALL; unset LANG; LC_CTYPE=unsupported_value chg \
459 $ (unset LC_ALL; unset LANG; LC_CTYPE=unsupported_value chg \
460 > --config extensions.debugenv=$TESTTMP/debugenv.py debugenv)
460 > --config extensions.debugenv=$TESTTMP/debugenv.py debugenv)
461 *cannot change locale* (glob) (?)
461 *cannot change locale* (glob) (?)
462 LC_CTYPE=unsupported_value
462 LC_CTYPE=unsupported_value
463 $ (unset LC_ALL; unset LANG; LC_CTYPE= chg \
463 $ (unset LC_ALL; unset LANG; LC_CTYPE= chg \
464 > --config extensions.debugenv=$TESTTMP/debugenv.py debugenv)
464 > --config extensions.debugenv=$TESTTMP/debugenv.py debugenv)
465 LC_CTYPE=
465 LC_CTYPE=
466 $ LANG= LC_ALL= LC_CTYPE= chg \
466 $ LANG= LC_ALL= LC_CTYPE= chg \
467 > --config extensions.debugenv=$TESTTMP/debugenv.py debugenv
467 > --config extensions.debugenv=$TESTTMP/debugenv.py debugenv
468 LC_ALL=
468 LC_ALL=
469 LC_CTYPE=
469 LC_CTYPE=
470 LANG=
470 LANG=
471
471
472 Profiling isn't permanently enabled or carried over between chg invocations that
472 Profiling isn't permanently enabled or carried over between chg invocations that
473 share the same server
473 share the same server
474 $ cp $HGRCPATH.orig $HGRCPATH
474 $ cp $HGRCPATH.orig $HGRCPATH
475 $ hg init $TESTTMP/profiling
475 $ hg init $TESTTMP/profiling
476 $ cd $TESTTMP/profiling
476 $ cd $TESTTMP/profiling
477 $ filteredchg() {
477 $ filteredchg() {
478 > CHGDEBUG=1 chg "$@" 2>&1 | egrep 'Sample count|start cmdserver' || true
478 > CHGDEBUG=1 chg "$@" 2>&1 | sed -rn 's_^No samples recorded.*$_Sample count: 0_; /Sample count/p; /start cmdserver/p'
479 > }
479 > }
480 $ newchg() {
480 $ newchg() {
481 > chg --kill-chg-daemon
481 > chg --kill-chg-daemon
482 > filteredchg "$@" | egrep -v 'start cmdserver' || true
482 > filteredchg "$@" | egrep -v 'start cmdserver' || true
483 > }
483 > }
484 (--profile isn't permanently on just because it was specified when chg was
484 (--profile isn't permanently on just because it was specified when chg was
485 started)
485 started)
486 $ newchg log -r . --profile
486 $ newchg log -r . --profile
487 Sample count: * (glob)
487 Sample count: * (glob)
488 $ filteredchg log -r .
488 $ filteredchg log -r .
489 (enabling profiling via config works, even on the first chg command that starts
489 (enabling profiling via config works, even on the first chg command that starts
490 a cmdserver)
490 a cmdserver)
491 $ cat >> $HGRCPATH <<EOF
491 $ cat >> $HGRCPATH <<EOF
492 > [profiling]
492 > [profiling]
493 > type=stat
493 > type=stat
494 > enabled=1
494 > enabled=1
495 > EOF
495 > EOF
496 $ newchg log -r .
496 $ newchg log -r .
497 Sample count: * (glob)
497 Sample count: * (glob)
498 $ filteredchg log -r .
498 $ filteredchg log -r .
499 Sample count: * (glob)
499 Sample count: * (glob)
500 (test that we aren't accumulating more and more samples each run)
500 (test that we aren't accumulating more and more samples each run)
501 $ cat > $TESTTMP/debugsleep.py <<EOF
501 $ cat > $TESTTMP/debugsleep.py <<EOF
502 > import time
502 > import time
503 > from mercurial import registrar
503 > from mercurial import registrar
504 > cmdtable = {}
504 > cmdtable = {}
505 > command = registrar.command(cmdtable)
505 > command = registrar.command(cmdtable)
506 > @command(b'debugsleep', [], b'', norepo=True)
506 > @command(b'debugsleep', [], b'', norepo=True)
507 > def debugsleep(ui):
507 > def debugsleep(ui):
508 > start = time.time()
508 > start = time.time()
509 > x = 0
509 > x = 0
510 > while time.time() < start + 0.5:
510 > while time.time() < start + 0.5:
511 > time.sleep(.1)
511 > time.sleep(.1)
512 > x += 1
512 > x += 1
513 > ui.status(b'%d debugsleep iterations in %.03fs\n' % (x, time.time() - start))
513 > ui.status(b'%d debugsleep iterations in %.03fs\n' % (x, time.time() - start))
514 > EOF
514 > EOF
515 $ cat >> $HGRCPATH <<EOF
515 $ cat >> $HGRCPATH <<EOF
516 > [extensions]
516 > [extensions]
517 > debugsleep = $TESTTMP/debugsleep.py
517 > debugsleep = $TESTTMP/debugsleep.py
518 > EOF
518 > EOF
519 $ newchg debugsleep > run_1
519 $ newchg debugsleep > run_1
520 $ filteredchg debugsleep > run_2
520 $ filteredchg debugsleep > run_2
521 $ filteredchg debugsleep > run_3
521 $ filteredchg debugsleep > run_3
522 $ filteredchg debugsleep > run_4
522 $ filteredchg debugsleep > run_4
523 FIXME: Run 4 should not be >3x Run 1's number of samples.
523 FIXME: Run 4 should not be >3x Run 1's number of samples.
524 $ "$PYTHON" <<EOF
524 $ "$PYTHON" <<EOF
525 > r1 = int(open("run_1", "r").read().split()[-1])
525 > r1 = int(open("run_1", "r").read().split()[-1])
526 > r4 = int(open("run_4", "r").read().split()[-1])
526 > r4 = int(open("run_4", "r").read().split()[-1])
527 > print("Run 1: %d samples\nRun 4: %d samples\nRun 4 > 3 * Run 1: %s" %
527 > print("Run 1: %d samples\nRun 4: %d samples\nRun 4 > 3 * Run 1: %s" %
528 > (r1, r4, r4 > (r1 * 3)))
528 > (r1, r4, r4 > (r1 * 3)))
529 > EOF
529 > EOF
530 Run 1: * samples (glob)
530 Run 1: * samples (glob)
531 Run 4: * samples (glob)
531 Run 4: * samples (glob)
532 Run 4 > 3 * Run 1: False
532 Run 4 > 3 * Run 1: False
533 (Disabling with --no-profile on the commandline still works, but isn't permanent)
533 (Disabling with --no-profile on the commandline still works, but isn't permanent)
534 $ newchg log -r . --no-profile
534 $ newchg log -r . --no-profile
535 $ filteredchg log -r .
535 $ filteredchg log -r .
536 Sample count: * (glob)
536 Sample count: * (glob)
537 $ filteredchg log -r . --no-profile
537 $ filteredchg log -r . --no-profile
538 $ filteredchg log -r .
538 $ filteredchg log -r .
539 Sample count: * (glob)
539 Sample count: * (glob)
@@ -1,164 +1,220 b''
1 $ cat >> $HGRCPATH <<EOF
1 $ cat >> $HGRCPATH <<EOF
2 > [extensions]
2 > [extensions]
3 > rebase=
3 > rebase=
4 > [phases]
4 > [phases]
5 > publish=False
5 > publish=False
6 > [merge]
6 > [merge]
7 > EOF
7 > EOF
8
8
9 $ hg init repo
9 $ hg init repo
10 $ cd repo
10 $ cd repo
11 $ echo a > a
11 $ echo a > a
12 $ echo b > b
12 $ echo b > b
13 $ hg commit -qAm ab
13 $ hg commit -qAm ab
14 $ echo c >> a
14 $ echo c >> a
15 $ echo c >> b
15 $ echo c >> b
16 $ hg commit -qAm c
16 $ hg commit -qAm c
17 $ hg up -q ".^"
17 $ hg up -q ".^"
18 $ echo d >> a
18 $ echo d >> a
19 $ echo d >> b
19 $ echo d >> b
20 $ hg commit -qAm d
20 $ hg commit -qAm d
21
21
22 Testing on-failure=continue
22 Testing on-failure=continue
23 $ echo on-failure=continue >> $HGRCPATH
23 $ echo on-failure=continue >> $HGRCPATH
24 $ hg rebase -s 1 -d 2 --tool false
24 $ hg rebase -s 1 -d 2 --tool false
25 rebasing 1:1f28a51c3c9b "c"
25 rebasing 1:1f28a51c3c9b "c"
26 merging a
26 merging a
27 merging b
27 merging b
28 merging a failed!
28 merging a failed!
29 merging b failed!
29 merging b failed!
30 unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
30 unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
31 [240]
31 [240]
32
32
33 $ hg resolve --list
33 $ hg resolve --list
34 U a
34 U a
35 U b
35 U b
36
36
37 $ hg rebase --abort
37 $ hg rebase --abort
38 rebase aborted
38 rebase aborted
39
39
40 Testing on-failure=halt
40 Testing on-failure=halt
41 $ echo on-failure=halt >> $HGRCPATH
41 $ echo on-failure=halt >> $HGRCPATH
42 $ hg rebase -s 1 -d 2 --tool false
42 $ hg rebase -s 1 -d 2 --tool false
43 rebasing 1:1f28a51c3c9b "c"
43 rebasing 1:1f28a51c3c9b "c"
44 merging a
44 merging a
45 merging b
45 merging b
46 merging a failed!
46 merging a failed!
47 merge halted after failed merge (see hg resolve)
47 unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
48 [240]
48 [240]
49
49
50 $ hg resolve --list
50 $ hg resolve --list
51 U a
51 U a
52 U b
52 U b
53
53
54 $ hg rebase --abort
54 $ hg rebase --abort
55 rebase aborted
55 rebase aborted
56
56
57 Testing on-failure=prompt
57 Testing on-failure=prompt
58 $ cat <<EOS >> $HGRCPATH
58 $ cat <<EOS >> $HGRCPATH
59 > [merge]
59 > [merge]
60 > on-failure=prompt
60 > on-failure=prompt
61 > [ui]
61 > [ui]
62 > interactive=1
62 > interactive=1
63 > EOS
63 > EOS
64 $ cat <<EOS | hg rebase -s 1 -d 2 --tool false
64 $ cat <<EOS | hg rebase -s 1 -d 2 --tool false
65 > y
65 > y
66 > n
66 > n
67 > EOS
67 > EOS
68 rebasing 1:1f28a51c3c9b "c"
68 rebasing 1:1f28a51c3c9b "c"
69 merging a
69 merging a
70 merging b
70 merging b
71 merging a failed!
71 merging a failed!
72 continue merge operation (yn)? y
72 continue merge operation (yn)? y
73 merging b failed!
73 merging b failed!
74 continue merge operation (yn)? n
74 continue merge operation (yn)? n
75 merge halted after failed merge (see hg resolve)
75 unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
76 [240]
76 [240]
77
77
78 $ hg resolve --list
78 $ hg resolve --list
79 U a
79 U a
80 U b
80 U b
81
81
82 $ hg rebase --abort
82 $ hg rebase --abort
83 rebase aborted
83 rebase aborted
84
84
85 Check that successful tool with failed post-check halts the merge
85 Check that successful tool with failed post-check halts the merge
86 $ cat <<EOS >> $HGRCPATH
86 $ cat <<EOS >> $HGRCPATH
87 > [merge-tools]
87 > [merge-tools]
88 > true.check=changed
88 > true.check=changed
89 > EOS
89 > EOS
90 $ cat <<EOS | hg rebase -s 1 -d 2 --tool true
90 $ cat <<EOS | hg rebase -s 1 -d 2 --tool true
91 > y
91 > y
92 > n
92 > n
93 > n
93 > n
94 > EOS
94 > EOS
95 rebasing 1:1f28a51c3c9b "c"
95 rebasing 1:1f28a51c3c9b "c"
96 merging a
96 merging a
97 merging b
97 merging b
98 output file a appears unchanged
98 output file a appears unchanged
99 was merge successful (yn)? y
99 was merge successful (yn)? y
100 output file b appears unchanged
100 output file b appears unchanged
101 was merge successful (yn)? n
101 was merge successful (yn)? n
102 merging b failed!
102 merging b failed!
103 continue merge operation (yn)? n
103 continue merge operation (yn)? n
104 merge halted after failed merge (see hg resolve)
104 unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
105 [240]
105 [240]
106
106
107 $ hg resolve --list
107 $ hg resolve --list
108 R a
108 R a
109 U b
109 U b
110
110
111 $ hg rebase --abort
111 $ hg rebase --abort
112 rebase aborted
112 rebase aborted
113
113
114 Check that conflicts with conflict check also halts the merge
114 Check that conflicts with conflict check also halts the merge
115 $ cat <<EOS >> $HGRCPATH
115 $ cat <<EOS >> $HGRCPATH
116 > [merge-tools]
116 > [merge-tools]
117 > true.check=conflicts
117 > true.check=conflicts
118 > true.premerge=keep
118 > true.premerge=keep
119 > [merge]
119 > [merge]
120 > on-failure=halt
120 > on-failure=halt
121 > EOS
121 > EOS
122 $ hg rebase -s 1 -d 2 --tool true
122 $ hg rebase -s 1 -d 2 --tool true
123 rebasing 1:1f28a51c3c9b "c"
123 rebasing 1:1f28a51c3c9b "c"
124 merging a
124 merging a
125 merging b
125 merging b
126 merging a failed!
126 merging a failed!
127 merge halted after failed merge (see hg resolve)
127 unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
128 [240]
128 [240]
129
129
130 $ hg resolve --list
130 $ hg resolve --list
131 U a
131 U a
132 U b
132 U b
133
133
134 $ hg rebase --abort
134 $ hg rebase --abort
135 rebase aborted
135 rebase aborted
136
136
137 Check that always-prompt also can halt the merge
137 Check that always-prompt also can halt the merge
138 $ cat <<EOS | hg rebase -s 1 -d 2 --tool true --config merge-tools.true.check=prompt
138 $ cat <<EOS | hg rebase -s 1 -d 2 --tool true --config merge-tools.true.check=prompt
139 > y
139 > y
140 > n
140 > n
141 > EOS
141 > EOS
142 rebasing 1:1f28a51c3c9b "c"
142 rebasing 1:1f28a51c3c9b "c"
143 merging a
143 merging a
144 merging b
144 merging b
145 was merge of 'a' successful (yn)? y
145 was merge of 'a' successful (yn)? y
146 was merge of 'b' successful (yn)? n
146 was merge of 'b' successful (yn)? n
147 merging b failed!
147 merging b failed!
148 merge halted after failed merge (see hg resolve)
148 unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
149 [240]
149 [240]
150
150
151 $ hg resolve --list
151 $ hg resolve --list
152 R a
152 R a
153 U b
153 U b
154
154
155 $ hg rebase --abort
155 $ hg rebase --abort
156 rebase aborted
156 rebase aborted
157
157
158 Check that successful tool otherwise allows the merge to continue
158 Check that successful tool otherwise allows the merge to continue
159 $ hg rebase -s 1 -d 2 --tool echo --keep --config merge-tools.echo.premerge=keep
159 $ hg rebase -s 1 -d 2 --tool echo --keep --config merge-tools.echo.premerge=keep
160 rebasing 1:1f28a51c3c9b "c"
160 rebasing 1:1f28a51c3c9b "c"
161 merging a
161 merging a
162 merging b
162 merging b
163 $TESTTMP/repo/a *a~base* *a~other* (glob)
163 $TESTTMP/repo/a *a~base* *a~other* (glob)
164 $TESTTMP/repo/b *b~base* *b~other* (glob)
164 $TESTTMP/repo/b *b~base* *b~other* (glob)
165
166 Check that unshelve isn't broken by halting the merge
167 $ cat <<EOS >> $HGRCPATH
168 > [extensions]
169 > shelve =
170 > [merge-tools]
171 > false.check=conflicts
172 > false.premerge=false
173 > EOS
174 $ echo foo > shelve_file1
175 $ echo foo > shelve_file2
176 $ hg ci -qAm foo
177 $ echo bar >> shelve_file1
178 $ echo bar >> shelve_file2
179 $ hg shelve --list
180 $ hg shelve
181 shelved as default
182 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
183 $ echo baz >> shelve_file1
184 $ echo baz >> shelve_file2
185 $ hg ci -m baz
186 $ hg unshelve --tool false --config merge-tools.false.premerge=keep
187 unshelving change 'default'
188 rebasing shelved changes
189 merging shelve_file1
190 merging shelve_file2
191 merging shelve_file1 failed!
192 unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
193 [240]
194 $ hg status --config commands.status.verbose=True
195 M shelve_file1
196 M shelve_file2
197 ? shelve_file1.orig
198 ? shelve_file2.orig
199 # The repository is in an unfinished *unshelve* state.
200
201 # Unresolved merge conflicts:
202 #
203 # shelve_file1
204 # shelve_file2
205 #
206 # To mark files as resolved: hg resolve --mark FILE
207
208 # To continue: hg unshelve --continue
209 # To abort: hg unshelve --abort
210
211 $ hg resolve --tool false --all --re-merge
212 merging shelve_file1
213 merging shelve_file2
214 merging shelve_file1 failed!
215 merge halted after failed merge (see hg resolve)
216 [240]
217 $ hg shelve --list
218 default (* ago) changes to: foo (glob)
219 $ hg unshelve --abort
220 unshelve of 'default' aborted
@@ -1,1295 +1,1255 b''
1 ===================================
1 ===================================
2 Test the persistent on-disk nodemap
2 Test the persistent on-disk nodemap
3 ===================================
3 ===================================
4
4
5
5
6 $ cat << EOF >> $HGRCPATH
6 $ cat << EOF >> $HGRCPATH
7 > [format]
7 > [format]
8 > use-share-safe=yes
8 > use-share-safe=yes
9 > [extensions]
9 > [extensions]
10 > share=
10 > share=
11 > EOF
11 > EOF
12
12
13 #if no-rust
13 #if no-rust
14
14
15 $ cat << EOF >> $HGRCPATH
15 $ cat << EOF >> $HGRCPATH
16 > [format]
16 > [format]
17 > use-persistent-nodemap=yes
17 > use-persistent-nodemap=yes
18 > [devel]
18 > [devel]
19 > persistent-nodemap=yes
19 > persistent-nodemap=yes
20 > EOF
20 > EOF
21
21
22 #endif
22 #endif
23
23
24 $ hg init test-repo --config storage.revlog.persistent-nodemap.slow-path=allow
24 $ hg init test-repo --config storage.revlog.persistent-nodemap.slow-path=allow
25 $ cd test-repo
25 $ cd test-repo
26
26
27 Check handling of the default slow-path value
27 Check handling of the default slow-path value
28
28
29 #if no-pure no-rust
29 #if no-pure no-rust
30
30
31 $ hg id
31 $ hg id
32 abort: accessing `persistent-nodemap` repository without associated fast implementation.
32 abort: accessing `persistent-nodemap` repository without associated fast implementation.
33 (check `hg help config.format.use-persistent-nodemap` for details)
33 (check `hg help config.format.use-persistent-nodemap` for details)
34 [255]
34 [255]
35
35
36 Unlock further check (we are here to test the feature)
36 Unlock further check (we are here to test the feature)
37
37
38 $ cat << EOF >> $HGRCPATH
38 $ cat << EOF >> $HGRCPATH
39 > [storage]
39 > [storage]
40 > # to avoid spamming the test
40 > # to avoid spamming the test
41 > revlog.persistent-nodemap.slow-path=allow
41 > revlog.persistent-nodemap.slow-path=allow
42 > EOF
42 > EOF
43
43
44 #endif
44 #endif
45
45
46 #if rust
46 #if rust
47
47
48 Regression test for a previous bug in Rust/C FFI for the `Revlog_CAPI` capsule:
48 Regression test for a previous bug in Rust/C FFI for the `Revlog_CAPI` capsule:
49 in places where `mercurial/cext/revlog.c` function signatures use `Py_ssize_t`
49 in places where `mercurial/cext/revlog.c` function signatures use `Py_ssize_t`
50 (64 bits on Linux x86_64), corresponding declarations in `rust/hg-cpython/src/cindex.rs`
50 (64 bits on Linux x86_64), corresponding declarations in `rust/hg-cpython/src/cindex.rs`
51 incorrectly used `libc::c_int` (32 bits).
51 incorrectly used `libc::c_int` (32 bits).
52 As a result, -1 passed from Rust for the null revision became 4294967295 in C.
52 As a result, -1 passed from Rust for the null revision became 4294967295 in C.
53
53
54 $ hg log -r 00000000
54 $ hg log -r 00000000
55 changeset: -1:000000000000
55 changeset: -1:000000000000
56 tag: tip
56 tag: tip
57 user:
57 user:
58 date: Thu Jan 01 00:00:00 1970 +0000
58 date: Thu Jan 01 00:00:00 1970 +0000
59
59
60
60
61 #endif
61 #endif
62
62
63
63
64 $ hg debugformat
64 $ hg debugformat
65 format-variant repo
65 format-variant repo
66 fncache: yes
66 fncache: yes
67 dirstate-v2: no
67 dirstate-v2: no
68 dotencode: yes
68 dotencode: yes
69 generaldelta: yes
69 generaldelta: yes
70 share-safe: yes
70 share-safe: yes
71 sparserevlog: yes
71 sparserevlog: yes
72 persistent-nodemap: yes
72 persistent-nodemap: yes
73 copies-sdc: no
73 copies-sdc: no
74 revlog-v2: no
74 revlog-v2: no
75 changelog-v2: no
75 changelog-v2: no
76 plain-cl-delta: yes
76 plain-cl-delta: yes
77 compression: zlib (no-zstd !)
77 compression: zlib (no-zstd !)
78 compression: zstd (zstd !)
78 compression: zstd (zstd !)
79 compression-level: default
79 compression-level: default
80 $ hg debugbuilddag .+5000 --new-file
80 $ hg debugbuilddag .+5000 --new-file
81
81
82 $ hg debugnodemap --metadata
82 $ hg debugnodemap --metadata
83 uid: ???????? (glob)
83 uid: ???????? (glob)
84 tip-rev: 5000
84 tip-rev: 5000
85 tip-node: 6b02b8c7b96654c25e86ba69eda198d7e6ad8b3c
85 tip-node: 6b02b8c7b96654c25e86ba69eda198d7e6ad8b3c
86 data-length: 121088
86 data-length: 121088
87 data-unused: 0
87 data-unused: 0
88 data-unused: 0.000%
88 data-unused: 0.000%
89 $ f --size .hg/store/00changelog.n
89 $ f --size .hg/store/00changelog.n
90 .hg/store/00changelog.n: size=62
90 .hg/store/00changelog.n: size=62
91
91
92 Simple lookup works
92 Simple lookup works
93
93
94 $ ANYNODE=`hg log --template '{node|short}\n' --rev tip`
94 $ ANYNODE=`hg log --template '{node|short}\n' --rev tip`
95 $ hg log -r "$ANYNODE" --template '{rev}\n'
95 $ hg log -r "$ANYNODE" --template '{rev}\n'
96 5000
96 5000
97
97
98
98
99 #if rust
99 #if rust
100
100
101 $ f --sha256 .hg/store/00changelog-*.nd
101 $ f --sha256 .hg/store/00changelog-*.nd
102 .hg/store/00changelog-????????.nd: sha256=2e029d3200bd1a986b32784fc2ef1a3bd60dc331f025718bcf5ff44d93f026fd (glob)
102 .hg/store/00changelog-????????.nd: sha256=2e029d3200bd1a986b32784fc2ef1a3bd60dc331f025718bcf5ff44d93f026fd (glob)
103
103
104 $ f --sha256 .hg/store/00manifest-*.nd
104 $ f --sha256 .hg/store/00manifest-*.nd
105 .hg/store/00manifest-????????.nd: sha256=97117b1c064ea2f86664a124589e47db0e254e8d34739b5c5cc5bf31c9da2b51 (glob)
105 .hg/store/00manifest-????????.nd: sha256=97117b1c064ea2f86664a124589e47db0e254e8d34739b5c5cc5bf31c9da2b51 (glob)
106 $ hg debugnodemap --dump-new | f --sha256 --size
106 $ hg debugnodemap --dump-new | f --sha256 --size
107 size=121088, sha256=2e029d3200bd1a986b32784fc2ef1a3bd60dc331f025718bcf5ff44d93f026fd
107 size=121088, sha256=2e029d3200bd1a986b32784fc2ef1a3bd60dc331f025718bcf5ff44d93f026fd
108 $ hg debugnodemap --dump-disk | f --sha256 --bytes=256 --hexdump --size
108 $ hg debugnodemap --dump-disk | f --sha256 --bytes=256 --hexdump --size
109 size=121088, sha256=2e029d3200bd1a986b32784fc2ef1a3bd60dc331f025718bcf5ff44d93f026fd
109 size=121088, sha256=2e029d3200bd1a986b32784fc2ef1a3bd60dc331f025718bcf5ff44d93f026fd
110 0000: 00 00 00 91 00 00 00 20 00 00 00 bb 00 00 00 e7 |....... ........|
110 0000: 00 00 00 91 00 00 00 20 00 00 00 bb 00 00 00 e7 |....... ........|
111 0010: 00 00 00 66 00 00 00 a1 00 00 01 13 00 00 01 22 |...f..........."|
111 0010: 00 00 00 66 00 00 00 a1 00 00 01 13 00 00 01 22 |...f..........."|
112 0020: 00 00 00 23 00 00 00 fc 00 00 00 ba 00 00 00 5e |...#...........^|
112 0020: 00 00 00 23 00 00 00 fc 00 00 00 ba 00 00 00 5e |...#...........^|
113 0030: 00 00 00 df 00 00 01 4e 00 00 01 65 00 00 00 ab |.......N...e....|
113 0030: 00 00 00 df 00 00 01 4e 00 00 01 65 00 00 00 ab |.......N...e....|
114 0040: 00 00 00 a9 00 00 00 95 00 00 00 73 00 00 00 38 |...........s...8|
114 0040: 00 00 00 a9 00 00 00 95 00 00 00 73 00 00 00 38 |...........s...8|
115 0050: 00 00 00 cc 00 00 00 92 00 00 00 90 00 00 00 69 |...............i|
115 0050: 00 00 00 cc 00 00 00 92 00 00 00 90 00 00 00 69 |...............i|
116 0060: 00 00 00 ec 00 00 00 8d 00 00 01 4f 00 00 00 12 |...........O....|
116 0060: 00 00 00 ec 00 00 00 8d 00 00 01 4f 00 00 00 12 |...........O....|
117 0070: 00 00 02 0c 00 00 00 77 00 00 00 9c 00 00 00 8f |.......w........|
117 0070: 00 00 02 0c 00 00 00 77 00 00 00 9c 00 00 00 8f |.......w........|
118 0080: 00 00 00 d5 00 00 00 6b 00 00 00 48 00 00 00 b3 |.......k...H....|
118 0080: 00 00 00 d5 00 00 00 6b 00 00 00 48 00 00 00 b3 |.......k...H....|
119 0090: 00 00 00 e5 00 00 00 b5 00 00 00 8e 00 00 00 ad |................|
119 0090: 00 00 00 e5 00 00 00 b5 00 00 00 8e 00 00 00 ad |................|
120 00a0: 00 00 00 7b 00 00 00 7c 00 00 00 0b 00 00 00 2b |...{...|.......+|
120 00a0: 00 00 00 7b 00 00 00 7c 00 00 00 0b 00 00 00 2b |...{...|.......+|
121 00b0: 00 00 00 c6 00 00 00 1e 00 00 01 08 00 00 00 11 |................|
121 00b0: 00 00 00 c6 00 00 00 1e 00 00 01 08 00 00 00 11 |................|
122 00c0: 00 00 01 30 00 00 00 26 00 00 01 9c 00 00 00 35 |...0...&.......5|
122 00c0: 00 00 01 30 00 00 00 26 00 00 01 9c 00 00 00 35 |...0...&.......5|
123 00d0: 00 00 00 b8 00 00 01 31 00 00 00 2c 00 00 00 55 |.......1...,...U|
123 00d0: 00 00 00 b8 00 00 01 31 00 00 00 2c 00 00 00 55 |.......1...,...U|
124 00e0: 00 00 00 8a 00 00 00 9a 00 00 00 0c 00 00 01 1e |................|
124 00e0: 00 00 00 8a 00 00 00 9a 00 00 00 0c 00 00 01 1e |................|
125 00f0: 00 00 00 a4 00 00 00 83 00 00 00 c9 00 00 00 8c |................|
125 00f0: 00 00 00 a4 00 00 00 83 00 00 00 c9 00 00 00 8c |................|
126
126
127
127
128 #else
128 #else
129
129
130 $ f --sha256 .hg/store/00changelog-*.nd
130 $ f --sha256 .hg/store/00changelog-*.nd
131 .hg/store/00changelog-????????.nd: sha256=f544f5462ff46097432caf6d764091f6d8c46d6121be315ead8576d548c9dd79 (glob)
131 .hg/store/00changelog-????????.nd: sha256=f544f5462ff46097432caf6d764091f6d8c46d6121be315ead8576d548c9dd79 (glob)
132 $ hg debugnodemap --dump-new | f --sha256 --size
132 $ hg debugnodemap --dump-new | f --sha256 --size
133 size=121088, sha256=f544f5462ff46097432caf6d764091f6d8c46d6121be315ead8576d548c9dd79
133 size=121088, sha256=f544f5462ff46097432caf6d764091f6d8c46d6121be315ead8576d548c9dd79
134 $ hg debugnodemap --dump-disk | f --sha256 --bytes=256 --hexdump --size
134 $ hg debugnodemap --dump-disk | f --sha256 --bytes=256 --hexdump --size
135 size=121088, sha256=f544f5462ff46097432caf6d764091f6d8c46d6121be315ead8576d548c9dd79
135 size=121088, sha256=f544f5462ff46097432caf6d764091f6d8c46d6121be315ead8576d548c9dd79
136 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
136 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
137 0010: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
137 0010: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
138 0020: ff ff ff ff ff ff f5 06 ff ff ff ff ff ff f3 e7 |................|
138 0020: ff ff ff ff ff ff f5 06 ff ff ff ff ff ff f3 e7 |................|
139 0030: ff ff ef ca ff ff ff ff ff ff ff ff ff ff ff ff |................|
139 0030: ff ff ef ca ff ff ff ff ff ff ff ff ff ff ff ff |................|
140 0040: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
140 0040: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
141 0050: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ed 08 |................|
141 0050: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ed 08 |................|
142 0060: ff ff ed 66 ff ff ff ff ff ff ff ff ff ff ff ff |...f............|
142 0060: ff ff ed 66 ff ff ff ff ff ff ff ff ff ff ff ff |...f............|
143 0070: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
143 0070: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
144 0080: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
144 0080: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
145 0090: ff ff ff ff ff ff ff ff ff ff ff ff ff ff f6 ed |................|
145 0090: ff ff ff ff ff ff ff ff ff ff ff ff ff ff f6 ed |................|
146 00a0: ff ff ff ff ff ff fe 61 ff ff ff ff ff ff ff ff |.......a........|
146 00a0: ff ff ff ff ff ff fe 61 ff ff ff ff ff ff ff ff |.......a........|
147 00b0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
147 00b0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
148 00c0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
148 00c0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
149 00d0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
149 00d0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
150 00e0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff f1 02 |................|
150 00e0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff f1 02 |................|
151 00f0: ff ff ff ff ff ff ed 1b ff ff ff ff ff ff ff ff |................|
151 00f0: ff ff ff ff ff ff ed 1b ff ff ff ff ff ff ff ff |................|
152
152
153 #endif
153 #endif
154
154
155 $ hg debugnodemap --check
155 $ hg debugnodemap --check
156 revision in index: 5001
156 revision in index: 5001
157 revision in nodemap: 5001
157 revision in nodemap: 5001
158
158
159 add a new commit
159 add a new commit
160
160
161 $ hg up
161 $ hg up
162 5001 files updated, 0 files merged, 0 files removed, 0 files unresolved
162 5001 files updated, 0 files merged, 0 files removed, 0 files unresolved
163 $ echo foo > foo
163 $ echo foo > foo
164 $ hg add foo
164 $ hg add foo
165
165
166
166
167 Check slow-path config value handling
167 Check slow-path config value handling
168 -------------------------------------
168 -------------------------------------
169
169
170 #if no-pure no-rust
170 #if no-pure no-rust
171
171
172 $ hg id --config "storage.revlog.persistent-nodemap.slow-path=invalid-value"
172 $ hg id --config "storage.revlog.persistent-nodemap.slow-path=invalid-value"
173 unknown value for config "storage.revlog.persistent-nodemap.slow-path": "invalid-value"
173 unknown value for config "storage.revlog.persistent-nodemap.slow-path": "invalid-value"
174 falling back to default value: abort
174 falling back to default value: abort
175 abort: accessing `persistent-nodemap` repository without associated fast implementation.
175 abort: accessing `persistent-nodemap` repository without associated fast implementation.
176 (check `hg help config.format.use-persistent-nodemap` for details)
176 (check `hg help config.format.use-persistent-nodemap` for details)
177 [255]
177 [255]
178
178
179 $ hg log -r . --config "storage.revlog.persistent-nodemap.slow-path=warn"
179 $ hg log -r . --config "storage.revlog.persistent-nodemap.slow-path=warn"
180 warning: accessing `persistent-nodemap` repository without associated fast implementation.
180 warning: accessing `persistent-nodemap` repository without associated fast implementation.
181 (check `hg help config.format.use-persistent-nodemap` for details)
181 (check `hg help config.format.use-persistent-nodemap` for details)
182 changeset: 5000:6b02b8c7b966
182 changeset: 5000:6b02b8c7b966
183 tag: tip
183 tag: tip
184 user: debugbuilddag
184 user: debugbuilddag
185 date: Thu Jan 01 01:23:20 1970 +0000
185 date: Thu Jan 01 01:23:20 1970 +0000
186 summary: r5000
186 summary: r5000
187
187
188 $ hg ci -m 'foo' --config "storage.revlog.persistent-nodemap.slow-path=abort"
188 $ hg ci -m 'foo' --config "storage.revlog.persistent-nodemap.slow-path=abort"
189 abort: accessing `persistent-nodemap` repository without associated fast implementation.
189 abort: accessing `persistent-nodemap` repository without associated fast implementation.
190 (check `hg help config.format.use-persistent-nodemap` for details)
190 (check `hg help config.format.use-persistent-nodemap` for details)
191 [255]
191 [255]
192
192
193 #else
193 #else
194
194
195 $ hg id --config "storage.revlog.persistent-nodemap.slow-path=invalid-value"
195 $ hg id --config "storage.revlog.persistent-nodemap.slow-path=invalid-value"
196 unknown value for config "storage.revlog.persistent-nodemap.slow-path": "invalid-value"
196 unknown value for config "storage.revlog.persistent-nodemap.slow-path": "invalid-value"
197 falling back to default value: abort
197 falling back to default value: abort
198 6b02b8c7b966+ tip
198 6b02b8c7b966+ tip
199
199
200 #endif
200 #endif
201
201
202 $ hg ci -m 'foo'
202 $ hg ci -m 'foo'
203
203
204 #if no-pure no-rust
204 #if no-pure no-rust
205 $ hg debugnodemap --metadata
205 $ hg debugnodemap --metadata
206 uid: ???????? (glob)
206 uid: ???????? (glob)
207 tip-rev: 5001
207 tip-rev: 5001
208 tip-node: 16395c3cf7e231394735e6b1717823ada303fb0c
208 tip-node: 16395c3cf7e231394735e6b1717823ada303fb0c
209 data-length: 121088
209 data-length: 121088
210 data-unused: 0
210 data-unused: 0
211 data-unused: 0.000%
211 data-unused: 0.000%
212 #else
212 #else
213 $ hg debugnodemap --metadata
213 $ hg debugnodemap --metadata
214 uid: ???????? (glob)
214 uid: ???????? (glob)
215 tip-rev: 5001
215 tip-rev: 5001
216 tip-node: 16395c3cf7e231394735e6b1717823ada303fb0c
216 tip-node: 16395c3cf7e231394735e6b1717823ada303fb0c
217 data-length: 121344
217 data-length: 121344
218 data-unused: 256
218 data-unused: 256
219 data-unused: 0.211%
219 data-unused: 0.211%
220 #endif
220 #endif
221
221
222 $ f --size .hg/store/00changelog.n
222 $ f --size .hg/store/00changelog.n
223 .hg/store/00changelog.n: size=62
223 .hg/store/00changelog.n: size=62
224
224
225 (The pure code use the debug code that perform incremental update, the C code reencode from scratch)
225 (The pure code use the debug code that perform incremental update, the C code reencode from scratch)
226
226
227 #if pure
227 #if pure
228 $ f --sha256 .hg/store/00changelog-*.nd --size
228 $ f --sha256 .hg/store/00changelog-*.nd --size
229 .hg/store/00changelog-????????.nd: size=121344, sha256=cce54c5da5bde3ad72a4938673ed4064c86231b9c64376b082b163fdb20f8f66 (glob)
229 .hg/store/00changelog-????????.nd: size=121344, sha256=cce54c5da5bde3ad72a4938673ed4064c86231b9c64376b082b163fdb20f8f66 (glob)
230 #endif
230 #endif
231
231
232 #if rust
232 #if rust
233 $ f --sha256 .hg/store/00changelog-*.nd --size
233 $ f --sha256 .hg/store/00changelog-*.nd --size
234 .hg/store/00changelog-????????.nd: size=121344, sha256=952b042fcf614ceb37b542b1b723e04f18f83efe99bee4e0f5ccd232ef470e58 (glob)
234 .hg/store/00changelog-????????.nd: size=121344, sha256=952b042fcf614ceb37b542b1b723e04f18f83efe99bee4e0f5ccd232ef470e58 (glob)
235 #endif
235 #endif
236
236
237 #if no-pure no-rust
237 #if no-pure no-rust
238 $ f --sha256 .hg/store/00changelog-*.nd --size
238 $ f --sha256 .hg/store/00changelog-*.nd --size
239 .hg/store/00changelog-????????.nd: size=121088, sha256=df7c06a035b96cb28c7287d349d603baef43240be7736fe34eea419a49702e17 (glob)
239 .hg/store/00changelog-????????.nd: size=121088, sha256=df7c06a035b96cb28c7287d349d603baef43240be7736fe34eea419a49702e17 (glob)
240 #endif
240 #endif
241
241
242 $ hg debugnodemap --check
242 $ hg debugnodemap --check
243 revision in index: 5002
243 revision in index: 5002
244 revision in nodemap: 5002
244 revision in nodemap: 5002
245
245
246 Test code path without mmap
246 Test code path without mmap
247 ---------------------------
247 ---------------------------
248
248
249 $ echo bar > bar
249 $ echo bar > bar
250 $ hg add bar
250 $ hg add bar
251 $ hg ci -m 'bar' --config storage.revlog.persistent-nodemap.mmap=no
251 $ hg ci -m 'bar' --config storage.revlog.persistent-nodemap.mmap=no
252
252
253 $ hg debugnodemap --check --config storage.revlog.persistent-nodemap.mmap=yes
253 $ hg debugnodemap --check --config storage.revlog.persistent-nodemap.mmap=yes
254 revision in index: 5003
254 revision in index: 5003
255 revision in nodemap: 5003
255 revision in nodemap: 5003
256 $ hg debugnodemap --check --config storage.revlog.persistent-nodemap.mmap=no
256 $ hg debugnodemap --check --config storage.revlog.persistent-nodemap.mmap=no
257 revision in index: 5003
257 revision in index: 5003
258 revision in nodemap: 5003
258 revision in nodemap: 5003
259
259
260
260
261 #if pure
261 #if pure
262 $ hg debugnodemap --metadata
262 $ hg debugnodemap --metadata
263 uid: ???????? (glob)
263 uid: ???????? (glob)
264 tip-rev: 5002
264 tip-rev: 5002
265 tip-node: 880b18d239dfa9f632413a2071bfdbcc4806a4fd
265 tip-node: 880b18d239dfa9f632413a2071bfdbcc4806a4fd
266 data-length: 121600
266 data-length: 121600
267 data-unused: 512
267 data-unused: 512
268 data-unused: 0.421%
268 data-unused: 0.421%
269 $ f --sha256 .hg/store/00changelog-*.nd --size
269 $ f --sha256 .hg/store/00changelog-*.nd --size
270 .hg/store/00changelog-????????.nd: size=121600, sha256=def52503d049ccb823974af313a98a935319ba61f40f3aa06a8be4d35c215054 (glob)
270 .hg/store/00changelog-????????.nd: size=121600, sha256=def52503d049ccb823974af313a98a935319ba61f40f3aa06a8be4d35c215054 (glob)
271 #endif
271 #endif
272 #if rust
272 #if rust
273 $ hg debugnodemap --metadata
273 $ hg debugnodemap --metadata
274 uid: ???????? (glob)
274 uid: ???????? (glob)
275 tip-rev: 5002
275 tip-rev: 5002
276 tip-node: 880b18d239dfa9f632413a2071bfdbcc4806a4fd
276 tip-node: 880b18d239dfa9f632413a2071bfdbcc4806a4fd
277 data-length: 121600
277 data-length: 121600
278 data-unused: 512
278 data-unused: 512
279 data-unused: 0.421%
279 data-unused: 0.421%
280 $ f --sha256 .hg/store/00changelog-*.nd --size
280 $ f --sha256 .hg/store/00changelog-*.nd --size
281 .hg/store/00changelog-????????.nd: size=121600, sha256=dacf5b5f1d4585fee7527d0e67cad5b1ba0930e6a0928f650f779aefb04ce3fb (glob)
281 .hg/store/00changelog-????????.nd: size=121600, sha256=dacf5b5f1d4585fee7527d0e67cad5b1ba0930e6a0928f650f779aefb04ce3fb (glob)
282 #endif
282 #endif
283 #if no-pure no-rust
283 #if no-pure no-rust
284 $ hg debugnodemap --metadata
284 $ hg debugnodemap --metadata
285 uid: ???????? (glob)
285 uid: ???????? (glob)
286 tip-rev: 5002
286 tip-rev: 5002
287 tip-node: 880b18d239dfa9f632413a2071bfdbcc4806a4fd
287 tip-node: 880b18d239dfa9f632413a2071bfdbcc4806a4fd
288 data-length: 121088
288 data-length: 121088
289 data-unused: 0
289 data-unused: 0
290 data-unused: 0.000%
290 data-unused: 0.000%
291 $ f --sha256 .hg/store/00changelog-*.nd --size
291 $ f --sha256 .hg/store/00changelog-*.nd --size
292 .hg/store/00changelog-????????.nd: size=121088, sha256=59fcede3e3cc587755916ceed29e3c33748cd1aa7d2f91828ac83e7979d935e8 (glob)
292 .hg/store/00changelog-????????.nd: size=121088, sha256=59fcede3e3cc587755916ceed29e3c33748cd1aa7d2f91828ac83e7979d935e8 (glob)
293 #endif
293 #endif
294
294
295 Test force warming the cache
295 Test force warming the cache
296
296
297 $ rm .hg/store/00changelog.n
297 $ rm .hg/store/00changelog.n
298 $ hg debugnodemap --metadata
298 $ hg debugnodemap --metadata
299 $ hg debugupdatecache
299 $ hg debugupdatecache
300 #if pure
300 #if pure
301 $ hg debugnodemap --metadata
301 $ hg debugnodemap --metadata
302 uid: ???????? (glob)
302 uid: ???????? (glob)
303 tip-rev: 5002
303 tip-rev: 5002
304 tip-node: 880b18d239dfa9f632413a2071bfdbcc4806a4fd
304 tip-node: 880b18d239dfa9f632413a2071bfdbcc4806a4fd
305 data-length: 121088
305 data-length: 121088
306 data-unused: 0
306 data-unused: 0
307 data-unused: 0.000%
307 data-unused: 0.000%
308 #else
308 #else
309 $ hg debugnodemap --metadata
309 $ hg debugnodemap --metadata
310 uid: ???????? (glob)
310 uid: ???????? (glob)
311 tip-rev: 5002
311 tip-rev: 5002
312 tip-node: 880b18d239dfa9f632413a2071bfdbcc4806a4fd
312 tip-node: 880b18d239dfa9f632413a2071bfdbcc4806a4fd
313 data-length: 121088
313 data-length: 121088
314 data-unused: 0
314 data-unused: 0
315 data-unused: 0.000%
315 data-unused: 0.000%
316 #endif
316 #endif
317
317
318 Check out of sync nodemap
318 Check out of sync nodemap
319 =========================
319 =========================
320
320
321 First copy old data on the side.
321 First copy old data on the side.
322
322
323 $ mkdir ../tmp-copies
323 $ mkdir ../tmp-copies
324 $ cp .hg/store/00changelog-????????.nd .hg/store/00changelog.n ../tmp-copies
324 $ cp .hg/store/00changelog-????????.nd .hg/store/00changelog.n ../tmp-copies
325
325
326 Nodemap lagging behind
326 Nodemap lagging behind
327 ----------------------
327 ----------------------
328
328
329 make a new commit
329 make a new commit
330
330
331 $ echo bar2 > bar
331 $ echo bar2 > bar
332 $ hg ci -m 'bar2'
332 $ hg ci -m 'bar2'
333 $ NODE=`hg log -r tip -T '{node}\n'`
333 $ NODE=`hg log -r tip -T '{node}\n'`
334 $ hg log -r "$NODE" -T '{rev}\n'
334 $ hg log -r "$NODE" -T '{rev}\n'
335 5003
335 5003
336
336
337 If the nodemap is lagging behind, it can catch up fine
337 If the nodemap is lagging behind, it can catch up fine
338
338
339 $ hg debugnodemap --metadata
339 $ hg debugnodemap --metadata
340 uid: ???????? (glob)
340 uid: ???????? (glob)
341 tip-rev: 5003
341 tip-rev: 5003
342 tip-node: c9329770f979ade2d16912267c38ba5f82fd37b3
342 tip-node: c9329770f979ade2d16912267c38ba5f82fd37b3
343 data-length: 121344 (pure !)
343 data-length: 121344 (pure !)
344 data-length: 121344 (rust !)
344 data-length: 121344 (rust !)
345 data-length: 121152 (no-rust no-pure !)
345 data-length: 121152 (no-rust no-pure !)
346 data-unused: 192 (pure !)
346 data-unused: 192 (pure !)
347 data-unused: 192 (rust !)
347 data-unused: 192 (rust !)
348 data-unused: 0 (no-rust no-pure !)
348 data-unused: 0 (no-rust no-pure !)
349 data-unused: 0.158% (pure !)
349 data-unused: 0.158% (pure !)
350 data-unused: 0.158% (rust !)
350 data-unused: 0.158% (rust !)
351 data-unused: 0.000% (no-rust no-pure !)
351 data-unused: 0.000% (no-rust no-pure !)
352 $ cp -f ../tmp-copies/* .hg/store/
352 $ cp -f ../tmp-copies/* .hg/store/
353 $ hg debugnodemap --metadata
353 $ hg debugnodemap --metadata
354 uid: ???????? (glob)
354 uid: ???????? (glob)
355 tip-rev: 5002
355 tip-rev: 5002
356 tip-node: 880b18d239dfa9f632413a2071bfdbcc4806a4fd
356 tip-node: 880b18d239dfa9f632413a2071bfdbcc4806a4fd
357 data-length: 121088
357 data-length: 121088
358 data-unused: 0
358 data-unused: 0
359 data-unused: 0.000%
359 data-unused: 0.000%
360 $ hg log -r "$NODE" -T '{rev}\n'
360 $ hg log -r "$NODE" -T '{rev}\n'
361 5003
361 5003
362
362
363 changelog altered
363 changelog altered
364 -----------------
364 -----------------
365
365
366 If the nodemap is not gated behind a requirements, an unaware client can alter
366 If the nodemap is not gated behind a requirements, an unaware client can alter
367 the repository so the revlog used to generate the nodemap is not longer
367 the repository so the revlog used to generate the nodemap is not longer
368 compatible with the persistent nodemap. We need to detect that.
368 compatible with the persistent nodemap. We need to detect that.
369
369
370 $ hg up "$NODE~5"
370 $ hg up "$NODE~5"
371 0 files updated, 0 files merged, 4 files removed, 0 files unresolved
371 0 files updated, 0 files merged, 4 files removed, 0 files unresolved
372 $ echo bar > babar
372 $ echo bar > babar
373 $ hg add babar
373 $ hg add babar
374 $ hg ci -m 'babar'
374 $ hg ci -m 'babar'
375 created new head
375 created new head
376 $ OTHERNODE=`hg log -r tip -T '{node}\n'`
376 $ OTHERNODE=`hg log -r tip -T '{node}\n'`
377 $ hg log -r "$OTHERNODE" -T '{rev}\n'
377 $ hg log -r "$OTHERNODE" -T '{rev}\n'
378 5004
378 5004
379
379
380 $ hg --config extensions.strip= strip --rev "$NODE~1" --no-backup
380 $ hg --config extensions.strip= strip --rev "$NODE~1" --no-backup
381
381
382 the nodemap should detect the changelog have been tampered with and recover.
382 the nodemap should detect the changelog have been tampered with and recover.
383
383
384 $ hg debugnodemap --metadata
384 $ hg debugnodemap --metadata
385 uid: ???????? (glob)
385 uid: ???????? (glob)
386 tip-rev: 5002
386 tip-rev: 5002
387 tip-node: b355ef8adce0949b8bdf6afc72ca853740d65944
387 tip-node: b355ef8adce0949b8bdf6afc72ca853740d65944
388 data-length: 121536 (pure !)
388 data-length: 121536 (pure !)
389 data-length: 121088 (rust !)
389 data-length: 121088 (rust !)
390 data-length: 121088 (no-pure no-rust !)
390 data-length: 121088 (no-pure no-rust !)
391 data-unused: 448 (pure !)
391 data-unused: 448 (pure !)
392 data-unused: 0 (rust !)
392 data-unused: 0 (rust !)
393 data-unused: 0 (no-pure no-rust !)
393 data-unused: 0 (no-pure no-rust !)
394 data-unused: 0.000% (rust !)
394 data-unused: 0.000% (rust !)
395 data-unused: 0.369% (pure !)
395 data-unused: 0.369% (pure !)
396 data-unused: 0.000% (no-pure no-rust !)
396 data-unused: 0.000% (no-pure no-rust !)
397
397
398 $ cp -f ../tmp-copies/* .hg/store/
398 $ cp -f ../tmp-copies/* .hg/store/
399 $ hg debugnodemap --metadata
399 $ hg debugnodemap --metadata
400 uid: ???????? (glob)
400 uid: ???????? (glob)
401 tip-rev: 5002
401 tip-rev: 5002
402 tip-node: 880b18d239dfa9f632413a2071bfdbcc4806a4fd
402 tip-node: 880b18d239dfa9f632413a2071bfdbcc4806a4fd
403 data-length: 121088
403 data-length: 121088
404 data-unused: 0
404 data-unused: 0
405 data-unused: 0.000%
405 data-unused: 0.000%
406 $ hg log -r "$OTHERNODE" -T '{rev}\n'
406 $ hg log -r "$OTHERNODE" -T '{rev}\n'
407 5002
407 5002
408
408
409 missing data file
409 missing data file
410 -----------------
410 -----------------
411
411
412 $ UUID=`hg debugnodemap --metadata| grep 'uid:' | \
412 $ UUID=`hg debugnodemap --metadata| grep 'uid:' | \
413 > sed 's/uid: //'`
413 > sed 's/uid: //'`
414 $ FILE=.hg/store/00changelog-"${UUID}".nd
414 $ FILE=.hg/store/00changelog-"${UUID}".nd
415 $ mv $FILE ../tmp-data-file
415 $ mv $FILE ../tmp-data-file
416 $ cp .hg/store/00changelog.n ../tmp-docket
416 $ cp .hg/store/00changelog.n ../tmp-docket
417
417
418 mercurial don't crash
418 mercurial don't crash
419
419
420 $ hg log -r .
420 $ hg log -r .
421 changeset: 5002:b355ef8adce0
421 changeset: 5002:b355ef8adce0
422 tag: tip
422 tag: tip
423 parent: 4998:d918ad6d18d3
423 parent: 4998:d918ad6d18d3
424 user: test
424 user: test
425 date: Thu Jan 01 00:00:00 1970 +0000
425 date: Thu Jan 01 00:00:00 1970 +0000
426 summary: babar
426 summary: babar
427
427
428 $ hg debugnodemap --metadata
428 $ hg debugnodemap --metadata
429
429
430 $ hg debugupdatecache
430 $ hg debugupdatecache
431 $ hg debugnodemap --metadata
431 $ hg debugnodemap --metadata
432 uid: * (glob)
432 uid: * (glob)
433 tip-rev: 5002
433 tip-rev: 5002
434 tip-node: b355ef8adce0949b8bdf6afc72ca853740d65944
434 tip-node: b355ef8adce0949b8bdf6afc72ca853740d65944
435 data-length: 121088
435 data-length: 121088
436 data-unused: 0
436 data-unused: 0
437 data-unused: 0.000%
437 data-unused: 0.000%
438
439 Sub-case: fallback for corrupted data file
440 ------------------------------------------
441
442 Sabotaging the data file so that nodemap resolutions fail, triggering fallback to
443 (non-persistent) C implementation.
444
445
446 $ UUID=`hg debugnodemap --metadata| grep 'uid:' | \
447 > sed 's/uid: //'`
448 $ FILE=.hg/store/00changelog-"${UUID}".nd
449 $ python -c "fobj = open('$FILE', 'r+b'); fobj.write(b'\xff' * 121088); fobj.close()"
450
451 The nodemap data file is still considered in sync with the docket. This
452 would fail without the fallback to the (non-persistent) C implementation:
453
454 $ hg log -r b355ef8adce0949b8bdf6afc72ca853740d65944 -T '{rev}\n' --traceback
455 5002
456
457 The nodemap data file hasn't been fixed, more tests can be inserted:
458
459 $ hg debugnodemap --dump-disk | f --bytes=256 --hexdump --size
460 size=121088
461 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
462 0010: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
463 0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
464 0030: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
465 0040: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
466 0050: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
467 0060: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
468 0070: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
469 0080: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
470 0090: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
471 00a0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
472 00b0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
473 00c0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
474 00d0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
475 00e0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
476 00f0: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
477
478 $ mv ../tmp-data-file $FILE
438 $ mv ../tmp-data-file $FILE
479 $ mv ../tmp-docket .hg/store/00changelog.n
439 $ mv ../tmp-docket .hg/store/00changelog.n
480
440
481 Check transaction related property
441 Check transaction related property
482 ==================================
442 ==================================
483
443
484 An up to date nodemap should be available to shell hooks,
444 An up to date nodemap should be available to shell hooks,
485
445
486 $ echo dsljfl > a
446 $ echo dsljfl > a
487 $ hg add a
447 $ hg add a
488 $ hg ci -m a
448 $ hg ci -m a
489 $ hg debugnodemap --metadata
449 $ hg debugnodemap --metadata
490 uid: ???????? (glob)
450 uid: ???????? (glob)
491 tip-rev: 5003
451 tip-rev: 5003
492 tip-node: a52c5079765b5865d97b993b303a18740113bbb2
452 tip-node: a52c5079765b5865d97b993b303a18740113bbb2
493 data-length: 121088
453 data-length: 121088
494 data-unused: 0
454 data-unused: 0
495 data-unused: 0.000%
455 data-unused: 0.000%
496 $ echo babar2 > babar
456 $ echo babar2 > babar
497 $ hg ci -m 'babar2' --config "hooks.pretxnclose.nodemap-test=hg debugnodemap --metadata"
457 $ hg ci -m 'babar2' --config "hooks.pretxnclose.nodemap-test=hg debugnodemap --metadata"
498 uid: ???????? (glob)
458 uid: ???????? (glob)
499 tip-rev: 5004
459 tip-rev: 5004
500 tip-node: 2f5fb1c06a16834c5679d672e90da7c5f3b1a984
460 tip-node: 2f5fb1c06a16834c5679d672e90da7c5f3b1a984
501 data-length: 121280 (pure !)
461 data-length: 121280 (pure !)
502 data-length: 121280 (rust !)
462 data-length: 121280 (rust !)
503 data-length: 121088 (no-pure no-rust !)
463 data-length: 121088 (no-pure no-rust !)
504 data-unused: 192 (pure !)
464 data-unused: 192 (pure !)
505 data-unused: 192 (rust !)
465 data-unused: 192 (rust !)
506 data-unused: 0 (no-pure no-rust !)
466 data-unused: 0 (no-pure no-rust !)
507 data-unused: 0.158% (pure !)
467 data-unused: 0.158% (pure !)
508 data-unused: 0.158% (rust !)
468 data-unused: 0.158% (rust !)
509 data-unused: 0.000% (no-pure no-rust !)
469 data-unused: 0.000% (no-pure no-rust !)
510 $ hg debugnodemap --metadata
470 $ hg debugnodemap --metadata
511 uid: ???????? (glob)
471 uid: ???????? (glob)
512 tip-rev: 5004
472 tip-rev: 5004
513 tip-node: 2f5fb1c06a16834c5679d672e90da7c5f3b1a984
473 tip-node: 2f5fb1c06a16834c5679d672e90da7c5f3b1a984
514 data-length: 121280 (pure !)
474 data-length: 121280 (pure !)
515 data-length: 121280 (rust !)
475 data-length: 121280 (rust !)
516 data-length: 121088 (no-pure no-rust !)
476 data-length: 121088 (no-pure no-rust !)
517 data-unused: 192 (pure !)
477 data-unused: 192 (pure !)
518 data-unused: 192 (rust !)
478 data-unused: 192 (rust !)
519 data-unused: 0 (no-pure no-rust !)
479 data-unused: 0 (no-pure no-rust !)
520 data-unused: 0.158% (pure !)
480 data-unused: 0.158% (pure !)
521 data-unused: 0.158% (rust !)
481 data-unused: 0.158% (rust !)
522 data-unused: 0.000% (no-pure no-rust !)
482 data-unused: 0.000% (no-pure no-rust !)
523
483
524 Another process does not see the pending nodemap content during run.
484 Another process does not see the pending nodemap content during run.
525
485
526 $ echo qpoasp > a
486 $ echo qpoasp > a
527 $ hg ci -m a2 \
487 $ hg ci -m a2 \
528 > --config "hooks.pretxnclose=sh \"$RUNTESTDIR/testlib/wait-on-file\" 20 sync-repo-read sync-txn-pending" \
488 > --config "hooks.pretxnclose=sh \"$RUNTESTDIR/testlib/wait-on-file\" 20 sync-repo-read sync-txn-pending" \
529 > --config "hooks.txnclose=touch sync-txn-close" > output.txt 2>&1 &
489 > --config "hooks.txnclose=touch sync-txn-close" > output.txt 2>&1 &
530
490
531 (read the repository while the commit transaction is pending)
491 (read the repository while the commit transaction is pending)
532
492
533 $ sh "$RUNTESTDIR/testlib/wait-on-file" 20 sync-txn-pending && \
493 $ sh "$RUNTESTDIR/testlib/wait-on-file" 20 sync-txn-pending && \
534 > hg debugnodemap --metadata && \
494 > hg debugnodemap --metadata && \
535 > sh "$RUNTESTDIR/testlib/wait-on-file" 20 sync-txn-close sync-repo-read
495 > sh "$RUNTESTDIR/testlib/wait-on-file" 20 sync-txn-close sync-repo-read
536 uid: ???????? (glob)
496 uid: ???????? (glob)
537 tip-rev: 5004
497 tip-rev: 5004
538 tip-node: 2f5fb1c06a16834c5679d672e90da7c5f3b1a984
498 tip-node: 2f5fb1c06a16834c5679d672e90da7c5f3b1a984
539 data-length: 121280 (pure !)
499 data-length: 121280 (pure !)
540 data-length: 121280 (rust !)
500 data-length: 121280 (rust !)
541 data-length: 121088 (no-pure no-rust !)
501 data-length: 121088 (no-pure no-rust !)
542 data-unused: 192 (pure !)
502 data-unused: 192 (pure !)
543 data-unused: 192 (rust !)
503 data-unused: 192 (rust !)
544 data-unused: 0 (no-pure no-rust !)
504 data-unused: 0 (no-pure no-rust !)
545 data-unused: 0.158% (pure !)
505 data-unused: 0.158% (pure !)
546 data-unused: 0.158% (rust !)
506 data-unused: 0.158% (rust !)
547 data-unused: 0.000% (no-pure no-rust !)
507 data-unused: 0.000% (no-pure no-rust !)
548 $ hg debugnodemap --metadata
508 $ hg debugnodemap --metadata
549 uid: ???????? (glob)
509 uid: ???????? (glob)
550 tip-rev: 5005
510 tip-rev: 5005
551 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
511 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
552 data-length: 121536 (pure !)
512 data-length: 121536 (pure !)
553 data-length: 121536 (rust !)
513 data-length: 121536 (rust !)
554 data-length: 121088 (no-pure no-rust !)
514 data-length: 121088 (no-pure no-rust !)
555 data-unused: 448 (pure !)
515 data-unused: 448 (pure !)
556 data-unused: 448 (rust !)
516 data-unused: 448 (rust !)
557 data-unused: 0 (no-pure no-rust !)
517 data-unused: 0 (no-pure no-rust !)
558 data-unused: 0.369% (pure !)
518 data-unused: 0.369% (pure !)
559 data-unused: 0.369% (rust !)
519 data-unused: 0.369% (rust !)
560 data-unused: 0.000% (no-pure no-rust !)
520 data-unused: 0.000% (no-pure no-rust !)
561
521
562 $ cat output.txt
522 $ cat output.txt
563
523
564 Check that a failing transaction will properly revert the data
524 Check that a failing transaction will properly revert the data
565
525
566 $ echo plakfe > a
526 $ echo plakfe > a
567 $ f --size --sha256 .hg/store/00changelog-*.nd
527 $ f --size --sha256 .hg/store/00changelog-*.nd
568 .hg/store/00changelog-????????.nd: size=121536, sha256=bb414468d225cf52d69132e1237afba34d4346ee2eb81b505027e6197b107f03 (glob) (pure !)
528 .hg/store/00changelog-????????.nd: size=121536, sha256=bb414468d225cf52d69132e1237afba34d4346ee2eb81b505027e6197b107f03 (glob) (pure !)
569 .hg/store/00changelog-????????.nd: size=121536, sha256=909ac727bc4d1c0fda5f7bff3c620c98bd4a2967c143405a1503439e33b377da (glob) (rust !)
529 .hg/store/00changelog-????????.nd: size=121536, sha256=909ac727bc4d1c0fda5f7bff3c620c98bd4a2967c143405a1503439e33b377da (glob) (rust !)
570 .hg/store/00changelog-????????.nd: size=121088, sha256=342d36d30d86dde67d3cb6c002606c4a75bcad665595d941493845066d9c8ee0 (glob) (no-pure no-rust !)
530 .hg/store/00changelog-????????.nd: size=121088, sha256=342d36d30d86dde67d3cb6c002606c4a75bcad665595d941493845066d9c8ee0 (glob) (no-pure no-rust !)
571 $ hg ci -m a3 --config "extensions.abort=$RUNTESTDIR/testlib/crash_transaction_late.py"
531 $ hg ci -m a3 --config "extensions.abort=$RUNTESTDIR/testlib/crash_transaction_late.py"
572 transaction abort!
532 transaction abort!
573 rollback completed
533 rollback completed
574 abort: This is a late abort
534 abort: This is a late abort
575 [255]
535 [255]
576 $ hg debugnodemap --metadata
536 $ hg debugnodemap --metadata
577 uid: ???????? (glob)
537 uid: ???????? (glob)
578 tip-rev: 5005
538 tip-rev: 5005
579 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
539 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
580 data-length: 121536 (pure !)
540 data-length: 121536 (pure !)
581 data-length: 121536 (rust !)
541 data-length: 121536 (rust !)
582 data-length: 121088 (no-pure no-rust !)
542 data-length: 121088 (no-pure no-rust !)
583 data-unused: 448 (pure !)
543 data-unused: 448 (pure !)
584 data-unused: 448 (rust !)
544 data-unused: 448 (rust !)
585 data-unused: 0 (no-pure no-rust !)
545 data-unused: 0 (no-pure no-rust !)
586 data-unused: 0.369% (pure !)
546 data-unused: 0.369% (pure !)
587 data-unused: 0.369% (rust !)
547 data-unused: 0.369% (rust !)
588 data-unused: 0.000% (no-pure no-rust !)
548 data-unused: 0.000% (no-pure no-rust !)
589 $ f --size --sha256 .hg/store/00changelog-*.nd
549 $ f --size --sha256 .hg/store/00changelog-*.nd
590 .hg/store/00changelog-????????.nd: size=121536, sha256=bb414468d225cf52d69132e1237afba34d4346ee2eb81b505027e6197b107f03 (glob) (pure !)
550 .hg/store/00changelog-????????.nd: size=121536, sha256=bb414468d225cf52d69132e1237afba34d4346ee2eb81b505027e6197b107f03 (glob) (pure !)
591 .hg/store/00changelog-????????.nd: size=121536, sha256=909ac727bc4d1c0fda5f7bff3c620c98bd4a2967c143405a1503439e33b377da (glob) (rust !)
551 .hg/store/00changelog-????????.nd: size=121536, sha256=909ac727bc4d1c0fda5f7bff3c620c98bd4a2967c143405a1503439e33b377da (glob) (rust !)
592 .hg/store/00changelog-????????.nd: size=121088, sha256=342d36d30d86dde67d3cb6c002606c4a75bcad665595d941493845066d9c8ee0 (glob) (no-pure no-rust !)
552 .hg/store/00changelog-????????.nd: size=121088, sha256=342d36d30d86dde67d3cb6c002606c4a75bcad665595d941493845066d9c8ee0 (glob) (no-pure no-rust !)
593
553
594 Check that removing content does not confuse the nodemap
554 Check that removing content does not confuse the nodemap
595 --------------------------------------------------------
555 --------------------------------------------------------
596
556
597 removing data with rollback
557 removing data with rollback
598
558
599 $ echo aso > a
559 $ echo aso > a
600 $ hg ci -m a4
560 $ hg ci -m a4
601 $ hg rollback
561 $ hg rollback
602 repository tip rolled back to revision 5005 (undo commit)
562 repository tip rolled back to revision 5005 (undo commit)
603 working directory now based on revision 5005
563 working directory now based on revision 5005
604 $ hg id -r .
564 $ hg id -r .
605 90d5d3ba2fc4 tip
565 90d5d3ba2fc4 tip
606
566
607 removing data with strip
567 removing data with strip
608
568
609 $ echo aso > a
569 $ echo aso > a
610 $ hg ci -m a4
570 $ hg ci -m a4
611 $ hg --config extensions.strip= strip -r . --no-backup
571 $ hg --config extensions.strip= strip -r . --no-backup
612 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
572 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
613 $ hg id -r . --traceback
573 $ hg id -r . --traceback
614 90d5d3ba2fc4 tip
574 90d5d3ba2fc4 tip
615
575
616 (be a good citizen and regenerate the nodemap)
576 (be a good citizen and regenerate the nodemap)
617 $ hg debugupdatecaches
577 $ hg debugupdatecaches
618 $ hg debugnodemap --metadata
578 $ hg debugnodemap --metadata
619 uid: * (glob)
579 uid: * (glob)
620 tip-rev: 5005
580 tip-rev: 5005
621 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
581 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
622 data-length: 121088
582 data-length: 121088
623 data-unused: 0
583 data-unused: 0
624 data-unused: 0.000%
584 data-unused: 0.000%
625
585
626 Check race condition when multiple process write new data to the repository
586 Check race condition when multiple process write new data to the repository
627 ---------------------------------------------------------------------------
587 ---------------------------------------------------------------------------
628
588
629 In this test, we check that two writers touching the repositories will not
589 In this test, we check that two writers touching the repositories will not
630 overwrite each other data. This test is prompted by the existent of issue6554.
590 overwrite each other data. This test is prompted by the existent of issue6554.
631 Where a writer ended up using and outdated docket to update the repository. See
591 Where a writer ended up using and outdated docket to update the repository. See
632 the dedicated extension for details on the race windows and read/write schedule
592 the dedicated extension for details on the race windows and read/write schedule
633 necessary to end up in this situation: testlib/persistent-nodemap-race-ext.py
593 necessary to end up in this situation: testlib/persistent-nodemap-race-ext.py
634
594
635 The issue was initially observed on a server with a high push trafic, but it
595 The issue was initially observed on a server with a high push trafic, but it
636 can be reproduced using a share and two commiting process which seems simpler.
596 can be reproduced using a share and two commiting process which seems simpler.
637
597
638 The test is Rust only as the other implementation does not use the same
598 The test is Rust only as the other implementation does not use the same
639 read/write patterns.
599 read/write patterns.
640
600
641 $ cd ..
601 $ cd ..
642
602
643 #if rust
603 #if rust
644
604
645 $ cp -R test-repo race-repo
605 $ cp -R test-repo race-repo
646 $ hg share race-repo ./other-wc --config format.use-share-safe=yes
606 $ hg share race-repo ./other-wc --config format.use-share-safe=yes
647 updating working directory
607 updating working directory
648 5001 files updated, 0 files merged, 0 files removed, 0 files unresolved
608 5001 files updated, 0 files merged, 0 files removed, 0 files unresolved
649 $ hg debugformat -R ./race-repo | egrep 'share-safe|persistent-nodemap'
609 $ hg debugformat -R ./race-repo | egrep 'share-safe|persistent-nodemap'
650 share-safe: yes
610 share-safe: yes
651 persistent-nodemap: yes
611 persistent-nodemap: yes
652 $ hg debugformat -R ./other-wc/ | egrep 'share-safe|persistent-nodemap'
612 $ hg debugformat -R ./other-wc/ | egrep 'share-safe|persistent-nodemap'
653 share-safe: yes
613 share-safe: yes
654 persistent-nodemap: yes
614 persistent-nodemap: yes
655 $ hg -R ./other-wc update 'min(head())'
615 $ hg -R ./other-wc update 'min(head())'
656 3 files updated, 0 files merged, 2 files removed, 0 files unresolved
616 3 files updated, 0 files merged, 2 files removed, 0 files unresolved
657 $ hg -R ./race-repo debugnodemap --metadata
617 $ hg -R ./race-repo debugnodemap --metadata
658 uid: 43c37dde
618 uid: 43c37dde
659 tip-rev: 5005
619 tip-rev: 5005
660 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
620 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
661 data-length: 121088
621 data-length: 121088
662 data-unused: 0
622 data-unused: 0
663 data-unused: 0.000%
623 data-unused: 0.000%
664 $ hg -R ./race-repo log -G -r 'head()'
624 $ hg -R ./race-repo log -G -r 'head()'
665 @ changeset: 5005:90d5d3ba2fc4
625 @ changeset: 5005:90d5d3ba2fc4
666 | tag: tip
626 | tag: tip
667 ~ user: test
627 ~ user: test
668 date: Thu Jan 01 00:00:00 1970 +0000
628 date: Thu Jan 01 00:00:00 1970 +0000
669 summary: a2
629 summary: a2
670
630
671 o changeset: 5001:16395c3cf7e2
631 o changeset: 5001:16395c3cf7e2
672 | user: test
632 | user: test
673 ~ date: Thu Jan 01 00:00:00 1970 +0000
633 ~ date: Thu Jan 01 00:00:00 1970 +0000
674 summary: foo
634 summary: foo
675
635
676 $ hg -R ./other-wc log -G -r 'head()'
636 $ hg -R ./other-wc log -G -r 'head()'
677 o changeset: 5005:90d5d3ba2fc4
637 o changeset: 5005:90d5d3ba2fc4
678 | tag: tip
638 | tag: tip
679 ~ user: test
639 ~ user: test
680 date: Thu Jan 01 00:00:00 1970 +0000
640 date: Thu Jan 01 00:00:00 1970 +0000
681 summary: a2
641 summary: a2
682
642
683 @ changeset: 5001:16395c3cf7e2
643 @ changeset: 5001:16395c3cf7e2
684 | user: test
644 | user: test
685 ~ date: Thu Jan 01 00:00:00 1970 +0000
645 ~ date: Thu Jan 01 00:00:00 1970 +0000
686 summary: foo
646 summary: foo
687
647
688 $ echo left-side-race > race-repo/left-side-race
648 $ echo left-side-race > race-repo/left-side-race
689 $ hg -R ./race-repo/ add race-repo/left-side-race
649 $ hg -R ./race-repo/ add race-repo/left-side-race
690
650
691 $ echo right-side-race > ./other-wc/right-side-race
651 $ echo right-side-race > ./other-wc/right-side-race
692 $ hg -R ./other-wc/ add ./other-wc/right-side-race
652 $ hg -R ./other-wc/ add ./other-wc/right-side-race
693
653
694 $ mkdir sync-files
654 $ mkdir sync-files
695 $ mkdir outputs
655 $ mkdir outputs
696 $ (
656 $ (
697 > hg -R ./race-repo/ commit -m left-side-commit \
657 > hg -R ./race-repo/ commit -m left-side-commit \
698 > --config "extensions.race=${RUNTESTDIR}/testlib/persistent-nodemap-race-ext.py" \
658 > --config "extensions.race=${RUNTESTDIR}/testlib/persistent-nodemap-race-ext.py" \
699 > --config 'devel.nodemap-race.role=left';
659 > --config 'devel.nodemap-race.role=left';
700 > touch sync-files/left-done
660 > touch sync-files/left-done
701 > ) > outputs/left.txt 2>&1 &
661 > ) > outputs/left.txt 2>&1 &
702 $ (
662 $ (
703 > hg -R ./other-wc/ commit -m right-side-commit \
663 > hg -R ./other-wc/ commit -m right-side-commit \
704 > --config "extensions.race=${RUNTESTDIR}/testlib/persistent-nodemap-race-ext.py" \
664 > --config "extensions.race=${RUNTESTDIR}/testlib/persistent-nodemap-race-ext.py" \
705 > --config 'devel.nodemap-race.role=right';
665 > --config 'devel.nodemap-race.role=right';
706 > touch sync-files/right-done
666 > touch sync-files/right-done
707 > ) > outputs/right.txt 2>&1 &
667 > ) > outputs/right.txt 2>&1 &
708 $ (
668 $ (
709 > hg -R ./race-repo/ check-nodemap-race \
669 > hg -R ./race-repo/ check-nodemap-race \
710 > --config "extensions.race=${RUNTESTDIR}/testlib/persistent-nodemap-race-ext.py" \
670 > --config "extensions.race=${RUNTESTDIR}/testlib/persistent-nodemap-race-ext.py" \
711 > --config 'devel.nodemap-race.role=reader';
671 > --config 'devel.nodemap-race.role=reader';
712 > touch sync-files/reader-done
672 > touch sync-files/reader-done
713 > ) > outputs/reader.txt 2>&1 &
673 > ) > outputs/reader.txt 2>&1 &
714 $ sh "$RUNTESTDIR"/testlib/wait-on-file 10 sync-files/left-done
674 $ sh "$RUNTESTDIR"/testlib/wait-on-file 10 sync-files/left-done
715 $ cat outputs/left.txt
675 $ cat outputs/left.txt
716 docket-details:
676 docket-details:
717 uid: 43c37dde
677 uid: 43c37dde
718 actual-tip: 5005
678 actual-tip: 5005
719 tip-rev: 5005
679 tip-rev: 5005
720 data-length: 121088
680 data-length: 121088
721 nodemap-race: left side locked and ready to commit
681 nodemap-race: left side locked and ready to commit
722 docket-details:
682 docket-details:
723 uid: 43c37dde
683 uid: 43c37dde
724 actual-tip: 5005
684 actual-tip: 5005
725 tip-rev: 5005
685 tip-rev: 5005
726 data-length: 121088
686 data-length: 121088
727 finalized changelog write
687 finalized changelog write
728 persisting changelog nodemap
688 persisting changelog nodemap
729 new data start at 121088
689 new data start at 121088
730 persisted changelog nodemap
690 persisted changelog nodemap
731 docket-details:
691 docket-details:
732 uid: 43c37dde
692 uid: 43c37dde
733 actual-tip: 5006
693 actual-tip: 5006
734 tip-rev: 5006
694 tip-rev: 5006
735 data-length: 121280
695 data-length: 121280
736 $ sh "$RUNTESTDIR"/testlib/wait-on-file 10 sync-files/right-done
696 $ sh "$RUNTESTDIR"/testlib/wait-on-file 10 sync-files/right-done
737 $ cat outputs/right.txt
697 $ cat outputs/right.txt
738 nodemap-race: right side start of the locking sequence
698 nodemap-race: right side start of the locking sequence
739 nodemap-race: right side reading changelog
699 nodemap-race: right side reading changelog
740 nodemap-race: right side reading of changelog is done
700 nodemap-race: right side reading of changelog is done
741 docket-details:
701 docket-details:
742 uid: 43c37dde
702 uid: 43c37dde
743 actual-tip: 5006
703 actual-tip: 5006
744 tip-rev: 5005
704 tip-rev: 5005
745 data-length: 121088
705 data-length: 121088
746 nodemap-race: right side ready to wait for the lock
706 nodemap-race: right side ready to wait for the lock
747 nodemap-race: right side locked and ready to commit
707 nodemap-race: right side locked and ready to commit
748 docket-details:
708 docket-details:
749 uid: 43c37dde
709 uid: 43c37dde
750 actual-tip: 5006
710 actual-tip: 5006
751 tip-rev: 5006
711 tip-rev: 5006
752 data-length: 121280
712 data-length: 121280
753 right ready to write, waiting for reader
713 right ready to write, waiting for reader
754 right proceeding with writing its changelog index and nodemap
714 right proceeding with writing its changelog index and nodemap
755 finalized changelog write
715 finalized changelog write
756 persisting changelog nodemap
716 persisting changelog nodemap
757 new data start at 121280
717 new data start at 121280
758 persisted changelog nodemap
718 persisted changelog nodemap
759 docket-details:
719 docket-details:
760 uid: 43c37dde
720 uid: 43c37dde
761 actual-tip: 5007
721 actual-tip: 5007
762 tip-rev: 5007
722 tip-rev: 5007
763 data-length: 121536
723 data-length: 121536
764 $ sh "$RUNTESTDIR"/testlib/wait-on-file 10 sync-files/reader-done
724 $ sh "$RUNTESTDIR"/testlib/wait-on-file 10 sync-files/reader-done
765 $ cat outputs/reader.txt
725 $ cat outputs/reader.txt
766 reader: reading changelog
726 reader: reading changelog
767 reader ready to read the changelog, waiting for right
727 reader ready to read the changelog, waiting for right
768 reader: nodemap docket read
728 reader: nodemap docket read
769 record-data-length: 121280
729 record-data-length: 121280
770 actual-data-length: 121280
730 actual-data-length: 121280
771 file-actual-length: 121536
731 file-actual-length: 121536
772 reader: changelog read
732 reader: changelog read
773 docket-details:
733 docket-details:
774 uid: 43c37dde
734 uid: 43c37dde
775 actual-tip: 5006
735 actual-tip: 5006
776 tip-rev: 5006
736 tip-rev: 5006
777 data-length: 121280
737 data-length: 121280
778 tip-rev: 5006
738 tip-rev: 5006
779 tip-node: 492901161367
739 tip-node: 492901161367
780 node-rev: 5006
740 node-rev: 5006
781
741
782 $ hg -R ./race-repo log -G -r 'head()'
742 $ hg -R ./race-repo log -G -r 'head()'
783 o changeset: 5007:ac4a2abde241
743 o changeset: 5007:ac4a2abde241
784 | tag: tip
744 | tag: tip
785 ~ parent: 5001:16395c3cf7e2
745 ~ parent: 5001:16395c3cf7e2
786 user: test
746 user: test
787 date: Thu Jan 01 00:00:00 1970 +0000
747 date: Thu Jan 01 00:00:00 1970 +0000
788 summary: right-side-commit
748 summary: right-side-commit
789
749
790 @ changeset: 5006:492901161367
750 @ changeset: 5006:492901161367
791 | user: test
751 | user: test
792 ~ date: Thu Jan 01 00:00:00 1970 +0000
752 ~ date: Thu Jan 01 00:00:00 1970 +0000
793 summary: left-side-commit
753 summary: left-side-commit
794
754
795 $ hg -R ./other-wc log -G -r 'head()'
755 $ hg -R ./other-wc log -G -r 'head()'
796 @ changeset: 5007:ac4a2abde241
756 @ changeset: 5007:ac4a2abde241
797 | tag: tip
757 | tag: tip
798 ~ parent: 5001:16395c3cf7e2
758 ~ parent: 5001:16395c3cf7e2
799 user: test
759 user: test
800 date: Thu Jan 01 00:00:00 1970 +0000
760 date: Thu Jan 01 00:00:00 1970 +0000
801 summary: right-side-commit
761 summary: right-side-commit
802
762
803 o changeset: 5006:492901161367
763 o changeset: 5006:492901161367
804 | user: test
764 | user: test
805 ~ date: Thu Jan 01 00:00:00 1970 +0000
765 ~ date: Thu Jan 01 00:00:00 1970 +0000
806 summary: left-side-commit
766 summary: left-side-commit
807
767
808 #endif
768 #endif
809
769
810 Test upgrade / downgrade
770 Test upgrade / downgrade
811 ========================
771 ========================
812
772
813 $ cd ./test-repo/
773 $ cd ./test-repo/
814
774
815 downgrading
775 downgrading
816
776
817 $ cat << EOF >> .hg/hgrc
777 $ cat << EOF >> .hg/hgrc
818 > [format]
778 > [format]
819 > use-persistent-nodemap=no
779 > use-persistent-nodemap=no
820 > EOF
780 > EOF
821 $ hg debugformat -v
781 $ hg debugformat -v
822 format-variant repo config default
782 format-variant repo config default
823 fncache: yes yes yes
783 fncache: yes yes yes
824 dirstate-v2: no no no
784 dirstate-v2: no no no
825 dotencode: yes yes yes
785 dotencode: yes yes yes
826 generaldelta: yes yes yes
786 generaldelta: yes yes yes
827 share-safe: yes yes no
787 share-safe: yes yes no
828 sparserevlog: yes yes yes
788 sparserevlog: yes yes yes
829 persistent-nodemap: yes no no
789 persistent-nodemap: yes no no
830 copies-sdc: no no no
790 copies-sdc: no no no
831 revlog-v2: no no no
791 revlog-v2: no no no
832 changelog-v2: no no no
792 changelog-v2: no no no
833 plain-cl-delta: yes yes yes
793 plain-cl-delta: yes yes yes
834 compression: zlib zlib zlib (no-zstd !)
794 compression: zlib zlib zlib (no-zstd !)
835 compression: zstd zstd zstd (zstd !)
795 compression: zstd zstd zstd (zstd !)
836 compression-level: default default default
796 compression-level: default default default
837 $ hg debugupgraderepo --run --no-backup --quiet
797 $ hg debugupgraderepo --run --no-backup --quiet
838 upgrade will perform the following actions:
798 upgrade will perform the following actions:
839
799
840 requirements
800 requirements
841 preserved: dotencode, fncache, generaldelta, revlogv1, share-safe, sparserevlog, store (no-zstd no-dirstate-v2 !)
801 preserved: dotencode, fncache, generaldelta, revlogv1, share-safe, sparserevlog, store (no-zstd no-dirstate-v2 !)
842 preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, share-safe, sparserevlog, store (zstd no-dirstate-v2 !)
802 preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, share-safe, sparserevlog, store (zstd no-dirstate-v2 !)
843 preserved: dotencode, exp-dirstate-v2, fncache, generaldelta, revlog-compression-zstd, revlogv1, share-safe, sparserevlog, store (zstd dirstate-v2 !)
803 preserved: dotencode, exp-dirstate-v2, fncache, generaldelta, revlog-compression-zstd, revlogv1, share-safe, sparserevlog, store (zstd dirstate-v2 !)
844 removed: persistent-nodemap
804 removed: persistent-nodemap
845
805
846 processed revlogs:
806 processed revlogs:
847 - all-filelogs
807 - all-filelogs
848 - changelog
808 - changelog
849 - manifest
809 - manifest
850
810
851 $ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
811 $ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
852 00changelog-*.nd (glob)
812 00changelog-*.nd (glob)
853 00manifest-*.nd (glob)
813 00manifest-*.nd (glob)
854 undo.backup.00changelog.n
814 undo.backup.00changelog.n
855 undo.backup.00manifest.n
815 undo.backup.00manifest.n
856 $ hg debugnodemap --metadata
816 $ hg debugnodemap --metadata
857
817
858
818
859 upgrading
819 upgrading
860
820
861 $ cat << EOF >> .hg/hgrc
821 $ cat << EOF >> .hg/hgrc
862 > [format]
822 > [format]
863 > use-persistent-nodemap=yes
823 > use-persistent-nodemap=yes
864 > EOF
824 > EOF
865 $ hg debugformat -v
825 $ hg debugformat -v
866 format-variant repo config default
826 format-variant repo config default
867 fncache: yes yes yes
827 fncache: yes yes yes
868 dirstate-v2: no no no
828 dirstate-v2: no no no
869 dotencode: yes yes yes
829 dotencode: yes yes yes
870 generaldelta: yes yes yes
830 generaldelta: yes yes yes
871 share-safe: yes yes no
831 share-safe: yes yes no
872 sparserevlog: yes yes yes
832 sparserevlog: yes yes yes
873 persistent-nodemap: no yes no
833 persistent-nodemap: no yes no
874 copies-sdc: no no no
834 copies-sdc: no no no
875 revlog-v2: no no no
835 revlog-v2: no no no
876 changelog-v2: no no no
836 changelog-v2: no no no
877 plain-cl-delta: yes yes yes
837 plain-cl-delta: yes yes yes
878 compression: zlib zlib zlib (no-zstd !)
838 compression: zlib zlib zlib (no-zstd !)
879 compression: zstd zstd zstd (zstd !)
839 compression: zstd zstd zstd (zstd !)
880 compression-level: default default default
840 compression-level: default default default
881 $ hg debugupgraderepo --run --no-backup --quiet
841 $ hg debugupgraderepo --run --no-backup --quiet
882 upgrade will perform the following actions:
842 upgrade will perform the following actions:
883
843
884 requirements
844 requirements
885 preserved: dotencode, fncache, generaldelta, revlogv1, share-safe, sparserevlog, store (no-zstd no-dirstate-v2 !)
845 preserved: dotencode, fncache, generaldelta, revlogv1, share-safe, sparserevlog, store (no-zstd no-dirstate-v2 !)
886 preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, share-safe, sparserevlog, store (zstd no-dirstate-v2 !)
846 preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, share-safe, sparserevlog, store (zstd no-dirstate-v2 !)
887 preserved: dotencode, exp-dirstate-v2, fncache, generaldelta, revlog-compression-zstd, revlogv1, share-safe, sparserevlog, store (zstd dirstate-v2 !)
847 preserved: dotencode, exp-dirstate-v2, fncache, generaldelta, revlog-compression-zstd, revlogv1, share-safe, sparserevlog, store (zstd dirstate-v2 !)
888 added: persistent-nodemap
848 added: persistent-nodemap
889
849
890 processed revlogs:
850 processed revlogs:
891 - all-filelogs
851 - all-filelogs
892 - changelog
852 - changelog
893 - manifest
853 - manifest
894
854
895 $ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
855 $ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
896 00changelog-*.nd (glob)
856 00changelog-*.nd (glob)
897 00changelog.n
857 00changelog.n
898 00manifest-*.nd (glob)
858 00manifest-*.nd (glob)
899 00manifest.n
859 00manifest.n
900 undo.backup.00changelog.n
860 undo.backup.00changelog.n
901 undo.backup.00manifest.n
861 undo.backup.00manifest.n
902
862
903 $ hg debugnodemap --metadata
863 $ hg debugnodemap --metadata
904 uid: * (glob)
864 uid: * (glob)
905 tip-rev: 5005
865 tip-rev: 5005
906 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
866 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
907 data-length: 121088
867 data-length: 121088
908 data-unused: 0
868 data-unused: 0
909 data-unused: 0.000%
869 data-unused: 0.000%
910
870
911 Running unrelated upgrade
871 Running unrelated upgrade
912
872
913 $ hg debugupgraderepo --run --no-backup --quiet --optimize re-delta-all
873 $ hg debugupgraderepo --run --no-backup --quiet --optimize re-delta-all
914 upgrade will perform the following actions:
874 upgrade will perform the following actions:
915
875
916 requirements
876 requirements
917 preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, share-safe, sparserevlog, store (no-zstd no-dirstate-v2 !)
877 preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlogv1, share-safe, sparserevlog, store (no-zstd no-dirstate-v2 !)
918 preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlog-compression-zstd, revlogv1, share-safe, sparserevlog, store (zstd no-dirstate-v2 !)
878 preserved: dotencode, fncache, generaldelta, persistent-nodemap, revlog-compression-zstd, revlogv1, share-safe, sparserevlog, store (zstd no-dirstate-v2 !)
919 preserved: dotencode, exp-dirstate-v2, fncache, generaldelta, persistent-nodemap, revlog-compression-zstd, revlogv1, share-safe, sparserevlog, store (zstd dirstate-v2 !)
879 preserved: dotencode, exp-dirstate-v2, fncache, generaldelta, persistent-nodemap, revlog-compression-zstd, revlogv1, share-safe, sparserevlog, store (zstd dirstate-v2 !)
920
880
921 optimisations: re-delta-all
881 optimisations: re-delta-all
922
882
923 processed revlogs:
883 processed revlogs:
924 - all-filelogs
884 - all-filelogs
925 - changelog
885 - changelog
926 - manifest
886 - manifest
927
887
928 $ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
888 $ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
929 00changelog-*.nd (glob)
889 00changelog-*.nd (glob)
930 00changelog.n
890 00changelog.n
931 00manifest-*.nd (glob)
891 00manifest-*.nd (glob)
932 00manifest.n
892 00manifest.n
933
893
934 $ hg debugnodemap --metadata
894 $ hg debugnodemap --metadata
935 uid: * (glob)
895 uid: * (glob)
936 tip-rev: 5005
896 tip-rev: 5005
937 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
897 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
938 data-length: 121088
898 data-length: 121088
939 data-unused: 0
899 data-unused: 0
940 data-unused: 0.000%
900 data-unused: 0.000%
941
901
942 Persistent nodemap and local/streaming clone
902 Persistent nodemap and local/streaming clone
943 ============================================
903 ============================================
944
904
945 $ cd ..
905 $ cd ..
946
906
947 standard clone
907 standard clone
948 --------------
908 --------------
949
909
950 The persistent nodemap should exist after a streaming clone
910 The persistent nodemap should exist after a streaming clone
951
911
952 $ hg clone --pull --quiet -U test-repo standard-clone
912 $ hg clone --pull --quiet -U test-repo standard-clone
953 $ ls -1 standard-clone/.hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
913 $ ls -1 standard-clone/.hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
954 00changelog-*.nd (glob)
914 00changelog-*.nd (glob)
955 00changelog.n
915 00changelog.n
956 00manifest-*.nd (glob)
916 00manifest-*.nd (glob)
957 00manifest.n
917 00manifest.n
958 $ hg -R standard-clone debugnodemap --metadata
918 $ hg -R standard-clone debugnodemap --metadata
959 uid: * (glob)
919 uid: * (glob)
960 tip-rev: 5005
920 tip-rev: 5005
961 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
921 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
962 data-length: 121088
922 data-length: 121088
963 data-unused: 0
923 data-unused: 0
964 data-unused: 0.000%
924 data-unused: 0.000%
965
925
966
926
967 local clone
927 local clone
968 ------------
928 ------------
969
929
970 The persistent nodemap should exist after a streaming clone
930 The persistent nodemap should exist after a streaming clone
971
931
972 $ hg clone -U test-repo local-clone
932 $ hg clone -U test-repo local-clone
973 $ ls -1 local-clone/.hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
933 $ ls -1 local-clone/.hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
974 00changelog-*.nd (glob)
934 00changelog-*.nd (glob)
975 00changelog.n
935 00changelog.n
976 00manifest-*.nd (glob)
936 00manifest-*.nd (glob)
977 00manifest.n
937 00manifest.n
978 $ hg -R local-clone debugnodemap --metadata
938 $ hg -R local-clone debugnodemap --metadata
979 uid: * (glob)
939 uid: * (glob)
980 tip-rev: 5005
940 tip-rev: 5005
981 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
941 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
982 data-length: 121088
942 data-length: 121088
983 data-unused: 0
943 data-unused: 0
984 data-unused: 0.000%
944 data-unused: 0.000%
985
945
986 Test various corruption case
946 Test various corruption case
987 ============================
947 ============================
988
948
989 Missing datafile
949 Missing datafile
990 ----------------
950 ----------------
991
951
992 Test behavior with a missing datafile
952 Test behavior with a missing datafile
993
953
994 $ hg clone --quiet --pull test-repo corruption-test-repo
954 $ hg clone --quiet --pull test-repo corruption-test-repo
995 $ ls -1 corruption-test-repo/.hg/store/00changelog*
955 $ ls -1 corruption-test-repo/.hg/store/00changelog*
996 corruption-test-repo/.hg/store/00changelog-*.nd (glob)
956 corruption-test-repo/.hg/store/00changelog-*.nd (glob)
997 corruption-test-repo/.hg/store/00changelog.d
957 corruption-test-repo/.hg/store/00changelog.d
998 corruption-test-repo/.hg/store/00changelog.i
958 corruption-test-repo/.hg/store/00changelog.i
999 corruption-test-repo/.hg/store/00changelog.n
959 corruption-test-repo/.hg/store/00changelog.n
1000 $ rm corruption-test-repo/.hg/store/00changelog*.nd
960 $ rm corruption-test-repo/.hg/store/00changelog*.nd
1001 $ hg log -R corruption-test-repo -r .
961 $ hg log -R corruption-test-repo -r .
1002 changeset: 5005:90d5d3ba2fc4
962 changeset: 5005:90d5d3ba2fc4
1003 tag: tip
963 tag: tip
1004 user: test
964 user: test
1005 date: Thu Jan 01 00:00:00 1970 +0000
965 date: Thu Jan 01 00:00:00 1970 +0000
1006 summary: a2
966 summary: a2
1007
967
1008 $ ls -1 corruption-test-repo/.hg/store/00changelog*
968 $ ls -1 corruption-test-repo/.hg/store/00changelog*
1009 corruption-test-repo/.hg/store/00changelog.d
969 corruption-test-repo/.hg/store/00changelog.d
1010 corruption-test-repo/.hg/store/00changelog.i
970 corruption-test-repo/.hg/store/00changelog.i
1011 corruption-test-repo/.hg/store/00changelog.n
971 corruption-test-repo/.hg/store/00changelog.n
1012
972
1013 Truncated data file
973 Truncated data file
1014 -------------------
974 -------------------
1015
975
1016 Test behavior with a too short datafile
976 Test behavior with a too short datafile
1017
977
1018 rebuild the missing data
978 rebuild the missing data
1019 $ hg -R corruption-test-repo debugupdatecache
979 $ hg -R corruption-test-repo debugupdatecache
1020 $ ls -1 corruption-test-repo/.hg/store/00changelog*
980 $ ls -1 corruption-test-repo/.hg/store/00changelog*
1021 corruption-test-repo/.hg/store/00changelog-*.nd (glob)
981 corruption-test-repo/.hg/store/00changelog-*.nd (glob)
1022 corruption-test-repo/.hg/store/00changelog.d
982 corruption-test-repo/.hg/store/00changelog.d
1023 corruption-test-repo/.hg/store/00changelog.i
983 corruption-test-repo/.hg/store/00changelog.i
1024 corruption-test-repo/.hg/store/00changelog.n
984 corruption-test-repo/.hg/store/00changelog.n
1025
985
1026 truncate the file
986 truncate the file
1027
987
1028 $ datafilepath=`ls corruption-test-repo/.hg/store/00changelog*.nd`
988 $ datafilepath=`ls corruption-test-repo/.hg/store/00changelog*.nd`
1029 $ f -s $datafilepath
989 $ f -s $datafilepath
1030 corruption-test-repo/.hg/store/00changelog-*.nd: size=121088 (glob)
990 corruption-test-repo/.hg/store/00changelog-*.nd: size=121088 (glob)
1031 $ dd if=$datafilepath bs=1000 count=10 of=$datafilepath-tmp status=noxfer
991 $ dd if=$datafilepath bs=1000 count=10 of=$datafilepath-tmp status=noxfer
1032 10+0 records in
992 10+0 records in
1033 10+0 records out
993 10+0 records out
1034 $ mv $datafilepath-tmp $datafilepath
994 $ mv $datafilepath-tmp $datafilepath
1035 $ f -s $datafilepath
995 $ f -s $datafilepath
1036 corruption-test-repo/.hg/store/00changelog-*.nd: size=10000 (glob)
996 corruption-test-repo/.hg/store/00changelog-*.nd: size=10000 (glob)
1037
997
1038 Check that Mercurial reaction to this event
998 Check that Mercurial reaction to this event
1039
999
1040 $ hg -R corruption-test-repo log -r . --traceback
1000 $ hg -R corruption-test-repo log -r . --traceback
1041 changeset: 5005:90d5d3ba2fc4
1001 changeset: 5005:90d5d3ba2fc4
1042 tag: tip
1002 tag: tip
1043 user: test
1003 user: test
1044 date: Thu Jan 01 00:00:00 1970 +0000
1004 date: Thu Jan 01 00:00:00 1970 +0000
1045 summary: a2
1005 summary: a2
1046
1006
1047
1007
1048
1008
1049 stream clone
1009 stream clone
1050 ============
1010 ============
1051
1011
1052 The persistent nodemap should exist after a streaming clone
1012 The persistent nodemap should exist after a streaming clone
1053
1013
1054 Simple case
1014 Simple case
1055 -----------
1015 -----------
1056
1016
1057 No race condition
1017 No race condition
1058
1018
1059 $ hg clone -U --stream ssh://user@dummy/test-repo stream-clone --debug | egrep '00(changelog|manifest)'
1019 $ hg clone -U --stream ssh://user@dummy/test-repo stream-clone --debug | egrep '00(changelog|manifest)'
1060 adding [s] 00manifest.n (62 bytes)
1020 adding [s] 00manifest.n (62 bytes)
1061 adding [s] 00manifest-*.nd (118 KB) (glob)
1021 adding [s] 00manifest-*.nd (118 KB) (glob)
1062 adding [s] 00changelog.n (62 bytes)
1022 adding [s] 00changelog.n (62 bytes)
1063 adding [s] 00changelog-*.nd (118 KB) (glob)
1023 adding [s] 00changelog-*.nd (118 KB) (glob)
1064 adding [s] 00manifest.d (452 KB) (no-zstd !)
1024 adding [s] 00manifest.d (452 KB) (no-zstd !)
1065 adding [s] 00manifest.d (491 KB) (zstd !)
1025 adding [s] 00manifest.d (491 KB) (zstd !)
1066 adding [s] 00changelog.d (360 KB) (no-zstd !)
1026 adding [s] 00changelog.d (360 KB) (no-zstd !)
1067 adding [s] 00changelog.d (368 KB) (zstd !)
1027 adding [s] 00changelog.d (368 KB) (zstd !)
1068 adding [s] 00manifest.i (313 KB)
1028 adding [s] 00manifest.i (313 KB)
1069 adding [s] 00changelog.i (313 KB)
1029 adding [s] 00changelog.i (313 KB)
1070 $ ls -1 stream-clone/.hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
1030 $ ls -1 stream-clone/.hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
1071 00changelog-*.nd (glob)
1031 00changelog-*.nd (glob)
1072 00changelog.n
1032 00changelog.n
1073 00manifest-*.nd (glob)
1033 00manifest-*.nd (glob)
1074 00manifest.n
1034 00manifest.n
1075 $ hg -R stream-clone debugnodemap --metadata
1035 $ hg -R stream-clone debugnodemap --metadata
1076 uid: * (glob)
1036 uid: * (glob)
1077 tip-rev: 5005
1037 tip-rev: 5005
1078 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
1038 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
1079 data-length: 121088
1039 data-length: 121088
1080 data-unused: 0
1040 data-unused: 0
1081 data-unused: 0.000%
1041 data-unused: 0.000%
1082
1042
1083 new data appened
1043 new data appened
1084 -----------------
1044 -----------------
1085
1045
1086 Other commit happening on the server during the stream clone
1046 Other commit happening on the server during the stream clone
1087
1047
1088 setup the step-by-step stream cloning
1048 setup the step-by-step stream cloning
1089
1049
1090 $ HG_TEST_STREAM_WALKED_FILE_1="$TESTTMP/sync_file_walked_1"
1050 $ HG_TEST_STREAM_WALKED_FILE_1="$TESTTMP/sync_file_walked_1"
1091 $ export HG_TEST_STREAM_WALKED_FILE_1
1051 $ export HG_TEST_STREAM_WALKED_FILE_1
1092 $ HG_TEST_STREAM_WALKED_FILE_2="$TESTTMP/sync_file_walked_2"
1052 $ HG_TEST_STREAM_WALKED_FILE_2="$TESTTMP/sync_file_walked_2"
1093 $ export HG_TEST_STREAM_WALKED_FILE_2
1053 $ export HG_TEST_STREAM_WALKED_FILE_2
1094 $ HG_TEST_STREAM_WALKED_FILE_3="$TESTTMP/sync_file_walked_3"
1054 $ HG_TEST_STREAM_WALKED_FILE_3="$TESTTMP/sync_file_walked_3"
1095 $ export HG_TEST_STREAM_WALKED_FILE_3
1055 $ export HG_TEST_STREAM_WALKED_FILE_3
1096 $ cat << EOF >> test-repo/.hg/hgrc
1056 $ cat << EOF >> test-repo/.hg/hgrc
1097 > [extensions]
1057 > [extensions]
1098 > steps=$RUNTESTDIR/testlib/ext-stream-clone-steps.py
1058 > steps=$RUNTESTDIR/testlib/ext-stream-clone-steps.py
1099 > EOF
1059 > EOF
1100
1060
1101 Check and record file state beforehand
1061 Check and record file state beforehand
1102
1062
1103 $ f --size test-repo/.hg/store/00changelog*
1063 $ f --size test-repo/.hg/store/00changelog*
1104 test-repo/.hg/store/00changelog-*.nd: size=121088 (glob)
1064 test-repo/.hg/store/00changelog-*.nd: size=121088 (glob)
1105 test-repo/.hg/store/00changelog.d: size=376891 (zstd !)
1065 test-repo/.hg/store/00changelog.d: size=376891 (zstd !)
1106 test-repo/.hg/store/00changelog.d: size=368890 (no-zstd !)
1066 test-repo/.hg/store/00changelog.d: size=368890 (no-zstd !)
1107 test-repo/.hg/store/00changelog.i: size=320384
1067 test-repo/.hg/store/00changelog.i: size=320384
1108 test-repo/.hg/store/00changelog.n: size=62
1068 test-repo/.hg/store/00changelog.n: size=62
1109 $ hg -R test-repo debugnodemap --metadata | tee server-metadata.txt
1069 $ hg -R test-repo debugnodemap --metadata | tee server-metadata.txt
1110 uid: * (glob)
1070 uid: * (glob)
1111 tip-rev: 5005
1071 tip-rev: 5005
1112 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
1072 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
1113 data-length: 121088
1073 data-length: 121088
1114 data-unused: 0
1074 data-unused: 0
1115 data-unused: 0.000%
1075 data-unused: 0.000%
1116
1076
1117 Prepare a commit
1077 Prepare a commit
1118
1078
1119 $ echo foo >> test-repo/foo
1079 $ echo foo >> test-repo/foo
1120 $ hg -R test-repo/ add test-repo/foo
1080 $ hg -R test-repo/ add test-repo/foo
1121
1081
1122 Do a mix of clone and commit at the same time so that the file listed on disk differ at actual transfer time.
1082 Do a mix of clone and commit at the same time so that the file listed on disk differ at actual transfer time.
1123
1083
1124 $ (hg clone -U --stream ssh://user@dummy/test-repo stream-clone-race-1 --debug 2>> clone-output | egrep '00(changelog|manifest)' >> clone-output; touch $HG_TEST_STREAM_WALKED_FILE_3) &
1084 $ (hg clone -U --stream ssh://user@dummy/test-repo stream-clone-race-1 --debug 2>> clone-output | egrep '00(changelog|manifest)' >> clone-output; touch $HG_TEST_STREAM_WALKED_FILE_3) &
1125 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_1
1085 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_1
1126 $ hg -R test-repo/ commit -m foo
1086 $ hg -R test-repo/ commit -m foo
1127 $ touch $HG_TEST_STREAM_WALKED_FILE_2
1087 $ touch $HG_TEST_STREAM_WALKED_FILE_2
1128 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3
1088 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3
1129 $ cat clone-output
1089 $ cat clone-output
1130 adding [s] 00manifest.n (62 bytes)
1090 adding [s] 00manifest.n (62 bytes)
1131 adding [s] 00manifest-*.nd (118 KB) (glob)
1091 adding [s] 00manifest-*.nd (118 KB) (glob)
1132 adding [s] 00changelog.n (62 bytes)
1092 adding [s] 00changelog.n (62 bytes)
1133 adding [s] 00changelog-*.nd (118 KB) (glob)
1093 adding [s] 00changelog-*.nd (118 KB) (glob)
1134 adding [s] 00manifest.d (452 KB) (no-zstd !)
1094 adding [s] 00manifest.d (452 KB) (no-zstd !)
1135 adding [s] 00manifest.d (491 KB) (zstd !)
1095 adding [s] 00manifest.d (491 KB) (zstd !)
1136 adding [s] 00changelog.d (360 KB) (no-zstd !)
1096 adding [s] 00changelog.d (360 KB) (no-zstd !)
1137 adding [s] 00changelog.d (368 KB) (zstd !)
1097 adding [s] 00changelog.d (368 KB) (zstd !)
1138 adding [s] 00manifest.i (313 KB)
1098 adding [s] 00manifest.i (313 KB)
1139 adding [s] 00changelog.i (313 KB)
1099 adding [s] 00changelog.i (313 KB)
1140
1100
1141 Check the result state
1101 Check the result state
1142
1102
1143 $ f --size stream-clone-race-1/.hg/store/00changelog*
1103 $ f --size stream-clone-race-1/.hg/store/00changelog*
1144 stream-clone-race-1/.hg/store/00changelog-*.nd: size=121088 (glob)
1104 stream-clone-race-1/.hg/store/00changelog-*.nd: size=121088 (glob)
1145 stream-clone-race-1/.hg/store/00changelog.d: size=368890 (no-zstd !)
1105 stream-clone-race-1/.hg/store/00changelog.d: size=368890 (no-zstd !)
1146 stream-clone-race-1/.hg/store/00changelog.d: size=376891 (zstd !)
1106 stream-clone-race-1/.hg/store/00changelog.d: size=376891 (zstd !)
1147 stream-clone-race-1/.hg/store/00changelog.i: size=320384
1107 stream-clone-race-1/.hg/store/00changelog.i: size=320384
1148 stream-clone-race-1/.hg/store/00changelog.n: size=62
1108 stream-clone-race-1/.hg/store/00changelog.n: size=62
1149
1109
1150 $ hg -R stream-clone-race-1 debugnodemap --metadata | tee client-metadata.txt
1110 $ hg -R stream-clone-race-1 debugnodemap --metadata | tee client-metadata.txt
1151 uid: * (glob)
1111 uid: * (glob)
1152 tip-rev: 5005
1112 tip-rev: 5005
1153 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
1113 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
1154 data-length: 121088
1114 data-length: 121088
1155 data-unused: 0
1115 data-unused: 0
1156 data-unused: 0.000%
1116 data-unused: 0.000%
1157
1117
1158 We get a usable nodemap, so no rewrite would be needed and the metadata should be identical
1118 We get a usable nodemap, so no rewrite would be needed and the metadata should be identical
1159 (ie: the following diff should be empty)
1119 (ie: the following diff should be empty)
1160
1120
1161 This isn't the case for the `no-rust` `no-pure` implementation as it use a very minimal nodemap implementation that unconditionnaly rewrite the nodemap "all the time".
1121 This isn't the case for the `no-rust` `no-pure` implementation as it use a very minimal nodemap implementation that unconditionnaly rewrite the nodemap "all the time".
1162
1122
1163 #if no-rust no-pure
1123 #if no-rust no-pure
1164 $ diff -u server-metadata.txt client-metadata.txt
1124 $ diff -u server-metadata.txt client-metadata.txt
1165 --- server-metadata.txt * (glob)
1125 --- server-metadata.txt * (glob)
1166 +++ client-metadata.txt * (glob)
1126 +++ client-metadata.txt * (glob)
1167 @@ -1,4 +1,4 @@
1127 @@ -1,4 +1,4 @@
1168 -uid: * (glob)
1128 -uid: * (glob)
1169 +uid: * (glob)
1129 +uid: * (glob)
1170 tip-rev: 5005
1130 tip-rev: 5005
1171 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
1131 tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
1172 data-length: 121088
1132 data-length: 121088
1173 [1]
1133 [1]
1174 #else
1134 #else
1175 $ diff -u server-metadata.txt client-metadata.txt
1135 $ diff -u server-metadata.txt client-metadata.txt
1176 #endif
1136 #endif
1177
1137
1178
1138
1179 Clean up after the test.
1139 Clean up after the test.
1180
1140
1181 $ rm -f "$HG_TEST_STREAM_WALKED_FILE_1"
1141 $ rm -f "$HG_TEST_STREAM_WALKED_FILE_1"
1182 $ rm -f "$HG_TEST_STREAM_WALKED_FILE_2"
1142 $ rm -f "$HG_TEST_STREAM_WALKED_FILE_2"
1183 $ rm -f "$HG_TEST_STREAM_WALKED_FILE_3"
1143 $ rm -f "$HG_TEST_STREAM_WALKED_FILE_3"
1184
1144
1185 full regeneration
1145 full regeneration
1186 -----------------
1146 -----------------
1187
1147
1188 A full nodemap is generated
1148 A full nodemap is generated
1189
1149
1190 (ideally this test would append enough data to make sure the nodemap data file
1150 (ideally this test would append enough data to make sure the nodemap data file
1191 get changed, however to make thing simpler we will force the regeneration for
1151 get changed, however to make thing simpler we will force the regeneration for
1192 this test.
1152 this test.
1193
1153
1194 Check the initial state
1154 Check the initial state
1195
1155
1196 $ f --size test-repo/.hg/store/00changelog*
1156 $ f --size test-repo/.hg/store/00changelog*
1197 test-repo/.hg/store/00changelog-*.nd: size=121344 (glob) (rust !)
1157 test-repo/.hg/store/00changelog-*.nd: size=121344 (glob) (rust !)
1198 test-repo/.hg/store/00changelog-*.nd: size=121344 (glob) (pure !)
1158 test-repo/.hg/store/00changelog-*.nd: size=121344 (glob) (pure !)
1199 test-repo/.hg/store/00changelog-*.nd: size=121152 (glob) (no-rust no-pure !)
1159 test-repo/.hg/store/00changelog-*.nd: size=121152 (glob) (no-rust no-pure !)
1200 test-repo/.hg/store/00changelog.d: size=376950 (zstd !)
1160 test-repo/.hg/store/00changelog.d: size=376950 (zstd !)
1201 test-repo/.hg/store/00changelog.d: size=368949 (no-zstd !)
1161 test-repo/.hg/store/00changelog.d: size=368949 (no-zstd !)
1202 test-repo/.hg/store/00changelog.i: size=320448
1162 test-repo/.hg/store/00changelog.i: size=320448
1203 test-repo/.hg/store/00changelog.n: size=62
1163 test-repo/.hg/store/00changelog.n: size=62
1204 $ hg -R test-repo debugnodemap --metadata | tee server-metadata-2.txt
1164 $ hg -R test-repo debugnodemap --metadata | tee server-metadata-2.txt
1205 uid: * (glob)
1165 uid: * (glob)
1206 tip-rev: 5006
1166 tip-rev: 5006
1207 tip-node: ed2ec1eef9aa2a0ec5057c51483bc148d03e810b
1167 tip-node: ed2ec1eef9aa2a0ec5057c51483bc148d03e810b
1208 data-length: 121344 (rust !)
1168 data-length: 121344 (rust !)
1209 data-length: 121344 (pure !)
1169 data-length: 121344 (pure !)
1210 data-length: 121152 (no-rust no-pure !)
1170 data-length: 121152 (no-rust no-pure !)
1211 data-unused: 192 (rust !)
1171 data-unused: 192 (rust !)
1212 data-unused: 192 (pure !)
1172 data-unused: 192 (pure !)
1213 data-unused: 0 (no-rust no-pure !)
1173 data-unused: 0 (no-rust no-pure !)
1214 data-unused: 0.158% (rust !)
1174 data-unused: 0.158% (rust !)
1215 data-unused: 0.158% (pure !)
1175 data-unused: 0.158% (pure !)
1216 data-unused: 0.000% (no-rust no-pure !)
1176 data-unused: 0.000% (no-rust no-pure !)
1217
1177
1218 Performe the mix of clone and full refresh of the nodemap, so that the files
1178 Performe the mix of clone and full refresh of the nodemap, so that the files
1219 (and filenames) are different between listing time and actual transfer time.
1179 (and filenames) are different between listing time and actual transfer time.
1220
1180
1221 $ (hg clone -U --stream ssh://user@dummy/test-repo stream-clone-race-2 --debug 2>> clone-output-2 | egrep '00(changelog|manifest)' >> clone-output-2; touch $HG_TEST_STREAM_WALKED_FILE_3) &
1181 $ (hg clone -U --stream ssh://user@dummy/test-repo stream-clone-race-2 --debug 2>> clone-output-2 | egrep '00(changelog|manifest)' >> clone-output-2; touch $HG_TEST_STREAM_WALKED_FILE_3) &
1222 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_1
1182 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_1
1223 $ rm test-repo/.hg/store/00changelog.n
1183 $ rm test-repo/.hg/store/00changelog.n
1224 $ rm test-repo/.hg/store/00changelog-*.nd
1184 $ rm test-repo/.hg/store/00changelog-*.nd
1225 $ hg -R test-repo/ debugupdatecache
1185 $ hg -R test-repo/ debugupdatecache
1226 $ touch $HG_TEST_STREAM_WALKED_FILE_2
1186 $ touch $HG_TEST_STREAM_WALKED_FILE_2
1227 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3
1187 $ $RUNTESTDIR/testlib/wait-on-file 10 $HG_TEST_STREAM_WALKED_FILE_3
1228
1188
1229 (note: the stream clone code wronly pick the `undo.` files)
1189 (note: the stream clone code wronly pick the `undo.` files)
1230
1190
1231 $ cat clone-output-2
1191 $ cat clone-output-2
1232 adding [s] undo.backup.00manifest.n (62 bytes) (known-bad-output !)
1192 adding [s] undo.backup.00manifest.n (62 bytes) (known-bad-output !)
1233 adding [s] undo.backup.00changelog.n (62 bytes) (known-bad-output !)
1193 adding [s] undo.backup.00changelog.n (62 bytes) (known-bad-output !)
1234 adding [s] 00manifest.n (62 bytes)
1194 adding [s] 00manifest.n (62 bytes)
1235 adding [s] 00manifest-*.nd (118 KB) (glob)
1195 adding [s] 00manifest-*.nd (118 KB) (glob)
1236 adding [s] 00changelog.n (62 bytes)
1196 adding [s] 00changelog.n (62 bytes)
1237 adding [s] 00changelog-*.nd (118 KB) (glob)
1197 adding [s] 00changelog-*.nd (118 KB) (glob)
1238 adding [s] 00manifest.d (492 KB) (zstd !)
1198 adding [s] 00manifest.d (492 KB) (zstd !)
1239 adding [s] 00manifest.d (452 KB) (no-zstd !)
1199 adding [s] 00manifest.d (452 KB) (no-zstd !)
1240 adding [s] 00changelog.d (360 KB) (no-zstd !)
1200 adding [s] 00changelog.d (360 KB) (no-zstd !)
1241 adding [s] 00changelog.d (368 KB) (zstd !)
1201 adding [s] 00changelog.d (368 KB) (zstd !)
1242 adding [s] 00manifest.i (313 KB)
1202 adding [s] 00manifest.i (313 KB)
1243 adding [s] 00changelog.i (313 KB)
1203 adding [s] 00changelog.i (313 KB)
1244
1204
1245 Check the result.
1205 Check the result.
1246
1206
1247 $ f --size stream-clone-race-2/.hg/store/00changelog*
1207 $ f --size stream-clone-race-2/.hg/store/00changelog*
1248 stream-clone-race-2/.hg/store/00changelog-*.nd: size=121344 (glob) (rust !)
1208 stream-clone-race-2/.hg/store/00changelog-*.nd: size=121344 (glob) (rust !)
1249 stream-clone-race-2/.hg/store/00changelog-*.nd: size=121344 (glob) (pure !)
1209 stream-clone-race-2/.hg/store/00changelog-*.nd: size=121344 (glob) (pure !)
1250 stream-clone-race-2/.hg/store/00changelog-*.nd: size=121152 (glob) (no-rust no-pure !)
1210 stream-clone-race-2/.hg/store/00changelog-*.nd: size=121152 (glob) (no-rust no-pure !)
1251 stream-clone-race-2/.hg/store/00changelog.d: size=376950 (zstd !)
1211 stream-clone-race-2/.hg/store/00changelog.d: size=376950 (zstd !)
1252 stream-clone-race-2/.hg/store/00changelog.d: size=368949 (no-zstd !)
1212 stream-clone-race-2/.hg/store/00changelog.d: size=368949 (no-zstd !)
1253 stream-clone-race-2/.hg/store/00changelog.i: size=320448
1213 stream-clone-race-2/.hg/store/00changelog.i: size=320448
1254 stream-clone-race-2/.hg/store/00changelog.n: size=62
1214 stream-clone-race-2/.hg/store/00changelog.n: size=62
1255
1215
1256 $ hg -R stream-clone-race-2 debugnodemap --metadata | tee client-metadata-2.txt
1216 $ hg -R stream-clone-race-2 debugnodemap --metadata | tee client-metadata-2.txt
1257 uid: * (glob)
1217 uid: * (glob)
1258 tip-rev: 5006
1218 tip-rev: 5006
1259 tip-node: ed2ec1eef9aa2a0ec5057c51483bc148d03e810b
1219 tip-node: ed2ec1eef9aa2a0ec5057c51483bc148d03e810b
1260 data-length: 121344 (rust !)
1220 data-length: 121344 (rust !)
1261 data-unused: 192 (rust !)
1221 data-unused: 192 (rust !)
1262 data-unused: 0.158% (rust !)
1222 data-unused: 0.158% (rust !)
1263 data-length: 121152 (no-rust no-pure !)
1223 data-length: 121152 (no-rust no-pure !)
1264 data-unused: 0 (no-rust no-pure !)
1224 data-unused: 0 (no-rust no-pure !)
1265 data-unused: 0.000% (no-rust no-pure !)
1225 data-unused: 0.000% (no-rust no-pure !)
1266 data-length: 121344 (pure !)
1226 data-length: 121344 (pure !)
1267 data-unused: 192 (pure !)
1227 data-unused: 192 (pure !)
1268 data-unused: 0.158% (pure !)
1228 data-unused: 0.158% (pure !)
1269
1229
1270 We get a usable nodemap, so no rewrite would be needed and the metadata should be identical
1230 We get a usable nodemap, so no rewrite would be needed and the metadata should be identical
1271 (ie: the following diff should be empty)
1231 (ie: the following diff should be empty)
1272
1232
1273 This isn't the case for the `no-rust` `no-pure` implementation as it use a very minimal nodemap implementation that unconditionnaly rewrite the nodemap "all the time".
1233 This isn't the case for the `no-rust` `no-pure` implementation as it use a very minimal nodemap implementation that unconditionnaly rewrite the nodemap "all the time".
1274
1234
1275 #if no-rust no-pure
1235 #if no-rust no-pure
1276 $ diff -u server-metadata-2.txt client-metadata-2.txt
1236 $ diff -u server-metadata-2.txt client-metadata-2.txt
1277 --- server-metadata-2.txt * (glob)
1237 --- server-metadata-2.txt * (glob)
1278 +++ client-metadata-2.txt * (glob)
1238 +++ client-metadata-2.txt * (glob)
1279 @@ -1,4 +1,4 @@
1239 @@ -1,4 +1,4 @@
1280 -uid: * (glob)
1240 -uid: * (glob)
1281 +uid: * (glob)
1241 +uid: * (glob)
1282 tip-rev: 5006
1242 tip-rev: 5006
1283 tip-node: ed2ec1eef9aa2a0ec5057c51483bc148d03e810b
1243 tip-node: ed2ec1eef9aa2a0ec5057c51483bc148d03e810b
1284 data-length: 121152
1244 data-length: 121152
1285 [1]
1245 [1]
1286 #else
1246 #else
1287 $ diff -u server-metadata-2.txt client-metadata-2.txt
1247 $ diff -u server-metadata-2.txt client-metadata-2.txt
1288 #endif
1248 #endif
1289
1249
1290 Clean up after the test
1250 Clean up after the test
1291
1251
1292 $ rm -f $HG_TEST_STREAM_WALKED_FILE_1
1252 $ rm -f $HG_TEST_STREAM_WALKED_FILE_1
1293 $ rm -f $HG_TEST_STREAM_WALKED_FILE_2
1253 $ rm -f $HG_TEST_STREAM_WALKED_FILE_2
1294 $ rm -f $HG_TEST_STREAM_WALKED_FILE_3
1254 $ rm -f $HG_TEST_STREAM_WALKED_FILE_3
1295
1255
General Comments 0
You need to be logged in to leave comments. Login now