##// END OF EJS Templates
branching: merge stable into default
Raphaël Gomès -
r50174:127d33e6 merge default
parent child Browse files
Show More
@@ -1,158 +1,139 b''
1 1 stages:
2 2 - tests
3 - phabricator
4 3
5 4 image: registry.heptapod.net/mercurial/ci-images/mercurial-core:$HG_CI_IMAGE_TAG
6 5
7 6 variables:
8 7 PYTHON: python
9 8 TEST_HGMODULEPOLICY: "allow"
10 9 HG_CI_IMAGE_TAG: "v1.0"
11 10 TEST_HGTESTS_ALLOW_NETIO: "0"
12 11
13 12 .all_template: &all
14 13 when: on_success
15 14
16 15 .runtests_template: &runtests
17 16 <<: *all
18 17 stage: tests
19 18 # The runner made a clone as root.
20 19 # We make a new clone owned by user used to run the step.
21 20 before_script:
22 21 - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
23 22 - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
24 23 - cd /tmp/mercurial-ci/
25 24 - ls -1 tests/test-check-*.* > /tmp/check-tests.txt
26 25 - black --version
27 26 - clang-format --version
28 27 script:
29 28 - echo "python used, $PYTHON"
30 29 - echo "$RUNTEST_ARGS"
31 30 - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS
32 31
33 32 checks:
34 33 <<: *runtests
35 34 variables:
36 35 RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt"
37 36 PYTHON: python3
38 37
39 38 rust-cargo-test:
40 39 <<: *all
41 40 stage: tests
42 41 script:
43 42 - echo "python used, $PYTHON"
44 43 - make rust-tests
45 44 variables:
46 45 PYTHON: python3
47 46
48 phabricator-refresh:
49 stage: phabricator
50 except:
51 refs:
52 - merge_requests
53 variables:
54 - $PHABRICATOR_TOKEN == "NO-PHAB"
55 variables:
56 DEFAULT_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)"
57 STABLE_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\n⚠ This patch is intended for stable ⚠\n{image https://media.giphy.com/media/nYI8SmmChYXK0/source.gif}"
58 script:
59 - |
60 if [ `hg branch` == "stable" ]; then
61 ./contrib/phab-refresh-stack.sh --comment "$STABLE_COMMENT";
62 else
63 ./contrib/phab-refresh-stack.sh --comment "$DEFAULT_COMMENT";
64 fi
65
66 47 test-c:
67 48 <<: *runtests
68 49 variables:
69 50 RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt"
70 51 PYTHON: python3
71 52 TEST_HGMODULEPOLICY: "c"
72 53 TEST_HGTESTS_ALLOW_NETIO: "1"
73 54
74 55 test-pure:
75 56 <<: *runtests
76 57 variables:
77 58 RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt"
78 59 PYTHON: python3
79 60 TEST_HGMODULEPOLICY: "py"
80 61
81 62 test-rust:
82 63 <<: *runtests
83 64 variables:
84 65 HGWITHRUSTEXT: cpython
85 66 RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
86 67 PYTHON: python3
87 68 TEST_HGMODULEPOLICY: "rust+c"
88 69
89 70 test-rhg:
90 71 <<: *runtests
91 72 variables:
92 73 HGWITHRUSTEXT: cpython
93 74 RUNTEST_ARGS: "--rust --rhg --blacklist /tmp/check-tests.txt"
94 75 PYTHON: python3
95 76 TEST_HGMODULEPOLICY: "rust+c"
96 77
97 78 test-chg:
98 79 <<: *runtests
99 80 variables:
100 81 PYTHON: python3
101 82 RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
102 83 TEST_HGMODULEPOLICY: "c"
103 84
104 85 check-pytype:
105 86 extends: .runtests_template
106 87 before_script:
107 88 - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
108 89 - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
109 90 - cd /tmp/mercurial-ci/
110 91 - make local PYTHON=$PYTHON
111 92 - $PYTHON -m pip install --user -U pytype==2021.04.15
112 93 variables:
113 94 RUNTEST_ARGS: " --allow-slow-tests tests/test-check-pytype.t"
114 95 HGTEST_SLOWTIMEOUT: "3600"
115 96 PYTHON: python3
116 97 TEST_HGMODULEPOLICY: "c"
117 98
118 99 # `sh.exe --login` sets a couple of extra environment variables that are defined
119 100 # in the MinGW shell, but switches CWD to /home/$username. The previous value
120 101 # is stored in OLDPWD. Of the added variables, MSYSTEM is crucial to running
121 102 # run-tests.py- it is needed to make run-tests.py generate a `python3` script
122 103 # that satisfies the various shebang lines and delegates to `py -3`.
123 104 .window_runtests_template: &windows_runtests
124 105 <<: *all
125 106 when: manual # we don't have any Windows runners anymore at the moment
126 107 stage: tests
127 108 before_script:
128 109 - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && ls -1 tests/test-check-*.* > C:/Temp/check-tests.txt'
129 110 # TODO: find/install cvs, bzr, perforce, gpg, sqlite3
130 111
131 112 script:
132 113 - echo "Entering script section"
133 114 - echo "python used, $Env:PYTHON"
134 115 - Invoke-Expression "$Env:PYTHON -V"
135 116 - Invoke-Expression "$Env:PYTHON -m black --version"
136 117 - echo "$Env:RUNTEST_ARGS"
137 118 - echo "$Env:TMP"
138 119 - echo "$Env:TEMP"
139 120
140 121 - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" $PYTHON tests/run-tests.py --color=always $RUNTEST_ARGS'
141 122
142 123 windows:
143 124 <<: *windows_runtests
144 125 tags:
145 126 - windows
146 127 variables:
147 128 TEST_HGMODULEPOLICY: "c"
148 129 RUNTEST_ARGS: "--blacklist C:/Temp/check-tests.txt"
149 130 PYTHON: py -3
150 131
151 132 windows-pyox:
152 133 <<: *windows_runtests
153 134 tags:
154 135 - windows
155 136 variables:
156 137 TEST_HGMODULEPOLICY: "c"
157 138 RUNTEST_ARGS: "--blacklist C:/Temp/check-tests.txt --pyoxidized"
158 139 PYTHON: py -3
@@ -1,18 +1,21 b''
1 1 FROM rockylinux/rockylinux:8
2 2
3 3 RUN groupadd -g %GID% build && \
4 4 useradd -u %UID% -g %GID% -s /bin/bash -d /build -m build
5 5
6 6 RUN yum install -y \
7 7 gcc \
8 8 gettext \
9 9 make \
10 10 python3-devel \
11 11 python3-docutils \
12 12 rpm-build
13 13
14 14 # For creating repo meta data
15 15 RUN yum install -y createrepo
16 16
17 17 # For rust extensions
18 18 RUN yum install -y cargo
19
20 # avoid incorrect docker image permissions on /tmp preventing writes by non-root users
21 RUN chmod 1777 /tmp
@@ -1,80 +1,83 b''
1 1 # hgdemandimport - global demand-loading of modules for Mercurial
2 2 #
3 3 # Copyright 2017 Facebook Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''demandimport - automatic demand-loading of modules'''
9 9
10 10 # This is in a separate package from mercurial because in Python 3,
11 11 # demand loading is per-package. Keeping demandimport in the mercurial package
12 12 # would disable demand loading for any modules in mercurial.
13 13
14 14
15 15 import os
16 16 import sys
17 17
18 18 from . import demandimportpy3 as demandimport
19 19
20 20 # Full module names which can't be lazy imported.
21 21 # Extensions can add to this set.
22 22 IGNORES = {
23 23 '__future__',
24 24 '_hashlib',
25 25 # ImportError during pkg_resources/__init__.py:fixup_namespace_package
26 26 '_imp',
27 27 '_xmlplus',
28 28 'fcntl',
29 29 'nt', # pathlib2 tests the existence of built-in 'nt' module
30 30 'win32com.gen_py',
31 31 'win32com.shell', # 'appdirs' tries to import win32com.shell
32 32 '_winreg', # 2.7 mimetypes needs immediate ImportError
33 33 'pythoncom',
34 34 # imported by tarfile, not available under Windows
35 35 'pwd',
36 36 'grp',
37 37 # imported by profile, itself imported by hotshot.stats,
38 38 # not available under Windows
39 39 'resource',
40 40 # this trips up many extension authors
41 41 'gtk',
42 42 # setuptools' pkg_resources.py expects "from __main__ import x" to
43 43 # raise ImportError if x not defined
44 44 '__main__',
45 45 '_ast', # https://bugs.python.org/issue41631
46 46 '_ssl', # conditional imports in the stdlib, issue1964
47 47 '_sre', # issue4920
48 48 'rfc822',
49 49 'mimetools',
50 50 'sqlalchemy.events', # has import-time side effects (issue5085)
51 51 'sqlalchemy.dialects', # similar problems as above
52 52 # setuptools 8 expects this module to explode early when not on windows
53 53 'distutils.msvc9compiler',
54 54 '__builtin__',
55 55 'builtins',
56 56 'urwid.command_map', # for pudb
57 57 'lzma',
58 58 }
59 59
60 60 _pypy = '__pypy__' in sys.builtin_module_names
61 61
62 62 if _pypy:
63 63 # _ctypes.pointer is shadowed by "from ... import pointer" (PyPy 5)
64 64 IGNORES.add('_ctypes.pointer')
65 # pure Python module on PyPy, must be loaded to raise ModuleNotFoundError
66 # on non-Windows platforms
67 IGNORES.add('msvcrt')
65 68
66 69 demandimport.init(IGNORES)
67 70
68 71 # Re-export.
69 72 isenabled = demandimport.isenabled
70 73 disable = demandimport.disable
71 74 deactivated = demandimport.deactivated
72 75
73 76
74 77 def enable():
75 78 # chg pre-imports modules so do not enable demandimport for it
76 79 if (
77 80 'CHGINTERNALMARK' not in os.environ
78 81 and os.environ.get('HGDEMANDIMPORT') != 'disable'
79 82 ):
80 83 demandimport.enable()
@@ -1,2817 +1,2819 b''
1 1 # revset.py - revision set queries for mercurial
2 2 #
3 3 # Copyright 2010 Olivia Mackall <olivia@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8
9 9 import binascii
10 10 import re
11 11
12 12 from .i18n import _
13 13 from .pycompat import getattr
14 14 from .node import (
15 15 bin,
16 16 nullrev,
17 17 wdirrev,
18 18 )
19 19 from . import (
20 20 dagop,
21 21 destutil,
22 22 diffutil,
23 23 encoding,
24 24 error,
25 25 grep as grepmod,
26 26 hbisect,
27 27 match as matchmod,
28 28 obsolete as obsmod,
29 29 obsutil,
30 30 pathutil,
31 31 phases,
32 32 pycompat,
33 33 registrar,
34 34 repoview,
35 35 revsetlang,
36 36 scmutil,
37 37 smartset,
38 38 stack as stackmod,
39 39 util,
40 40 )
41 41 from .utils import (
42 42 dateutil,
43 43 stringutil,
44 44 urlutil,
45 45 )
46 46
47 47 # helpers for processing parsed tree
48 48 getsymbol = revsetlang.getsymbol
49 49 getstring = revsetlang.getstring
50 50 getinteger = revsetlang.getinteger
51 51 getboolean = revsetlang.getboolean
52 52 getlist = revsetlang.getlist
53 53 getintrange = revsetlang.getintrange
54 54 getargs = revsetlang.getargs
55 55 getargsdict = revsetlang.getargsdict
56 56
57 57 baseset = smartset.baseset
58 58 generatorset = smartset.generatorset
59 59 spanset = smartset.spanset
60 60 fullreposet = smartset.fullreposet
61 61
62 62 # revisions not included in all(), but populated if specified
63 63 _virtualrevs = (nullrev, wdirrev)
64 64
65 65 # Constants for ordering requirement, used in getset():
66 66 #
67 67 # If 'define', any nested functions and operations MAY change the ordering of
68 68 # the entries in the set (but if changes the ordering, it MUST ALWAYS change
69 69 # it). If 'follow', any nested functions and operations MUST take the ordering
70 70 # specified by the first operand to the '&' operator.
71 71 #
72 72 # For instance,
73 73 #
74 74 # X & (Y | Z)
75 75 # ^ ^^^^^^^
76 76 # | follow
77 77 # define
78 78 #
79 79 # will be evaluated as 'or(y(x()), z(x()))', where 'x()' can change the order
80 80 # of the entries in the set, but 'y()', 'z()' and 'or()' shouldn't.
81 81 #
82 82 # 'any' means the order doesn't matter. For instance,
83 83 #
84 84 # (X & !Y) | ancestors(Z)
85 85 # ^ ^
86 86 # any any
87 87 #
88 88 # For 'X & !Y', 'X' decides the order and 'Y' is subtracted from 'X', so the
89 89 # order of 'Y' does not matter. For 'ancestors(Z)', Z's order does not matter
90 90 # since 'ancestors' does not care about the order of its argument.
91 91 #
92 92 # Currently, most revsets do not care about the order, so 'define' is
93 93 # equivalent to 'follow' for them, and the resulting order is based on the
94 94 # 'subset' parameter passed down to them:
95 95 #
96 96 # m = revset.match(...)
97 97 # m(repo, subset, order=defineorder)
98 98 # ^^^^^^
99 99 # For most revsets, 'define' means using the order this subset provides
100 100 #
101 101 # There are a few revsets that always redefine the order if 'define' is
102 102 # specified: 'sort(X)', 'reverse(X)', 'x:y'.
103 103 anyorder = b'any' # don't care the order, could be even random-shuffled
104 104 defineorder = b'define' # ALWAYS redefine, or ALWAYS follow the current order
105 105 followorder = b'follow' # MUST follow the current order
106 106
107 107 # helpers
108 108
109 109
110 110 def getset(repo, subset, x, order=defineorder):
111 111 if not x:
112 112 raise error.ParseError(_(b"missing argument"))
113 113 return methods[x[0]](repo, subset, *x[1:], order=order)
114 114
115 115
116 116 def _getrevsource(repo, r):
117 117 extra = repo[r].extra()
118 118 for label in (b'source', b'transplant_source', b'rebase_source'):
119 119 if label in extra:
120 120 try:
121 121 return repo[extra[label]].rev()
122 122 except error.RepoLookupError:
123 123 pass
124 124 return None
125 125
126 126
127 127 def _sortedb(xs):
128 128 return sorted(pycompat.rapply(pycompat.maybebytestr, xs))
129 129
130 130
131 131 # operator methods
132 132
133 133
134 134 def stringset(repo, subset, x, order):
135 135 if not x:
136 136 raise error.ParseError(_(b"empty string is not a valid revision"))
137 137 x = scmutil.intrev(scmutil.revsymbol(repo, x))
138 138 if x in subset or x in _virtualrevs and isinstance(subset, fullreposet):
139 139 return baseset([x])
140 140 return baseset()
141 141
142 142
143 143 def rawsmartset(repo, subset, x, order):
144 144 """argument is already a smartset, use that directly"""
145 145 if order == followorder:
146 146 return subset & x
147 147 else:
148 148 return x & subset
149 149
150 150
151 151 def rangeset(repo, subset, x, y, order):
152 152 m = getset(repo, fullreposet(repo), x)
153 153 n = getset(repo, fullreposet(repo), y)
154 154
155 155 if not m or not n:
156 156 return baseset()
157 157 return _makerangeset(repo, subset, m.first(), n.last(), order)
158 158
159 159
160 160 def rangeall(repo, subset, x, order):
161 161 assert x is None
162 162 return _makerangeset(repo, subset, 0, repo.changelog.tiprev(), order)
163 163
164 164
165 165 def rangepre(repo, subset, y, order):
166 166 # ':y' can't be rewritten to '0:y' since '0' may be hidden
167 167 n = getset(repo, fullreposet(repo), y)
168 168 if not n:
169 169 return baseset()
170 170 return _makerangeset(repo, subset, 0, n.last(), order)
171 171
172 172
173 173 def rangepost(repo, subset, x, order):
174 174 m = getset(repo, fullreposet(repo), x)
175 175 if not m:
176 176 return baseset()
177 177 return _makerangeset(
178 178 repo, subset, m.first(), repo.changelog.tiprev(), order
179 179 )
180 180
181 181
182 182 def _makerangeset(repo, subset, m, n, order):
183 183 if m == n:
184 184 r = baseset([m])
185 185 elif n == wdirrev:
186 186 r = spanset(repo, m, len(repo)) + baseset([n])
187 187 elif m == wdirrev:
188 188 r = baseset([m]) + spanset(repo, repo.changelog.tiprev(), n - 1)
189 189 elif m < n:
190 190 r = spanset(repo, m, n + 1)
191 191 else:
192 192 r = spanset(repo, m, n - 1)
193 193
194 194 if order == defineorder:
195 195 return r & subset
196 196 else:
197 197 # carrying the sorting over when possible would be more efficient
198 198 return subset & r
199 199
200 200
201 201 def dagrange(repo, subset, x, y, order):
202 202 r = fullreposet(repo)
203 203 xs = dagop.reachableroots(
204 204 repo, getset(repo, r, x), getset(repo, r, y), includepath=True
205 205 )
206 206 return subset & xs
207 207
208 208
209 209 def andset(repo, subset, x, y, order):
210 210 if order == anyorder:
211 211 yorder = anyorder
212 212 else:
213 213 yorder = followorder
214 214 return getset(repo, getset(repo, subset, x, order), y, yorder)
215 215
216 216
217 217 def andsmallyset(repo, subset, x, y, order):
218 218 # 'andsmally(x, y)' is equivalent to 'and(x, y)', but faster when y is small
219 219 if order == anyorder:
220 220 yorder = anyorder
221 221 else:
222 222 yorder = followorder
223 223 return getset(repo, getset(repo, subset, y, yorder), x, order)
224 224
225 225
226 226 def differenceset(repo, subset, x, y, order):
227 227 return getset(repo, subset, x, order) - getset(repo, subset, y, anyorder)
228 228
229 229
230 230 def _orsetlist(repo, subset, xs, order):
231 231 assert xs
232 232 if len(xs) == 1:
233 233 return getset(repo, subset, xs[0], order)
234 234 p = len(xs) // 2
235 235 a = _orsetlist(repo, subset, xs[:p], order)
236 236 b = _orsetlist(repo, subset, xs[p:], order)
237 237 return a + b
238 238
239 239
240 240 def orset(repo, subset, x, order):
241 241 xs = getlist(x)
242 242 if not xs:
243 243 return baseset()
244 244 if order == followorder:
245 245 # slow path to take the subset order
246 246 return subset & _orsetlist(repo, fullreposet(repo), xs, anyorder)
247 247 else:
248 248 return _orsetlist(repo, subset, xs, order)
249 249
250 250
251 251 def notset(repo, subset, x, order):
252 252 return subset - getset(repo, subset, x, anyorder)
253 253
254 254
255 255 def relationset(repo, subset, x, y, order):
256 256 # this is pretty basic implementation of 'x#y' operator, still
257 257 # experimental so undocumented. see the wiki for further ideas.
258 258 # https://www.mercurial-scm.org/wiki/RevsetOperatorPlan
259 259 rel = getsymbol(y)
260 260 if rel in relations:
261 261 return relations[rel](repo, subset, x, rel, order)
262 262
263 263 relnames = [r for r in relations.keys() if len(r) > 1]
264 264 raise error.UnknownIdentifier(rel, relnames)
265 265
266 266
267 267 def _splitrange(a, b):
268 268 """Split range with bounds a and b into two ranges at 0 and return two
269 269 tuples of numbers for use as startdepth and stopdepth arguments of
270 270 revancestors and revdescendants.
271 271
272 272 >>> _splitrange(-10, -5) # [-10:-5]
273 273 ((5, 11), (None, None))
274 274 >>> _splitrange(5, 10) # [5:10]
275 275 ((None, None), (5, 11))
276 276 >>> _splitrange(-10, 10) # [-10:10]
277 277 ((0, 11), (0, 11))
278 278 >>> _splitrange(-10, 0) # [-10:0]
279 279 ((0, 11), (None, None))
280 280 >>> _splitrange(0, 10) # [0:10]
281 281 ((None, None), (0, 11))
282 282 >>> _splitrange(0, 0) # [0:0]
283 283 ((0, 1), (None, None))
284 284 >>> _splitrange(1, -1) # [1:-1]
285 285 ((None, None), (None, None))
286 286 """
287 287 ancdepths = (None, None)
288 288 descdepths = (None, None)
289 289 if a == b == 0:
290 290 ancdepths = (0, 1)
291 291 if a < 0:
292 292 ancdepths = (-min(b, 0), -a + 1)
293 293 if b > 0:
294 294 descdepths = (max(a, 0), b + 1)
295 295 return ancdepths, descdepths
296 296
297 297
298 298 def generationsrel(repo, subset, x, rel, order):
299 299 z = (b'rangeall', None)
300 300 return generationssubrel(repo, subset, x, rel, z, order)
301 301
302 302
303 303 def generationssubrel(repo, subset, x, rel, z, order):
304 304 # TODO: rewrite tests, and drop startdepth argument from ancestors() and
305 305 # descendants() predicates
306 306 a, b = getintrange(
307 307 z,
308 308 _(b'relation subscript must be an integer or a range'),
309 309 _(b'relation subscript bounds must be integers'),
310 310 deffirst=-(dagop.maxlogdepth - 1),
311 311 deflast=+(dagop.maxlogdepth - 1),
312 312 )
313 313 (ancstart, ancstop), (descstart, descstop) = _splitrange(a, b)
314 314
315 315 if ancstart is None and descstart is None:
316 316 return baseset()
317 317
318 318 revs = getset(repo, fullreposet(repo), x)
319 319 if not revs:
320 320 return baseset()
321 321
322 322 if ancstart is not None and descstart is not None:
323 323 s = dagop.revancestors(repo, revs, False, ancstart, ancstop)
324 324 s += dagop.revdescendants(repo, revs, False, descstart, descstop)
325 325 elif ancstart is not None:
326 326 s = dagop.revancestors(repo, revs, False, ancstart, ancstop)
327 327 elif descstart is not None:
328 328 s = dagop.revdescendants(repo, revs, False, descstart, descstop)
329 329
330 330 return subset & s
331 331
332 332
333 333 def relsubscriptset(repo, subset, x, y, z, order):
334 334 # this is pretty basic implementation of 'x#y[z]' operator, still
335 335 # experimental so undocumented. see the wiki for further ideas.
336 336 # https://www.mercurial-scm.org/wiki/RevsetOperatorPlan
337 337 rel = getsymbol(y)
338 338 if rel in subscriptrelations:
339 339 return subscriptrelations[rel](repo, subset, x, rel, z, order)
340 340
341 341 relnames = [r for r in subscriptrelations.keys() if len(r) > 1]
342 342 raise error.UnknownIdentifier(rel, relnames)
343 343
344 344
345 345 def subscriptset(repo, subset, x, y, order):
346 346 raise error.ParseError(_(b"can't use a subscript in this context"))
347 347
348 348
349 349 def listset(repo, subset, *xs, **opts):
350 350 raise error.ParseError(
351 351 _(b"can't use a list in this context"),
352 352 hint=_(b'see \'hg help "revsets.x or y"\''),
353 353 )
354 354
355 355
356 356 def keyvaluepair(repo, subset, k, v, order):
357 357 raise error.ParseError(_(b"can't use a key-value pair in this context"))
358 358
359 359
360 360 def func(repo, subset, a, b, order):
361 361 f = getsymbol(a)
362 362 if f in symbols:
363 363 func = symbols[f]
364 364 if getattr(func, '_takeorder', False):
365 365 return func(repo, subset, b, order)
366 366 return func(repo, subset, b)
367 367
368 368 keep = lambda fn: getattr(fn, '__doc__', None) is not None
369 369
370 370 syms = [s for (s, fn) in symbols.items() if keep(fn)]
371 371 raise error.UnknownIdentifier(f, syms)
372 372
373 373
374 374 # functions
375 375
376 376 # symbols are callables like:
377 377 # fn(repo, subset, x)
378 378 # with:
379 379 # repo - current repository instance
380 380 # subset - of revisions to be examined
381 381 # x - argument in tree form
382 382 symbols = revsetlang.symbols
383 383
384 384 # symbols which can't be used for a DoS attack for any given input
385 385 # (e.g. those which accept regexes as plain strings shouldn't be included)
386 386 # functions that just return a lot of changesets (like all) don't count here
387 387 safesymbols = set()
388 388
389 389 predicate = registrar.revsetpredicate()
390 390
391 391
392 392 @predicate(b'_destupdate')
393 393 def _destupdate(repo, subset, x):
394 394 # experimental revset for update destination
395 395 args = getargsdict(x, b'limit', b'clean')
396 396 return subset & baseset(
397 397 [destutil.destupdate(repo, **pycompat.strkwargs(args))[0]]
398 398 )
399 399
400 400
401 401 @predicate(b'_destmerge')
402 402 def _destmerge(repo, subset, x):
403 403 # experimental revset for merge destination
404 404 sourceset = None
405 405 if x is not None:
406 406 sourceset = getset(repo, fullreposet(repo), x)
407 407 return subset & baseset([destutil.destmerge(repo, sourceset=sourceset)])
408 408
409 409
410 410 @predicate(b'adds(pattern)', safe=True, weight=30)
411 411 def adds(repo, subset, x):
412 412 """Changesets that add a file matching pattern.
413 413
414 414 The pattern without explicit kind like ``glob:`` is expected to be
415 415 relative to the current directory and match against a file or a
416 416 directory.
417 417 """
418 418 # i18n: "adds" is a keyword
419 419 pat = getstring(x, _(b"adds requires a pattern"))
420 420 return checkstatus(repo, subset, pat, 'added')
421 421
422 422
423 423 @predicate(b'ancestor(*changeset)', safe=True, weight=0.5)
424 424 def ancestor(repo, subset, x):
425 425 """A greatest common ancestor of the changesets.
426 426
427 427 Accepts 0 or more changesets.
428 428 Will return empty list when passed no args.
429 429 Greatest common ancestor of a single changeset is that changeset.
430 430 """
431 431 reviter = iter(orset(repo, fullreposet(repo), x, order=anyorder))
432 432 try:
433 433 anc = repo[next(reviter)]
434 434 except StopIteration:
435 435 return baseset()
436 436 for r in reviter:
437 437 anc = anc.ancestor(repo[r])
438 438
439 439 r = scmutil.intrev(anc)
440 440 if r in subset:
441 441 return baseset([r])
442 442 return baseset()
443 443
444 444
445 445 def _ancestors(
446 446 repo, subset, x, followfirst=False, startdepth=None, stopdepth=None
447 447 ):
448 448 heads = getset(repo, fullreposet(repo), x)
449 449 if not heads:
450 450 return baseset()
451 451 s = dagop.revancestors(repo, heads, followfirst, startdepth, stopdepth)
452 452 return subset & s
453 453
454 454
455 455 @predicate(b'ancestors(set[, depth])', safe=True)
456 456 def ancestors(repo, subset, x):
457 457 """Changesets that are ancestors of changesets in set, including the
458 458 given changesets themselves.
459 459
460 460 If depth is specified, the result only includes changesets up to
461 461 the specified generation.
462 462 """
463 463 # startdepth is for internal use only until we can decide the UI
464 464 args = getargsdict(x, b'ancestors', b'set depth startdepth')
465 465 if b'set' not in args:
466 466 # i18n: "ancestors" is a keyword
467 467 raise error.ParseError(_(b'ancestors takes at least 1 argument'))
468 468 startdepth = stopdepth = None
469 469 if b'startdepth' in args:
470 470 n = getinteger(
471 471 args[b'startdepth'], b"ancestors expects an integer startdepth"
472 472 )
473 473 if n < 0:
474 474 raise error.ParseError(b"negative startdepth")
475 475 startdepth = n
476 476 if b'depth' in args:
477 477 # i18n: "ancestors" is a keyword
478 478 n = getinteger(args[b'depth'], _(b"ancestors expects an integer depth"))
479 479 if n < 0:
480 480 raise error.ParseError(_(b"negative depth"))
481 481 stopdepth = n + 1
482 482 return _ancestors(
483 483 repo, subset, args[b'set'], startdepth=startdepth, stopdepth=stopdepth
484 484 )
485 485
486 486
487 487 @predicate(b'_firstancestors', safe=True)
488 488 def _firstancestors(repo, subset, x):
489 489 # ``_firstancestors(set)``
490 490 # Like ``ancestors(set)`` but follows only the first parents.
491 491 return _ancestors(repo, subset, x, followfirst=True)
492 492
493 493
494 494 def _childrenspec(repo, subset, x, n, order):
495 495 """Changesets that are the Nth child of a changeset
496 496 in set.
497 497 """
498 498 cs = set()
499 499 for r in getset(repo, fullreposet(repo), x):
500 500 for i in range(n):
501 501 c = repo[r].children()
502 502 if len(c) == 0:
503 503 break
504 504 if len(c) > 1:
505 505 raise error.RepoLookupError(
506 506 _(b"revision in set has more than one child")
507 507 )
508 508 r = c[0].rev()
509 509 else:
510 510 cs.add(r)
511 511 return subset & cs
512 512
513 513
514 514 def ancestorspec(repo, subset, x, n, order):
515 515 """``set~n``
516 516 Changesets that are the Nth ancestor (first parents only) of a changeset
517 517 in set.
518 518 """
519 519 n = getinteger(n, _(b"~ expects a number"))
520 520 if n < 0:
521 521 # children lookup
522 522 return _childrenspec(repo, subset, x, -n, order)
523 523 ps = set()
524 524 cl = repo.changelog
525 525 for r in getset(repo, fullreposet(repo), x):
526 526 for i in range(n):
527 527 try:
528 528 r = cl.parentrevs(r)[0]
529 529 except error.WdirUnsupported:
530 530 r = repo[r].p1().rev()
531 531 ps.add(r)
532 532 return subset & ps
533 533
534 534
535 535 @predicate(b'author(string)', safe=True, weight=10)
536 536 def author(repo, subset, x):
537 537 """Alias for ``user(string)``."""
538 538 # i18n: "author" is a keyword
539 539 n = getstring(x, _(b"author requires a string"))
540 540 kind, pattern, matcher = _substringmatcher(n, casesensitive=False)
541 541 return subset.filter(
542 542 lambda x: matcher(repo[x].user()), condrepr=(b'<user %r>', n)
543 543 )
544 544
545 545
546 546 @predicate(b'bisect(string)', safe=True)
547 547 def bisect(repo, subset, x):
548 548 """Changesets marked in the specified bisect status:
549 549
550 550 - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip
551 551 - ``goods``, ``bads`` : csets topologically good/bad
552 552 - ``range`` : csets taking part in the bisection
553 553 - ``pruned`` : csets that are goods, bads or skipped
554 554 - ``untested`` : csets whose fate is yet unknown
555 555 - ``ignored`` : csets ignored due to DAG topology
556 556 - ``current`` : the cset currently being bisected
557 557 """
558 558 # i18n: "bisect" is a keyword
559 559 status = getstring(x, _(b"bisect requires a string")).lower()
560 560 state = set(hbisect.get(repo, status))
561 561 return subset & state
562 562
563 563
564 564 # Backward-compatibility
565 565 # - no help entry so that we do not advertise it any more
566 566 @predicate(b'bisected', safe=True)
567 567 def bisected(repo, subset, x):
568 568 return bisect(repo, subset, x)
569 569
570 570
571 571 @predicate(b'bookmark([name])', safe=True)
572 572 def bookmark(repo, subset, x):
573 573 """The named bookmark or all bookmarks.
574 574
575 575 Pattern matching is supported for `name`. See :hg:`help revisions.patterns`.
576 576 """
577 577 # i18n: "bookmark" is a keyword
578 578 args = getargs(x, 0, 1, _(b'bookmark takes one or no arguments'))
579 579 if args:
580 580 bm = getstring(
581 581 args[0],
582 582 # i18n: "bookmark" is a keyword
583 583 _(b'the argument to bookmark must be a string'),
584 584 )
585 585 kind, pattern, matcher = stringutil.stringmatcher(bm)
586 586 bms = set()
587 587 if kind == b'literal':
588 588 if bm == pattern:
589 589 pattern = repo._bookmarks.expandname(pattern)
590 590 bmrev = repo._bookmarks.get(pattern, None)
591 591 if not bmrev:
592 592 raise error.RepoLookupError(
593 593 _(b"bookmark '%s' does not exist") % pattern
594 594 )
595 595 bms.add(repo[bmrev].rev())
596 596 else:
597 597 matchrevs = set()
598 598 for name, bmrev in repo._bookmarks.items():
599 599 if matcher(name):
600 600 matchrevs.add(bmrev)
601 601 for bmrev in matchrevs:
602 602 bms.add(repo[bmrev].rev())
603 603 else:
604 604 bms = {repo[r].rev() for r in repo._bookmarks.values()}
605 605 bms -= {nullrev}
606 606 return subset & bms
607 607
608 608
609 609 @predicate(b'branch(string or set)', safe=True, weight=10)
610 610 def branch(repo, subset, x):
611 611 """
612 612 All changesets belonging to the given branch or the branches of the given
613 613 changesets.
614 614
615 615 Pattern matching is supported for `string`. See
616 616 :hg:`help revisions.patterns`.
617 617 """
618 618 getbi = repo.revbranchcache().branchinfo
619 619
620 620 def getbranch(r):
621 621 try:
622 622 return getbi(r)[0]
623 623 except error.WdirUnsupported:
624 624 return repo[r].branch()
625 625
626 626 try:
627 627 b = getstring(x, b'')
628 628 except error.ParseError:
629 629 # not a string, but another revspec, e.g. tip()
630 630 pass
631 631 else:
632 632 kind, pattern, matcher = stringutil.stringmatcher(b)
633 633 if kind == b'literal':
634 634 # note: falls through to the revspec case if no branch with
635 635 # this name exists and pattern kind is not specified explicitly
636 636 if repo.branchmap().hasbranch(pattern):
637 637 return subset.filter(
638 638 lambda r: matcher(getbranch(r)),
639 639 condrepr=(b'<branch %r>', b),
640 640 )
641 641 if b.startswith(b'literal:'):
642 642 raise error.RepoLookupError(
643 643 _(b"branch '%s' does not exist") % pattern
644 644 )
645 645 else:
646 646 return subset.filter(
647 647 lambda r: matcher(getbranch(r)), condrepr=(b'<branch %r>', b)
648 648 )
649 649
650 650 s = getset(repo, fullreposet(repo), x)
651 651 b = set()
652 652 for r in s:
653 653 b.add(getbranch(r))
654 654 c = s.__contains__
655 655 return subset.filter(
656 656 lambda r: c(r) or getbranch(r) in b,
657 657 condrepr=lambda: b'<branch %r>' % _sortedb(b),
658 658 )
659 659
660 660
661 661 @predicate(b'phasedivergent()', safe=True)
662 662 def phasedivergent(repo, subset, x):
663 663 """Mutable changesets marked as successors of public changesets.
664 664
665 665 Only non-public and non-obsolete changesets can be `phasedivergent`.
666 666 (EXPERIMENTAL)
667 667 """
668 668 # i18n: "phasedivergent" is a keyword
669 669 getargs(x, 0, 0, _(b"phasedivergent takes no arguments"))
670 670 phasedivergent = obsmod.getrevs(repo, b'phasedivergent')
671 671 return subset & phasedivergent
672 672
673 673
674 674 @predicate(b'bundle()', safe=True)
675 675 def bundle(repo, subset, x):
676 676 """Changesets in the bundle.
677 677
678 678 Bundle must be specified by the -R option."""
679 679
680 680 try:
681 681 bundlerevs = repo.changelog.bundlerevs
682 682 except AttributeError:
683 683 raise error.Abort(_(b"no bundle provided - specify with -R"))
684 684 return subset & bundlerevs
685 685
686 686
687 687 def checkstatus(repo, subset, pat, field):
688 688 """Helper for status-related revsets (adds, removes, modifies).
689 689 The field parameter says which kind is desired.
690 690 """
691 691 hasset = matchmod.patkind(pat) == b'set'
692 692
693 693 mcache = [None]
694 694
695 695 def matches(x):
696 696 c = repo[x]
697 697 if not mcache[0] or hasset:
698 698 mcache[0] = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
699 699 m = mcache[0]
700 700 fname = None
701 701
702 702 assert m is not None # help pytype
703 703 if not m.anypats() and len(m.files()) == 1:
704 704 fname = m.files()[0]
705 705 if fname is not None:
706 706 if fname not in c.files():
707 707 return False
708 708 else:
709 709 if not any(m(f) for f in c.files()):
710 710 return False
711 711 files = getattr(repo.status(c.p1().node(), c.node()), field)
712 712 if fname is not None:
713 713 if fname in files:
714 714 return True
715 715 else:
716 716 if any(m(f) for f in files):
717 717 return True
718 718
719 719 return subset.filter(
720 720 matches, condrepr=(b'<status.%s %r>', pycompat.sysbytes(field), pat)
721 721 )
722 722
723 723
724 724 def _children(repo, subset, parentset):
725 725 if not parentset:
726 726 return baseset()
727 727 cs = set()
728 728 pr = repo.changelog.parentrevs
729 729 minrev = parentset.min()
730 730 for r in subset:
731 731 if r <= minrev:
732 732 continue
733 733 p1, p2 = pr(r)
734 734 if p1 in parentset:
735 735 cs.add(r)
736 736 if p2 != nullrev and p2 in parentset:
737 737 cs.add(r)
738 738 return baseset(cs)
739 739
740 740
741 741 @predicate(b'children(set)', safe=True)
742 742 def children(repo, subset, x):
743 743 """Child changesets of changesets in set."""
744 744 s = getset(repo, fullreposet(repo), x)
745 745 cs = _children(repo, subset, s)
746 746 return subset & cs
747 747
748 748
749 749 @predicate(b'closed()', safe=True, weight=10)
750 750 def closed(repo, subset, x):
751 751 """Changeset is closed."""
752 752 # i18n: "closed" is a keyword
753 753 getargs(x, 0, 0, _(b"closed takes no arguments"))
754 754 return subset.filter(
755 755 lambda r: repo[r].closesbranch(), condrepr=b'<branch closed>'
756 756 )
757 757
758 758
759 759 # for internal use
760 760 @predicate(b'_commonancestorheads(set)', safe=True)
761 761 def _commonancestorheads(repo, subset, x):
762 762 # This is an internal method is for quickly calculating "heads(::x and
763 763 # ::y)"
764 764
765 765 # These greatest common ancestors are the same ones that the consensus bid
766 766 # merge will find.
767 767 startrevs = getset(repo, fullreposet(repo), x, order=anyorder)
768 768
769 769 ancs = repo.changelog._commonancestorsheads(*list(startrevs))
770 770 return subset & baseset(ancs)
771 771
772 772
773 773 @predicate(b'commonancestors(set)', safe=True)
774 774 def commonancestors(repo, subset, x):
775 775 """Changesets that are ancestors of every changeset in set."""
776 776 startrevs = getset(repo, fullreposet(repo), x, order=anyorder)
777 777 if not startrevs:
778 778 return baseset()
779 779 for r in startrevs:
780 780 subset &= dagop.revancestors(repo, baseset([r]))
781 781 return subset
782 782
783 783
784 784 @predicate(b'conflictlocal()', safe=True)
785 785 def conflictlocal(repo, subset, x):
786 786 """The local side of the merge, if currently in an unresolved merge.
787 787
788 788 "merge" here includes merge conflicts from e.g. 'hg rebase' or 'hg graft'.
789 789 """
790 790 getargs(x, 0, 0, _(b"conflictlocal takes no arguments"))
791 791 from . import mergestate as mergestatemod
792 792
793 793 mergestate = mergestatemod.mergestate.read(repo)
794 794 if mergestate.active() and repo.changelog.hasnode(mergestate.local):
795 795 return subset & {repo.changelog.rev(mergestate.local)}
796 796
797 797 return baseset()
798 798
799 799
800 800 @predicate(b'conflictother()', safe=True)
801 801 def conflictother(repo, subset, x):
802 802 """The other side of the merge, if currently in an unresolved merge.
803 803
804 804 "merge" here includes merge conflicts from e.g. 'hg rebase' or 'hg graft'.
805 805 """
806 806 getargs(x, 0, 0, _(b"conflictother takes no arguments"))
807 807 from . import mergestate as mergestatemod
808 808
809 809 mergestate = mergestatemod.mergestate.read(repo)
810 810 if mergestate.active() and repo.changelog.hasnode(mergestate.other):
811 811 return subset & {repo.changelog.rev(mergestate.other)}
812 812
813 813 return baseset()
814 814
815 815
816 816 @predicate(b'contains(pattern)', weight=100)
817 817 def contains(repo, subset, x):
818 818 """The revision's manifest contains a file matching pattern (but might not
819 819 modify it). See :hg:`help patterns` for information about file patterns.
820 820
821 821 The pattern without explicit kind like ``glob:`` is expected to be
822 822 relative to the current directory and match against a file exactly
823 823 for efficiency.
824 824 """
825 825 # i18n: "contains" is a keyword
826 826 pat = getstring(x, _(b"contains requires a pattern"))
827 827
828 828 def matches(x):
829 829 if not matchmod.patkind(pat):
830 830 pats = pathutil.canonpath(repo.root, repo.getcwd(), pat)
831 831 if pats in repo[x]:
832 832 return True
833 833 else:
834 834 c = repo[x]
835 835 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
836 836 for f in c.manifest():
837 837 if m(f):
838 838 return True
839 839 return False
840 840
841 841 return subset.filter(matches, condrepr=(b'<contains %r>', pat))
842 842
843 843
844 844 @predicate(b'converted([id])', safe=True)
845 845 def converted(repo, subset, x):
846 846 """Changesets converted from the given identifier in the old repository if
847 847 present, or all converted changesets if no identifier is specified.
848 848 """
849 849
850 850 # There is exactly no chance of resolving the revision, so do a simple
851 851 # string compare and hope for the best
852 852
853 853 rev = None
854 854 # i18n: "converted" is a keyword
855 855 l = getargs(x, 0, 1, _(b'converted takes one or no arguments'))
856 856 if l:
857 857 # i18n: "converted" is a keyword
858 858 rev = getstring(l[0], _(b'converted requires a revision'))
859 859
860 860 def _matchvalue(r):
861 861 source = repo[r].extra().get(b'convert_revision', None)
862 862 return source is not None and (rev is None or source.startswith(rev))
863 863
864 864 return subset.filter(
865 865 lambda r: _matchvalue(r), condrepr=(b'<converted %r>', rev)
866 866 )
867 867
868 868
869 869 @predicate(b'date(interval)', safe=True, weight=10)
870 870 def date(repo, subset, x):
871 871 """Changesets within the interval, see :hg:`help dates`."""
872 872 # i18n: "date" is a keyword
873 873 ds = getstring(x, _(b"date requires a string"))
874 874 dm = dateutil.matchdate(ds)
875 875 return subset.filter(
876 876 lambda x: dm(repo[x].date()[0]), condrepr=(b'<date %r>', ds)
877 877 )
878 878
879 879
880 880 @predicate(b'desc(string)', safe=True, weight=10)
881 881 def desc(repo, subset, x):
882 882 """Search commit message for string. The match is case-insensitive.
883 883
884 884 Pattern matching is supported for `string`. See
885 885 :hg:`help revisions.patterns`.
886 886 """
887 887 # i18n: "desc" is a keyword
888 888 ds = getstring(x, _(b"desc requires a string"))
889 889
890 890 kind, pattern, matcher = _substringmatcher(ds, casesensitive=False)
891 891
892 892 return subset.filter(
893 893 lambda r: matcher(repo[r].description()), condrepr=(b'<desc %r>', ds)
894 894 )
895 895
896 896
897 897 def _descendants(
898 898 repo, subset, x, followfirst=False, startdepth=None, stopdepth=None
899 899 ):
900 900 roots = getset(repo, fullreposet(repo), x)
901 901 if not roots:
902 902 return baseset()
903 903 s = dagop.revdescendants(repo, roots, followfirst, startdepth, stopdepth)
904 904 return subset & s
905 905
906 906
907 907 @predicate(b'descendants(set[, depth])', safe=True)
908 908 def descendants(repo, subset, x):
909 909 """Changesets which are descendants of changesets in set, including the
910 910 given changesets themselves.
911 911
912 912 If depth is specified, the result only includes changesets up to
913 913 the specified generation.
914 914 """
915 915 # startdepth is for internal use only until we can decide the UI
916 916 args = getargsdict(x, b'descendants', b'set depth startdepth')
917 917 if b'set' not in args:
918 918 # i18n: "descendants" is a keyword
919 919 raise error.ParseError(_(b'descendants takes at least 1 argument'))
920 920 startdepth = stopdepth = None
921 921 if b'startdepth' in args:
922 922 n = getinteger(
923 923 args[b'startdepth'], b"descendants expects an integer startdepth"
924 924 )
925 925 if n < 0:
926 926 raise error.ParseError(b"negative startdepth")
927 927 startdepth = n
928 928 if b'depth' in args:
929 929 # i18n: "descendants" is a keyword
930 930 n = getinteger(
931 931 args[b'depth'], _(b"descendants expects an integer depth")
932 932 )
933 933 if n < 0:
934 934 raise error.ParseError(_(b"negative depth"))
935 935 stopdepth = n + 1
936 936 return _descendants(
937 937 repo, subset, args[b'set'], startdepth=startdepth, stopdepth=stopdepth
938 938 )
939 939
940 940
941 941 @predicate(b'_firstdescendants', safe=True)
942 942 def _firstdescendants(repo, subset, x):
943 943 # ``_firstdescendants(set)``
944 944 # Like ``descendants(set)`` but follows only the first parents.
945 945 return _descendants(repo, subset, x, followfirst=True)
946 946
947 947
948 948 @predicate(b'destination([set])', safe=True, weight=10)
949 949 def destination(repo, subset, x):
950 950 """Changesets that were created by a graft, transplant or rebase operation,
951 951 with the given revisions specified as the source. Omitting the optional set
952 952 is the same as passing all().
953 953 """
954 954 if x is not None:
955 955 sources = getset(repo, fullreposet(repo), x)
956 956 else:
957 957 sources = fullreposet(repo)
958 958
959 959 dests = set()
960 960
961 961 # subset contains all of the possible destinations that can be returned, so
962 962 # iterate over them and see if their source(s) were provided in the arg set.
963 963 # Even if the immediate src of r is not in the arg set, src's source (or
964 964 # further back) may be. Scanning back further than the immediate src allows
965 965 # transitive transplants and rebases to yield the same results as transitive
966 966 # grafts.
967 967 for r in subset:
968 968 src = _getrevsource(repo, r)
969 969 lineage = None
970 970
971 971 while src is not None:
972 972 if lineage is None:
973 973 lineage = list()
974 974
975 975 lineage.append(r)
976 976
977 977 # The visited lineage is a match if the current source is in the arg
978 978 # set. Since every candidate dest is visited by way of iterating
979 979 # subset, any dests further back in the lineage will be tested by a
980 980 # different iteration over subset. Likewise, if the src was already
981 981 # selected, the current lineage can be selected without going back
982 982 # further.
983 983 if src in sources or src in dests:
984 984 dests.update(lineage)
985 985 break
986 986
987 987 r = src
988 988 src = _getrevsource(repo, r)
989 989
990 990 return subset.filter(
991 991 dests.__contains__,
992 992 condrepr=lambda: b'<destination %r>' % _sortedb(dests),
993 993 )
994 994
995 995
996 996 @predicate(b'diffcontains(pattern)', weight=110)
997 997 def diffcontains(repo, subset, x):
998 998 """Search revision differences for when the pattern was added or removed.
999 999
1000 1000 The pattern may be a substring literal or a regular expression. See
1001 1001 :hg:`help revisions.patterns`.
1002 1002 """
1003 1003 args = getargsdict(x, b'diffcontains', b'pattern')
1004 1004 if b'pattern' not in args:
1005 1005 # i18n: "diffcontains" is a keyword
1006 1006 raise error.ParseError(_(b'diffcontains takes at least 1 argument'))
1007 1007
1008 1008 pattern = getstring(
1009 1009 args[b'pattern'], _(b'diffcontains requires a string pattern')
1010 1010 )
1011 1011 regexp = stringutil.substringregexp(pattern, re.M)
1012 1012
1013 1013 # TODO: add support for file pattern and --follow. For example,
1014 1014 # diffcontains(pattern[, set]) where set may be file(pattern) or
1015 1015 # follow(pattern), and we'll eventually add a support for narrowing
1016 1016 # files by revset?
1017 1017 fmatch = matchmod.always()
1018 1018
1019 1019 def makefilematcher(ctx):
1020 1020 return fmatch
1021 1021
1022 1022 # TODO: search in a windowed way
1023 1023 searcher = grepmod.grepsearcher(repo.ui, repo, regexp, diff=True)
1024 1024
1025 1025 def testdiff(rev):
1026 1026 # consume the generator to discard revfiles/matches cache
1027 1027 found = False
1028 1028 for fn, ctx, pstates, states in searcher.searchfiles(
1029 1029 baseset([rev]), makefilematcher
1030 1030 ):
1031 1031 if next(grepmod.difflinestates(pstates, states), None):
1032 1032 found = True
1033 1033 return found
1034 1034
1035 1035 return subset.filter(testdiff, condrepr=(b'<diffcontains %r>', pattern))
1036 1036
1037 1037
1038 1038 @predicate(b'contentdivergent()', safe=True)
1039 1039 def contentdivergent(repo, subset, x):
1040 1040 """
1041 1041 Final successors of changesets with an alternative set of final
1042 1042 successors. (EXPERIMENTAL)
1043 1043 """
1044 1044 # i18n: "contentdivergent" is a keyword
1045 1045 getargs(x, 0, 0, _(b"contentdivergent takes no arguments"))
1046 1046 contentdivergent = obsmod.getrevs(repo, b'contentdivergent')
1047 1047 return subset & contentdivergent
1048 1048
1049 1049
1050 1050 @predicate(b'expectsize(set[, size])', safe=True, takeorder=True)
1051 1051 def expectsize(repo, subset, x, order):
1052 1052 """Return the given revset if size matches the revset size.
1053 1053 Abort if the revset doesn't expect given size.
1054 1054 size can either be an integer range or an integer.
1055 1055
1056 1056 For example, ``expectsize(0:1, 3:5)`` will abort as revset size is 2 and
1057 1057 2 is not between 3 and 5 inclusive."""
1058 1058
1059 1059 args = getargsdict(x, b'expectsize', b'set size')
1060 1060 minsize = 0
1061 1061 maxsize = len(repo) + 1
1062 1062 err = b''
1063 1063 if b'size' not in args or b'set' not in args:
1064 1064 raise error.ParseError(_(b'invalid set of arguments'))
1065 1065 minsize, maxsize = getintrange(
1066 1066 args[b'size'],
1067 1067 _(b'expectsize requires a size range or a positive integer'),
1068 1068 _(b'size range bounds must be integers'),
1069 1069 minsize,
1070 1070 maxsize,
1071 1071 )
1072 1072 if minsize < 0 or maxsize < 0:
1073 1073 raise error.ParseError(_(b'negative size'))
1074 1074 rev = getset(repo, fullreposet(repo), args[b'set'], order=order)
1075 1075 if minsize != maxsize and (len(rev) < minsize or len(rev) > maxsize):
1076 1076 err = _(b'revset size mismatch. expected between %d and %d, got %d') % (
1077 1077 minsize,
1078 1078 maxsize,
1079 1079 len(rev),
1080 1080 )
1081 1081 elif minsize == maxsize and len(rev) != minsize:
1082 1082 err = _(b'revset size mismatch. expected %d, got %d') % (
1083 1083 minsize,
1084 1084 len(rev),
1085 1085 )
1086 1086 if err:
1087 1087 raise error.RepoLookupError(err)
1088 1088 if order == followorder:
1089 1089 return subset & rev
1090 1090 else:
1091 1091 return rev & subset
1092 1092
1093 1093
1094 1094 @predicate(b'extdata(source)', safe=False, weight=100)
1095 1095 def extdata(repo, subset, x):
1096 1096 """Changesets in the specified extdata source. (EXPERIMENTAL)"""
1097 1097 # i18n: "extdata" is a keyword
1098 1098 args = getargsdict(x, b'extdata', b'source')
1099 1099 source = getstring(
1100 1100 args.get(b'source'),
1101 1101 # i18n: "extdata" is a keyword
1102 1102 _(b'extdata takes at least 1 string argument'),
1103 1103 )
1104 1104 data = scmutil.extdatasource(repo, source)
1105 1105 return subset & baseset(data)
1106 1106
1107 1107
1108 1108 @predicate(b'extinct()', safe=True)
1109 1109 def extinct(repo, subset, x):
1110 1110 """Obsolete changesets with obsolete descendants only. (EXPERIMENTAL)"""
1111 1111 # i18n: "extinct" is a keyword
1112 1112 getargs(x, 0, 0, _(b"extinct takes no arguments"))
1113 1113 extincts = obsmod.getrevs(repo, b'extinct')
1114 1114 return subset & extincts
1115 1115
1116 1116
1117 1117 @predicate(b'extra(label, [value])', safe=True)
1118 1118 def extra(repo, subset, x):
1119 1119 """Changesets with the given label in the extra metadata, with the given
1120 1120 optional value.
1121 1121
1122 1122 Pattern matching is supported for `value`. See
1123 1123 :hg:`help revisions.patterns`.
1124 1124 """
1125 1125 args = getargsdict(x, b'extra', b'label value')
1126 1126 if b'label' not in args:
1127 1127 # i18n: "extra" is a keyword
1128 1128 raise error.ParseError(_(b'extra takes at least 1 argument'))
1129 1129 # i18n: "extra" is a keyword
1130 1130 label = getstring(
1131 1131 args[b'label'], _(b'first argument to extra must be a string')
1132 1132 )
1133 1133 value = None
1134 1134
1135 1135 if b'value' in args:
1136 1136 # i18n: "extra" is a keyword
1137 1137 value = getstring(
1138 1138 args[b'value'], _(b'second argument to extra must be a string')
1139 1139 )
1140 1140 kind, value, matcher = stringutil.stringmatcher(value)
1141 1141
1142 1142 def _matchvalue(r):
1143 1143 extra = repo[r].extra()
1144 1144 return label in extra and (value is None or matcher(extra[label]))
1145 1145
1146 1146 return subset.filter(
1147 1147 lambda r: _matchvalue(r), condrepr=(b'<extra[%r] %r>', label, value)
1148 1148 )
1149 1149
1150 1150
1151 1151 @predicate(b'filelog(pattern)', safe=True)
1152 1152 def filelog(repo, subset, x):
1153 1153 """Changesets connected to the specified filelog.
1154 1154
1155 1155 For performance reasons, visits only revisions mentioned in the file-level
1156 1156 filelog, rather than filtering through all changesets (much faster, but
1157 1157 doesn't include deletes or duplicate changes). For a slower, more accurate
1158 1158 result, use ``file()``.
1159 1159
1160 1160 The pattern without explicit kind like ``glob:`` is expected to be
1161 1161 relative to the current directory and match against a file exactly
1162 1162 for efficiency.
1163 1163 """
1164 1164
1165 1165 # i18n: "filelog" is a keyword
1166 1166 pat = getstring(x, _(b"filelog requires a pattern"))
1167 1167 s = set()
1168 1168 cl = repo.changelog
1169 1169
1170 1170 if not matchmod.patkind(pat):
1171 1171 f = pathutil.canonpath(repo.root, repo.getcwd(), pat)
1172 1172 files = [f]
1173 1173 else:
1174 1174 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=repo[None])
1175 1175 files = (f for f in repo[None] if m(f))
1176 1176
1177 1177 for f in files:
1178 1178 fl = repo.file(f)
1179 1179 known = {}
1180 1180 scanpos = 0
1181 1181 for fr in list(fl):
1182 1182 fn = fl.node(fr)
1183 1183 if fn in known:
1184 1184 s.add(known[fn])
1185 1185 continue
1186 1186
1187 1187 lr = fl.linkrev(fr)
1188 1188 if lr in cl:
1189 1189 s.add(lr)
1190 1190 elif scanpos is not None:
1191 1191 # lowest matching changeset is filtered, scan further
1192 1192 # ahead in changelog
1193 1193 start = max(lr, scanpos) + 1
1194 1194 scanpos = None
1195 1195 for r in cl.revs(start):
1196 1196 # minimize parsing of non-matching entries
1197 1197 if f in cl.revision(r) and f in cl.readfiles(r):
1198 1198 try:
1199 1199 # try to use manifest delta fastpath
1200 1200 n = repo[r].filenode(f)
1201 1201 if n not in known:
1202 1202 if n == fn:
1203 1203 s.add(r)
1204 1204 scanpos = r
1205 1205 break
1206 1206 else:
1207 1207 known[n] = r
1208 1208 except error.ManifestLookupError:
1209 1209 # deletion in changelog
1210 1210 continue
1211 1211
1212 1212 return subset & s
1213 1213
1214 1214
1215 1215 @predicate(b'first(set, [n])', safe=True, takeorder=True, weight=0)
1216 1216 def first(repo, subset, x, order):
1217 1217 """An alias for limit()."""
1218 1218 return limit(repo, subset, x, order)
1219 1219
1220 1220
1221 1221 def _follow(repo, subset, x, name, followfirst=False):
1222 1222 args = getargsdict(x, name, b'file startrev')
1223 1223 revs = None
1224 1224 if b'startrev' in args:
1225 1225 revs = getset(repo, fullreposet(repo), args[b'startrev'])
1226 1226 if b'file' in args:
1227 1227 x = getstring(args[b'file'], _(b"%s expected a pattern") % name)
1228 1228 if revs is None:
1229 1229 revs = [None]
1230 1230 fctxs = []
1231 1231 for r in revs:
1232 1232 ctx = mctx = repo[r]
1233 1233 if r is None:
1234 1234 ctx = repo[b'.']
1235 1235 m = matchmod.match(
1236 1236 repo.root, repo.getcwd(), [x], ctx=mctx, default=b'path'
1237 1237 )
1238 1238 fctxs.extend(ctx[f].introfilectx() for f in ctx.manifest().walk(m))
1239 1239 s = dagop.filerevancestors(fctxs, followfirst)
1240 1240 else:
1241 1241 if revs is None:
1242 1242 revs = baseset([repo[b'.'].rev()])
1243 1243 s = dagop.revancestors(repo, revs, followfirst)
1244 1244
1245 1245 return subset & s
1246 1246
1247 1247
1248 1248 @predicate(b'follow([file[, startrev]])', safe=True)
1249 1249 def follow(repo, subset, x):
1250 1250 """
1251 1251 An alias for ``::.`` (ancestors of the working directory's first parent).
1252 1252 If file pattern is specified, the histories of files matching given
1253 1253 pattern in the revision given by startrev are followed, including copies.
1254 1254 """
1255 1255 return _follow(repo, subset, x, b'follow')
1256 1256
1257 1257
1258 1258 @predicate(b'_followfirst', safe=True)
1259 1259 def _followfirst(repo, subset, x):
1260 1260 # ``followfirst([file[, startrev]])``
1261 1261 # Like ``follow([file[, startrev]])`` but follows only the first parent
1262 1262 # of every revisions or files revisions.
1263 1263 return _follow(repo, subset, x, b'_followfirst', followfirst=True)
1264 1264
1265 1265
1266 1266 @predicate(
1267 1267 b'followlines(file, fromline:toline[, startrev=., descend=False])',
1268 1268 safe=True,
1269 1269 )
1270 1270 def followlines(repo, subset, x):
1271 1271 """Changesets modifying `file` in line range ('fromline', 'toline').
1272 1272
1273 1273 Line range corresponds to 'file' content at 'startrev' and should hence be
1274 1274 consistent with file size. If startrev is not specified, working directory's
1275 1275 parent is used.
1276 1276
1277 1277 By default, ancestors of 'startrev' are returned. If 'descend' is True,
1278 1278 descendants of 'startrev' are returned though renames are (currently) not
1279 1279 followed in this direction.
1280 1280 """
1281 1281 args = getargsdict(x, b'followlines', b'file *lines startrev descend')
1282 1282 if len(args[b'lines']) != 1:
1283 1283 raise error.ParseError(_(b"followlines requires a line range"))
1284 1284
1285 1285 rev = b'.'
1286 1286 if b'startrev' in args:
1287 1287 revs = getset(repo, fullreposet(repo), args[b'startrev'])
1288 1288 if len(revs) != 1:
1289 1289 raise error.ParseError(
1290 1290 # i18n: "followlines" is a keyword
1291 1291 _(b"followlines expects exactly one revision")
1292 1292 )
1293 1293 rev = revs.last()
1294 1294
1295 1295 pat = getstring(args[b'file'], _(b"followlines requires a pattern"))
1296 1296 # i18n: "followlines" is a keyword
1297 1297 msg = _(b"followlines expects exactly one file")
1298 1298 fname = scmutil.parsefollowlinespattern(repo, rev, pat, msg)
1299 1299 fromline, toline = util.processlinerange(
1300 1300 *getintrange(
1301 1301 args[b'lines'][0],
1302 1302 # i18n: "followlines" is a keyword
1303 1303 _(b"followlines expects a line number or a range"),
1304 1304 _(b"line range bounds must be integers"),
1305 1305 )
1306 1306 )
1307 1307
1308 1308 fctx = repo[rev].filectx(fname)
1309 1309 descend = False
1310 1310 if b'descend' in args:
1311 1311 descend = getboolean(
1312 1312 args[b'descend'],
1313 1313 # i18n: "descend" is a keyword
1314 1314 _(b"descend argument must be a boolean"),
1315 1315 )
1316 1316 if descend:
1317 1317 rs = generatorset(
1318 1318 (
1319 1319 c.rev()
1320 1320 for c, _linerange in dagop.blockdescendants(
1321 1321 fctx, fromline, toline
1322 1322 )
1323 1323 ),
1324 1324 iterasc=True,
1325 1325 )
1326 1326 else:
1327 1327 rs = generatorset(
1328 1328 (
1329 1329 c.rev()
1330 1330 for c, _linerange in dagop.blockancestors(
1331 1331 fctx, fromline, toline
1332 1332 )
1333 1333 ),
1334 1334 iterasc=False,
1335 1335 )
1336 1336 return subset & rs
1337 1337
1338 1338
1339 1339 @predicate(b'nodefromfile(path)')
1340 1340 def nodefromfile(repo, subset, x):
1341 """
1342 An alias for ``::.`` (ancestors of the working directory's first parent).
1343 If file pattern is specified, the histories of files matching given
1344 pattern in the revision given by startrev are followed, including copies.
1341 """Read a list of nodes from the file at `path`.
1342
1343 This applies `id(LINE)` to each line of the file.
1344
1345 This is useful when the amount of nodes you need to specify gets too large
1346 for the command line.
1345 1347 """
1346 1348 path = getstring(x, _(b"nodefromfile require a file path"))
1347 1349 listed_rev = set()
1348 1350 try:
1349 1351 with pycompat.open(path, 'rb') as f:
1350 1352 for line in f:
1351 1353 n = line.strip()
1352 1354 rn = _node(repo, n)
1353 1355 if rn is not None:
1354 1356 listed_rev.add(rn)
1355 1357 except IOError as exc:
1356 1358 m = _(b'cannot open nodes file "%s": %s')
1357 1359 m %= (path, encoding.strtolocal(exc.strerror))
1358 1360 raise error.Abort(m)
1359 1361 return subset & baseset(listed_rev)
1360 1362
1361 1363
1362 1364 @predicate(b'all()', safe=True)
1363 1365 def getall(repo, subset, x):
1364 1366 """All changesets, the same as ``0:tip``."""
1365 1367 # i18n: "all" is a keyword
1366 1368 getargs(x, 0, 0, _(b"all takes no arguments"))
1367 1369 return subset & spanset(repo) # drop "null" if any
1368 1370
1369 1371
1370 1372 @predicate(b'grep(regex)', weight=10)
1371 1373 def grep(repo, subset, x):
1372 1374 """Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')``
1373 1375 to ensure special escape characters are handled correctly. Unlike
1374 1376 ``keyword(string)``, the match is case-sensitive.
1375 1377 """
1376 1378 try:
1377 1379 # i18n: "grep" is a keyword
1378 1380 gr = re.compile(getstring(x, _(b"grep requires a string")))
1379 1381 except re.error as e:
1380 1382 raise error.ParseError(
1381 1383 _(b'invalid match pattern: %s') % stringutil.forcebytestr(e)
1382 1384 )
1383 1385
1384 1386 def matches(x):
1385 1387 c = repo[x]
1386 1388 for e in c.files() + [c.user(), c.description()]:
1387 1389 if gr.search(e):
1388 1390 return True
1389 1391 return False
1390 1392
1391 1393 return subset.filter(matches, condrepr=(b'<grep %r>', gr.pattern))
1392 1394
1393 1395
1394 1396 @predicate(b'_matchfiles', safe=True)
1395 1397 def _matchfiles(repo, subset, x):
1396 1398 # _matchfiles takes a revset list of prefixed arguments:
1397 1399 #
1398 1400 # [p:foo, i:bar, x:baz]
1399 1401 #
1400 1402 # builds a match object from them and filters subset. Allowed
1401 1403 # prefixes are 'p:' for regular patterns, 'i:' for include
1402 1404 # patterns and 'x:' for exclude patterns. Use 'r:' prefix to pass
1403 1405 # a revision identifier, or the empty string to reference the
1404 1406 # working directory, from which the match object is
1405 1407 # initialized. Use 'd:' to set the default matching mode, default
1406 1408 # to 'glob'. At most one 'r:' and 'd:' argument can be passed.
1407 1409
1408 1410 l = getargs(x, 1, -1, b"_matchfiles requires at least one argument")
1409 1411 pats, inc, exc = [], [], []
1410 1412 rev, default = None, None
1411 1413 for arg in l:
1412 1414 s = getstring(arg, b"_matchfiles requires string arguments")
1413 1415 prefix, value = s[:2], s[2:]
1414 1416 if prefix == b'p:':
1415 1417 pats.append(value)
1416 1418 elif prefix == b'i:':
1417 1419 inc.append(value)
1418 1420 elif prefix == b'x:':
1419 1421 exc.append(value)
1420 1422 elif prefix == b'r:':
1421 1423 if rev is not None:
1422 1424 raise error.ParseError(
1423 1425 b'_matchfiles expected at most one revision'
1424 1426 )
1425 1427 if value == b'': # empty means working directory
1426 1428 rev = wdirrev
1427 1429 else:
1428 1430 rev = value
1429 1431 elif prefix == b'd:':
1430 1432 if default is not None:
1431 1433 raise error.ParseError(
1432 1434 b'_matchfiles expected at most one default mode'
1433 1435 )
1434 1436 default = value
1435 1437 else:
1436 1438 raise error.ParseError(b'invalid _matchfiles prefix: %s' % prefix)
1437 1439 if not default:
1438 1440 default = b'glob'
1439 1441 hasset = any(matchmod.patkind(p) == b'set' for p in pats + inc + exc)
1440 1442
1441 1443 mcache = [None]
1442 1444
1443 1445 # This directly read the changelog data as creating changectx for all
1444 1446 # revisions is quite expensive.
1445 1447 getfiles = repo.changelog.readfiles
1446 1448
1447 1449 def matches(x):
1448 1450 if x == wdirrev:
1449 1451 files = repo[x].files()
1450 1452 else:
1451 1453 files = getfiles(x)
1452 1454
1453 1455 if not mcache[0] or (hasset and rev is None):
1454 1456 r = x if rev is None else rev
1455 1457 mcache[0] = matchmod.match(
1456 1458 repo.root,
1457 1459 repo.getcwd(),
1458 1460 pats,
1459 1461 include=inc,
1460 1462 exclude=exc,
1461 1463 ctx=repo[r],
1462 1464 default=default,
1463 1465 )
1464 1466 m = mcache[0]
1465 1467
1466 1468 for f in files:
1467 1469 if m(f):
1468 1470 return True
1469 1471 return False
1470 1472
1471 1473 return subset.filter(
1472 1474 matches,
1473 1475 condrepr=(
1474 1476 b'<matchfiles patterns=%r, include=%r '
1475 1477 b'exclude=%r, default=%r, rev=%r>',
1476 1478 pats,
1477 1479 inc,
1478 1480 exc,
1479 1481 default,
1480 1482 rev,
1481 1483 ),
1482 1484 )
1483 1485
1484 1486
1485 1487 @predicate(b'file(pattern)', safe=True, weight=10)
1486 1488 def hasfile(repo, subset, x):
1487 1489 """Changesets affecting files matched by pattern.
1488 1490
1489 1491 For a faster but less accurate result, consider using ``filelog()``
1490 1492 instead.
1491 1493
1492 1494 This predicate uses ``glob:`` as the default kind of pattern.
1493 1495 """
1494 1496 # i18n: "file" is a keyword
1495 1497 pat = getstring(x, _(b"file requires a pattern"))
1496 1498 return _matchfiles(repo, subset, (b'string', b'p:' + pat))
1497 1499
1498 1500
1499 1501 @predicate(b'head()', safe=True)
1500 1502 def head(repo, subset, x):
1501 1503 """Changeset is a named branch head."""
1502 1504 # i18n: "head" is a keyword
1503 1505 getargs(x, 0, 0, _(b"head takes no arguments"))
1504 1506 hs = set()
1505 1507 cl = repo.changelog
1506 1508 for ls in repo.branchmap().iterheads():
1507 1509 hs.update(cl.rev(h) for h in ls)
1508 1510 return subset & baseset(hs)
1509 1511
1510 1512
1511 1513 @predicate(b'heads(set)', safe=True, takeorder=True)
1512 1514 def heads(repo, subset, x, order):
1513 1515 """Members of set with no children in set."""
1514 1516 # argument set should never define order
1515 1517 if order == defineorder:
1516 1518 order = followorder
1517 1519 inputset = getset(repo, fullreposet(repo), x, order=order)
1518 1520 wdirparents = None
1519 1521 if wdirrev in inputset:
1520 1522 # a bit slower, but not common so good enough for now
1521 1523 wdirparents = [p.rev() for p in repo[None].parents()]
1522 1524 inputset = set(inputset)
1523 1525 inputset.discard(wdirrev)
1524 1526 heads = repo.changelog.headrevs(inputset)
1525 1527 if wdirparents is not None:
1526 1528 heads.difference_update(wdirparents)
1527 1529 heads.add(wdirrev)
1528 1530 heads = baseset(heads)
1529 1531 return subset & heads
1530 1532
1531 1533
1532 1534 @predicate(b'hidden()', safe=True)
1533 1535 def hidden(repo, subset, x):
1534 1536 """Hidden changesets."""
1535 1537 # i18n: "hidden" is a keyword
1536 1538 getargs(x, 0, 0, _(b"hidden takes no arguments"))
1537 1539 hiddenrevs = repoview.filterrevs(repo, b'visible')
1538 1540 return subset & hiddenrevs
1539 1541
1540 1542
1541 1543 @predicate(b'keyword(string)', safe=True, weight=10)
1542 1544 def keyword(repo, subset, x):
1543 1545 """Search commit message, user name, and names of changed files for
1544 1546 string. The match is case-insensitive.
1545 1547
1546 1548 For a regular expression or case sensitive search of these fields, use
1547 1549 ``grep(regex)``.
1548 1550 """
1549 1551 # i18n: "keyword" is a keyword
1550 1552 kw = encoding.lower(getstring(x, _(b"keyword requires a string")))
1551 1553
1552 1554 def matches(r):
1553 1555 c = repo[r]
1554 1556 return any(
1555 1557 kw in encoding.lower(t)
1556 1558 for t in c.files() + [c.user(), c.description()]
1557 1559 )
1558 1560
1559 1561 return subset.filter(matches, condrepr=(b'<keyword %r>', kw))
1560 1562
1561 1563
1562 1564 @predicate(b'limit(set[, n[, offset]])', safe=True, takeorder=True, weight=0)
1563 1565 def limit(repo, subset, x, order):
1564 1566 """First n members of set, defaulting to 1, starting from offset."""
1565 1567 args = getargsdict(x, b'limit', b'set n offset')
1566 1568 if b'set' not in args:
1567 1569 # i18n: "limit" is a keyword
1568 1570 raise error.ParseError(_(b"limit requires one to three arguments"))
1569 1571 # i18n: "limit" is a keyword
1570 1572 lim = getinteger(args.get(b'n'), _(b"limit expects a number"), default=1)
1571 1573 if lim < 0:
1572 1574 raise error.ParseError(_(b"negative number to select"))
1573 1575 # i18n: "limit" is a keyword
1574 1576 ofs = getinteger(
1575 1577 args.get(b'offset'), _(b"limit expects a number"), default=0
1576 1578 )
1577 1579 if ofs < 0:
1578 1580 raise error.ParseError(_(b"negative offset"))
1579 1581 os = getset(repo, fullreposet(repo), args[b'set'])
1580 1582 ls = os.slice(ofs, ofs + lim)
1581 1583 if order == followorder and lim > 1:
1582 1584 return subset & ls
1583 1585 return ls & subset
1584 1586
1585 1587
1586 1588 @predicate(b'last(set, [n])', safe=True, takeorder=True)
1587 1589 def last(repo, subset, x, order):
1588 1590 """Last n members of set, defaulting to 1."""
1589 1591 # i18n: "last" is a keyword
1590 1592 l = getargs(x, 1, 2, _(b"last requires one or two arguments"))
1591 1593 lim = 1
1592 1594 if len(l) == 2:
1593 1595 # i18n: "last" is a keyword
1594 1596 lim = getinteger(l[1], _(b"last expects a number"))
1595 1597 if lim < 0:
1596 1598 raise error.ParseError(_(b"negative number to select"))
1597 1599 os = getset(repo, fullreposet(repo), l[0])
1598 1600 os.reverse()
1599 1601 ls = os.slice(0, lim)
1600 1602 if order == followorder and lim > 1:
1601 1603 return subset & ls
1602 1604 ls.reverse()
1603 1605 return ls & subset
1604 1606
1605 1607
1606 1608 @predicate(b'max(set)', safe=True)
1607 1609 def maxrev(repo, subset, x):
1608 1610 """Changeset with highest revision number in set."""
1609 1611 os = getset(repo, fullreposet(repo), x)
1610 1612 try:
1611 1613 m = os.max()
1612 1614 if m in subset:
1613 1615 return baseset([m], datarepr=(b'<max %r, %r>', subset, os))
1614 1616 except ValueError:
1615 1617 # os.max() throws a ValueError when the collection is empty.
1616 1618 # Same as python's max().
1617 1619 pass
1618 1620 return baseset(datarepr=(b'<max %r, %r>', subset, os))
1619 1621
1620 1622
1621 1623 @predicate(b'merge()', safe=True)
1622 1624 def merge(repo, subset, x):
1623 1625 """Changeset is a merge changeset."""
1624 1626 # i18n: "merge" is a keyword
1625 1627 getargs(x, 0, 0, _(b"merge takes no arguments"))
1626 1628 cl = repo.changelog
1627 1629
1628 1630 def ismerge(r):
1629 1631 try:
1630 1632 return cl.parentrevs(r)[1] != nullrev
1631 1633 except error.WdirUnsupported:
1632 1634 return bool(repo[r].p2())
1633 1635
1634 1636 return subset.filter(ismerge, condrepr=b'<merge>')
1635 1637
1636 1638
1637 1639 @predicate(b'branchpoint()', safe=True)
1638 1640 def branchpoint(repo, subset, x):
1639 1641 """Changesets with more than one child."""
1640 1642 # i18n: "branchpoint" is a keyword
1641 1643 getargs(x, 0, 0, _(b"branchpoint takes no arguments"))
1642 1644 cl = repo.changelog
1643 1645 if not subset:
1644 1646 return baseset()
1645 1647 # XXX this should be 'parentset.min()' assuming 'parentset' is a smartset
1646 1648 # (and if it is not, it should.)
1647 1649 baserev = min(subset)
1648 1650 parentscount = [0] * (len(repo) - baserev)
1649 1651 for r in cl.revs(start=baserev + 1):
1650 1652 for p in cl.parentrevs(r):
1651 1653 if p >= baserev:
1652 1654 parentscount[p - baserev] += 1
1653 1655 return subset.filter(
1654 1656 lambda r: parentscount[r - baserev] > 1, condrepr=b'<branchpoint>'
1655 1657 )
1656 1658
1657 1659
1658 1660 @predicate(b'min(set)', safe=True)
1659 1661 def minrev(repo, subset, x):
1660 1662 """Changeset with lowest revision number in set."""
1661 1663 os = getset(repo, fullreposet(repo), x)
1662 1664 try:
1663 1665 m = os.min()
1664 1666 if m in subset:
1665 1667 return baseset([m], datarepr=(b'<min %r, %r>', subset, os))
1666 1668 except ValueError:
1667 1669 # os.min() throws a ValueError when the collection is empty.
1668 1670 # Same as python's min().
1669 1671 pass
1670 1672 return baseset(datarepr=(b'<min %r, %r>', subset, os))
1671 1673
1672 1674
1673 1675 @predicate(b'modifies(pattern)', safe=True, weight=30)
1674 1676 def modifies(repo, subset, x):
1675 1677 """Changesets modifying files matched by pattern.
1676 1678
1677 1679 The pattern without explicit kind like ``glob:`` is expected to be
1678 1680 relative to the current directory and match against a file or a
1679 1681 directory.
1680 1682 """
1681 1683 # i18n: "modifies" is a keyword
1682 1684 pat = getstring(x, _(b"modifies requires a pattern"))
1683 1685 return checkstatus(repo, subset, pat, 'modified')
1684 1686
1685 1687
1686 1688 @predicate(b'named(namespace)')
1687 1689 def named(repo, subset, x):
1688 1690 """The changesets in a given namespace.
1689 1691
1690 1692 Pattern matching is supported for `namespace`. See
1691 1693 :hg:`help revisions.patterns`.
1692 1694 """
1693 1695 # i18n: "named" is a keyword
1694 1696 args = getargs(x, 1, 1, _(b'named requires a namespace argument'))
1695 1697
1696 1698 ns = getstring(
1697 1699 args[0],
1698 1700 # i18n: "named" is a keyword
1699 1701 _(b'the argument to named must be a string'),
1700 1702 )
1701 1703 kind, pattern, matcher = stringutil.stringmatcher(ns)
1702 1704 namespaces = set()
1703 1705 if kind == b'literal':
1704 1706 if pattern not in repo.names:
1705 1707 raise error.RepoLookupError(
1706 1708 _(b"namespace '%s' does not exist") % ns
1707 1709 )
1708 1710 namespaces.add(repo.names[pattern])
1709 1711 else:
1710 1712 for name, ns in repo.names.items():
1711 1713 if matcher(name):
1712 1714 namespaces.add(ns)
1713 1715
1714 1716 names = set()
1715 1717 for ns in namespaces:
1716 1718 for name in ns.listnames(repo):
1717 1719 if name not in ns.deprecated:
1718 1720 names.update(repo[n].rev() for n in ns.nodes(repo, name))
1719 1721
1720 1722 names -= {nullrev}
1721 1723 return subset & names
1722 1724
1723 1725
1724 1726 def _node(repo, n):
1725 1727 """process a node input"""
1726 1728 rn = None
1727 1729 if len(n) == 2 * repo.nodeconstants.nodelen:
1728 1730 try:
1729 1731 rn = repo.changelog.rev(bin(n))
1730 1732 except error.WdirUnsupported:
1731 1733 rn = wdirrev
1732 1734 except (binascii.Error, LookupError):
1733 1735 rn = None
1734 1736 else:
1735 1737 try:
1736 1738 pm = scmutil.resolvehexnodeidprefix(repo, n)
1737 1739 if pm is not None:
1738 1740 rn = repo.changelog.rev(pm)
1739 1741 except LookupError:
1740 1742 pass
1741 1743 except error.WdirUnsupported:
1742 1744 rn = wdirrev
1743 1745 return rn
1744 1746
1745 1747
1746 1748 @predicate(b'id(string)', safe=True)
1747 1749 def node_(repo, subset, x):
1748 1750 """Revision non-ambiguously specified by the given hex string prefix."""
1749 1751 # i18n: "id" is a keyword
1750 1752 l = getargs(x, 1, 1, _(b"id requires one argument"))
1751 1753 # i18n: "id" is a keyword
1752 1754 n = getstring(l[0], _(b"id requires a string"))
1753 1755 rn = _node(repo, n)
1754 1756
1755 1757 if rn is None:
1756 1758 return baseset()
1757 1759 result = baseset([rn])
1758 1760 return result & subset
1759 1761
1760 1762
1761 1763 @predicate(b'none()', safe=True)
1762 1764 def none(repo, subset, x):
1763 1765 """No changesets."""
1764 1766 # i18n: "none" is a keyword
1765 1767 getargs(x, 0, 0, _(b"none takes no arguments"))
1766 1768 return baseset()
1767 1769
1768 1770
1769 1771 @predicate(b'obsolete()', safe=True)
1770 1772 def obsolete(repo, subset, x):
1771 1773 """Mutable changeset with a newer version. (EXPERIMENTAL)"""
1772 1774 # i18n: "obsolete" is a keyword
1773 1775 getargs(x, 0, 0, _(b"obsolete takes no arguments"))
1774 1776 obsoletes = obsmod.getrevs(repo, b'obsolete')
1775 1777 return subset & obsoletes
1776 1778
1777 1779
1778 1780 @predicate(b'only(set, [set])', safe=True)
1779 1781 def only(repo, subset, x):
1780 1782 """Changesets that are ancestors of the first set that are not ancestors
1781 1783 of any other head in the repo. If a second set is specified, the result
1782 1784 is ancestors of the first set that are not ancestors of the second set
1783 1785 (i.e. ::<set1> - ::<set2>).
1784 1786 """
1785 1787 cl = repo.changelog
1786 1788 # i18n: "only" is a keyword
1787 1789 args = getargs(x, 1, 2, _(b'only takes one or two arguments'))
1788 1790 include = getset(repo, fullreposet(repo), args[0])
1789 1791 if len(args) == 1:
1790 1792 if not include:
1791 1793 return baseset()
1792 1794
1793 1795 descendants = set(dagop.revdescendants(repo, include, False))
1794 1796 exclude = [
1795 1797 rev
1796 1798 for rev in cl.headrevs()
1797 1799 if not rev in descendants and not rev in include
1798 1800 ]
1799 1801 else:
1800 1802 exclude = getset(repo, fullreposet(repo), args[1])
1801 1803
1802 1804 results = set(cl.findmissingrevs(common=exclude, heads=include))
1803 1805 # XXX we should turn this into a baseset instead of a set, smartset may do
1804 1806 # some optimizations from the fact this is a baseset.
1805 1807 return subset & results
1806 1808
1807 1809
1808 1810 @predicate(b'origin([set])', safe=True)
1809 1811 def origin(repo, subset, x):
1810 1812 """
1811 1813 Changesets that were specified as a source for the grafts, transplants or
1812 1814 rebases that created the given revisions. Omitting the optional set is the
1813 1815 same as passing all(). If a changeset created by these operations is itself
1814 1816 specified as a source for one of these operations, only the source changeset
1815 1817 for the first operation is selected.
1816 1818 """
1817 1819 if x is not None:
1818 1820 dests = getset(repo, fullreposet(repo), x)
1819 1821 else:
1820 1822 dests = fullreposet(repo)
1821 1823
1822 1824 def _firstsrc(rev):
1823 1825 src = _getrevsource(repo, rev)
1824 1826 if src is None:
1825 1827 return None
1826 1828
1827 1829 while True:
1828 1830 prev = _getrevsource(repo, src)
1829 1831
1830 1832 if prev is None:
1831 1833 return src
1832 1834 src = prev
1833 1835
1834 1836 o = {_firstsrc(r) for r in dests}
1835 1837 o -= {None}
1836 1838 # XXX we should turn this into a baseset instead of a set, smartset may do
1837 1839 # some optimizations from the fact this is a baseset.
1838 1840 return subset & o
1839 1841
1840 1842
1841 1843 @predicate(b'outgoing([path])', safe=False, weight=10)
1842 1844 def outgoing(repo, subset, x):
1843 1845 """Changesets not found in the specified destination repository, or the
1844 1846 default push location.
1845 1847
1846 1848 If the location resolve to multiple repositories, the union of all
1847 1849 outgoing changeset will be used.
1848 1850 """
1849 1851 # Avoid cycles.
1850 1852 from . import (
1851 1853 discovery,
1852 1854 hg,
1853 1855 )
1854 1856
1855 1857 # i18n: "outgoing" is a keyword
1856 1858 l = getargs(x, 0, 1, _(b"outgoing takes one or no arguments"))
1857 1859 # i18n: "outgoing" is a keyword
1858 1860 dest = (
1859 1861 l and getstring(l[0], _(b"outgoing requires a repository path")) or b''
1860 1862 )
1861 1863 if dest:
1862 1864 dests = [dest]
1863 1865 else:
1864 1866 dests = []
1865 1867 missing = set()
1866 1868 for path in urlutil.get_push_paths(repo, repo.ui, dests):
1867 1869 dest = path.pushloc or path.loc
1868 1870 branches = path.branch, []
1869 1871
1870 1872 revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
1871 1873 if revs:
1872 1874 revs = [repo.lookup(rev) for rev in revs]
1873 1875 other = hg.peer(repo, {}, dest)
1874 1876 try:
1875 1877 with repo.ui.silent():
1876 1878 outgoing = discovery.findcommonoutgoing(
1877 1879 repo, other, onlyheads=revs
1878 1880 )
1879 1881 finally:
1880 1882 other.close()
1881 1883 missing.update(outgoing.missing)
1882 1884 cl = repo.changelog
1883 1885 o = {cl.rev(r) for r in missing}
1884 1886 return subset & o
1885 1887
1886 1888
1887 1889 @predicate(b'p1([set])', safe=True)
1888 1890 def p1(repo, subset, x):
1889 1891 """First parent of changesets in set, or the working directory."""
1890 1892 if x is None:
1891 1893 p = repo[x].p1().rev()
1892 1894 if p >= 0:
1893 1895 return subset & baseset([p])
1894 1896 return baseset()
1895 1897
1896 1898 ps = set()
1897 1899 cl = repo.changelog
1898 1900 for r in getset(repo, fullreposet(repo), x):
1899 1901 try:
1900 1902 ps.add(cl.parentrevs(r)[0])
1901 1903 except error.WdirUnsupported:
1902 1904 ps.add(repo[r].p1().rev())
1903 1905 ps -= {nullrev}
1904 1906 # XXX we should turn this into a baseset instead of a set, smartset may do
1905 1907 # some optimizations from the fact this is a baseset.
1906 1908 return subset & ps
1907 1909
1908 1910
1909 1911 @predicate(b'p2([set])', safe=True)
1910 1912 def p2(repo, subset, x):
1911 1913 """Second parent of changesets in set, or the working directory."""
1912 1914 if x is None:
1913 1915 ps = repo[x].parents()
1914 1916 try:
1915 1917 p = ps[1].rev()
1916 1918 if p >= 0:
1917 1919 return subset & baseset([p])
1918 1920 return baseset()
1919 1921 except IndexError:
1920 1922 return baseset()
1921 1923
1922 1924 ps = set()
1923 1925 cl = repo.changelog
1924 1926 for r in getset(repo, fullreposet(repo), x):
1925 1927 try:
1926 1928 ps.add(cl.parentrevs(r)[1])
1927 1929 except error.WdirUnsupported:
1928 1930 parents = repo[r].parents()
1929 1931 if len(parents) == 2:
1930 1932 ps.add(parents[1])
1931 1933 ps -= {nullrev}
1932 1934 # XXX we should turn this into a baseset instead of a set, smartset may do
1933 1935 # some optimizations from the fact this is a baseset.
1934 1936 return subset & ps
1935 1937
1936 1938
1937 1939 def parentpost(repo, subset, x, order):
1938 1940 return p1(repo, subset, x)
1939 1941
1940 1942
1941 1943 @predicate(b'parents([set])', safe=True)
1942 1944 def parents(repo, subset, x):
1943 1945 """
1944 1946 The set of all parents for all changesets in set, or the working directory.
1945 1947 """
1946 1948 if x is None:
1947 1949 ps = {p.rev() for p in repo[x].parents()}
1948 1950 else:
1949 1951 ps = set()
1950 1952 cl = repo.changelog
1951 1953 up = ps.update
1952 1954 parentrevs = cl.parentrevs
1953 1955 for r in getset(repo, fullreposet(repo), x):
1954 1956 try:
1955 1957 up(parentrevs(r))
1956 1958 except error.WdirUnsupported:
1957 1959 up(p.rev() for p in repo[r].parents())
1958 1960 ps -= {nullrev}
1959 1961 return subset & ps
1960 1962
1961 1963
1962 1964 def _phase(repo, subset, *targets):
1963 1965 """helper to select all rev in <targets> phases"""
1964 1966 return repo._phasecache.getrevset(repo, targets, subset)
1965 1967
1966 1968
1967 1969 @predicate(b'_phase(idx)', safe=True)
1968 1970 def phase(repo, subset, x):
1969 1971 l = getargs(x, 1, 1, b"_phase requires one argument")
1970 1972 target = getinteger(l[0], b"_phase expects a number")
1971 1973 return _phase(repo, subset, target)
1972 1974
1973 1975
1974 1976 @predicate(b'draft()', safe=True)
1975 1977 def draft(repo, subset, x):
1976 1978 """Changeset in draft phase."""
1977 1979 # i18n: "draft" is a keyword
1978 1980 getargs(x, 0, 0, _(b"draft takes no arguments"))
1979 1981 target = phases.draft
1980 1982 return _phase(repo, subset, target)
1981 1983
1982 1984
1983 1985 @predicate(b'secret()', safe=True)
1984 1986 def secret(repo, subset, x):
1985 1987 """Changeset in secret phase."""
1986 1988 # i18n: "secret" is a keyword
1987 1989 getargs(x, 0, 0, _(b"secret takes no arguments"))
1988 1990 target = phases.secret
1989 1991 return _phase(repo, subset, target)
1990 1992
1991 1993
1992 1994 @predicate(b'stack([revs])', safe=True)
1993 1995 def stack(repo, subset, x):
1994 1996 """Experimental revset for the stack of changesets or working directory
1995 1997 parent. (EXPERIMENTAL)
1996 1998 """
1997 1999 if x is None:
1998 2000 stacks = stackmod.getstack(repo)
1999 2001 else:
2000 2002 stacks = smartset.baseset([])
2001 2003 for revision in getset(repo, fullreposet(repo), x):
2002 2004 currentstack = stackmod.getstack(repo, revision)
2003 2005 stacks = stacks + currentstack
2004 2006
2005 2007 return subset & stacks
2006 2008
2007 2009
2008 2010 def parentspec(repo, subset, x, n, order):
2009 2011 """``set^0``
2010 2012 The set.
2011 2013 ``set^1`` (or ``set^``), ``set^2``
2012 2014 First or second parent, respectively, of all changesets in set.
2013 2015 """
2014 2016 try:
2015 2017 n = int(n[1])
2016 2018 if n not in (0, 1, 2):
2017 2019 raise ValueError
2018 2020 except (TypeError, ValueError):
2019 2021 raise error.ParseError(_(b"^ expects a number 0, 1, or 2"))
2020 2022 ps = set()
2021 2023 cl = repo.changelog
2022 2024 for r in getset(repo, fullreposet(repo), x):
2023 2025 if n == 0:
2024 2026 ps.add(r)
2025 2027 elif n == 1:
2026 2028 try:
2027 2029 ps.add(cl.parentrevs(r)[0])
2028 2030 except error.WdirUnsupported:
2029 2031 ps.add(repo[r].p1().rev())
2030 2032 else:
2031 2033 try:
2032 2034 parents = cl.parentrevs(r)
2033 2035 if parents[1] != nullrev:
2034 2036 ps.add(parents[1])
2035 2037 except error.WdirUnsupported:
2036 2038 parents = repo[r].parents()
2037 2039 if len(parents) == 2:
2038 2040 ps.add(parents[1].rev())
2039 2041 return subset & ps
2040 2042
2041 2043
2042 2044 @predicate(b'present(set)', safe=True, takeorder=True)
2043 2045 def present(repo, subset, x, order):
2044 2046 """An empty set, if any revision in set isn't found; otherwise,
2045 2047 all revisions in set.
2046 2048
2047 2049 If any of specified revisions is not present in the local repository,
2048 2050 the query is normally aborted. But this predicate allows the query
2049 2051 to continue even in such cases.
2050 2052 """
2051 2053 try:
2052 2054 return getset(repo, subset, x, order)
2053 2055 except error.RepoLookupError:
2054 2056 return baseset()
2055 2057
2056 2058
2057 2059 # for internal use
2058 2060 @predicate(b'_notpublic', safe=True)
2059 2061 def _notpublic(repo, subset, x):
2060 2062 getargs(x, 0, 0, b"_notpublic takes no arguments")
2061 2063 return _phase(repo, subset, phases.draft, phases.secret)
2062 2064
2063 2065
2064 2066 # for internal use
2065 2067 @predicate(b'_phaseandancestors(phasename, set)', safe=True)
2066 2068 def _phaseandancestors(repo, subset, x):
2067 2069 # equivalent to (phasename() & ancestors(set)) but more efficient
2068 2070 # phasename could be one of 'draft', 'secret', or '_notpublic'
2069 2071 args = getargs(x, 2, 2, b"_phaseandancestors requires two arguments")
2070 2072 phasename = getsymbol(args[0])
2071 2073 s = getset(repo, fullreposet(repo), args[1])
2072 2074
2073 2075 draft = phases.draft
2074 2076 secret = phases.secret
2075 2077 phasenamemap = {
2076 2078 b'_notpublic': draft,
2077 2079 b'draft': draft, # follow secret's ancestors
2078 2080 b'secret': secret,
2079 2081 }
2080 2082 if phasename not in phasenamemap:
2081 2083 raise error.ParseError(b'%r is not a valid phasename' % phasename)
2082 2084
2083 2085 minimalphase = phasenamemap[phasename]
2084 2086 getphase = repo._phasecache.phase
2085 2087
2086 2088 def cutfunc(rev):
2087 2089 return getphase(repo, rev) < minimalphase
2088 2090
2089 2091 revs = dagop.revancestors(repo, s, cutfunc=cutfunc)
2090 2092
2091 2093 if phasename == b'draft': # need to remove secret changesets
2092 2094 revs = revs.filter(lambda r: getphase(repo, r) == draft)
2093 2095 return subset & revs
2094 2096
2095 2097
2096 2098 @predicate(b'public()', safe=True)
2097 2099 def public(repo, subset, x):
2098 2100 """Changeset in public phase."""
2099 2101 # i18n: "public" is a keyword
2100 2102 getargs(x, 0, 0, _(b"public takes no arguments"))
2101 2103 return _phase(repo, subset, phases.public)
2102 2104
2103 2105
2104 2106 @predicate(b'remote([id [,path]])', safe=False)
2105 2107 def remote(repo, subset, x):
2106 2108 """Local revision that corresponds to the given identifier in a
2107 2109 remote repository, if present. Here, the '.' identifier is a
2108 2110 synonym for the current local branch.
2109 2111 """
2110 2112
2111 2113 from . import hg # avoid start-up nasties
2112 2114
2113 2115 # i18n: "remote" is a keyword
2114 2116 l = getargs(x, 0, 2, _(b"remote takes zero, one, or two arguments"))
2115 2117
2116 2118 q = b'.'
2117 2119 if len(l) > 0:
2118 2120 # i18n: "remote" is a keyword
2119 2121 q = getstring(l[0], _(b"remote requires a string id"))
2120 2122 if q == b'.':
2121 2123 q = repo[b'.'].branch()
2122 2124
2123 2125 dest = b''
2124 2126 if len(l) > 1:
2125 2127 # i18n: "remote" is a keyword
2126 2128 dest = getstring(l[1], _(b"remote requires a repository path"))
2127 2129 if not dest:
2128 2130 dest = b'default'
2129 2131 dest, branches = urlutil.get_unique_pull_path(
2130 2132 b'remote', repo, repo.ui, dest
2131 2133 )
2132 2134
2133 2135 other = hg.peer(repo, {}, dest)
2134 2136 n = other.lookup(q)
2135 2137 if n in repo:
2136 2138 r = repo[n].rev()
2137 2139 if r in subset:
2138 2140 return baseset([r])
2139 2141 return baseset()
2140 2142
2141 2143
2142 2144 @predicate(b'removes(pattern)', safe=True, weight=30)
2143 2145 def removes(repo, subset, x):
2144 2146 """Changesets which remove files matching pattern.
2145 2147
2146 2148 The pattern without explicit kind like ``glob:`` is expected to be
2147 2149 relative to the current directory and match against a file or a
2148 2150 directory.
2149 2151 """
2150 2152 # i18n: "removes" is a keyword
2151 2153 pat = getstring(x, _(b"removes requires a pattern"))
2152 2154 return checkstatus(repo, subset, pat, 'removed')
2153 2155
2154 2156
2155 2157 @predicate(b'rev(number)', safe=True)
2156 2158 def rev(repo, subset, x):
2157 2159 """Revision with the given numeric identifier."""
2158 2160 try:
2159 2161 return _rev(repo, subset, x)
2160 2162 except error.RepoLookupError:
2161 2163 return baseset()
2162 2164
2163 2165
2164 2166 @predicate(b'_rev(number)', safe=True)
2165 2167 def _rev(repo, subset, x):
2166 2168 # internal version of "rev(x)" that raise error if "x" is invalid
2167 2169 # i18n: "rev" is a keyword
2168 2170 l = getargs(x, 1, 1, _(b"rev requires one argument"))
2169 2171 try:
2170 2172 # i18n: "rev" is a keyword
2171 2173 l = int(getstring(l[0], _(b"rev requires a number")))
2172 2174 except (TypeError, ValueError):
2173 2175 # i18n: "rev" is a keyword
2174 2176 raise error.ParseError(_(b"rev expects a number"))
2175 2177 if l not in _virtualrevs:
2176 2178 try:
2177 2179 repo.changelog.node(l) # check that the rev exists
2178 2180 except IndexError:
2179 2181 raise error.RepoLookupError(_(b"unknown revision '%d'") % l)
2180 2182 return subset & baseset([l])
2181 2183
2182 2184
2183 2185 @predicate(b'revset(set)', safe=True, takeorder=True)
2184 2186 def revsetpredicate(repo, subset, x, order):
2185 2187 """Strictly interpret the content as a revset.
2186 2188
2187 2189 The content of this special predicate will be strictly interpreted as a
2188 2190 revset. For example, ``revset(id(0))`` will be interpreted as "id(0)"
2189 2191 without possible ambiguity with a "id(0)" bookmark or tag.
2190 2192 """
2191 2193 return getset(repo, subset, x, order)
2192 2194
2193 2195
2194 2196 @predicate(b'matching(revision [, field])', safe=True)
2195 2197 def matching(repo, subset, x):
2196 2198 """Changesets in which a given set of fields match the set of fields in the
2197 2199 selected revision or set.
2198 2200
2199 2201 To match more than one field pass the list of fields to match separated
2200 2202 by spaces (e.g. ``author description``).
2201 2203
2202 2204 Valid fields are most regular revision fields and some special fields.
2203 2205
2204 2206 Regular revision fields are ``description``, ``author``, ``branch``,
2205 2207 ``date``, ``files``, ``phase``, ``parents``, ``substate``, ``user``
2206 2208 and ``diff``.
2207 2209 Note that ``author`` and ``user`` are synonyms. ``diff`` refers to the
2208 2210 contents of the revision. Two revisions matching their ``diff`` will
2209 2211 also match their ``files``.
2210 2212
2211 2213 Special fields are ``summary`` and ``metadata``:
2212 2214 ``summary`` matches the first line of the description.
2213 2215 ``metadata`` is equivalent to matching ``description user date``
2214 2216 (i.e. it matches the main metadata fields).
2215 2217
2216 2218 ``metadata`` is the default field which is used when no fields are
2217 2219 specified. You can match more than one field at a time.
2218 2220 """
2219 2221 # i18n: "matching" is a keyword
2220 2222 l = getargs(x, 1, 2, _(b"matching takes 1 or 2 arguments"))
2221 2223
2222 2224 revs = getset(repo, fullreposet(repo), l[0])
2223 2225
2224 2226 fieldlist = [b'metadata']
2225 2227 if len(l) > 1:
2226 2228 fieldlist = getstring(
2227 2229 l[1],
2228 2230 # i18n: "matching" is a keyword
2229 2231 _(b"matching requires a string as its second argument"),
2230 2232 ).split()
2231 2233
2232 2234 # Make sure that there are no repeated fields,
2233 2235 # expand the 'special' 'metadata' field type
2234 2236 # and check the 'files' whenever we check the 'diff'
2235 2237 fields = []
2236 2238 for field in fieldlist:
2237 2239 if field == b'metadata':
2238 2240 fields += [b'user', b'description', b'date']
2239 2241 elif field == b'diff':
2240 2242 # a revision matching the diff must also match the files
2241 2243 # since matching the diff is very costly, make sure to
2242 2244 # also match the files first
2243 2245 fields += [b'files', b'diff']
2244 2246 else:
2245 2247 if field == b'author':
2246 2248 field = b'user'
2247 2249 fields.append(field)
2248 2250 fields = set(fields)
2249 2251 if b'summary' in fields and b'description' in fields:
2250 2252 # If a revision matches its description it also matches its summary
2251 2253 fields.discard(b'summary')
2252 2254
2253 2255 # We may want to match more than one field
2254 2256 # Not all fields take the same amount of time to be matched
2255 2257 # Sort the selected fields in order of increasing matching cost
2256 2258 fieldorder = [
2257 2259 b'phase',
2258 2260 b'parents',
2259 2261 b'user',
2260 2262 b'date',
2261 2263 b'branch',
2262 2264 b'summary',
2263 2265 b'files',
2264 2266 b'description',
2265 2267 b'substate',
2266 2268 b'diff',
2267 2269 ]
2268 2270
2269 2271 def fieldkeyfunc(f):
2270 2272 try:
2271 2273 return fieldorder.index(f)
2272 2274 except ValueError:
2273 2275 # assume an unknown field is very costly
2274 2276 return len(fieldorder)
2275 2277
2276 2278 fields = list(fields)
2277 2279 fields.sort(key=fieldkeyfunc)
2278 2280
2279 2281 # Each field will be matched with its own "getfield" function
2280 2282 # which will be added to the getfieldfuncs array of functions
2281 2283 getfieldfuncs = []
2282 2284 _funcs = {
2283 2285 b'user': lambda r: repo[r].user(),
2284 2286 b'branch': lambda r: repo[r].branch(),
2285 2287 b'date': lambda r: repo[r].date(),
2286 2288 b'description': lambda r: repo[r].description(),
2287 2289 b'files': lambda r: repo[r].files(),
2288 2290 b'parents': lambda r: repo[r].parents(),
2289 2291 b'phase': lambda r: repo[r].phase(),
2290 2292 b'substate': lambda r: repo[r].substate,
2291 2293 b'summary': lambda r: repo[r].description().splitlines()[0],
2292 2294 b'diff': lambda r: list(
2293 2295 repo[r].diff(opts=diffutil.diffallopts(repo.ui, {b'git': True}))
2294 2296 ),
2295 2297 }
2296 2298 for info in fields:
2297 2299 getfield = _funcs.get(info, None)
2298 2300 if getfield is None:
2299 2301 raise error.ParseError(
2300 2302 # i18n: "matching" is a keyword
2301 2303 _(b"unexpected field name passed to matching: %s")
2302 2304 % info
2303 2305 )
2304 2306 getfieldfuncs.append(getfield)
2305 2307 # convert the getfield array of functions into a "getinfo" function
2306 2308 # which returns an array of field values (or a single value if there
2307 2309 # is only one field to match)
2308 2310 getinfo = lambda r: [f(r) for f in getfieldfuncs]
2309 2311
2310 2312 def matches(x):
2311 2313 for rev in revs:
2312 2314 target = getinfo(rev)
2313 2315 match = True
2314 2316 for n, f in enumerate(getfieldfuncs):
2315 2317 if target[n] != f(x):
2316 2318 match = False
2317 2319 if match:
2318 2320 return True
2319 2321 return False
2320 2322
2321 2323 return subset.filter(matches, condrepr=(b'<matching%r %r>', fields, revs))
2322 2324
2323 2325
2324 2326 @predicate(b'reverse(set)', safe=True, takeorder=True, weight=0)
2325 2327 def reverse(repo, subset, x, order):
2326 2328 """Reverse order of set."""
2327 2329 l = getset(repo, subset, x, order)
2328 2330 if order == defineorder:
2329 2331 l.reverse()
2330 2332 return l
2331 2333
2332 2334
2333 2335 @predicate(b'roots(set)', safe=True)
2334 2336 def roots(repo, subset, x):
2335 2337 """Changesets in set with no parent changeset in set."""
2336 2338 s = getset(repo, fullreposet(repo), x)
2337 2339 parents = repo.changelog.parentrevs
2338 2340
2339 2341 def filter(r):
2340 2342 for p in parents(r):
2341 2343 if 0 <= p and p in s:
2342 2344 return False
2343 2345 return True
2344 2346
2345 2347 return subset & s.filter(filter, condrepr=b'<roots>')
2346 2348
2347 2349
2348 2350 _sortkeyfuncs = {
2349 2351 b'rev': scmutil.intrev,
2350 2352 b'branch': lambda c: c.branch(),
2351 2353 b'desc': lambda c: c.description(),
2352 2354 b'user': lambda c: c.user(),
2353 2355 b'author': lambda c: c.user(),
2354 2356 b'date': lambda c: c.date()[0],
2355 2357 b'node': scmutil.binnode,
2356 2358 }
2357 2359
2358 2360
2359 2361 def _getsortargs(x):
2360 2362 """Parse sort options into (set, [(key, reverse)], opts)"""
2361 2363 args = getargsdict(x, b'sort', b'set keys topo.firstbranch')
2362 2364 if b'set' not in args:
2363 2365 # i18n: "sort" is a keyword
2364 2366 raise error.ParseError(_(b'sort requires one or two arguments'))
2365 2367 keys = b"rev"
2366 2368 if b'keys' in args:
2367 2369 # i18n: "sort" is a keyword
2368 2370 keys = getstring(args[b'keys'], _(b"sort spec must be a string"))
2369 2371
2370 2372 keyflags = []
2371 2373 for k in keys.split():
2372 2374 fk = k
2373 2375 reverse = k.startswith(b'-')
2374 2376 if reverse:
2375 2377 k = k[1:]
2376 2378 if k not in _sortkeyfuncs and k != b'topo':
2377 2379 raise error.ParseError(
2378 2380 _(b"unknown sort key %r") % pycompat.bytestr(fk)
2379 2381 )
2380 2382 keyflags.append((k, reverse))
2381 2383
2382 2384 if len(keyflags) > 1 and any(k == b'topo' for k, reverse in keyflags):
2383 2385 # i18n: "topo" is a keyword
2384 2386 raise error.ParseError(
2385 2387 _(b'topo sort order cannot be combined with other sort keys')
2386 2388 )
2387 2389
2388 2390 opts = {}
2389 2391 if b'topo.firstbranch' in args:
2390 2392 if any(k == b'topo' for k, reverse in keyflags):
2391 2393 opts[b'topo.firstbranch'] = args[b'topo.firstbranch']
2392 2394 else:
2393 2395 # i18n: "topo" and "topo.firstbranch" are keywords
2394 2396 raise error.ParseError(
2395 2397 _(
2396 2398 b'topo.firstbranch can only be used '
2397 2399 b'when using the topo sort key'
2398 2400 )
2399 2401 )
2400 2402
2401 2403 return args[b'set'], keyflags, opts
2402 2404
2403 2405
2404 2406 @predicate(
2405 2407 b'sort(set[, [-]key... [, ...]])', safe=True, takeorder=True, weight=10
2406 2408 )
2407 2409 def sort(repo, subset, x, order):
2408 2410 """Sort set by keys. The default sort order is ascending, specify a key
2409 2411 as ``-key`` to sort in descending order.
2410 2412
2411 2413 The keys can be:
2412 2414
2413 2415 - ``rev`` for the revision number,
2414 2416 - ``branch`` for the branch name,
2415 2417 - ``desc`` for the commit message (description),
2416 2418 - ``user`` for user name (``author`` can be used as an alias),
2417 2419 - ``date`` for the commit date
2418 2420 - ``topo`` for a reverse topographical sort
2419 2421 - ``node`` the nodeid of the revision
2420 2422
2421 2423 The ``topo`` sort order cannot be combined with other sort keys. This sort
2422 2424 takes one optional argument, ``topo.firstbranch``, which takes a revset that
2423 2425 specifies what topographical branches to prioritize in the sort.
2424 2426
2425 2427 """
2426 2428 s, keyflags, opts = _getsortargs(x)
2427 2429 revs = getset(repo, subset, s, order)
2428 2430
2429 2431 if not keyflags or order != defineorder:
2430 2432 return revs
2431 2433 if len(keyflags) == 1 and keyflags[0][0] == b"rev":
2432 2434 revs.sort(reverse=keyflags[0][1])
2433 2435 return revs
2434 2436 elif keyflags[0][0] == b"topo":
2435 2437 firstbranch = ()
2436 2438 if b'topo.firstbranch' in opts:
2437 2439 firstbranch = getset(repo, subset, opts[b'topo.firstbranch'])
2438 2440 revs = baseset(
2439 2441 dagop.toposort(revs, repo.changelog.parentrevs, firstbranch),
2440 2442 istopo=True,
2441 2443 )
2442 2444 if keyflags[0][1]:
2443 2445 revs.reverse()
2444 2446 return revs
2445 2447
2446 2448 # sort() is guaranteed to be stable
2447 2449 ctxs = [repo[r] for r in revs]
2448 2450 for k, reverse in reversed(keyflags):
2449 2451 ctxs.sort(key=_sortkeyfuncs[k], reverse=reverse)
2450 2452 return baseset([c.rev() for c in ctxs])
2451 2453
2452 2454
2453 2455 @predicate(b'subrepo([pattern])')
2454 2456 def subrepo(repo, subset, x):
2455 2457 """Changesets that add, modify or remove the given subrepo. If no subrepo
2456 2458 pattern is named, any subrepo changes are returned.
2457 2459 """
2458 2460 # i18n: "subrepo" is a keyword
2459 2461 args = getargs(x, 0, 1, _(b'subrepo takes at most one argument'))
2460 2462 pat = None
2461 2463 if len(args) != 0:
2462 2464 pat = getstring(args[0], _(b"subrepo requires a pattern"))
2463 2465
2464 2466 m = matchmod.exact([b'.hgsubstate'])
2465 2467
2466 2468 def submatches(names):
2467 2469 k, p, m = stringutil.stringmatcher(pat)
2468 2470 for name in names:
2469 2471 if m(name):
2470 2472 yield name
2471 2473
2472 2474 def matches(x):
2473 2475 c = repo[x]
2474 2476 s = repo.status(c.p1().node(), c.node(), match=m)
2475 2477
2476 2478 if pat is None:
2477 2479 return s.added or s.modified or s.removed
2478 2480
2479 2481 if s.added:
2480 2482 return any(submatches(c.substate.keys()))
2481 2483
2482 2484 if s.modified:
2483 2485 subs = set(c.p1().substate.keys())
2484 2486 subs.update(c.substate.keys())
2485 2487
2486 2488 for path in submatches(subs):
2487 2489 if c.p1().substate.get(path) != c.substate.get(path):
2488 2490 return True
2489 2491
2490 2492 if s.removed:
2491 2493 return any(submatches(c.p1().substate.keys()))
2492 2494
2493 2495 return False
2494 2496
2495 2497 return subset.filter(matches, condrepr=(b'<subrepo %r>', pat))
2496 2498
2497 2499
2498 2500 def _mapbynodefunc(repo, s, f):
2499 2501 """(repo, smartset, [node] -> [node]) -> smartset
2500 2502
2501 2503 Helper method to map a smartset to another smartset given a function only
2502 2504 talking about nodes. Handles converting between rev numbers and nodes, and
2503 2505 filtering.
2504 2506 """
2505 2507 cl = repo.unfiltered().changelog
2506 2508 torev = cl.index.get_rev
2507 2509 tonode = cl.node
2508 2510 result = {torev(n) for n in f(tonode(r) for r in s)}
2509 2511 result.discard(None)
2510 2512 return smartset.baseset(result - repo.changelog.filteredrevs)
2511 2513
2512 2514
2513 2515 @predicate(b'successors(set)', safe=True)
2514 2516 def successors(repo, subset, x):
2515 2517 """All successors for set, including the given set themselves.
2516 2518 (EXPERIMENTAL)"""
2517 2519 s = getset(repo, fullreposet(repo), x)
2518 2520 f = lambda nodes: obsutil.allsuccessors(repo.obsstore, nodes)
2519 2521 d = _mapbynodefunc(repo, s, f)
2520 2522 return subset & d
2521 2523
2522 2524
2523 2525 def _substringmatcher(pattern, casesensitive=True):
2524 2526 kind, pattern, matcher = stringutil.stringmatcher(
2525 2527 pattern, casesensitive=casesensitive
2526 2528 )
2527 2529 if kind == b'literal':
2528 2530 if not casesensitive:
2529 2531 pattern = encoding.lower(pattern)
2530 2532 matcher = lambda s: pattern in encoding.lower(s)
2531 2533 else:
2532 2534 matcher = lambda s: pattern in s
2533 2535 return kind, pattern, matcher
2534 2536
2535 2537
2536 2538 @predicate(b'tag([name])', safe=True)
2537 2539 def tag(repo, subset, x):
2538 2540 """The specified tag by name, or all tagged revisions if no name is given.
2539 2541
2540 2542 Pattern matching is supported for `name`. See
2541 2543 :hg:`help revisions.patterns`.
2542 2544 """
2543 2545 # i18n: "tag" is a keyword
2544 2546 args = getargs(x, 0, 1, _(b"tag takes one or no arguments"))
2545 2547 cl = repo.changelog
2546 2548 if args:
2547 2549 pattern = getstring(
2548 2550 args[0],
2549 2551 # i18n: "tag" is a keyword
2550 2552 _(b'the argument to tag must be a string'),
2551 2553 )
2552 2554 kind, pattern, matcher = stringutil.stringmatcher(pattern)
2553 2555 if kind == b'literal':
2554 2556 # avoid resolving all tags
2555 2557 tn = repo._tagscache.tags.get(pattern, None)
2556 2558 if tn is None:
2557 2559 raise error.RepoLookupError(
2558 2560 _(b"tag '%s' does not exist") % pattern
2559 2561 )
2560 2562 s = {repo[tn].rev()}
2561 2563 else:
2562 2564 s = {cl.rev(n) for t, n in repo.tagslist() if matcher(t)}
2563 2565 else:
2564 2566 s = {cl.rev(n) for t, n in repo.tagslist() if t != b'tip'}
2565 2567 return subset & s
2566 2568
2567 2569
2568 2570 @predicate(b'tagged', safe=True)
2569 2571 def tagged(repo, subset, x):
2570 2572 return tag(repo, subset, x)
2571 2573
2572 2574
2573 2575 @predicate(b'orphan()', safe=True)
2574 2576 def orphan(repo, subset, x):
2575 2577 """Non-obsolete changesets with obsolete ancestors. (EXPERIMENTAL)"""
2576 2578 # i18n: "orphan" is a keyword
2577 2579 getargs(x, 0, 0, _(b"orphan takes no arguments"))
2578 2580 orphan = obsmod.getrevs(repo, b'orphan')
2579 2581 return subset & orphan
2580 2582
2581 2583
2582 2584 @predicate(b'unstable()', safe=True)
2583 2585 def unstable(repo, subset, x):
2584 2586 """Changesets with instabilities. (EXPERIMENTAL)"""
2585 2587 # i18n: "unstable" is a keyword
2586 2588 getargs(x, 0, 0, b'unstable takes no arguments')
2587 2589 _unstable = set()
2588 2590 _unstable.update(obsmod.getrevs(repo, b'orphan'))
2589 2591 _unstable.update(obsmod.getrevs(repo, b'phasedivergent'))
2590 2592 _unstable.update(obsmod.getrevs(repo, b'contentdivergent'))
2591 2593 return subset & baseset(_unstable)
2592 2594
2593 2595
2594 2596 @predicate(b'user(string)', safe=True, weight=10)
2595 2597 def user(repo, subset, x):
2596 2598 """User name contains string. The match is case-insensitive.
2597 2599
2598 2600 Pattern matching is supported for `string`. See
2599 2601 :hg:`help revisions.patterns`.
2600 2602 """
2601 2603 return author(repo, subset, x)
2602 2604
2603 2605
2604 2606 @predicate(b'wdir()', safe=True, weight=0)
2605 2607 def wdir(repo, subset, x):
2606 2608 """Working directory. (EXPERIMENTAL)"""
2607 2609 # i18n: "wdir" is a keyword
2608 2610 getargs(x, 0, 0, _(b"wdir takes no arguments"))
2609 2611 if wdirrev in subset or isinstance(subset, fullreposet):
2610 2612 return baseset([wdirrev])
2611 2613 return baseset()
2612 2614
2613 2615
2614 2616 def _orderedlist(repo, subset, x):
2615 2617 s = getstring(x, b"internal error")
2616 2618 if not s:
2617 2619 return baseset()
2618 2620 # remove duplicates here. it's difficult for caller to deduplicate sets
2619 2621 # because different symbols can point to the same rev.
2620 2622 cl = repo.changelog
2621 2623 ls = []
2622 2624 seen = set()
2623 2625 for t in s.split(b'\0'):
2624 2626 try:
2625 2627 # fast path for integer revision
2626 2628 r = int(t)
2627 2629 if (b'%d' % r) != t or r not in cl:
2628 2630 raise ValueError
2629 2631 revs = [r]
2630 2632 except ValueError:
2631 2633 revs = stringset(repo, subset, t, defineorder)
2632 2634
2633 2635 for r in revs:
2634 2636 if r in seen:
2635 2637 continue
2636 2638 if (
2637 2639 r in subset
2638 2640 or r in _virtualrevs
2639 2641 and isinstance(subset, fullreposet)
2640 2642 ):
2641 2643 ls.append(r)
2642 2644 seen.add(r)
2643 2645 return baseset(ls)
2644 2646
2645 2647
2646 2648 # for internal use
2647 2649 @predicate(b'_list', safe=True, takeorder=True)
2648 2650 def _list(repo, subset, x, order):
2649 2651 if order == followorder:
2650 2652 # slow path to take the subset order
2651 2653 return subset & _orderedlist(repo, fullreposet(repo), x)
2652 2654 else:
2653 2655 return _orderedlist(repo, subset, x)
2654 2656
2655 2657
2656 2658 def _orderedintlist(repo, subset, x):
2657 2659 s = getstring(x, b"internal error")
2658 2660 if not s:
2659 2661 return baseset()
2660 2662 ls = [int(r) for r in s.split(b'\0')]
2661 2663 s = subset
2662 2664 return baseset([r for r in ls if r in s])
2663 2665
2664 2666
2665 2667 # for internal use
2666 2668 @predicate(b'_intlist', safe=True, takeorder=True, weight=0)
2667 2669 def _intlist(repo, subset, x, order):
2668 2670 if order == followorder:
2669 2671 # slow path to take the subset order
2670 2672 return subset & _orderedintlist(repo, fullreposet(repo), x)
2671 2673 else:
2672 2674 return _orderedintlist(repo, subset, x)
2673 2675
2674 2676
2675 2677 def _orderedhexlist(repo, subset, x):
2676 2678 s = getstring(x, b"internal error")
2677 2679 if not s:
2678 2680 return baseset()
2679 2681 cl = repo.changelog
2680 2682 ls = [cl.rev(bin(r)) for r in s.split(b'\0')]
2681 2683 s = subset
2682 2684 return baseset([r for r in ls if r in s])
2683 2685
2684 2686
2685 2687 # for internal use
2686 2688 @predicate(b'_hexlist', safe=True, takeorder=True)
2687 2689 def _hexlist(repo, subset, x, order):
2688 2690 if order == followorder:
2689 2691 # slow path to take the subset order
2690 2692 return subset & _orderedhexlist(repo, fullreposet(repo), x)
2691 2693 else:
2692 2694 return _orderedhexlist(repo, subset, x)
2693 2695
2694 2696
2695 2697 methods = {
2696 2698 b"range": rangeset,
2697 2699 b"rangeall": rangeall,
2698 2700 b"rangepre": rangepre,
2699 2701 b"rangepost": rangepost,
2700 2702 b"dagrange": dagrange,
2701 2703 b"string": stringset,
2702 2704 b"symbol": stringset,
2703 2705 b"and": andset,
2704 2706 b"andsmally": andsmallyset,
2705 2707 b"or": orset,
2706 2708 b"not": notset,
2707 2709 b"difference": differenceset,
2708 2710 b"relation": relationset,
2709 2711 b"relsubscript": relsubscriptset,
2710 2712 b"subscript": subscriptset,
2711 2713 b"list": listset,
2712 2714 b"keyvalue": keyvaluepair,
2713 2715 b"func": func,
2714 2716 b"ancestor": ancestorspec,
2715 2717 b"parent": parentspec,
2716 2718 b"parentpost": parentpost,
2717 2719 b"smartset": rawsmartset,
2718 2720 }
2719 2721
2720 2722 relations = {
2721 2723 b"g": generationsrel,
2722 2724 b"generations": generationsrel,
2723 2725 }
2724 2726
2725 2727 subscriptrelations = {
2726 2728 b"g": generationssubrel,
2727 2729 b"generations": generationssubrel,
2728 2730 }
2729 2731
2730 2732
2731 2733 def lookupfn(repo):
2732 2734 def fn(symbol):
2733 2735 try:
2734 2736 return scmutil.isrevsymbol(repo, symbol)
2735 2737 except error.AmbiguousPrefixLookupError:
2736 2738 raise error.InputError(
2737 2739 b'ambiguous revision identifier: %s' % symbol
2738 2740 )
2739 2741
2740 2742 return fn
2741 2743
2742 2744
2743 2745 def match(ui, spec, lookup=None):
2744 2746 """Create a matcher for a single revision spec"""
2745 2747 return matchany(ui, [spec], lookup=lookup)
2746 2748
2747 2749
2748 2750 def matchany(ui, specs, lookup=None, localalias=None):
2749 2751 """Create a matcher that will include any revisions matching one of the
2750 2752 given specs
2751 2753
2752 2754 If lookup function is not None, the parser will first attempt to handle
2753 2755 old-style ranges, which may contain operator characters.
2754 2756
2755 2757 If localalias is not None, it is a dict {name: definitionstring}. It takes
2756 2758 precedence over [revsetalias] config section.
2757 2759 """
2758 2760 if not specs:
2759 2761
2760 2762 def mfunc(repo, subset=None):
2761 2763 return baseset()
2762 2764
2763 2765 return mfunc
2764 2766 if not all(specs):
2765 2767 raise error.ParseError(_(b"empty query"))
2766 2768 if len(specs) == 1:
2767 2769 tree = revsetlang.parse(specs[0], lookup)
2768 2770 else:
2769 2771 tree = (
2770 2772 b'or',
2771 2773 (b'list',) + tuple(revsetlang.parse(s, lookup) for s in specs),
2772 2774 )
2773 2775
2774 2776 aliases = []
2775 2777 warn = None
2776 2778 if ui:
2777 2779 aliases.extend(ui.configitems(b'revsetalias'))
2778 2780 warn = ui.warn
2779 2781 if localalias:
2780 2782 aliases.extend(localalias.items())
2781 2783 if aliases:
2782 2784 tree = revsetlang.expandaliases(tree, aliases, warn=warn)
2783 2785 tree = revsetlang.foldconcat(tree)
2784 2786 tree = revsetlang.analyze(tree)
2785 2787 tree = revsetlang.optimize(tree)
2786 2788 return makematcher(tree)
2787 2789
2788 2790
2789 2791 def makematcher(tree):
2790 2792 """Create a matcher from an evaluatable tree"""
2791 2793
2792 2794 def mfunc(repo, subset=None, order=None):
2793 2795 if order is None:
2794 2796 if subset is None:
2795 2797 order = defineorder # 'x'
2796 2798 else:
2797 2799 order = followorder # 'subset & x'
2798 2800 if subset is None:
2799 2801 subset = fullreposet(repo)
2800 2802 return getset(repo, subset, tree, order)
2801 2803
2802 2804 return mfunc
2803 2805
2804 2806
2805 2807 def loadpredicate(ui, extname, registrarobj):
2806 2808 """Load revset predicates from specified registrarobj"""
2807 2809 for name, func in registrarobj._table.items():
2808 2810 symbols[name] = func
2809 2811 if func._safe:
2810 2812 safesymbols.add(name)
2811 2813
2812 2814
2813 2815 # load built-in predicates explicitly to setup safesymbols
2814 2816 loadpredicate(None, None, predicate)
2815 2817
2816 2818 # tell hggettext to extract docstrings from these functions:
2817 2819 i18nfunctions = symbols.values()
@@ -1,723 +1,651 b''
1 1 # url.py - HTTP handling for mercurial
2 2 #
3 3 # Copyright 2005, 2006, 2007, 2008 Olivia Mackall <olivia@selenic.com>
4 4 # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
5 5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9
10 10
11 11 import base64
12 12 import socket
13 13
14 14 from .i18n import _
15 from .pycompat import getattr
16 15 from . import (
17 16 encoding,
18 17 error,
19 18 httpconnection as httpconnectionmod,
20 19 keepalive,
21 20 pycompat,
22 21 sslutil,
23 22 urllibcompat,
24 23 util,
25 24 )
26 25 from .utils import (
27 26 stringutil,
28 27 urlutil,
29 28 )
30 29
31 30 httplib = util.httplib
32 31 stringio = util.stringio
33 32 urlerr = util.urlerr
34 33 urlreq = util.urlreq
35 34
36 35
37 36 def escape(s, quote=None):
38 37 """Replace special characters "&", "<" and ">" to HTML-safe sequences.
39 38 If the optional flag quote is true, the quotation mark character (")
40 39 is also translated.
41 40
42 41 This is the same as cgi.escape in Python, but always operates on
43 42 bytes, whereas cgi.escape in Python 3 only works on unicodes.
44 43 """
45 44 s = s.replace(b"&", b"&amp;")
46 45 s = s.replace(b"<", b"&lt;")
47 46 s = s.replace(b">", b"&gt;")
48 47 if quote:
49 48 s = s.replace(b'"', b"&quot;")
50 49 return s
51 50
52 51
53 52 class passwordmgr:
54 53 def __init__(self, ui, passwddb):
55 54 self.ui = ui
56 55 self.passwddb = passwddb
57 56
58 57 def add_password(self, realm, uri, user, passwd):
59 58 return self.passwddb.add_password(realm, uri, user, passwd)
60 59
61 60 def find_user_password(self, realm, authuri):
62 61 assert isinstance(realm, (type(None), str))
63 62 assert isinstance(authuri, str)
64 63 authinfo = self.passwddb.find_user_password(realm, authuri)
65 64 user, passwd = authinfo
66 65 user, passwd = pycompat.bytesurl(user), pycompat.bytesurl(passwd)
67 66 if user and passwd:
68 67 self._writedebug(user, passwd)
69 68 return (pycompat.strurl(user), pycompat.strurl(passwd))
70 69
71 70 if not user or not passwd:
72 71 res = httpconnectionmod.readauthforuri(self.ui, authuri, user)
73 72 if res:
74 73 group, auth = res
75 74 user, passwd = auth.get(b'username'), auth.get(b'password')
76 75 self.ui.debug(b"using auth.%s.* for authentication\n" % group)
77 76 if not user or not passwd:
78 77 u = urlutil.url(pycompat.bytesurl(authuri))
79 78 u.query = None
80 79 if not self.ui.interactive():
81 80 raise error.Abort(
82 81 _(b'http authorization required for %s')
83 82 % urlutil.hidepassword(bytes(u))
84 83 )
85 84
86 85 self.ui.write(
87 86 _(b"http authorization required for %s\n")
88 87 % urlutil.hidepassword(bytes(u))
89 88 )
90 89 self.ui.write(_(b"realm: %s\n") % pycompat.bytesurl(realm))
91 90 if user:
92 91 self.ui.write(_(b"user: %s\n") % user)
93 92 else:
94 93 user = self.ui.prompt(_(b"user:"), default=None)
95 94
96 95 if not passwd:
97 96 passwd = self.ui.getpass()
98 97
99 98 # As of Python 3.8, the default implementation of
100 99 # AbstractBasicAuthHandler.retry_http_basic_auth() assumes the user
101 100 # is set if pw is not None. This means (None, str) is not a valid
102 101 # return type of find_user_password().
103 102 if user is None:
104 103 return None, None
105 104
106 105 self.passwddb.add_password(realm, authuri, user, passwd)
107 106 self._writedebug(user, passwd)
108 107 return (pycompat.strurl(user), pycompat.strurl(passwd))
109 108
110 109 def _writedebug(self, user, passwd):
111 110 msg = _(b'http auth: user %s, password %s\n')
112 111 self.ui.debug(msg % (user, passwd and b'*' * len(passwd) or b'not set'))
113 112
114 113 def find_stored_password(self, authuri):
115 114 return self.passwddb.find_user_password(None, authuri)
116 115
117 116
118 117 class proxyhandler(urlreq.proxyhandler):
119 118 def __init__(self, ui):
120 119 proxyurl = ui.config(b"http_proxy", b"host") or encoding.environ.get(
121 120 b'http_proxy'
122 121 )
123 122 # XXX proxyauthinfo = None
124 123
125 124 if proxyurl:
126 125 # proxy can be proper url or host[:port]
127 126 if not (
128 127 proxyurl.startswith(b'http:') or proxyurl.startswith(b'https:')
129 128 ):
130 129 proxyurl = b'http://' + proxyurl + b'/'
131 130 proxy = urlutil.url(proxyurl)
132 131 if not proxy.user:
133 132 proxy.user = ui.config(b"http_proxy", b"user")
134 133 proxy.passwd = ui.config(b"http_proxy", b"passwd")
135 134
136 135 # see if we should use a proxy for this url
137 136 no_list = [b"localhost", b"127.0.0.1"]
138 137 no_list.extend(
139 138 [p.lower() for p in ui.configlist(b"http_proxy", b"no")]
140 139 )
141 140 no_list.extend(
142 141 [
143 142 p.strip().lower()
144 143 for p in encoding.environ.get(b"no_proxy", b'').split(b',')
145 144 if p.strip()
146 145 ]
147 146 )
148 147 # "http_proxy.always" config is for running tests on localhost
149 148 if ui.configbool(b"http_proxy", b"always"):
150 149 self.no_list = []
151 150 else:
152 151 self.no_list = no_list
153 152
154 153 # Keys and values need to be str because the standard library
155 154 # expects them to be.
156 155 proxyurl = str(proxy)
157 156 proxies = {'http': proxyurl, 'https': proxyurl}
158 157 ui.debug(
159 158 b'proxying through %s\n' % urlutil.hidepassword(bytes(proxy))
160 159 )
161 160 else:
162 161 proxies = {}
163 162
164 163 urlreq.proxyhandler.__init__(self, proxies)
165 164 self.ui = ui
166 165
167 166 def proxy_open(self, req, proxy, type_):
168 167 host = pycompat.bytesurl(urllibcompat.gethost(req)).split(b':')[0]
169 168 for e in self.no_list:
170 169 if host == e:
171 170 return None
172 171 if e.startswith(b'*.') and host.endswith(e[2:]):
173 172 return None
174 173 if e.startswith(b'.') and host.endswith(e[1:]):
175 174 return None
176 175
177 176 return urlreq.proxyhandler.proxy_open(self, req, proxy, type_)
178 177
179 178
180 179 def _gen_sendfile(orgsend):
181 180 def _sendfile(self, data):
182 181 # send a file
183 182 if isinstance(data, httpconnectionmod.httpsendfile):
184 183 # if auth required, some data sent twice, so rewind here
185 184 data.seek(0)
186 185 for chunk in util.filechunkiter(data):
187 186 orgsend(self, chunk)
188 187 else:
189 188 orgsend(self, data)
190 189
191 190 return _sendfile
192 191
193 192
194 193 has_https = util.safehasattr(urlreq, b'httpshandler')
195 194
196 195
197 196 class httpconnection(keepalive.HTTPConnection):
198 197 # must be able to send big bundle as stream.
199 198 send = _gen_sendfile(keepalive.HTTPConnection.send)
200 199
201 def getresponse(self):
202 proxyres = getattr(self, 'proxyres', None)
203 if proxyres:
204 if proxyres.will_close:
205 self.close()
206 self.proxyres = None
207 return proxyres
208 return keepalive.HTTPConnection.getresponse(self)
209
210 200
211 201 # Large parts of this function have their origin from before Python 2.6
212 202 # and could potentially be removed.
213 203 def _generic_start_transaction(handler, h, req):
214 204 tunnel_host = req._tunnel_host
215 205 if tunnel_host:
216 206 if tunnel_host[:7] not in ['http://', 'https:/']:
217 207 tunnel_host = 'https://' + tunnel_host
218 208 new_tunnel = True
219 209 else:
220 210 tunnel_host = urllibcompat.getselector(req)
221 211 new_tunnel = False
222 212
223 213 if new_tunnel or tunnel_host == urllibcompat.getfullurl(req): # has proxy
224 214 u = urlutil.url(pycompat.bytesurl(tunnel_host))
225 215 if new_tunnel or u.scheme == b'https': # only use CONNECT for HTTPS
226 216 h.realhostport = b':'.join([u.host, (u.port or b'443')])
227 217 h.headers = req.headers.copy()
228 218 h.headers.update(handler.parent.addheaders)
229 219 return
230 220
231 221 h.realhostport = None
232 222 h.headers = None
233 223
234 224
235 225 def _generic_proxytunnel(self):
236 226 proxyheaders = {
237 227 pycompat.bytestr(x): pycompat.bytestr(self.headers[x])
238 228 for x in self.headers
239 229 if x.lower().startswith('proxy-')
240 230 }
241 231 self.send(b'CONNECT %s HTTP/1.0\r\n' % self.realhostport)
242 232 for header in proxyheaders.items():
243 233 self.send(b'%s: %s\r\n' % header)
244 234 self.send(b'\r\n')
245 235
246 236 # majority of the following code is duplicated from
247 237 # httplib.HTTPConnection as there are no adequate places to
248 238 # override functions to provide the needed functionality.
249 239
250 240 res = self.response_class(self.sock, method=self._method)
251 241
252 242 while True:
253 243 version, status, reason = res._read_status()
254 244 if status != httplib.CONTINUE:
255 245 break
256 246 # skip lines that are all whitespace
257 247 list(iter(lambda: res.fp.readline().strip(), b''))
258 res.status = status
259 res.reason = reason.strip()
260 248
261 if res.status == 200:
249 if status == 200:
262 250 # skip lines until we find a blank line
263 251 list(iter(res.fp.readline, b'\r\n'))
264 return True
265
266 if version == b'HTTP/1.0':
267 res.version = 10
268 elif version.startswith(b'HTTP/1.'):
269 res.version = 11
270 elif version == b'HTTP/0.9':
271 res.version = 9
272 252 else:
273 raise httplib.UnknownProtocol(version)
274
275 if res.version == 9:
276 res.length = None
277 res.chunked = 0
278 res.will_close = 1
279 res.msg = httplib.HTTPMessage(stringio())
280 return False
281
282 res.msg = httplib.HTTPMessage(res.fp)
283 res.msg.fp = None
284
285 # are we using the chunked-style of transfer encoding?
286 trenc = res.msg.getheader(b'transfer-encoding')
287 if trenc and trenc.lower() == b"chunked":
288 res.chunked = 1
289 res.chunk_left = None
290 else:
291 res.chunked = 0
292
293 # will the connection close at the end of the response?
294 res.will_close = res._check_close()
295
296 # do we have a Content-Length?
297 # NOTE: RFC 2616, section 4.4, #3 says we ignore this if
298 # transfer-encoding is "chunked"
299 length = res.msg.getheader(b'content-length')
300 if length and not res.chunked:
301 try:
302 res.length = int(length)
303 except ValueError:
304 res.length = None
305 else:
306 if res.length < 0: # ignore nonsensical negative lengths
307 res.length = None
308 else:
309 res.length = None
310
311 # does the body have a fixed length? (of zero)
312 if (
313 status == httplib.NO_CONTENT
314 or status == httplib.NOT_MODIFIED
315 or 100 <= status < 200
316 or res._method == b'HEAD' # 1xx codes
317 ):
318 res.length = 0
319
320 # if the connection remains open, and we aren't using chunked, and
321 # a content-length was not provided, then assume that the connection
322 # WILL close.
323 if not res.will_close and not res.chunked and res.length is None:
324 res.will_close = 1
325
326 self.proxyres = res
327
328 return False
253 self.close()
254 raise socket.error(
255 "Tunnel connection failed: %d %s" % (status, reason.strip())
256 )
329 257
330 258
331 259 class httphandler(keepalive.HTTPHandler):
332 260 def http_open(self, req):
333 261 return self.do_open(httpconnection, req)
334 262
335 263 def _start_transaction(self, h, req):
336 264 _generic_start_transaction(self, h, req)
337 265 return keepalive.HTTPHandler._start_transaction(self, h, req)
338 266
339 267
340 268 class logginghttpconnection(keepalive.HTTPConnection):
341 269 def __init__(self, createconn, *args, **kwargs):
342 270 keepalive.HTTPConnection.__init__(self, *args, **kwargs)
343 271 self._create_connection = createconn
344 272
345 273
346 274 class logginghttphandler(httphandler):
347 275 """HTTP handler that logs socket I/O."""
348 276
349 277 def __init__(self, logfh, name, observeropts, timeout=None):
350 278 super(logginghttphandler, self).__init__(timeout=timeout)
351 279
352 280 self._logfh = logfh
353 281 self._logname = name
354 282 self._observeropts = observeropts
355 283
356 284 # do_open() calls the passed class to instantiate an HTTPConnection. We
357 285 # pass in a callable method that creates a custom HTTPConnection instance
358 286 # whose callback to create the socket knows how to proxy the socket.
359 287 def http_open(self, req):
360 288 return self.do_open(self._makeconnection, req)
361 289
362 290 def _makeconnection(self, *args, **kwargs):
363 291 def createconnection(*args, **kwargs):
364 292 sock = socket.create_connection(*args, **kwargs)
365 293 return util.makeloggingsocket(
366 294 self._logfh, sock, self._logname, **self._observeropts
367 295 )
368 296
369 297 return logginghttpconnection(createconnection, *args, **kwargs)
370 298
371 299
372 300 if has_https:
373 301
374 302 class httpsconnection(keepalive.HTTPConnection):
375 303 response_class = keepalive.HTTPResponse
376 304 default_port = httplib.HTTPS_PORT
377 305 # must be able to send big bundle as stream.
378 306 send = _gen_sendfile(keepalive.safesend)
379 307 getresponse = keepalive.wrapgetresponse(httplib.HTTPConnection)
380 308
381 309 def __init__(
382 310 self,
383 311 host,
384 312 port=None,
385 313 key_file=None,
386 314 cert_file=None,
387 315 *args,
388 316 **kwargs
389 317 ):
390 318 keepalive.HTTPConnection.__init__(self, host, port, *args, **kwargs)
391 319 self.key_file = key_file
392 320 self.cert_file = cert_file
393 321
394 322 def connect(self):
395 323 self.sock = socket.create_connection((self.host, self.port))
396 324
397 325 host = self.host
398 326 if self.realhostport: # use CONNECT proxy
399 327 _generic_proxytunnel(self)
400 328 host = self.realhostport.rsplit(b':', 1)[0]
401 329 self.sock = sslutil.wrapsocket(
402 330 self.sock,
403 331 self.key_file,
404 332 self.cert_file,
405 333 ui=self.ui,
406 334 serverhostname=host,
407 335 )
408 336 sslutil.validatesocket(self.sock)
409 337
410 338 class httpshandler(keepalive.KeepAliveHandler, urlreq.httpshandler):
411 339 def __init__(self, ui, timeout=None):
412 340 keepalive.KeepAliveHandler.__init__(self, timeout=timeout)
413 341 urlreq.httpshandler.__init__(self)
414 342 self.ui = ui
415 343 self.pwmgr = passwordmgr(self.ui, self.ui.httppasswordmgrdb)
416 344
417 345 def _start_transaction(self, h, req):
418 346 _generic_start_transaction(self, h, req)
419 347 return keepalive.KeepAliveHandler._start_transaction(self, h, req)
420 348
421 349 def https_open(self, req):
422 350 # urllibcompat.getfullurl() does not contain credentials
423 351 # and we may need them to match the certificates.
424 352 url = urllibcompat.getfullurl(req)
425 353 user, password = self.pwmgr.find_stored_password(url)
426 354 res = httpconnectionmod.readauthforuri(self.ui, url, user)
427 355 if res:
428 356 group, auth = res
429 357 self.auth = auth
430 358 self.ui.debug(b"using auth.%s.* for authentication\n" % group)
431 359 else:
432 360 self.auth = None
433 361 return self.do_open(self._makeconnection, req)
434 362
435 363 def _makeconnection(self, host, port=None, *args, **kwargs):
436 364 keyfile = None
437 365 certfile = None
438 366
439 367 if len(args) >= 1: # key_file
440 368 keyfile = args[0]
441 369 if len(args) >= 2: # cert_file
442 370 certfile = args[1]
443 371 args = args[2:]
444 372
445 373 # if the user has specified different key/cert files in
446 374 # hgrc, we prefer these
447 375 if self.auth and b'key' in self.auth and b'cert' in self.auth:
448 376 keyfile = self.auth[b'key']
449 377 certfile = self.auth[b'cert']
450 378
451 379 conn = httpsconnection(
452 380 host, port, keyfile, certfile, *args, **kwargs
453 381 )
454 382 conn.ui = self.ui
455 383 return conn
456 384
457 385
458 386 class httpdigestauthhandler(urlreq.httpdigestauthhandler):
459 387 def __init__(self, *args, **kwargs):
460 388 urlreq.httpdigestauthhandler.__init__(self, *args, **kwargs)
461 389 self.retried_req = None
462 390
463 391 def reset_retry_count(self):
464 392 # Python 2.6.5 will call this on 401 or 407 errors and thus loop
465 393 # forever. We disable reset_retry_count completely and reset in
466 394 # http_error_auth_reqed instead.
467 395 pass
468 396
469 397 def http_error_auth_reqed(self, auth_header, host, req, headers):
470 398 # Reset the retry counter once for each request.
471 399 if req is not self.retried_req:
472 400 self.retried_req = req
473 401 self.retried = 0
474 402 return urlreq.httpdigestauthhandler.http_error_auth_reqed(
475 403 self, auth_header, host, req, headers
476 404 )
477 405
478 406
479 407 class httpbasicauthhandler(urlreq.httpbasicauthhandler):
480 408 def __init__(self, *args, **kwargs):
481 409 self.auth = None
482 410 urlreq.httpbasicauthhandler.__init__(self, *args, **kwargs)
483 411 self.retried_req = None
484 412
485 413 def http_request(self, request):
486 414 if self.auth:
487 415 request.add_unredirected_header(self.auth_header, self.auth)
488 416
489 417 return request
490 418
491 419 def https_request(self, request):
492 420 if self.auth:
493 421 request.add_unredirected_header(self.auth_header, self.auth)
494 422
495 423 return request
496 424
497 425 def reset_retry_count(self):
498 426 # Python 2.6.5 will call this on 401 or 407 errors and thus loop
499 427 # forever. We disable reset_retry_count completely and reset in
500 428 # http_error_auth_reqed instead.
501 429 pass
502 430
503 431 def http_error_auth_reqed(self, auth_header, host, req, headers):
504 432 # Reset the retry counter once for each request.
505 433 if req is not self.retried_req:
506 434 self.retried_req = req
507 435 self.retried = 0
508 436 return urlreq.httpbasicauthhandler.http_error_auth_reqed(
509 437 self, auth_header, host, req, headers
510 438 )
511 439
512 440 def retry_http_basic_auth(self, host, req, realm):
513 441 user, pw = self.passwd.find_user_password(
514 442 realm, urllibcompat.getfullurl(req)
515 443 )
516 444 if pw is not None:
517 445 raw = b"%s:%s" % (pycompat.bytesurl(user), pycompat.bytesurl(pw))
518 446 auth = 'Basic %s' % pycompat.strurl(base64.b64encode(raw).strip())
519 447 if req.get_header(self.auth_header, None) == auth:
520 448 return None
521 449 self.auth = auth
522 450 req.add_unredirected_header(self.auth_header, auth)
523 451 return self.parent.open(req)
524 452 else:
525 453 return None
526 454
527 455
528 456 class cookiehandler(urlreq.basehandler):
529 457 def __init__(self, ui):
530 458 self.cookiejar = None
531 459
532 460 cookiefile = ui.config(b'auth', b'cookiefile')
533 461 if not cookiefile:
534 462 return
535 463
536 464 cookiefile = util.expandpath(cookiefile)
537 465 try:
538 466 cookiejar = util.cookielib.MozillaCookieJar(
539 467 pycompat.fsdecode(cookiefile)
540 468 )
541 469 cookiejar.load()
542 470 self.cookiejar = cookiejar
543 471 except util.cookielib.LoadError as e:
544 472 ui.warn(
545 473 _(
546 474 b'(error loading cookie file %s: %s; continuing without '
547 475 b'cookies)\n'
548 476 )
549 477 % (cookiefile, stringutil.forcebytestr(e))
550 478 )
551 479
552 480 def http_request(self, request):
553 481 if self.cookiejar:
554 482 self.cookiejar.add_cookie_header(request)
555 483
556 484 return request
557 485
558 486 def https_request(self, request):
559 487 if self.cookiejar:
560 488 self.cookiejar.add_cookie_header(request)
561 489
562 490 return request
563 491
564 492
565 493 handlerfuncs = []
566 494
567 495
568 496 def opener(
569 497 ui,
570 498 authinfo=None,
571 499 useragent=None,
572 500 loggingfh=None,
573 501 loggingname=b's',
574 502 loggingopts=None,
575 503 sendaccept=True,
576 504 ):
577 505 """
578 506 construct an opener suitable for urllib2
579 507 authinfo will be added to the password manager
580 508
581 509 The opener can be configured to log socket events if the various
582 510 ``logging*`` arguments are specified.
583 511
584 512 ``loggingfh`` denotes a file object to log events to.
585 513 ``loggingname`` denotes the name of the to print when logging.
586 514 ``loggingopts`` is a dict of keyword arguments to pass to the constructed
587 515 ``util.socketobserver`` instance.
588 516
589 517 ``sendaccept`` allows controlling whether the ``Accept`` request header
590 518 is sent. The header is sent by default.
591 519 """
592 520 timeout = ui.configwith(float, b'http', b'timeout')
593 521 handlers = []
594 522
595 523 if loggingfh:
596 524 handlers.append(
597 525 logginghttphandler(
598 526 loggingfh, loggingname, loggingopts or {}, timeout=timeout
599 527 )
600 528 )
601 529 # We don't yet support HTTPS when logging I/O. If we attempt to open
602 530 # an HTTPS URL, we'll likely fail due to unknown protocol.
603 531
604 532 else:
605 533 handlers.append(httphandler(timeout=timeout))
606 534 if has_https:
607 535 handlers.append(httpshandler(ui, timeout=timeout))
608 536
609 537 handlers.append(proxyhandler(ui))
610 538
611 539 passmgr = passwordmgr(ui, ui.httppasswordmgrdb)
612 540 if authinfo is not None:
613 541 realm, uris, user, passwd = authinfo
614 542 saveduser, savedpass = passmgr.find_stored_password(uris[0])
615 543 if user != saveduser or passwd:
616 544 passmgr.add_password(realm, uris, user, passwd)
617 545 ui.debug(
618 546 b'http auth: user %s, password %s\n'
619 547 % (user, passwd and b'*' * len(passwd) or b'not set')
620 548 )
621 549
622 550 handlers.extend(
623 551 (httpbasicauthhandler(passmgr), httpdigestauthhandler(passmgr))
624 552 )
625 553 handlers.extend([h(ui, passmgr) for h in handlerfuncs])
626 554 handlers.append(cookiehandler(ui))
627 555 opener = urlreq.buildopener(*handlers)
628 556
629 557 # keepalive.py's handlers will populate these attributes if they exist.
630 558 opener.requestscount = 0
631 559 opener.sentbytescount = 0
632 560 opener.receivedbytescount = 0
633 561
634 562 # The user agent should should *NOT* be used by servers for e.g.
635 563 # protocol detection or feature negotiation: there are other
636 564 # facilities for that.
637 565 #
638 566 # "mercurial/proto-1.0" was the original user agent string and
639 567 # exists for backwards compatibility reasons.
640 568 #
641 569 # The "(Mercurial %s)" string contains the distribution
642 570 # name and version. Other client implementations should choose their
643 571 # own distribution name. Since servers should not be using the user
644 572 # agent string for anything, clients should be able to define whatever
645 573 # user agent they deem appropriate.
646 574 #
647 575 # The custom user agent is for lfs, because unfortunately some servers
648 576 # do look at this value.
649 577 if not useragent:
650 578 agent = b'mercurial/proto-1.0 (Mercurial %s)' % util.version()
651 579 opener.addheaders = [('User-agent', pycompat.sysstr(agent))]
652 580 else:
653 581 opener.addheaders = [('User-agent', pycompat.sysstr(useragent))]
654 582
655 583 # This header should only be needed by wire protocol requests. But it has
656 584 # been sent on all requests since forever. We keep sending it for backwards
657 585 # compatibility reasons. Modern versions of the wire protocol use
658 586 # X-HgProto-<N> for advertising client support.
659 587 if sendaccept:
660 588 opener.addheaders.append(('Accept', 'application/mercurial-0.1'))
661 589
662 590 return opener
663 591
664 592
665 593 def open(ui, url_, data=None, sendaccept=True):
666 594 u = urlutil.url(url_)
667 595 if u.scheme:
668 596 u.scheme = u.scheme.lower()
669 597 url_, authinfo = u.authinfo()
670 598 else:
671 599 path = util.normpath(util.abspath(url_))
672 600 url_ = b'file://' + pycompat.bytesurl(
673 601 urlreq.pathname2url(pycompat.fsdecode(path))
674 602 )
675 603 authinfo = None
676 604 return opener(ui, authinfo, sendaccept=sendaccept).open(
677 605 pycompat.strurl(url_), data
678 606 )
679 607
680 608
681 609 def wrapresponse(resp):
682 610 """Wrap a response object with common error handlers.
683 611
684 612 This ensures that any I/O from any consumer raises the appropriate
685 613 error and messaging.
686 614 """
687 615 origread = resp.read
688 616
689 617 class readerproxy(resp.__class__):
690 618 def read(self, size=None):
691 619 try:
692 620 return origread(size)
693 621 except httplib.IncompleteRead as e:
694 622 # e.expected is an integer if length known or None otherwise.
695 623 if e.expected:
696 624 got = len(e.partial)
697 625 total = e.expected + got
698 626 msg = _(
699 627 b'HTTP request error (incomplete response; '
700 628 b'expected %d bytes got %d)'
701 629 ) % (total, got)
702 630 else:
703 631 msg = _(b'HTTP request error (incomplete response)')
704 632
705 633 raise error.PeerTransportError(
706 634 msg,
707 635 hint=_(
708 636 b'this may be an intermittent network failure; '
709 637 b'if the error persists, consider contacting the '
710 638 b'network or server operator'
711 639 ),
712 640 )
713 641 except httplib.HTTPException as e:
714 642 raise error.PeerTransportError(
715 643 _(b'HTTP request error (%s)') % e,
716 644 hint=_(
717 645 b'this may be an intermittent network failure; '
718 646 b'if the error persists, consider contacting the '
719 647 b'network or server operator'
720 648 ),
721 649 )
722 650
723 651 resp.__class__ = readerproxy
@@ -1,48 +1,48 b''
1 1 [package]
2 2 name = "hg-core"
3 3 version = "0.1.0"
4 4 authors = ["Georges Racinet <gracinet@anybox.fr>"]
5 5 description = "Mercurial pure Rust core library, with no assumption on Python bindings (FFI)"
6 6 edition = "2018"
7 7
8 8 [lib]
9 9 name = "hg"
10 10
11 11 [dependencies]
12 12 bitflags = "1.3.2"
13 13 bytes-cast = "0.2.0"
14 14 byteorder = "1.4.3"
15 15 derive_more = "0.99.17"
16 16 hashbrown = { version = "0.9.1", features = ["rayon"] }
17 17 home = "0.5.3"
18 im-rc = "15.0.0"
18 im-rc = "15.0"
19 19 itertools = "0.10.3"
20 20 lazy_static = "1.4.0"
21 21 libc = "0.2"
22 22 ouroboros = "0.15.0"
23 23 rand = "0.8.4"
24 24 rand_pcg = "0.3.1"
25 25 rand_distr = "0.4.3"
26 26 rayon = "1.5.1"
27 27 regex = "1.5.5"
28 28 sha-1 = "0.10.0"
29 29 twox-hash = "1.6.2"
30 30 same-file = "1.0.6"
31 31 tempfile = "3.1.0"
32 32 crossbeam-channel = "0.4"
33 33 micro-timer = "0.3.0"
34 34 log = "0.4.8"
35 35 memmap2 = {version = "0.4", features = ["stable_deref_trait"]}
36 36 zstd = "0.5.3"
37 37 format-bytes = "0.3.0"
38 38
39 39 # We don't use the `miniz-oxide` backend to not change rhg benchmarks and until
40 40 # we have a clearer view of which backend is the fastest.
41 41 [dependencies.flate2]
42 42 version = "1.0.22"
43 43 features = ["zlib"]
44 44 default-features = false
45 45
46 46 [dev-dependencies]
47 47 clap = "2.34.0"
48 48 pretty_assertions = "1.1.0"
@@ -1,1170 +1,1170 b''
1 1 import distutils.version
2 2 import os
3 3 import re
4 4 import socket
5 5 import stat
6 6 import subprocess
7 7 import sys
8 8 import tempfile
9 9
10 10 tempprefix = 'hg-hghave-'
11 11
12 12 checks = {
13 13 "true": (lambda: True, "yak shaving"),
14 14 "false": (lambda: False, "nail clipper"),
15 15 "known-bad-output": (lambda: True, "use for currently known bad output"),
16 16 "missing-correct-output": (lambda: False, "use for missing good output"),
17 17 }
18 18
19 19 try:
20 20 import msvcrt
21 21
22 22 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
23 23 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
24 24 except ImportError:
25 25 pass
26 26
27 27 stdout = getattr(sys.stdout, 'buffer', sys.stdout)
28 28 stderr = getattr(sys.stderr, 'buffer', sys.stderr)
29 29
30 30 is_not_python2 = sys.version_info[0] >= 3
31 31 if is_not_python2:
32 32
33 33 def _sys2bytes(p):
34 34 if p is None:
35 35 return p
36 36 return p.encode('utf-8')
37 37
38 38 def _bytes2sys(p):
39 39 if p is None:
40 40 return p
41 41 return p.decode('utf-8')
42 42
43 43
44 44 else:
45 45
46 46 def _sys2bytes(p):
47 47 return p
48 48
49 49 _bytes2sys = _sys2bytes
50 50
51 51
52 52 def check(name, desc):
53 53 """Registers a check function for a feature."""
54 54
55 55 def decorator(func):
56 56 checks[name] = (func, desc)
57 57 return func
58 58
59 59 return decorator
60 60
61 61
62 62 def checkvers(name, desc, vers):
63 63 """Registers a check function for each of a series of versions.
64 64
65 65 vers can be a list or an iterator.
66 66
67 67 Produces a series of feature checks that have the form <name><vers> without
68 68 any punctuation (even if there's punctuation in 'vers'; i.e. this produces
69 69 'py38', not 'py3.8' or 'py-38')."""
70 70
71 71 def decorator(func):
72 72 def funcv(v):
73 73 def f():
74 74 return func(v)
75 75
76 76 return f
77 77
78 78 for v in vers:
79 79 v = str(v)
80 80 f = funcv(v)
81 81 checks['%s%s' % (name, v.replace('.', ''))] = (f, desc % v)
82 82 return func
83 83
84 84 return decorator
85 85
86 86
87 87 def checkfeatures(features):
88 88 result = {
89 89 'error': [],
90 90 'missing': [],
91 91 'skipped': [],
92 92 }
93 93
94 94 for feature in features:
95 95 negate = feature.startswith('no-')
96 96 if negate:
97 97 feature = feature[3:]
98 98
99 99 if feature not in checks:
100 100 result['missing'].append(feature)
101 101 continue
102 102
103 103 check, desc = checks[feature]
104 104 try:
105 105 available = check()
106 106 except Exception as e:
107 107 result['error'].append('hghave check %s failed: %r' % (feature, e))
108 108 continue
109 109
110 110 if not negate and not available:
111 111 result['skipped'].append('missing feature: %s' % desc)
112 112 elif negate and available:
113 113 result['skipped'].append('system supports %s' % desc)
114 114
115 115 return result
116 116
117 117
118 118 def require(features):
119 119 """Require that features are available, exiting if not."""
120 120 result = checkfeatures(features)
121 121
122 122 for missing in result['missing']:
123 123 stderr.write(
124 124 ('skipped: unknown feature: %s\n' % missing).encode('utf-8')
125 125 )
126 126 for msg in result['skipped']:
127 127 stderr.write(('skipped: %s\n' % msg).encode('utf-8'))
128 128 for msg in result['error']:
129 129 stderr.write(('%s\n' % msg).encode('utf-8'))
130 130
131 131 if result['missing']:
132 132 sys.exit(2)
133 133
134 134 if result['skipped'] or result['error']:
135 135 sys.exit(1)
136 136
137 137
138 138 def matchoutput(cmd, regexp, ignorestatus=False):
139 139 """Return the match object if cmd executes successfully and its output
140 140 is matched by the supplied regular expression.
141 141 """
142 142
143 143 # Tests on Windows have to fake USERPROFILE to point to the test area so
144 144 # that `~` is properly expanded on py3.8+. However, some tools like black
145 145 # make calls that need the real USERPROFILE in order to run `foo --version`.
146 146 env = os.environ
147 147 if os.name == 'nt':
148 148 env = os.environ.copy()
149 149 env['USERPROFILE'] = env['REALUSERPROFILE']
150 150
151 151 r = re.compile(regexp)
152 152 p = subprocess.Popen(
153 153 cmd,
154 154 shell=True,
155 155 stdout=subprocess.PIPE,
156 156 stderr=subprocess.STDOUT,
157 157 env=env,
158 158 )
159 159 s = p.communicate()[0]
160 160 ret = p.returncode
161 161 return (ignorestatus or not ret) and r.search(s)
162 162
163 163
164 164 @check("baz", "GNU Arch baz client")
165 165 def has_baz():
166 166 return matchoutput('baz --version 2>&1', br'baz Bazaar version')
167 167
168 168
169 169 @check("bzr", "Breezy library and executable version >= 3.1")
170 170 def has_bzr():
171 171 if not is_not_python2:
172 172 return False
173 173 try:
174 174 # Test the Breezy python lib
175 175 import breezy
176 176 import breezy.bzr.bzrdir
177 177 import breezy.errors
178 178 import breezy.revision
179 179 import breezy.revisionspec
180 180
181 181 breezy.revisionspec.RevisionSpec
182 182 if breezy.__doc__ is None or breezy.version_info[:2] < (3, 1):
183 183 return False
184 184 except (AttributeError, ImportError):
185 185 return False
186 186 # Test the executable
187 187 return matchoutput('brz --version 2>&1', br'Breezy \(brz\) ')
188 188
189 189
190 190 @check("chg", "running with chg")
191 191 def has_chg():
192 192 return 'CHG_INSTALLED_AS_HG' in os.environ
193 193
194 194
195 195 @check("rhg", "running with rhg as 'hg'")
196 196 def has_rhg():
197 197 return 'RHG_INSTALLED_AS_HG' in os.environ
198 198
199 199
200 200 @check("pyoxidizer", "running with pyoxidizer build as 'hg'")
201 201 def has_rhg():
202 202 return 'PYOXIDIZED_INSTALLED_AS_HG' in os.environ
203 203
204 204
205 205 @check("cvs", "cvs client/server")
206 206 def has_cvs():
207 207 re = br'Concurrent Versions System.*?server'
208 208 return matchoutput('cvs --version 2>&1', re) and not has_msys()
209 209
210 210
211 211 @check("cvs112", "cvs client/server 1.12.* (not cvsnt)")
212 212 def has_cvs112():
213 213 re = br'Concurrent Versions System \(CVS\) 1.12.*?server'
214 214 return matchoutput('cvs --version 2>&1', re) and not has_msys()
215 215
216 216
217 217 @check("cvsnt", "cvsnt client/server")
218 218 def has_cvsnt():
219 219 re = br'Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)'
220 220 return matchoutput('cvsnt --version 2>&1', re)
221 221
222 222
223 223 @check("darcs", "darcs client")
224 224 def has_darcs():
225 225 return matchoutput('darcs --version', br'\b2\.([2-9]|\d{2})', True)
226 226
227 227
228 228 @check("mtn", "monotone client (>= 1.0)")
229 229 def has_mtn():
230 230 return matchoutput('mtn --version', br'monotone', True) and not matchoutput(
231 231 'mtn --version', br'monotone 0\.', True
232 232 )
233 233
234 234
235 235 @check("eol-in-paths", "end-of-lines in paths")
236 236 def has_eol_in_paths():
237 237 try:
238 238 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix, suffix='\n\r')
239 239 os.close(fd)
240 240 os.remove(path)
241 241 return True
242 242 except (IOError, OSError):
243 243 return False
244 244
245 245
246 246 @check("execbit", "executable bit")
247 247 def has_executablebit():
248 248 try:
249 249 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
250 250 fh, fn = tempfile.mkstemp(dir='.', prefix=tempprefix)
251 251 try:
252 252 os.close(fh)
253 253 m = os.stat(fn).st_mode & 0o777
254 254 new_file_has_exec = m & EXECFLAGS
255 255 os.chmod(fn, m ^ EXECFLAGS)
256 256 exec_flags_cannot_flip = (os.stat(fn).st_mode & 0o777) == m
257 257 finally:
258 258 os.unlink(fn)
259 259 except (IOError, OSError):
260 260 # we don't care, the user probably won't be able to commit anyway
261 261 return False
262 262 return not (new_file_has_exec or exec_flags_cannot_flip)
263 263
264 264
265 265 @check("suidbit", "setuid and setgid bit")
266 266 def has_suidbit():
267 267 if (
268 268 getattr(os, "statvfs", None) is None
269 269 or getattr(os, "ST_NOSUID", None) is None
270 270 ):
271 271 return False
272 272 return bool(os.statvfs('.').f_flag & os.ST_NOSUID)
273 273
274 274
275 275 @check("icasefs", "case insensitive file system")
276 276 def has_icasefs():
277 277 # Stolen from mercurial.util
278 278 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix)
279 279 os.close(fd)
280 280 try:
281 281 s1 = os.stat(path)
282 282 d, b = os.path.split(path)
283 283 p2 = os.path.join(d, b.upper())
284 284 if path == p2:
285 285 p2 = os.path.join(d, b.lower())
286 286 try:
287 287 s2 = os.stat(p2)
288 288 return s2 == s1
289 289 except OSError:
290 290 return False
291 291 finally:
292 292 os.remove(path)
293 293
294 294
295 295 @check("fifo", "named pipes")
296 296 def has_fifo():
297 297 if getattr(os, "mkfifo", None) is None:
298 298 return False
299 299 name = tempfile.mktemp(dir='.', prefix=tempprefix)
300 300 try:
301 301 os.mkfifo(name)
302 302 os.unlink(name)
303 303 return True
304 304 except OSError:
305 305 return False
306 306
307 307
308 308 @check("killdaemons", 'killdaemons.py support')
309 309 def has_killdaemons():
310 310 return True
311 311
312 312
313 313 @check("cacheable", "cacheable filesystem")
314 314 def has_cacheable_fs():
315 315 from mercurial import util
316 316
317 317 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix)
318 318 os.close(fd)
319 319 try:
320 320 return util.cachestat(path).cacheable()
321 321 finally:
322 322 os.remove(path)
323 323
324 324
325 325 @check("lsprof", "python lsprof module")
326 326 def has_lsprof():
327 327 try:
328 328 import _lsprof
329 329
330 330 _lsprof.Profiler # silence unused import warning
331 331 return True
332 332 except ImportError:
333 333 return False
334 334
335 335
336 336 def _gethgversion():
337 337 m = matchoutput('hg --version --quiet 2>&1', br'(\d+)\.(\d+)')
338 338 if not m:
339 339 return (0, 0)
340 340 return (int(m.group(1)), int(m.group(2)))
341 341
342 342
343 343 _hgversion = None
344 344
345 345
346 346 def gethgversion():
347 347 global _hgversion
348 348 if _hgversion is None:
349 349 _hgversion = _gethgversion()
350 350 return _hgversion
351 351
352 352
353 353 @checkvers(
354 354 "hg", "Mercurial >= %s", list([(1.0 * x) / 10 for x in range(9, 99)])
355 355 )
356 356 def has_hg_range(v):
357 357 major, minor = v.split('.')[0:2]
358 358 return gethgversion() >= (int(major), int(minor))
359 359
360 360
361 361 @check("rust", "Using the Rust extensions")
362 362 def has_rust():
363 363 """Check is the mercurial currently running is using some rust code"""
364 364 cmd = 'hg debuginstall --quiet 2>&1'
365 365 match = br'checking module policy \(([^)]+)\)'
366 366 policy = matchoutput(cmd, match)
367 367 if not policy:
368 368 return False
369 369 return b'rust' in policy.group(1)
370 370
371 371
372 372 @check("hg08", "Mercurial >= 0.8")
373 373 def has_hg08():
374 374 if checks["hg09"][0]():
375 375 return True
376 376 return matchoutput('hg help annotate 2>&1', '--date')
377 377
378 378
379 379 @check("hg07", "Mercurial >= 0.7")
380 380 def has_hg07():
381 381 if checks["hg08"][0]():
382 382 return True
383 383 return matchoutput('hg --version --quiet 2>&1', 'Mercurial Distributed SCM')
384 384
385 385
386 386 @check("hg06", "Mercurial >= 0.6")
387 387 def has_hg06():
388 388 if checks["hg07"][0]():
389 389 return True
390 390 return matchoutput('hg --version --quiet 2>&1', 'Mercurial version')
391 391
392 392
393 393 @check("gettext", "GNU Gettext (msgfmt)")
394 394 def has_gettext():
395 395 return matchoutput('msgfmt --version', br'GNU gettext-tools')
396 396
397 397
398 398 @check("git", "git command line client")
399 399 def has_git():
400 400 return matchoutput('git --version 2>&1', br'^git version')
401 401
402 402
403 403 def getgitversion():
404 404 m = matchoutput('git --version 2>&1', br'git version (\d+)\.(\d+)')
405 405 if not m:
406 406 return (0, 0)
407 407 return (int(m.group(1)), int(m.group(2)))
408 408
409 409
410 410 @check("pygit2", "pygit2 Python library")
411 411 def has_git():
412 412 try:
413 413 import pygit2
414 414
415 415 pygit2.Oid # silence unused import
416 416 return True
417 417 except ImportError:
418 418 return False
419 419
420 420
421 421 # https://github.com/git-lfs/lfs-test-server
422 422 @check("lfs-test-server", "git-lfs test server")
423 423 def has_lfsserver():
424 424 exe = 'lfs-test-server'
425 425 if has_windows():
426 426 exe = 'lfs-test-server.exe'
427 427 return any(
428 428 os.access(os.path.join(path, exe), os.X_OK)
429 429 for path in os.environ["PATH"].split(os.pathsep)
430 430 )
431 431
432 432
433 433 @checkvers("git", "git client (with ext::sh support) version >= %s", (1.9,))
434 434 def has_git_range(v):
435 435 major, minor = v.split('.')[0:2]
436 436 return getgitversion() >= (int(major), int(minor))
437 437
438 438
439 439 @check("docutils", "Docutils text processing library")
440 440 def has_docutils():
441 441 try:
442 442 import docutils.core
443 443
444 444 docutils.core.publish_cmdline # silence unused import
445 445 return True
446 446 except ImportError:
447 447 return False
448 448
449 449
450 450 def getsvnversion():
451 451 m = matchoutput('svn --version --quiet 2>&1', br'^(\d+)\.(\d+)')
452 452 if not m:
453 453 return (0, 0)
454 454 return (int(m.group(1)), int(m.group(2)))
455 455
456 456
457 457 @checkvers("svn", "subversion client and admin tools >= %s", (1.3, 1.5))
458 458 def has_svn_range(v):
459 459 major, minor = v.split('.')[0:2]
460 460 return getsvnversion() >= (int(major), int(minor))
461 461
462 462
463 463 @check("svn", "subversion client and admin tools")
464 464 def has_svn():
465 465 return matchoutput('svn --version 2>&1', br'^svn, version') and matchoutput(
466 466 'svnadmin --version 2>&1', br'^svnadmin, version'
467 467 )
468 468
469 469
470 470 @check("svn-bindings", "subversion python bindings")
471 471 def has_svn_bindings():
472 472 try:
473 473 import svn.core
474 474
475 475 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
476 476 if version < (1, 4):
477 477 return False
478 478 return True
479 479 except ImportError:
480 480 return False
481 481
482 482
483 483 @check("p4", "Perforce server and client")
484 484 def has_p4():
485 485 return matchoutput('p4 -V', br'Rev\. P4/') and matchoutput(
486 486 'p4d -V', br'Rev\. P4D/'
487 487 )
488 488
489 489
490 490 @check("symlink", "symbolic links")
491 491 def has_symlink():
492 492 # mercurial.windows.checklink() is a hard 'no' at the moment
493 493 if os.name == 'nt' or getattr(os, "symlink", None) is None:
494 494 return False
495 495 name = tempfile.mktemp(dir='.', prefix=tempprefix)
496 496 try:
497 497 os.symlink(".", name)
498 498 os.unlink(name)
499 499 return True
500 500 except (OSError, AttributeError):
501 501 return False
502 502
503 503
504 504 @check("hardlink", "hardlinks")
505 505 def has_hardlink():
506 506 from mercurial import util
507 507
508 508 fh, fn = tempfile.mkstemp(dir='.', prefix=tempprefix)
509 509 os.close(fh)
510 510 name = tempfile.mktemp(dir='.', prefix=tempprefix)
511 511 try:
512 512 util.oslink(_sys2bytes(fn), _sys2bytes(name))
513 513 os.unlink(name)
514 514 return True
515 515 except OSError:
516 516 return False
517 517 finally:
518 518 os.unlink(fn)
519 519
520 520
521 521 @check("hardlink-whitelisted", "hardlinks on whitelisted filesystems")
522 522 def has_hardlink_whitelisted():
523 523 from mercurial import util
524 524
525 525 try:
526 526 fstype = util.getfstype(b'.')
527 527 except OSError:
528 528 return False
529 529 return fstype in util._hardlinkfswhitelist
530 530
531 531
532 532 @check("rmcwd", "can remove current working directory")
533 533 def has_rmcwd():
534 534 ocwd = os.getcwd()
535 535 temp = tempfile.mkdtemp(dir='.', prefix=tempprefix)
536 536 try:
537 537 os.chdir(temp)
538 538 # On Linux, 'rmdir .' isn't allowed, but the other names are okay.
539 539 # On Solaris and Windows, the cwd can't be removed by any names.
540 540 os.rmdir(os.getcwd())
541 541 return True
542 542 except OSError:
543 543 return False
544 544 finally:
545 545 os.chdir(ocwd)
546 546 # clean up temp dir on platforms where cwd can't be removed
547 547 try:
548 548 os.rmdir(temp)
549 549 except OSError:
550 550 pass
551 551
552 552
553 553 @check("tla", "GNU Arch tla client")
554 554 def has_tla():
555 555 return matchoutput('tla --version 2>&1', br'The GNU Arch Revision')
556 556
557 557
558 558 @check("gpg", "gpg client")
559 559 def has_gpg():
560 560 return matchoutput('gpg --version 2>&1', br'GnuPG')
561 561
562 562
563 563 @check("gpg2", "gpg client v2")
564 564 def has_gpg2():
565 565 return matchoutput('gpg --version 2>&1', br'GnuPG[^0-9]+2\.')
566 566
567 567
568 568 @check("gpg21", "gpg client v2.1+")
569 569 def has_gpg21():
570 570 return matchoutput('gpg --version 2>&1', br'GnuPG[^0-9]+2\.(?!0)')
571 571
572 572
573 573 @check("unix-permissions", "unix-style permissions")
574 574 def has_unix_permissions():
575 575 d = tempfile.mkdtemp(dir='.', prefix=tempprefix)
576 576 try:
577 577 fname = os.path.join(d, 'foo')
578 578 for umask in (0o77, 0o07, 0o22):
579 579 os.umask(umask)
580 580 f = open(fname, 'w')
581 581 f.close()
582 582 mode = os.stat(fname).st_mode
583 583 os.unlink(fname)
584 584 if mode & 0o777 != ~umask & 0o666:
585 585 return False
586 586 return True
587 587 finally:
588 588 os.rmdir(d)
589 589
590 590
591 591 @check("unix-socket", "AF_UNIX socket family")
592 592 def has_unix_socket():
593 593 return getattr(socket, 'AF_UNIX', None) is not None
594 594
595 595
596 596 @check("root", "root permissions")
597 597 def has_root():
598 598 return getattr(os, 'geteuid', None) and os.geteuid() == 0
599 599
600 600
601 601 @check("pyflakes", "Pyflakes python linter")
602 602 def has_pyflakes():
603 603 try:
604 604 import pyflakes
605 605
606 606 pyflakes.__version__
607 607 except ImportError:
608 608 return False
609 609 else:
610 610 return True
611 611
612 612
613 613 @check("pylint", "Pylint python linter")
614 614 def has_pylint():
615 615 return matchoutput("pylint --help", br"Usage:[ ]+pylint", True)
616 616
617 617
618 618 @check("clang-format", "clang-format C code formatter (>= 11)")
619 619 def has_clang_format():
620 620 m = matchoutput('clang-format --version', br'clang-format version (\d+)')
621 621 # style changed somewhere between 10.x and 11.x
622 622 if m:
623 623 return int(m.group(1)) >= 11
624 624 # Assist Googler contributors, they have a centrally-maintained version of
625 625 # clang-format that is generally very fresh, but unlike most builds (both
626 626 # official and unofficial), it does *not* include a version number.
627 627 return matchoutput(
628 628 'clang-format --version', br'clang-format .*google3-trunk \([0-9a-f]+\)'
629 629 )
630 630
631 631
632 632 @check("jshint", "JSHint static code analysis tool")
633 633 def has_jshint():
634 634 return matchoutput("jshint --version 2>&1", br"jshint v")
635 635
636 636
637 637 @check("pygments", "Pygments source highlighting library")
638 638 def has_pygments():
639 639 try:
640 640 import pygments
641 641
642 642 pygments.highlight # silence unused import warning
643 643 return True
644 644 except ImportError:
645 645 return False
646 646
647 647
648 648 @check("pygments25", "Pygments version >= 2.5")
649 649 def pygments25():
650 650 try:
651 651 import pygments
652 652
653 653 v = pygments.__version__
654 654 except ImportError:
655 655 return False
656 656
657 657 parts = v.split(".")
658 658 major = int(parts[0])
659 659 minor = int(parts[1])
660 660
661 661 return (major, minor) >= (2, 5)
662 662
663 663
664 664 @check("pygments211", "Pygments version >= 2.11")
665 665 def pygments211():
666 666 try:
667 667 import pygments
668 668
669 669 v = pygments.__version__
670 670 except ImportError:
671 671 return False
672 672
673 673 parts = v.split(".")
674 674 major = int(parts[0])
675 675 minor = int(parts[1])
676 676
677 677 return (major, minor) >= (2, 11)
678 678
679 679
680 680 @check("outer-repo", "outer repo")
681 681 def has_outer_repo():
682 682 # failing for other reasons than 'no repo' imply that there is a repo
683 683 return not matchoutput('hg root 2>&1', br'abort: no repository found', True)
684 684
685 685
686 686 @check("ssl", "ssl module available")
687 687 def has_ssl():
688 688 try:
689 689 import ssl
690 690
691 691 ssl.CERT_NONE
692 692 return True
693 693 except ImportError:
694 694 return False
695 695
696 696
697 697 @check("defaultcacertsloaded", "detected presence of loaded system CA certs")
698 698 def has_defaultcacertsloaded():
699 699 import ssl
700 700 from mercurial import sslutil, ui as uimod
701 701
702 702 ui = uimod.ui.load()
703 703 cafile = sslutil._defaultcacerts(ui)
704 704 ctx = ssl.create_default_context()
705 705 if cafile:
706 706 ctx.load_verify_locations(cafile=cafile)
707 707 else:
708 708 ctx.load_default_certs()
709 709
710 710 return len(ctx.get_ca_certs()) > 0
711 711
712 712
713 713 @check("tls1.2", "TLS 1.2 protocol support")
714 714 def has_tls1_2():
715 715 from mercurial import sslutil
716 716
717 717 return b'tls1.2' in sslutil.supportedprotocols
718 718
719 719
720 720 @check("windows", "Windows")
721 721 def has_windows():
722 722 return os.name == 'nt'
723 723
724 724
725 725 @check("system-sh", "system() uses sh")
726 726 def has_system_sh():
727 727 return os.name != 'nt'
728 728
729 729
730 730 @check("serve", "platform and python can manage 'hg serve -d'")
731 731 def has_serve():
732 732 return True
733 733
734 734
735 735 @check("setprocname", "whether osutil.setprocname is available or not")
736 736 def has_setprocname():
737 737 try:
738 738 from mercurial.utils import procutil
739 739
740 740 procutil.setprocname
741 741 return True
742 742 except AttributeError:
743 743 return False
744 744
745 745
746 746 @check("test-repo", "running tests from repository")
747 747 def has_test_repo():
748 748 t = os.environ["TESTDIR"]
749 749 return os.path.isdir(os.path.join(t, "..", ".hg"))
750 750
751 751
752 752 @check("network-io", "whether tests are allowed to access 3rd party services")
753 753 def has_test_repo():
754 754 t = os.environ.get("HGTESTS_ALLOW_NETIO")
755 755 return t == "1"
756 756
757 757
758 758 @check("curses", "terminfo compiler and curses module")
759 759 def has_curses():
760 760 try:
761 761 import curses
762 762
763 763 curses.COLOR_BLUE
764 764
765 765 # Windows doesn't have a `tic` executable, but the windows_curses
766 766 # package is sufficient to run the tests without it.
767 767 if os.name == 'nt':
768 768 return True
769 769
770 770 return has_tic()
771 771
772 772 except (ImportError, AttributeError):
773 773 return False
774 774
775 775
776 776 @check("tic", "terminfo compiler")
777 777 def has_tic():
778 778 return matchoutput('test -x "`which tic`"', br'')
779 779
780 780
781 781 @check("xz", "xz compression utility")
782 782 def has_xz():
783 783 # When Windows invokes a subprocess in shell mode, it uses `cmd.exe`, which
784 784 # only knows `where`, not `which`. So invoke MSYS shell explicitly.
785 785 return matchoutput("sh -c 'test -x \"`which xz`\"'", b'')
786 786
787 787
788 788 @check("msys", "Windows with MSYS")
789 789 def has_msys():
790 790 return os.getenv('MSYSTEM')
791 791
792 792
793 793 @check("aix", "AIX")
794 794 def has_aix():
795 795 return sys.platform.startswith("aix")
796 796
797 797
798 798 @check("osx", "OS X")
799 799 def has_osx():
800 800 return sys.platform == 'darwin'
801 801
802 802
803 803 @check("osxpackaging", "OS X packaging tools")
804 804 def has_osxpackaging():
805 805 try:
806 806 return (
807 807 matchoutput('pkgbuild', br'Usage: pkgbuild ', ignorestatus=1)
808 808 and matchoutput(
809 809 'productbuild', br'Usage: productbuild ', ignorestatus=1
810 810 )
811 811 and matchoutput('lsbom', br'Usage: lsbom', ignorestatus=1)
812 812 and matchoutput('xar --help', br'Usage: xar', ignorestatus=1)
813 813 )
814 814 except ImportError:
815 815 return False
816 816
817 817
818 818 @check('linuxormacos', 'Linux or MacOS')
819 819 def has_linuxormacos():
820 820 # This isn't a perfect test for MacOS. But it is sufficient for our needs.
821 821 return sys.platform.startswith(('linux', 'darwin'))
822 822
823 823
824 824 @check("docker", "docker support")
825 825 def has_docker():
826 826 pat = br'A self-sufficient runtime for'
827 827 if matchoutput('docker --help', pat):
828 828 if 'linux' not in sys.platform:
829 829 # TODO: in theory we should be able to test docker-based
830 830 # package creation on non-linux using boot2docker, but in
831 831 # practice that requires extra coordination to make sure
832 832 # $TESTTEMP is going to be visible at the same path to the
833 833 # boot2docker VM. If we figure out how to verify that, we
834 834 # can use the following instead of just saying False:
835 835 # return 'DOCKER_HOST' in os.environ
836 836 return False
837 837
838 838 return True
839 839 return False
840 840
841 841
842 842 @check("debhelper", "debian packaging tools")
843 843 def has_debhelper():
844 844 # Some versions of dpkg say `dpkg', some say 'dpkg' (` vs ' on the first
845 845 # quote), so just accept anything in that spot.
846 846 dpkg = matchoutput(
847 847 'dpkg --version', br"Debian .dpkg' package management program"
848 848 )
849 849 dh = matchoutput(
850 850 'dh --help', br'dh is a part of debhelper.', ignorestatus=True
851 851 )
852 852 dh_py2 = matchoutput(
853 853 'dh_python2 --help', br'other supported Python versions'
854 854 )
855 855 # debuild comes from the 'devscripts' package, though you might want
856 856 # the 'build-debs' package instead, which has a dependency on devscripts.
857 857 debuild = matchoutput(
858 858 'debuild --help', br'to run debian/rules with given parameter'
859 859 )
860 860 return dpkg and dh and dh_py2 and debuild
861 861
862 862
863 863 @check(
864 864 "debdeps", "debian build dependencies (run dpkg-checkbuilddeps in contrib/)"
865 865 )
866 866 def has_debdeps():
867 867 # just check exit status (ignoring output)
868 868 path = '%s/../contrib/packaging/debian/control' % os.environ['TESTDIR']
869 869 return matchoutput('dpkg-checkbuilddeps %s' % path, br'')
870 870
871 871
872 872 @check("demandimport", "demandimport enabled")
873 873 def has_demandimport():
874 874 # chg disables demandimport intentionally for performance wins.
875 875 return (not has_chg()) and os.environ.get('HGDEMANDIMPORT') != 'disable'
876 876
877 877
878 878 # Add "py27", "py35", ... as possible feature checks. Note that there's no
879 879 # punctuation here.
880 880 @checkvers("py", "Python >= %s", (2.7, 3.5, 3.6, 3.7, 3.8, 3.9))
881 881 def has_python_range(v):
882 882 major, minor = v.split('.')[0:2]
883 883 py_major, py_minor = sys.version_info.major, sys.version_info.minor
884 884
885 885 return (py_major, py_minor) >= (int(major), int(minor))
886 886
887 887
888 888 @check("py3", "running with Python 3.x")
889 889 def has_py3():
890 890 return 3 == sys.version_info[0]
891 891
892 892
893 893 @check("py3exe", "a Python 3.x interpreter is available")
894 894 def has_python3exe():
895 895 py = 'python3'
896 896 if os.name == 'nt':
897 897 py = 'py -3'
898 898 return matchoutput('%s -V' % py, br'^Python 3.(5|6|7|8|9)')
899 899
900 900
901 901 @check("pure", "running with pure Python code")
902 902 def has_pure():
903 903 return any(
904 904 [
905 905 os.environ.get("HGMODULEPOLICY") == "py",
906 906 os.environ.get("HGTEST_RUN_TESTS_PURE") == "--pure",
907 907 ]
908 908 )
909 909
910 910
911 911 @check("slow", "allow slow tests (use --allow-slow-tests)")
912 912 def has_slow():
913 913 return os.environ.get('HGTEST_SLOW') == 'slow'
914 914
915 915
916 916 @check("hypothesis", "Hypothesis automated test generation")
917 917 def has_hypothesis():
918 918 try:
919 919 import hypothesis
920 920
921 921 hypothesis.given
922 922 return True
923 923 except ImportError:
924 924 return False
925 925
926 926
927 927 @check("unziplinks", "unzip(1) understands and extracts symlinks")
928 928 def unzip_understands_symlinks():
929 929 return matchoutput('unzip --help', br'Info-ZIP')
930 930
931 931
932 932 @check("zstd", "zstd Python module available")
933 933 def has_zstd():
934 934 try:
935 935 import mercurial.zstd
936 936
937 937 mercurial.zstd.__version__
938 938 return True
939 939 except ImportError:
940 940 return False
941 941
942 942
943 943 @check("devfull", "/dev/full special file")
944 944 def has_dev_full():
945 945 return os.path.exists('/dev/full')
946 946
947 947
948 948 @check("ensurepip", "ensurepip module")
949 949 def has_ensurepip():
950 950 try:
951 951 import ensurepip
952 952
953 953 ensurepip.bootstrap
954 954 return True
955 955 except ImportError:
956 956 return False
957 957
958 958
959 959 @check("virtualenv", "virtualenv support")
960 960 def has_virtualenv():
961 961 try:
962 962 import virtualenv
963 963
964 964 # --no-site-package became the default in 1.7 (Nov 2011), and the
965 965 # argument was removed in 20.0 (Feb 2020). Rather than make the
966 966 # script complicated, just ignore ancient versions.
967 967 return int(virtualenv.__version__.split('.')[0]) > 1
968 968 except (AttributeError, ImportError, IndexError):
969 969 return False
970 970
971 971
972 972 @check("fsmonitor", "running tests with fsmonitor")
973 973 def has_fsmonitor():
974 974 return 'HGFSMONITOR_TESTS' in os.environ
975 975
976 976
977 977 @check("fuzzywuzzy", "Fuzzy string matching library")
978 978 def has_fuzzywuzzy():
979 979 try:
980 980 import fuzzywuzzy
981 981
982 982 fuzzywuzzy.__version__
983 983 return True
984 984 except ImportError:
985 985 return False
986 986
987 987
988 988 @check("clang-libfuzzer", "clang new enough to include libfuzzer")
989 989 def has_clang_libfuzzer():
990 990 mat = matchoutput('clang --version', br'clang version (\d)')
991 991 if mat:
992 992 # libfuzzer is new in clang 6
993 993 return int(mat.group(1)) > 5
994 994 return False
995 995
996 996
997 997 @check("clang-6.0", "clang 6.0 with version suffix (libfuzzer included)")
998 998 def has_clang60():
999 999 return matchoutput('clang-6.0 --version', br'clang version 6\.')
1000 1000
1001 1001
1002 1002 @check("xdiff", "xdiff algorithm")
1003 1003 def has_xdiff():
1004 1004 try:
1005 1005 from mercurial import policy
1006 1006
1007 1007 bdiff = policy.importmod('bdiff')
1008 1008 return bdiff.xdiffblocks(b'', b'') == [(0, 0, 0, 0)]
1009 1009 except (ImportError, AttributeError):
1010 1010 return False
1011 1011
1012 1012
1013 1013 @check('extraextensions', 'whether tests are running with extra extensions')
1014 1014 def has_extraextensions():
1015 1015 return 'HGTESTEXTRAEXTENSIONS' in os.environ
1016 1016
1017 1017
1018 1018 def getrepofeatures():
1019 1019 """Obtain set of repository features in use.
1020 1020
1021 1021 HGREPOFEATURES can be used to define or remove features. It contains
1022 1022 a space-delimited list of feature strings. Strings beginning with ``-``
1023 1023 mean to remove.
1024 1024 """
1025 1025 # Default list provided by core.
1026 1026 features = {
1027 1027 'bundlerepo',
1028 1028 'revlogstore',
1029 1029 'fncache',
1030 1030 }
1031 1031
1032 1032 # Features that imply other features.
1033 1033 implies = {
1034 1034 'simplestore': ['-revlogstore', '-bundlerepo', '-fncache'],
1035 1035 }
1036 1036
1037 1037 for override in os.environ.get('HGREPOFEATURES', '').split(' '):
1038 1038 if not override:
1039 1039 continue
1040 1040
1041 1041 if override.startswith('-'):
1042 1042 if override[1:] in features:
1043 1043 features.remove(override[1:])
1044 1044 else:
1045 1045 features.add(override)
1046 1046
1047 1047 for imply in implies.get(override, []):
1048 1048 if imply.startswith('-'):
1049 1049 if imply[1:] in features:
1050 1050 features.remove(imply[1:])
1051 1051 else:
1052 1052 features.add(imply)
1053 1053
1054 1054 return features
1055 1055
1056 1056
1057 1057 @check('reporevlogstore', 'repository using the default revlog store')
1058 1058 def has_reporevlogstore():
1059 1059 return 'revlogstore' in getrepofeatures()
1060 1060
1061 1061
1062 1062 @check('reposimplestore', 'repository using simple storage extension')
1063 1063 def has_reposimplestore():
1064 1064 return 'simplestore' in getrepofeatures()
1065 1065
1066 1066
1067 1067 @check('repobundlerepo', 'whether we can open bundle files as repos')
1068 1068 def has_repobundlerepo():
1069 1069 return 'bundlerepo' in getrepofeatures()
1070 1070
1071 1071
1072 1072 @check('repofncache', 'repository has an fncache')
1073 1073 def has_repofncache():
1074 1074 return 'fncache' in getrepofeatures()
1075 1075
1076 1076
1077 1077 @check('dirstate-v2', 'using the v2 format of .hg/dirstate')
1078 1078 def has_dirstate_v2():
1079 1079 # Keep this logic in sync with `newreporequirements()` in `mercurial/localrepo.py`
1080 1080 return has_rust() and matchoutput(
1081 1081 'hg config format.exp-rc-dirstate-v2', b'(?i)1|yes|true|on|always'
1082 1082 )
1083 1083
1084 1084
1085 1085 @check('sqlite', 'sqlite3 module and matching cli is available')
1086 1086 def has_sqlite():
1087 1087 try:
1088 1088 import sqlite3
1089 1089
1090 1090 version = sqlite3.sqlite_version_info
1091 1091 except ImportError:
1092 1092 return False
1093 1093
1094 1094 if version < (3, 8, 3):
1095 1095 # WITH clause not supported
1096 1096 return False
1097 1097
1098 1098 return matchoutput('sqlite3 -version', br'^3\.\d+')
1099 1099
1100 1100
1101 1101 @check('vcr', 'vcr http mocking library (pytest-vcr)')
1102 1102 def has_vcr():
1103 1103 try:
1104 1104 import vcr
1105 1105
1106 1106 vcr.VCR
1107 1107 return True
1108 1108 except (ImportError, AttributeError):
1109 1109 pass
1110 1110 return False
1111 1111
1112 1112
1113 1113 @check('emacs', 'GNU Emacs')
1114 1114 def has_emacs():
1115 1115 # Our emacs lisp uses `with-eval-after-load` which is new in emacs
1116 1116 # 24.4, so we allow emacs 24.4, 24.5, and 25+ (24.5 was the last
1117 1117 # 24 release)
1118 1118 return matchoutput('emacs --version', b'GNU Emacs 2(4.4|4.5|5|6|7|8|9)')
1119 1119
1120 1120
1121 1121 @check('black', 'the black formatter for python (>= 20.8b1)')
1122 1122 def has_black():
1123 1123 blackcmd = 'black --version'
1124 version_regex = b'black, version ([0-9a-b.]+)'
1124 version_regex = b'black, (?:version )?([0-9a-b.]+)'
1125 1125 version = matchoutput(blackcmd, version_regex)
1126 1126 sv = distutils.version.StrictVersion
1127 1127 return version and sv(_bytes2sys(version.group(1))) >= sv('20.8b1')
1128 1128
1129 1129
1130 1130 @check('pytype', 'the pytype type checker')
1131 1131 def has_pytype():
1132 1132 pytypecmd = 'pytype --version'
1133 1133 version = matchoutput(pytypecmd, b'[0-9a-b.]+')
1134 1134 sv = distutils.version.StrictVersion
1135 1135 return version and sv(_bytes2sys(version.group(0))) >= sv('2019.10.17')
1136 1136
1137 1137
1138 1138 @check("rustfmt", "rustfmt tool at version nightly-2021-11-02")
1139 1139 def has_rustfmt():
1140 1140 # We use Nightly's rustfmt due to current unstable config options.
1141 1141 return matchoutput(
1142 1142 '`rustup which --toolchain nightly-2021-11-02 rustfmt` --version',
1143 1143 b'rustfmt',
1144 1144 )
1145 1145
1146 1146
1147 1147 @check("cargo", "cargo tool")
1148 1148 def has_cargo():
1149 1149 return matchoutput('`rustup which cargo` --version', b'cargo')
1150 1150
1151 1151
1152 1152 @check("lzma", "python lzma module")
1153 1153 def has_lzma():
1154 1154 try:
1155 1155 import _lzma
1156 1156
1157 1157 _lzma.FORMAT_XZ
1158 1158 return True
1159 1159 except ImportError:
1160 1160 return False
1161 1161
1162 1162
1163 1163 @check("bash", "bash shell")
1164 1164 def has_bash():
1165 1165 return matchoutput("bash -c 'echo hi'", b'^hi$')
1166 1166
1167 1167
1168 1168 @check("bigendian", "big-endian CPU")
1169 1169 def has_bigendian():
1170 1170 return sys.byteorder == 'big'
@@ -1,122 +1,131 b''
1 1 #require serve
2 2
3 3 $ hg init a
4 4 $ cd a
5 5 $ echo a > a
6 6 $ hg ci -Ama -d '1123456789 0'
7 7 adding a
8 8 $ hg serve --config server.uncompressed=True -p $HGPORT -d --pid-file=hg.pid
9 9 $ cat hg.pid >> $DAEMON_PIDS
10 10 $ cd ..
11 11 $ tinyproxy.py $HGPORT1 localhost 2>proxy.log >/dev/null </dev/null &
12 12 $ while [ ! -f proxy.pid ]; do sleep 0; done
13 13 $ cat proxy.pid >> $DAEMON_PIDS
14 14
15 15 url for proxy, stream
16 16
17 17 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone --stream http://localhost:$HGPORT/ b
18 18 streaming all changes
19 19 6 files to transfer, 412 bytes of data (reporevlogstore !)
20 20 4 files to transfer, 330 bytes of data (reposimplestore !)
21 21 transferred * bytes in * seconds (*/sec) (glob)
22 22 updating to branch default
23 23 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
24 24 $ cd b
25 25 $ hg verify
26 26 checking changesets
27 27 checking manifests
28 28 crosschecking files in changesets and manifests
29 29 checking files
30 30 checked 1 changesets with 1 changes to 1 files
31 31 $ cd ..
32 32
33 33 url for proxy, pull
34 34
35 35 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone http://localhost:$HGPORT/ b-pull
36 36 requesting all changes
37 37 adding changesets
38 38 adding manifests
39 39 adding file changes
40 40 added 1 changesets with 1 changes to 1 files
41 41 new changesets 83180e7845de
42 42 updating to branch default
43 43 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
44 44 $ cd b-pull
45 45 $ hg verify
46 46 checking changesets
47 47 checking manifests
48 48 crosschecking files in changesets and manifests
49 49 checking files
50 50 checked 1 changesets with 1 changes to 1 files
51 51 $ cd ..
52 52
53 53 host:port for proxy
54 54
55 55 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ c
56 56 requesting all changes
57 57 adding changesets
58 58 adding manifests
59 59 adding file changes
60 60 added 1 changesets with 1 changes to 1 files
61 61 new changesets 83180e7845de
62 62 updating to branch default
63 63 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
64 64
65 65 proxy url with user name and password
66 66
67 67 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ d
68 68 requesting all changes
69 69 adding changesets
70 70 adding manifests
71 71 adding file changes
72 72 added 1 changesets with 1 changes to 1 files
73 73 new changesets 83180e7845de
74 74 updating to branch default
75 75 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
76 76
77 77 url with user name and password
78 78
79 79 $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://user:passwd@localhost:$HGPORT/ e
80 80 requesting all changes
81 81 adding changesets
82 82 adding manifests
83 83 adding file changes
84 84 added 1 changesets with 1 changes to 1 files
85 85 new changesets 83180e7845de
86 86 updating to branch default
87 87 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
88 88
89 89 bad host:port for proxy ("Protocol not supported" can happen on
90 90 misconfigured hosts)
91 91
92 92 $ http_proxy=localhost:$HGPORT2 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ f
93 93 abort: error: (Connection refused|Protocol not supported|.* actively refused it|\$EADDRNOTAVAIL\$|No route to host) (re)
94 94 [100]
95 95
96 96 do not use the proxy if it is in the no list
97 97
98 98 $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.no=localhost http://localhost:$HGPORT/ g
99 99 requesting all changes
100 100 adding changesets
101 101 adding manifests
102 102 adding file changes
103 103 added 1 changesets with 1 changes to 1 files
104 104 new changesets 83180e7845de
105 105 updating to branch default
106 106 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
107
108 proxy can't connect to server
109
110 $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone http://localhost:$HGPORT2/ h
111 abort: HTTP Error 404: Connection refused
112 [100]
113
107 114 $ cat proxy.log
108 115 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
109 116 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
110 117 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=0&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&stream=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
111 118 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
112 119 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
113 120 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
114 121 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
115 122 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
116 123 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
117 124 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
118 125 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
119 126 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
120 127 * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob)
121 128 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
122 129 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
130 * - - [*] code 404, message Connection refused (glob)
131 $LOCALIP - - [$LOGDATE$] "GET http://localhost:$HGPORT2/?cmd=capabilities HTTP/1.1" 404 - (glob)
@@ -1,555 +1,562 b''
1 1 #require serve ssl
2 2
3 3 Proper https client requires the built-in ssl from Python 2.6.
4 4
5 5 Disable the system configuration which may set stricter TLS requirements.
6 6 This test expects that legacy TLS versions are supported.
7 7
8 8 $ OPENSSL_CONF=
9 9 $ export OPENSSL_CONF
10 10
11 11 Make server certificates:
12 12
13 13 $ CERTSDIR="$TESTDIR/sslcerts"
14 14 $ cat "$CERTSDIR/priv.pem" "$CERTSDIR/pub.pem" >> server.pem
15 15 $ PRIV=`pwd`/server.pem
16 16 $ cat "$CERTSDIR/priv.pem" "$CERTSDIR/pub-not-yet.pem" > server-not-yet.pem
17 17 $ cat "$CERTSDIR/priv.pem" "$CERTSDIR/pub-expired.pem" > server-expired.pem
18 18
19 19 $ hg init test
20 20 $ cd test
21 21 $ echo foo>foo
22 22 $ mkdir foo.d foo.d/bAr.hg.d foo.d/baR.d.hg
23 23 $ echo foo>foo.d/foo
24 24 $ echo bar>foo.d/bAr.hg.d/BaR
25 25 $ echo bar>foo.d/baR.d.hg/bAR
26 26 $ hg commit -A -m 1
27 27 adding foo
28 28 adding foo.d/bAr.hg.d/BaR
29 29 adding foo.d/baR.d.hg/bAR
30 30 adding foo.d/foo
31 31 $ hg serve -p $HGPORT -d --pid-file=../hg0.pid --certificate=$PRIV
32 32 $ cat ../hg0.pid >> $DAEMON_PIDS
33 33
34 34 cacert not found
35 35
36 36 $ hg in --config web.cacerts=no-such.pem https://localhost:$HGPORT/
37 37 abort: could not find web.cacerts: no-such.pem
38 38 [255]
39 39
40 40 Test server address cannot be reused
41 41
42 42 $ hg serve -p $HGPORT --certificate=$PRIV 2>&1
43 43 abort: cannot start server at 'localhost:$HGPORT': $EADDRINUSE$
44 44 [255]
45 45
46 46 $ cd ..
47 47
48 48 Our test cert is not signed by a trusted CA. It should fail to verify if
49 49 we are able to load CA certs.
50 50
51 51 #if no-defaultcacertsloaded
52 52 $ hg clone https://localhost:$HGPORT/ copy-pull
53 53 (an attempt was made to load CA certificates but none were loaded; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error)
54 54 abort: error: *certificate verify failed* (glob)
55 55 [100]
56 56 #endif
57 57
58 58 #if defaultcacertsloaded
59 59 $ hg clone https://localhost:$HGPORT/ copy-pull
60 60 (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
61 61 abort: error: *certificate verify failed* (glob)
62 62 [100]
63 63 #endif
64 64
65 65 Specifying a per-host certificate file that doesn't exist will abort. The full
66 66 C:/path/to/msysroot will print on Windows.
67 67
68 68 $ hg --config hostsecurity.localhost:verifycertsfile=/does/not/exist clone https://localhost:$HGPORT/
69 69 abort: path specified by hostsecurity.localhost:verifycertsfile does not exist: */does/not/exist (glob)
70 70 [255]
71 71
72 72 A malformed per-host certificate file will raise an error
73 73
74 74 $ echo baddata > badca.pem
75 75 $ hg --config hostsecurity.localhost:verifycertsfile=badca.pem clone https://localhost:$HGPORT/
76 76 abort: error loading CA file badca.pem: * (glob)
77 77 (file is empty or malformed?)
78 78 [255]
79 79
80 80 A per-host certificate mismatching the server will fail verification
81 81
82 82 (modern ssl is able to discern whether the loaded cert is a CA cert)
83 83 $ hg --config hostsecurity.localhost:verifycertsfile="$CERTSDIR/client-cert.pem" clone https://localhost:$HGPORT/
84 84 (an attempt was made to load CA certificates but none were loaded; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error)
85 85 (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
86 86 abort: error: *certificate verify failed* (glob)
87 87 [100]
88 88
89 89 A per-host certificate matching the server's cert will be accepted
90 90
91 91 $ hg --config hostsecurity.localhost:verifycertsfile="$CERTSDIR/pub.pem" clone -U https://localhost:$HGPORT/ perhostgood1
92 92 requesting all changes
93 93 adding changesets
94 94 adding manifests
95 95 adding file changes
96 96 added 1 changesets with 4 changes to 4 files
97 97 new changesets 8b6053c928fe
98 98
99 99 A per-host certificate with multiple certs and one matching will be accepted
100 100
101 101 $ cat "$CERTSDIR/client-cert.pem" "$CERTSDIR/pub.pem" > perhost.pem
102 102 $ hg --config hostsecurity.localhost:verifycertsfile=perhost.pem clone -U https://localhost:$HGPORT/ perhostgood2
103 103 requesting all changes
104 104 adding changesets
105 105 adding manifests
106 106 adding file changes
107 107 added 1 changesets with 4 changes to 4 files
108 108 new changesets 8b6053c928fe
109 109
110 110 Defining both per-host certificate and a fingerprint will print a warning
111 111
112 112 $ hg --config hostsecurity.localhost:verifycertsfile="$CERTSDIR/pub.pem" --config hostsecurity.localhost:fingerprints=sha1:ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03 clone -U https://localhost:$HGPORT/ caandfingerwarning
113 113 (hostsecurity.localhost:verifycertsfile ignored when host fingerprints defined; using host fingerprints for verification)
114 114 requesting all changes
115 115 adding changesets
116 116 adding manifests
117 117 adding file changes
118 118 added 1 changesets with 4 changes to 4 files
119 119 new changesets 8b6053c928fe
120 120
121 121 $ DISABLECACERTS="--config devel.disableloaddefaultcerts=true"
122 122
123 123 Inability to verify peer certificate will result in abort
124 124
125 125 $ hg clone https://localhost:$HGPORT/ copy-pull $DISABLECACERTS
126 126 abort: unable to verify security of localhost (no loaded CA certificates); refusing to connect
127 127 (see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error or set hostsecurity.localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e to trust this server)
128 128 [150]
129 129
130 130 $ hg clone --insecure https://localhost:$HGPORT/ copy-pull
131 131 warning: connection security to localhost is disabled per current settings; communication is susceptible to eavesdropping and tampering
132 132 requesting all changes
133 133 adding changesets
134 134 adding manifests
135 135 adding file changes
136 136 added 1 changesets with 4 changes to 4 files
137 137 new changesets 8b6053c928fe
138 138 updating to branch default
139 139 4 files updated, 0 files merged, 0 files removed, 0 files unresolved
140 140 $ hg verify -R copy-pull
141 141 checking changesets
142 142 checking manifests
143 143 crosschecking files in changesets and manifests
144 144 checking files
145 145 checked 1 changesets with 4 changes to 4 files
146 146 $ cd test
147 147 $ echo bar > bar
148 148 $ hg commit -A -d '1 0' -m 2
149 149 adding bar
150 150 $ cd ..
151 151
152 152 pull without cacert
153 153
154 154 $ cd copy-pull
155 155 $ cat >> .hg/hgrc <<EOF
156 156 > [hooks]
157 157 > changegroup = sh -c "printenv.py --line changegroup"
158 158 > EOF
159 159 $ hg pull $DISABLECACERTS
160 160 pulling from https://localhost:$HGPORT/
161 161 abort: unable to verify security of localhost (no loaded CA certificates); refusing to connect
162 162 (see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error or set hostsecurity.localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e to trust this server)
163 163 [150]
164 164
165 165 $ hg pull --insecure
166 166 pulling from https://localhost:$HGPORT/
167 167 warning: connection security to localhost is disabled per current settings; communication is susceptible to eavesdropping and tampering
168 168 searching for changes
169 169 adding changesets
170 170 adding manifests
171 171 adding file changes
172 172 added 1 changesets with 1 changes to 1 files
173 173 new changesets 5fed3813f7f5
174 174 changegroup hook: HG_HOOKNAME=changegroup
175 175 HG_HOOKTYPE=changegroup
176 176 HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d
177 177 HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d
178 178 HG_SOURCE=pull
179 179 HG_TXNID=TXN:$ID$
180 180 HG_TXNNAME=pull
181 181 https://localhost:$HGPORT/
182 182 HG_URL=https://localhost:$HGPORT/
183 183
184 184 (run 'hg update' to get a working copy)
185 185 $ cd ..
186 186
187 187 cacert configured in local repo
188 188
189 189 $ cp copy-pull/.hg/hgrc copy-pull/.hg/hgrc.bu
190 190 $ echo "[web]" >> copy-pull/.hg/hgrc
191 191 $ echo "cacerts=$CERTSDIR/pub.pem" >> copy-pull/.hg/hgrc
192 192 $ hg -R copy-pull pull
193 193 pulling from https://localhost:$HGPORT/
194 194 searching for changes
195 195 no changes found
196 196 $ mv copy-pull/.hg/hgrc.bu copy-pull/.hg/hgrc
197 197
198 198 cacert configured globally, also testing expansion of environment
199 199 variables in the filename
200 200
201 201 $ echo "[web]" >> $HGRCPATH
202 202 $ echo 'cacerts=$P/pub.pem' >> $HGRCPATH
203 203 $ P="$CERTSDIR" hg -R copy-pull pull
204 204 pulling from https://localhost:$HGPORT/
205 205 searching for changes
206 206 no changes found
207 207 $ P="$CERTSDIR" hg -R copy-pull pull --insecure
208 208 pulling from https://localhost:$HGPORT/
209 209 warning: connection security to localhost is disabled per current settings; communication is susceptible to eavesdropping and tampering
210 210 searching for changes
211 211 no changes found
212 212
213 213 empty cacert file
214 214
215 215 $ touch emptycafile
216 216
217 217 $ hg --config web.cacerts=emptycafile -R copy-pull pull
218 218 pulling from https://localhost:$HGPORT/
219 219 abort: error loading CA file emptycafile: * (glob)
220 220 (file is empty or malformed?)
221 221 [255]
222 222
223 223 cacert mismatch
224 224
225 225 $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub.pem" \
226 226 > https://$LOCALIP:$HGPORT/
227 227 pulling from https://*:$HGPORT/ (glob)
228 228 abort: $LOCALIP certificate error: certificate is for localhost (glob)
229 229 (set hostsecurity.$LOCALIP:certfingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e config setting or use --insecure to connect insecurely)
230 230 [150]
231 231 $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub.pem" \
232 232 > https://$LOCALIP:$HGPORT/ --insecure
233 233 pulling from https://*:$HGPORT/ (glob)
234 234 warning: connection security to $LOCALIP is disabled per current settings; communication is susceptible to eavesdropping and tampering (glob)
235 235 searching for changes
236 236 no changes found
237 237 $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub-other.pem"
238 238 pulling from https://localhost:$HGPORT/
239 239 (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
240 240 abort: error: *certificate verify failed* (glob)
241 241 [100]
242 242 $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub-other.pem" \
243 243 > --insecure
244 244 pulling from https://localhost:$HGPORT/
245 245 warning: connection security to localhost is disabled per current settings; communication is susceptible to eavesdropping and tampering
246 246 searching for changes
247 247 no changes found
248 248
249 249 Test server cert which isn't valid yet
250 250
251 251 $ hg serve -R test -p $HGPORT1 -d --pid-file=hg1.pid --certificate=server-not-yet.pem
252 252 $ cat hg1.pid >> $DAEMON_PIDS
253 253 $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub-not-yet.pem" \
254 254 > https://localhost:$HGPORT1/
255 255 pulling from https://localhost:$HGPORT1/
256 256 (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
257 257 abort: error: *certificate verify failed* (glob)
258 258 [100]
259 259
260 260 Test server cert which no longer is valid
261 261
262 262 $ hg serve -R test -p $HGPORT2 -d --pid-file=hg2.pid --certificate=server-expired.pem
263 263 $ cat hg2.pid >> $DAEMON_PIDS
264 264 $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub-expired.pem" \
265 265 > https://localhost:$HGPORT2/
266 266 pulling from https://localhost:$HGPORT2/
267 267 (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
268 268 abort: error: *certificate verify failed* (glob)
269 269 [100]
270 270
271 271 Setting ciphers to an invalid value aborts
272 272 $ P="$CERTSDIR" hg --config hostsecurity.ciphers=invalid -R copy-pull id https://localhost:$HGPORT/
273 273 abort: could not set ciphers: No cipher can be selected.
274 274 (change cipher string (invalid) in config)
275 275 [255]
276 276
277 277 $ P="$CERTSDIR" hg --config hostsecurity.localhost:ciphers=invalid -R copy-pull id https://localhost:$HGPORT/
278 278 abort: could not set ciphers: No cipher can be selected.
279 279 (change cipher string (invalid) in config)
280 280 [255]
281 281
282 282 Changing the cipher string works
283 283
284 284 $ P="$CERTSDIR" hg --config hostsecurity.ciphers=HIGH -R copy-pull id https://localhost:$HGPORT/
285 285 5fed3813f7f5
286 286
287 287 Fingerprints
288 288
289 289 - works without cacerts (hostfingerprints)
290 290 $ hg -R copy-pull id https://localhost:$HGPORT/ --insecure --config hostfingerprints.localhost=ec:d8:7c:d6:b3:86:d0:4f:c1:b8:b4:1c:9d:8f:5e:16:8e:ef:1c:03
291 291 (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, remove the old SHA-1 fingerprint from [hostfingerprints] and add the following entry to the new [hostsecurity] section: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
292 292 5fed3813f7f5
293 293
294 294 - works without cacerts (hostsecurity)
295 295 $ hg -R copy-pull id https://localhost:$HGPORT/ --config hostsecurity.localhost:fingerprints=sha1:ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03
296 296 5fed3813f7f5
297 297
298 298 $ hg -R copy-pull id https://localhost:$HGPORT/ --config hostsecurity.localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e
299 299 5fed3813f7f5
300 300
301 301 - multiple fingerprints specified and first matches
302 302 $ hg --config 'hostfingerprints.localhost=ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03, deadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/ --insecure
303 303 (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, remove the old SHA-1 fingerprint from [hostfingerprints] and add the following entry to the new [hostsecurity] section: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
304 304 5fed3813f7f5
305 305
306 306 $ hg --config 'hostsecurity.localhost:fingerprints=sha1:ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03, sha1:deadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/
307 307 5fed3813f7f5
308 308
309 309 - multiple fingerprints specified and last matches
310 310 $ hg --config 'hostfingerprints.localhost=deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03' -R copy-pull id https://localhost:$HGPORT/ --insecure
311 311 (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, remove the old SHA-1 fingerprint from [hostfingerprints] and add the following entry to the new [hostsecurity] section: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
312 312 5fed3813f7f5
313 313
314 314 $ hg --config 'hostsecurity.localhost:fingerprints=sha1:deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, sha1:ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03' -R copy-pull id https://localhost:$HGPORT/
315 315 5fed3813f7f5
316 316
317 317 - multiple fingerprints specified and none match
318 318
319 319 $ hg --config 'hostfingerprints.localhost=deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, aeadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/ --insecure
320 320 abort: certificate for localhost has unexpected fingerprint ec:d8:7c:d6:b3:86:d0:4f:c1:b8:b4:1c:9d:8f:5e:16:8e:ef:1c:03
321 321 (check hostfingerprint configuration)
322 322 [150]
323 323
324 324 $ hg --config 'hostsecurity.localhost:fingerprints=sha1:deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, sha1:aeadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/
325 325 abort: certificate for localhost has unexpected fingerprint sha1:ec:d8:7c:d6:b3:86:d0:4f:c1:b8:b4:1c:9d:8f:5e:16:8e:ef:1c:03
326 326 (check hostsecurity configuration)
327 327 [150]
328 328
329 329 - fails when cert doesn't match hostname (port is ignored)
330 330 $ hg -R copy-pull id https://localhost:$HGPORT1/ --config hostfingerprints.localhost=ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03
331 331 abort: certificate for localhost has unexpected fingerprint f4:2f:5a:0c:3e:52:5b:db:e7:24:a8:32:1d:18:97:6d:69:b5:87:84
332 332 (check hostfingerprint configuration)
333 333 [150]
334 334
335 335
336 336 - ignores that certificate doesn't match hostname
337 337 $ hg -R copy-pull id https://$LOCALIP:$HGPORT/ --config hostfingerprints.$LOCALIP=ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03
338 338 (SHA-1 fingerprint for $LOCALIP found in legacy [hostfingerprints] section; if you trust this fingerprint, remove the old SHA-1 fingerprint from [hostfingerprints] and add the following entry to the new [hostsecurity] section: $LOCALIP:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
339 339 5fed3813f7f5
340 340
341 341 Ports used by next test. Kill servers.
342 342
343 343 $ killdaemons.py hg0.pid
344 344 $ killdaemons.py hg1.pid
345 345 $ killdaemons.py hg2.pid
346 346
347 347 #if tls1.2
348 348 Start servers running supported TLS versions
349 349
350 350 $ cd test
351 351 $ hg serve -p $HGPORT -d --pid-file=../hg0.pid --certificate=$PRIV \
352 352 > --config devel.serverexactprotocol=tls1.0
353 353 $ cat ../hg0.pid >> $DAEMON_PIDS
354 354 $ hg serve -p $HGPORT1 -d --pid-file=../hg1.pid --certificate=$PRIV \
355 355 > --config devel.serverexactprotocol=tls1.1
356 356 $ cat ../hg1.pid >> $DAEMON_PIDS
357 357 $ hg serve -p $HGPORT2 -d --pid-file=../hg2.pid --certificate=$PRIV \
358 358 > --config devel.serverexactprotocol=tls1.2
359 359 $ cat ../hg2.pid >> $DAEMON_PIDS
360 360 $ cd ..
361 361
362 362 Clients talking same TLS versions work
363 363
364 364 $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.0 --config hostsecurity.ciphers=DEFAULT id https://localhost:$HGPORT/
365 365 5fed3813f7f5
366 366 $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.1 --config hostsecurity.ciphers=DEFAULT id https://localhost:$HGPORT1/
367 367 5fed3813f7f5
368 368 $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.2 id https://localhost:$HGPORT2/
369 369 5fed3813f7f5
370 370
371 371 Clients requiring newer TLS version than what server supports fail
372 372
373 373 $ P="$CERTSDIR" hg id https://localhost:$HGPORT/
374 374 (could not negotiate a common security protocol (tls1.1+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
375 375 (consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
376 376 (see https://mercurial-scm.org/wiki/SecureConnections for more info)
377 377 abort: error: .*(unsupported protocol|wrong ssl version|alert protocol version).* (re)
378 378 [100]
379 379
380 380 $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.1 id https://localhost:$HGPORT/
381 381 (could not negotiate a common security protocol (tls1.1+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
382 382 (consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
383 383 (see https://mercurial-scm.org/wiki/SecureConnections for more info)
384 384 abort: error: .*(unsupported protocol|wrong ssl version|alert protocol version).* (re)
385 385 [100]
386 386 $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.2 id https://localhost:$HGPORT/
387 387 (could not negotiate a common security protocol (tls1.2+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
388 388 (consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
389 389 (see https://mercurial-scm.org/wiki/SecureConnections for more info)
390 390 abort: error: .*(unsupported protocol|wrong ssl version|alert protocol version).* (re)
391 391 [100]
392 392 $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.2 id https://localhost:$HGPORT1/
393 393 (could not negotiate a common security protocol (tls1.2+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
394 394 (consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
395 395 (see https://mercurial-scm.org/wiki/SecureConnections for more info)
396 396 abort: error: .*(unsupported protocol|wrong ssl version|alert protocol version).* (re)
397 397 [100]
398 398
399 399 --insecure will allow TLS 1.0 connections and override configs
400 400
401 401 $ hg --config hostsecurity.minimumprotocol=tls1.2 id --insecure https://localhost:$HGPORT1/
402 402 warning: connection security to localhost is disabled per current settings; communication is susceptible to eavesdropping and tampering
403 403 5fed3813f7f5
404 404
405 405 The per-host config option overrides the default
406 406
407 407 $ P="$CERTSDIR" hg id https://localhost:$HGPORT/ \
408 408 > --config hostsecurity.ciphers=DEFAULT \
409 409 > --config hostsecurity.minimumprotocol=tls1.2 \
410 410 > --config hostsecurity.localhost:minimumprotocol=tls1.0
411 411 5fed3813f7f5
412 412
413 413 The per-host config option by itself works
414 414
415 415 $ P="$CERTSDIR" hg id https://localhost:$HGPORT/ \
416 416 > --config hostsecurity.localhost:minimumprotocol=tls1.2
417 417 (could not negotiate a common security protocol (tls1.2+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
418 418 (consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
419 419 (see https://mercurial-scm.org/wiki/SecureConnections for more info)
420 420 abort: error: .*(unsupported protocol|wrong ssl version|alert protocol version).* (re)
421 421 [100]
422 422
423 423 .hg/hgrc file [hostsecurity] settings are applied to remote ui instances (issue5305)
424 424
425 425 $ cat >> copy-pull/.hg/hgrc << EOF
426 426 > [hostsecurity]
427 427 > localhost:minimumprotocol=tls1.2
428 428 > EOF
429 429 $ P="$CERTSDIR" hg -R copy-pull id https://localhost:$HGPORT/
430 430 (could not negotiate a common security protocol (tls1.2+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
431 431 (consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
432 432 (see https://mercurial-scm.org/wiki/SecureConnections for more info)
433 433 abort: error: .*(unsupported protocol|wrong ssl version|alert protocol version).* (re)
434 434 [100]
435 435
436 436 $ killdaemons.py hg0.pid
437 437 $ killdaemons.py hg1.pid
438 438 $ killdaemons.py hg2.pid
439 439 #endif
440 440
441 441 Prepare for connecting through proxy
442 442
443 443 $ hg serve -R test -p $HGPORT -d --pid-file=hg0.pid --certificate=$PRIV
444 444 $ cat hg0.pid >> $DAEMON_PIDS
445 445 $ hg serve -R test -p $HGPORT2 -d --pid-file=hg2.pid --certificate=server-expired.pem
446 446 $ cat hg2.pid >> $DAEMON_PIDS
447 447 tinyproxy.py doesn't fully detach, so killing it may result in extra output
448 448 from the shell. So don't kill it.
449 449 $ tinyproxy.py $HGPORT1 localhost >proxy.log </dev/null 2>&1 &
450 450 $ while [ ! -f proxy.pid ]; do sleep 0; done
451 451 $ cat proxy.pid >> $DAEMON_PIDS
452 452
453 453 $ echo "[http_proxy]" >> copy-pull/.hg/hgrc
454 454 $ echo "always=True" >> copy-pull/.hg/hgrc
455 455 $ echo "[hostfingerprints]" >> copy-pull/.hg/hgrc
456 456 $ echo "localhost =" >> copy-pull/.hg/hgrc
457 457
458 458 Test unvalidated https through proxy
459 459
460 460 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --insecure
461 461 pulling from https://localhost:$HGPORT/
462 462 warning: connection security to localhost is disabled per current settings; communication is susceptible to eavesdropping and tampering
463 463 searching for changes
464 464 no changes found
465 465
466 466 Test https with cacert and fingerprint through proxy
467 467
468 468 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull \
469 469 > --config web.cacerts="$CERTSDIR/pub.pem"
470 470 pulling from https://localhost:$HGPORT/
471 471 searching for changes
472 472 no changes found
473 473 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull https://localhost:$HGPORT/ --config hostfingerprints.localhost=ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03 --trace
474 474 pulling from https://*:$HGPORT/ (glob)
475 475 (SHA-1 fingerprint for localhost found in legacy [hostfingerprints] section; if you trust this fingerprint, remove the old SHA-1 fingerprint from [hostfingerprints] and add the following entry to the new [hostsecurity] section: localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e)
476 476 searching for changes
477 477 no changes found
478 478
479 479 Test https with cert problems through proxy
480 480
481 481 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull \
482 482 > --config web.cacerts="$CERTSDIR/pub-other.pem"
483 483 pulling from https://localhost:$HGPORT/
484 484 (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
485 485 abort: error: *certificate verify failed* (glob)
486 486 [100]
487 487 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull \
488 488 > --config web.cacerts="$CERTSDIR/pub-expired.pem" https://localhost:$HGPORT2/
489 489 pulling from https://localhost:$HGPORT2/
490 490 (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
491 491 abort: error: *certificate verify failed* (glob)
492 492 [100]
493 493
494 Test when proxy can't connect to server
495
496 $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --insecure https://localhost:0/
497 pulling from https://localhost:0/
498 abort: error: Tunnel connection failed: 404 Connection refused
499 [100]
500
494 501
495 502 $ killdaemons.py hg0.pid
496 503
497 504 $ cd test
498 505
499 506 Missing certificate file(s) are detected
500 507
501 508 $ hg serve -p $HGPORT --certificate=/missing/certificate \
502 509 > --config devel.servercafile=$PRIV --config devel.serverrequirecert=true
503 510 abort: referenced certificate file (*/missing/certificate) does not exist (glob)
504 511 [255]
505 512
506 513 $ hg serve -p $HGPORT --certificate=$PRIV \
507 514 > --config devel.servercafile=/missing/cafile --config devel.serverrequirecert=true
508 515 abort: referenced certificate file (*/missing/cafile) does not exist (glob)
509 516 [255]
510 517
511 518 Start hgweb that requires client certificates:
512 519
513 520 $ hg serve -p $HGPORT -d --pid-file=../hg0.pid --certificate=$PRIV \
514 521 > --config devel.servercafile=$PRIV --config devel.serverrequirecert=true
515 522 $ cat ../hg0.pid >> $DAEMON_PIDS
516 523 $ cd ..
517 524
518 525 without client certificate:
519 526
520 527 $ P="$CERTSDIR" hg id https://localhost:$HGPORT/
521 528 abort: error: .*(\$ECONNRESET\$|certificate required|handshake failure|EOF occurred).* (re)
522 529 [100]
523 530
524 531 with client certificate:
525 532
526 533 $ cat << EOT >> $HGRCPATH
527 534 > [auth]
528 535 > l.prefix = localhost
529 536 > l.cert = $CERTSDIR/client-cert.pem
530 537 > l.key = $CERTSDIR/client-key.pem
531 538 > EOT
532 539
533 540 $ P="$CERTSDIR" hg id https://localhost:$HGPORT/ \
534 541 > --config auth.l.key="$CERTSDIR/client-key-decrypted.pem"
535 542 5fed3813f7f5
536 543
537 544 $ printf '1234\n' | env P="$CERTSDIR" hg id https://localhost:$HGPORT/ \
538 545 > --config ui.interactive=True --config ui.nontty=True
539 546 passphrase for */client-key.pem: 5fed3813f7f5 (glob)
540 547
541 548 $ env P="$CERTSDIR" hg id https://localhost:$HGPORT/
542 549 abort: error: * (glob)
543 550 [100]
544 551
545 552 Missing certficate and key files result in error
546 553
547 554 $ hg id https://localhost:$HGPORT/ --config auth.l.cert=/missing/cert
548 555 abort: certificate file (*/missing/cert) does not exist; cannot connect to localhost (glob)
549 556 (restore missing file or fix references in Mercurial config)
550 557 [255]
551 558
552 559 $ hg id https://localhost:$HGPORT/ --config auth.l.key=/missing/key
553 560 abort: certificate file (*/missing/key) does not exist; cannot connect to localhost (glob)
554 561 (restore missing file or fix references in Mercurial config)
555 562 [255]
@@ -1,222 +1,218 b''
1 1 #!/usr/bin/env python
2 2
3 3
4 4 __doc__ = """Tiny HTTP Proxy.
5 5
6 6 This module implements GET, HEAD, POST, PUT and DELETE methods
7 7 on BaseHTTPServer, and behaves as an HTTP proxy. The CONNECT
8 8 method is also implemented experimentally, but has not been
9 9 tested yet.
10 10
11 11 Any help will be greatly appreciated. SUZUKI Hisao
12 12 """
13 13
14 14 __version__ = "0.2.1"
15 15
16 16 import optparse
17 17 import os
18 18 import select
19 19 import socket
20 20 import sys
21 21
22 22 from mercurial import (
23 23 pycompat,
24 24 util,
25 25 )
26 26
27 27 httpserver = util.httpserver
28 28 socketserver = util.socketserver
29 29 urlreq = util.urlreq
30 30
31 31 if os.environ.get('HGIPV6', '0') == '1':
32 32 family = socket.AF_INET6
33 33 else:
34 34 family = socket.AF_INET
35 35
36 36
37 37 class ProxyHandler(httpserver.basehttprequesthandler):
38 38 __base = httpserver.basehttprequesthandler
39 39 __base_handle = __base.handle
40 40
41 41 server_version = "TinyHTTPProxy/" + __version__
42 42 rbufsize = 0 # self.rfile Be unbuffered
43 43
44 44 def handle(self):
45 45 (ip, port) = self.client_address
46 46 allowed = getattr(self, 'allowed_clients', None)
47 47 if allowed is not None and ip not in allowed:
48 48 self.raw_requestline = self.rfile.readline()
49 49 if self.parse_request():
50 50 self.send_error(403)
51 51 else:
52 52 self.__base_handle()
53 53
54 54 def log_request(self, code='-', size='-'):
55 55 xheaders = [h for h in self.headers.items() if h[0].startswith('x-')]
56 56 self.log_message(
57 57 '"%s" %s %s%s',
58 58 self.requestline,
59 59 str(code),
60 60 str(size),
61 61 ''.join([' %s:%s' % h for h in sorted(xheaders)]),
62 62 )
63 63 # Flush for Windows, so output isn't lost on TerminateProcess()
64 64 sys.stdout.flush()
65 65 sys.stderr.flush()
66 66
67 67 def _connect_to(self, netloc, soc):
68 68 i = netloc.find(':')
69 69 if i >= 0:
70 70 host_port = netloc[:i], int(netloc[i + 1 :])
71 71 else:
72 72 host_port = netloc, 80
73 73 print("\t" "connect to %s:%d" % host_port)
74 74 try:
75 75 soc.connect(host_port)
76 except socket.error as arg:
77 try:
78 msg = arg[1]
79 except (IndexError, TypeError):
80 msg = arg
81 self.send_error(404, msg)
76 except socket.error as e:
77 self.send_error(404, e.strerror)
82 78 return 0
83 79 return 1
84 80
85 81 def do_CONNECT(self):
86 82 soc = socket.socket(family, socket.SOCK_STREAM)
87 83 try:
88 84 if self._connect_to(self.path, soc):
89 85 self.log_request(200)
90 86 self.wfile.write(
91 87 pycompat.bytestr(self.protocol_version)
92 88 + b" 200 Connection established\r\n"
93 89 )
94 90 self.wfile.write(
95 91 b"Proxy-agent: %s\r\n"
96 92 % pycompat.bytestr(self.version_string())
97 93 )
98 94 self.wfile.write(b"\r\n")
99 95 self._read_write(soc, 300)
100 96 finally:
101 97 print("\t" "bye")
102 98 soc.close()
103 99 self.connection.close()
104 100
105 101 def do_GET(self):
106 102 (scm, netloc, path, params, query, fragment) = urlreq.urlparse(
107 103 self.path, 'http'
108 104 )
109 105 if scm != 'http' or fragment or not netloc:
110 106 self.send_error(400, "bad url %s" % self.path)
111 107 return
112 108 soc = socket.socket(family, socket.SOCK_STREAM)
113 109 try:
114 110 if self._connect_to(netloc, soc):
115 111 self.log_request()
116 112 url = urlreq.urlunparse(('', '', path, params, query, ''))
117 113 soc.send(
118 114 b"%s %s %s\r\n"
119 115 % (
120 116 pycompat.bytestr(self.command),
121 117 pycompat.bytestr(url),
122 118 pycompat.bytestr(self.request_version),
123 119 )
124 120 )
125 121 self.headers['Connection'] = 'close'
126 122 del self.headers['Proxy-Connection']
127 123 for key, val in self.headers.items():
128 124 soc.send(
129 125 b"%s: %s\r\n"
130 126 % (pycompat.bytestr(key), pycompat.bytestr(val))
131 127 )
132 128 soc.send(b"\r\n")
133 129 self._read_write(soc)
134 130 finally:
135 131 print("\t" "bye")
136 132 soc.close()
137 133 self.connection.close()
138 134
139 135 def _read_write(self, soc, max_idling=20):
140 136 iw = [self.connection, soc]
141 137 ow = []
142 138 count = 0
143 139 while True:
144 140 count += 1
145 141 (ins, _, exs) = select.select(iw, ow, iw, 3)
146 142 if exs:
147 143 break
148 144 if ins:
149 145 for i in ins:
150 146 if i is soc:
151 147 out = self.connection
152 148 else:
153 149 out = soc
154 150 try:
155 151 data = i.recv(8192)
156 152 except socket.error:
157 153 break
158 154 if data:
159 155 out.send(data)
160 156 count = 0
161 157 else:
162 158 print("\t" "idle", count)
163 159 if count == max_idling:
164 160 break
165 161
166 162 do_HEAD = do_GET
167 163 do_POST = do_GET
168 164 do_PUT = do_GET
169 165 do_DELETE = do_GET
170 166
171 167
172 168 class ThreadingHTTPServer(socketserver.ThreadingMixIn, httpserver.httpserver):
173 169 def __init__(self, *args, **kwargs):
174 170 httpserver.httpserver.__init__(self, *args, **kwargs)
175 171 a = open("proxy.pid", "w")
176 172 a.write(str(os.getpid()) + "\n")
177 173 a.close()
178 174
179 175
180 176 def runserver(port=8000, bind=""):
181 177 server_address = (bind, port)
182 178 ProxyHandler.protocol_version = "HTTP/1.0"
183 179 httpd = ThreadingHTTPServer(server_address, ProxyHandler)
184 180 sa = httpd.socket.getsockname()
185 181 print("Serving HTTP on", sa[0], "port", sa[1], "...")
186 182 try:
187 183 httpd.serve_forever()
188 184 except KeyboardInterrupt:
189 185 print("\nKeyboard interrupt received, exiting.")
190 186 httpd.server_close()
191 187 sys.exit(0)
192 188
193 189
194 190 if __name__ == '__main__':
195 191 argv = sys.argv
196 192 if argv[1:] and argv[1] in ('-h', '--help'):
197 193 print(argv[0], "[port [allowed_client_name ...]]")
198 194 else:
199 195 if argv[2:]:
200 196 allowed = []
201 197 for name in argv[2:]:
202 198 client = socket.gethostbyname(name)
203 199 allowed.append(client)
204 200 print("Accept: %s (%s)" % (client, name))
205 201 ProxyHandler.allowed_clients = allowed
206 202 del argv[2:]
207 203 else:
208 204 print("Any clients will be served...")
209 205
210 206 parser = optparse.OptionParser()
211 207 parser.add_option(
212 208 '-b',
213 209 '--bind',
214 210 metavar='ADDRESS',
215 211 help='Specify alternate bind address ' '[default: all interfaces]',
216 212 default='',
217 213 )
218 214 (options, args) = parser.parse_args()
219 215 port = 8000
220 216 if len(args) == 1:
221 217 port = int(args[0])
222 218 runserver(port, options.bind)
General Comments 0
You need to be logged in to leave comments. Login now