##// END OF EJS Templates
clonebundles: move a bundle of clone bundle related code to a new module...
marmoute -
r46369:74271829 default
parent child Browse files
Show More
@@ -0,0 +1,422 b''
1 # bundlecaches.py - utility to deal with pre-computed bundle for servers
2 #
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
5
6 from .i18n import _
7
8 from .thirdparty import attr
9
10 from . import (
11 error,
12 sslutil,
13 util,
14 )
15 from .utils import stringutil
16
17 urlreq = util.urlreq
18
19
20 @attr.s
21 class bundlespec(object):
22 compression = attr.ib()
23 wirecompression = attr.ib()
24 version = attr.ib()
25 wireversion = attr.ib()
26 params = attr.ib()
27 contentopts = attr.ib()
28
29
30 # Maps bundle version human names to changegroup versions.
31 _bundlespeccgversions = {
32 b'v1': b'01',
33 b'v2': b'02',
34 b'packed1': b's1',
35 b'bundle2': b'02', # legacy
36 }
37
38 # Maps bundle version with content opts to choose which part to bundle
39 _bundlespeccontentopts = {
40 b'v1': {
41 b'changegroup': True,
42 b'cg.version': b'01',
43 b'obsolescence': False,
44 b'phases': False,
45 b'tagsfnodescache': False,
46 b'revbranchcache': False,
47 },
48 b'v2': {
49 b'changegroup': True,
50 b'cg.version': b'02',
51 b'obsolescence': False,
52 b'phases': False,
53 b'tagsfnodescache': True,
54 b'revbranchcache': True,
55 },
56 b'packed1': {b'cg.version': b's1'},
57 }
58 _bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
59
60 _bundlespecvariants = {
61 b"streamv2": {
62 b"changegroup": False,
63 b"streamv2": True,
64 b"tagsfnodescache": False,
65 b"revbranchcache": False,
66 }
67 }
68
69 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
70 _bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
71
72
73 def parsebundlespec(repo, spec, strict=True):
74 """Parse a bundle string specification into parts.
75
76 Bundle specifications denote a well-defined bundle/exchange format.
77 The content of a given specification should not change over time in
78 order to ensure that bundles produced by a newer version of Mercurial are
79 readable from an older version.
80
81 The string currently has the form:
82
83 <compression>-<type>[;<parameter0>[;<parameter1>]]
84
85 Where <compression> is one of the supported compression formats
86 and <type> is (currently) a version string. A ";" can follow the type and
87 all text afterwards is interpreted as URI encoded, ";" delimited key=value
88 pairs.
89
90 If ``strict`` is True (the default) <compression> is required. Otherwise,
91 it is optional.
92
93 Returns a bundlespec object of (compression, version, parameters).
94 Compression will be ``None`` if not in strict mode and a compression isn't
95 defined.
96
97 An ``InvalidBundleSpecification`` is raised when the specification is
98 not syntactically well formed.
99
100 An ``UnsupportedBundleSpecification`` is raised when the compression or
101 bundle type/version is not recognized.
102
103 Note: this function will likely eventually return a more complex data
104 structure, including bundle2 part information.
105 """
106
107 def parseparams(s):
108 if b';' not in s:
109 return s, {}
110
111 params = {}
112 version, paramstr = s.split(b';', 1)
113
114 for p in paramstr.split(b';'):
115 if b'=' not in p:
116 raise error.InvalidBundleSpecification(
117 _(
118 b'invalid bundle specification: '
119 b'missing "=" in parameter: %s'
120 )
121 % p
122 )
123
124 key, value = p.split(b'=', 1)
125 key = urlreq.unquote(key)
126 value = urlreq.unquote(value)
127 params[key] = value
128
129 return version, params
130
131 if strict and b'-' not in spec:
132 raise error.InvalidBundleSpecification(
133 _(
134 b'invalid bundle specification; '
135 b'must be prefixed with compression: %s'
136 )
137 % spec
138 )
139
140 if b'-' in spec:
141 compression, version = spec.split(b'-', 1)
142
143 if compression not in util.compengines.supportedbundlenames:
144 raise error.UnsupportedBundleSpecification(
145 _(b'%s compression is not supported') % compression
146 )
147
148 version, params = parseparams(version)
149
150 if version not in _bundlespeccgversions:
151 raise error.UnsupportedBundleSpecification(
152 _(b'%s is not a recognized bundle version') % version
153 )
154 else:
155 # Value could be just the compression or just the version, in which
156 # case some defaults are assumed (but only when not in strict mode).
157 assert not strict
158
159 spec, params = parseparams(spec)
160
161 if spec in util.compengines.supportedbundlenames:
162 compression = spec
163 version = b'v1'
164 # Generaldelta repos require v2.
165 if b'generaldelta' in repo.requirements:
166 version = b'v2'
167 # Modern compression engines require v2.
168 if compression not in _bundlespecv1compengines:
169 version = b'v2'
170 elif spec in _bundlespeccgversions:
171 if spec == b'packed1':
172 compression = b'none'
173 else:
174 compression = b'bzip2'
175 version = spec
176 else:
177 raise error.UnsupportedBundleSpecification(
178 _(b'%s is not a recognized bundle specification') % spec
179 )
180
181 # Bundle version 1 only supports a known set of compression engines.
182 if version == b'v1' and compression not in _bundlespecv1compengines:
183 raise error.UnsupportedBundleSpecification(
184 _(b'compression engine %s is not supported on v1 bundles')
185 % compression
186 )
187
188 # The specification for packed1 can optionally declare the data formats
189 # required to apply it. If we see this metadata, compare against what the
190 # repo supports and error if the bundle isn't compatible.
191 if version == b'packed1' and b'requirements' in params:
192 requirements = set(params[b'requirements'].split(b','))
193 missingreqs = requirements - repo.supportedformats
194 if missingreqs:
195 raise error.UnsupportedBundleSpecification(
196 _(b'missing support for repository features: %s')
197 % b', '.join(sorted(missingreqs))
198 )
199
200 # Compute contentopts based on the version
201 contentopts = _bundlespeccontentopts.get(version, {}).copy()
202
203 # Process the variants
204 if b"stream" in params and params[b"stream"] == b"v2":
205 variant = _bundlespecvariants[b"streamv2"]
206 contentopts.update(variant)
207
208 engine = util.compengines.forbundlename(compression)
209 compression, wirecompression = engine.bundletype()
210 wireversion = _bundlespeccgversions[version]
211
212 return bundlespec(
213 compression, wirecompression, version, wireversion, params, contentopts
214 )
215
216
217 def parseclonebundlesmanifest(repo, s):
218 """Parses the raw text of a clone bundles manifest.
219
220 Returns a list of dicts. The dicts have a ``URL`` key corresponding
221 to the URL and other keys are the attributes for the entry.
222 """
223 m = []
224 for line in s.splitlines():
225 fields = line.split()
226 if not fields:
227 continue
228 attrs = {b'URL': fields[0]}
229 for rawattr in fields[1:]:
230 key, value = rawattr.split(b'=', 1)
231 key = util.urlreq.unquote(key)
232 value = util.urlreq.unquote(value)
233 attrs[key] = value
234
235 # Parse BUNDLESPEC into components. This makes client-side
236 # preferences easier to specify since you can prefer a single
237 # component of the BUNDLESPEC.
238 if key == b'BUNDLESPEC':
239 try:
240 bundlespec = parsebundlespec(repo, value)
241 attrs[b'COMPRESSION'] = bundlespec.compression
242 attrs[b'VERSION'] = bundlespec.version
243 except error.InvalidBundleSpecification:
244 pass
245 except error.UnsupportedBundleSpecification:
246 pass
247
248 m.append(attrs)
249
250 return m
251
252
253 def isstreamclonespec(bundlespec):
254 # Stream clone v1
255 if bundlespec.wirecompression == b'UN' and bundlespec.wireversion == b's1':
256 return True
257
258 # Stream clone v2
259 if (
260 bundlespec.wirecompression == b'UN'
261 and bundlespec.wireversion == b'02'
262 and bundlespec.contentopts.get(b'streamv2')
263 ):
264 return True
265
266 return False
267
268
269 def filterclonebundleentries(repo, entries, streamclonerequested=False):
270 """Remove incompatible clone bundle manifest entries.
271
272 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
273 and returns a new list consisting of only the entries that this client
274 should be able to apply.
275
276 There is no guarantee we'll be able to apply all returned entries because
277 the metadata we use to filter on may be missing or wrong.
278 """
279 newentries = []
280 for entry in entries:
281 spec = entry.get(b'BUNDLESPEC')
282 if spec:
283 try:
284 bundlespec = parsebundlespec(repo, spec, strict=True)
285
286 # If a stream clone was requested, filter out non-streamclone
287 # entries.
288 if streamclonerequested and not isstreamclonespec(bundlespec):
289 repo.ui.debug(
290 b'filtering %s because not a stream clone\n'
291 % entry[b'URL']
292 )
293 continue
294
295 except error.InvalidBundleSpecification as e:
296 repo.ui.debug(stringutil.forcebytestr(e) + b'\n')
297 continue
298 except error.UnsupportedBundleSpecification as e:
299 repo.ui.debug(
300 b'filtering %s because unsupported bundle '
301 b'spec: %s\n' % (entry[b'URL'], stringutil.forcebytestr(e))
302 )
303 continue
304 # If we don't have a spec and requested a stream clone, we don't know
305 # what the entry is so don't attempt to apply it.
306 elif streamclonerequested:
307 repo.ui.debug(
308 b'filtering %s because cannot determine if a stream '
309 b'clone bundle\n' % entry[b'URL']
310 )
311 continue
312
313 if b'REQUIRESNI' in entry and not sslutil.hassni:
314 repo.ui.debug(
315 b'filtering %s because SNI not supported\n' % entry[b'URL']
316 )
317 continue
318
319 if b'REQUIREDRAM' in entry:
320 try:
321 requiredram = util.sizetoint(entry[b'REQUIREDRAM'])
322 except error.ParseError:
323 repo.ui.debug(
324 b'filtering %s due to a bad REQUIREDRAM attribute\n'
325 % entry[b'URL']
326 )
327 continue
328 actualram = repo.ui.estimatememory()
329 if actualram is not None and actualram * 0.66 < requiredram:
330 repo.ui.debug(
331 b'filtering %s as it needs more than 2/3 of system memory\n'
332 % entry[b'URL']
333 )
334 continue
335
336 newentries.append(entry)
337
338 return newentries
339
340
341 class clonebundleentry(object):
342 """Represents an item in a clone bundles manifest.
343
344 This rich class is needed to support sorting since sorted() in Python 3
345 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
346 won't work.
347 """
348
349 def __init__(self, value, prefers):
350 self.value = value
351 self.prefers = prefers
352
353 def _cmp(self, other):
354 for prefkey, prefvalue in self.prefers:
355 avalue = self.value.get(prefkey)
356 bvalue = other.value.get(prefkey)
357
358 # Special case for b missing attribute and a matches exactly.
359 if avalue is not None and bvalue is None and avalue == prefvalue:
360 return -1
361
362 # Special case for a missing attribute and b matches exactly.
363 if bvalue is not None and avalue is None and bvalue == prefvalue:
364 return 1
365
366 # We can't compare unless attribute present on both.
367 if avalue is None or bvalue is None:
368 continue
369
370 # Same values should fall back to next attribute.
371 if avalue == bvalue:
372 continue
373
374 # Exact matches come first.
375 if avalue == prefvalue:
376 return -1
377 if bvalue == prefvalue:
378 return 1
379
380 # Fall back to next attribute.
381 continue
382
383 # If we got here we couldn't sort by attributes and prefers. Fall
384 # back to index order.
385 return 0
386
387 def __lt__(self, other):
388 return self._cmp(other) < 0
389
390 def __gt__(self, other):
391 return self._cmp(other) > 0
392
393 def __eq__(self, other):
394 return self._cmp(other) == 0
395
396 def __le__(self, other):
397 return self._cmp(other) <= 0
398
399 def __ge__(self, other):
400 return self._cmp(other) >= 0
401
402 def __ne__(self, other):
403 return self._cmp(other) != 0
404
405
406 def sortclonebundleentries(ui, entries):
407 prefers = ui.configlist(b'ui', b'clonebundleprefers')
408 if not prefers:
409 return list(entries)
410
411 def _split(p):
412 if b'=' not in p:
413 hint = _(b"each comma separated item should be key=value pairs")
414 raise error.Abort(
415 _(b"invalid ui.clonebundleprefers item: %s") % p, hint=hint
416 )
417 return p.split(b'=', 1)
418
419 prefers = [_split(p) for p in prefers]
420
421 items = sorted(clonebundleentry(v, prefers) for v in entries)
422 return [i.value for i in items]
@@ -1,426 +1,426 b''
1 # lfs - hash-preserving large file support using Git-LFS protocol
1 # lfs - hash-preserving large file support using Git-LFS protocol
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """lfs - large file support (EXPERIMENTAL)
8 """lfs - large file support (EXPERIMENTAL)
9
9
10 This extension allows large files to be tracked outside of the normal
10 This extension allows large files to be tracked outside of the normal
11 repository storage and stored on a centralized server, similar to the
11 repository storage and stored on a centralized server, similar to the
12 ``largefiles`` extension. The ``git-lfs`` protocol is used when
12 ``largefiles`` extension. The ``git-lfs`` protocol is used when
13 communicating with the server, so existing git infrastructure can be
13 communicating with the server, so existing git infrastructure can be
14 harnessed. Even though the files are stored outside of the repository,
14 harnessed. Even though the files are stored outside of the repository,
15 they are still integrity checked in the same manner as normal files.
15 they are still integrity checked in the same manner as normal files.
16
16
17 The files stored outside of the repository are downloaded on demand,
17 The files stored outside of the repository are downloaded on demand,
18 which reduces the time to clone, and possibly the local disk usage.
18 which reduces the time to clone, and possibly the local disk usage.
19 This changes fundamental workflows in a DVCS, so careful thought
19 This changes fundamental workflows in a DVCS, so careful thought
20 should be given before deploying it. :hg:`convert` can be used to
20 should be given before deploying it. :hg:`convert` can be used to
21 convert LFS repositories to normal repositories that no longer
21 convert LFS repositories to normal repositories that no longer
22 require this extension, and do so without changing the commit hashes.
22 require this extension, and do so without changing the commit hashes.
23 This allows the extension to be disabled if the centralized workflow
23 This allows the extension to be disabled if the centralized workflow
24 becomes burdensome. However, the pre and post convert clones will
24 becomes burdensome. However, the pre and post convert clones will
25 not be able to communicate with each other unless the extension is
25 not be able to communicate with each other unless the extension is
26 enabled on both.
26 enabled on both.
27
27
28 To start a new repository, or to add LFS files to an existing one, just
28 To start a new repository, or to add LFS files to an existing one, just
29 create an ``.hglfs`` file as described below in the root directory of
29 create an ``.hglfs`` file as described below in the root directory of
30 the repository. Typically, this file should be put under version
30 the repository. Typically, this file should be put under version
31 control, so that the settings will propagate to other repositories with
31 control, so that the settings will propagate to other repositories with
32 push and pull. During any commit, Mercurial will consult this file to
32 push and pull. During any commit, Mercurial will consult this file to
33 determine if an added or modified file should be stored externally. The
33 determine if an added or modified file should be stored externally. The
34 type of storage depends on the characteristics of the file at each
34 type of storage depends on the characteristics of the file at each
35 commit. A file that is near a size threshold may switch back and forth
35 commit. A file that is near a size threshold may switch back and forth
36 between LFS and normal storage, as needed.
36 between LFS and normal storage, as needed.
37
37
38 Alternately, both normal repositories and largefile controlled
38 Alternately, both normal repositories and largefile controlled
39 repositories can be converted to LFS by using :hg:`convert` and the
39 repositories can be converted to LFS by using :hg:`convert` and the
40 ``lfs.track`` config option described below. The ``.hglfs`` file
40 ``lfs.track`` config option described below. The ``.hglfs`` file
41 should then be created and added, to control subsequent LFS selection.
41 should then be created and added, to control subsequent LFS selection.
42 The hashes are also unchanged in this case. The LFS and non-LFS
42 The hashes are also unchanged in this case. The LFS and non-LFS
43 repositories can be distinguished because the LFS repository will
43 repositories can be distinguished because the LFS repository will
44 abort any command if this extension is disabled.
44 abort any command if this extension is disabled.
45
45
46 Committed LFS files are held locally, until the repository is pushed.
46 Committed LFS files are held locally, until the repository is pushed.
47 Prior to pushing the normal repository data, the LFS files that are
47 Prior to pushing the normal repository data, the LFS files that are
48 tracked by the outgoing commits are automatically uploaded to the
48 tracked by the outgoing commits are automatically uploaded to the
49 configured central server. No LFS files are transferred on
49 configured central server. No LFS files are transferred on
50 :hg:`pull` or :hg:`clone`. Instead, the files are downloaded on
50 :hg:`pull` or :hg:`clone`. Instead, the files are downloaded on
51 demand as they need to be read, if a cached copy cannot be found
51 demand as they need to be read, if a cached copy cannot be found
52 locally. Both committing and downloading an LFS file will link the
52 locally. Both committing and downloading an LFS file will link the
53 file to a usercache, to speed up future access. See the `usercache`
53 file to a usercache, to speed up future access. See the `usercache`
54 config setting described below.
54 config setting described below.
55
55
56 The extension reads its configuration from a versioned ``.hglfs``
56 The extension reads its configuration from a versioned ``.hglfs``
57 configuration file found in the root of the working directory. The
57 configuration file found in the root of the working directory. The
58 ``.hglfs`` file uses the same syntax as all other Mercurial
58 ``.hglfs`` file uses the same syntax as all other Mercurial
59 configuration files. It uses a single section, ``[track]``.
59 configuration files. It uses a single section, ``[track]``.
60
60
61 The ``[track]`` section specifies which files are stored as LFS (or
61 The ``[track]`` section specifies which files are stored as LFS (or
62 not). Each line is keyed by a file pattern, with a predicate value.
62 not). Each line is keyed by a file pattern, with a predicate value.
63 The first file pattern match is used, so put more specific patterns
63 The first file pattern match is used, so put more specific patterns
64 first. The available predicates are ``all()``, ``none()``, and
64 first. The available predicates are ``all()``, ``none()``, and
65 ``size()``. See "hg help filesets.size" for the latter.
65 ``size()``. See "hg help filesets.size" for the latter.
66
66
67 Example versioned ``.hglfs`` file::
67 Example versioned ``.hglfs`` file::
68
68
69 [track]
69 [track]
70 # No Makefile or python file, anywhere, will be LFS
70 # No Makefile or python file, anywhere, will be LFS
71 **Makefile = none()
71 **Makefile = none()
72 **.py = none()
72 **.py = none()
73
73
74 **.zip = all()
74 **.zip = all()
75 **.exe = size(">1MB")
75 **.exe = size(">1MB")
76
76
77 # Catchall for everything not matched above
77 # Catchall for everything not matched above
78 ** = size(">10MB")
78 ** = size(">10MB")
79
79
80 Configs::
80 Configs::
81
81
82 [lfs]
82 [lfs]
83 # Remote endpoint. Multiple protocols are supported:
83 # Remote endpoint. Multiple protocols are supported:
84 # - http(s)://user:pass@example.com/path
84 # - http(s)://user:pass@example.com/path
85 # git-lfs endpoint
85 # git-lfs endpoint
86 # - file:///tmp/path
86 # - file:///tmp/path
87 # local filesystem, usually for testing
87 # local filesystem, usually for testing
88 # if unset, lfs will assume the remote repository also handles blob storage
88 # if unset, lfs will assume the remote repository also handles blob storage
89 # for http(s) URLs. Otherwise, lfs will prompt to set this when it must
89 # for http(s) URLs. Otherwise, lfs will prompt to set this when it must
90 # use this value.
90 # use this value.
91 # (default: unset)
91 # (default: unset)
92 url = https://example.com/repo.git/info/lfs
92 url = https://example.com/repo.git/info/lfs
93
93
94 # Which files to track in LFS. Path tests are "**.extname" for file
94 # Which files to track in LFS. Path tests are "**.extname" for file
95 # extensions, and "path:under/some/directory" for path prefix. Both
95 # extensions, and "path:under/some/directory" for path prefix. Both
96 # are relative to the repository root.
96 # are relative to the repository root.
97 # File size can be tested with the "size()" fileset, and tests can be
97 # File size can be tested with the "size()" fileset, and tests can be
98 # joined with fileset operators. (See "hg help filesets.operators".)
98 # joined with fileset operators. (See "hg help filesets.operators".)
99 #
99 #
100 # Some examples:
100 # Some examples:
101 # - all() # everything
101 # - all() # everything
102 # - none() # nothing
102 # - none() # nothing
103 # - size(">20MB") # larger than 20MB
103 # - size(">20MB") # larger than 20MB
104 # - !**.txt # anything not a *.txt file
104 # - !**.txt # anything not a *.txt file
105 # - **.zip | **.tar.gz | **.7z # some types of compressed files
105 # - **.zip | **.tar.gz | **.7z # some types of compressed files
106 # - path:bin # files under "bin" in the project root
106 # - path:bin # files under "bin" in the project root
107 # - (**.php & size(">2MB")) | (**.js & size(">5MB")) | **.tar.gz
107 # - (**.php & size(">2MB")) | (**.js & size(">5MB")) | **.tar.gz
108 # | (path:bin & !path:/bin/README) | size(">1GB")
108 # | (path:bin & !path:/bin/README) | size(">1GB")
109 # (default: none())
109 # (default: none())
110 #
110 #
111 # This is ignored if there is a tracked '.hglfs' file, and this setting
111 # This is ignored if there is a tracked '.hglfs' file, and this setting
112 # will eventually be deprecated and removed.
112 # will eventually be deprecated and removed.
113 track = size(">10M")
113 track = size(">10M")
114
114
115 # how many times to retry before giving up on transferring an object
115 # how many times to retry before giving up on transferring an object
116 retry = 5
116 retry = 5
117
117
118 # the local directory to store lfs files for sharing across local clones.
118 # the local directory to store lfs files for sharing across local clones.
119 # If not set, the cache is located in an OS specific cache location.
119 # If not set, the cache is located in an OS specific cache location.
120 usercache = /path/to/global/cache
120 usercache = /path/to/global/cache
121 """
121 """
122
122
123 from __future__ import absolute_import
123 from __future__ import absolute_import
124
124
125 import sys
125 import sys
126
126
127 from mercurial.i18n import _
127 from mercurial.i18n import _
128
128
129 from mercurial import (
129 from mercurial import (
130 bundlecaches,
130 config,
131 config,
131 context,
132 context,
132 error,
133 error,
133 exchange,
134 extensions,
134 extensions,
135 exthelper,
135 exthelper,
136 filelog,
136 filelog,
137 filesetlang,
137 filesetlang,
138 localrepo,
138 localrepo,
139 minifileset,
139 minifileset,
140 node,
140 node,
141 pycompat,
141 pycompat,
142 revlog,
142 revlog,
143 scmutil,
143 scmutil,
144 templateutil,
144 templateutil,
145 util,
145 util,
146 )
146 )
147
147
148 from mercurial.interfaces import repository
148 from mercurial.interfaces import repository
149
149
150 from . import (
150 from . import (
151 blobstore,
151 blobstore,
152 wireprotolfsserver,
152 wireprotolfsserver,
153 wrapper,
153 wrapper,
154 )
154 )
155
155
156 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
156 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
157 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
157 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
158 # be specifying the version(s) of Mercurial they are tested with, or
158 # be specifying the version(s) of Mercurial they are tested with, or
159 # leave the attribute unspecified.
159 # leave the attribute unspecified.
160 testedwith = b'ships-with-hg-core'
160 testedwith = b'ships-with-hg-core'
161
161
162 eh = exthelper.exthelper()
162 eh = exthelper.exthelper()
163 eh.merge(wrapper.eh)
163 eh.merge(wrapper.eh)
164 eh.merge(wireprotolfsserver.eh)
164 eh.merge(wireprotolfsserver.eh)
165
165
166 cmdtable = eh.cmdtable
166 cmdtable = eh.cmdtable
167 configtable = eh.configtable
167 configtable = eh.configtable
168 extsetup = eh.finalextsetup
168 extsetup = eh.finalextsetup
169 uisetup = eh.finaluisetup
169 uisetup = eh.finaluisetup
170 filesetpredicate = eh.filesetpredicate
170 filesetpredicate = eh.filesetpredicate
171 reposetup = eh.finalreposetup
171 reposetup = eh.finalreposetup
172 templatekeyword = eh.templatekeyword
172 templatekeyword = eh.templatekeyword
173
173
174 eh.configitem(
174 eh.configitem(
175 b'experimental', b'lfs.serve', default=True,
175 b'experimental', b'lfs.serve', default=True,
176 )
176 )
177 eh.configitem(
177 eh.configitem(
178 b'experimental', b'lfs.user-agent', default=None,
178 b'experimental', b'lfs.user-agent', default=None,
179 )
179 )
180 eh.configitem(
180 eh.configitem(
181 b'experimental', b'lfs.disableusercache', default=False,
181 b'experimental', b'lfs.disableusercache', default=False,
182 )
182 )
183 eh.configitem(
183 eh.configitem(
184 b'experimental', b'lfs.worker-enable', default=True,
184 b'experimental', b'lfs.worker-enable', default=True,
185 )
185 )
186
186
187 eh.configitem(
187 eh.configitem(
188 b'lfs', b'url', default=None,
188 b'lfs', b'url', default=None,
189 )
189 )
190 eh.configitem(
190 eh.configitem(
191 b'lfs', b'usercache', default=None,
191 b'lfs', b'usercache', default=None,
192 )
192 )
193 # Deprecated
193 # Deprecated
194 eh.configitem(
194 eh.configitem(
195 b'lfs', b'threshold', default=None,
195 b'lfs', b'threshold', default=None,
196 )
196 )
197 eh.configitem(
197 eh.configitem(
198 b'lfs', b'track', default=b'none()',
198 b'lfs', b'track', default=b'none()',
199 )
199 )
200 eh.configitem(
200 eh.configitem(
201 b'lfs', b'retry', default=5,
201 b'lfs', b'retry', default=5,
202 )
202 )
203
203
204 lfsprocessor = (
204 lfsprocessor = (
205 wrapper.readfromstore,
205 wrapper.readfromstore,
206 wrapper.writetostore,
206 wrapper.writetostore,
207 wrapper.bypasscheckhash,
207 wrapper.bypasscheckhash,
208 )
208 )
209
209
210
210
211 def featuresetup(ui, supported):
211 def featuresetup(ui, supported):
212 # don't die on seeing a repo with the lfs requirement
212 # don't die on seeing a repo with the lfs requirement
213 supported |= {b'lfs'}
213 supported |= {b'lfs'}
214
214
215
215
216 @eh.uisetup
216 @eh.uisetup
217 def _uisetup(ui):
217 def _uisetup(ui):
218 localrepo.featuresetupfuncs.add(featuresetup)
218 localrepo.featuresetupfuncs.add(featuresetup)
219
219
220
220
221 @eh.reposetup
221 @eh.reposetup
222 def _reposetup(ui, repo):
222 def _reposetup(ui, repo):
223 # Nothing to do with a remote repo
223 # Nothing to do with a remote repo
224 if not repo.local():
224 if not repo.local():
225 return
225 return
226
226
227 repo.svfs.lfslocalblobstore = blobstore.local(repo)
227 repo.svfs.lfslocalblobstore = blobstore.local(repo)
228 repo.svfs.lfsremoteblobstore = blobstore.remote(repo)
228 repo.svfs.lfsremoteblobstore = blobstore.remote(repo)
229
229
230 class lfsrepo(repo.__class__):
230 class lfsrepo(repo.__class__):
231 @localrepo.unfilteredmethod
231 @localrepo.unfilteredmethod
232 def commitctx(self, ctx, error=False, origctx=None):
232 def commitctx(self, ctx, error=False, origctx=None):
233 repo.svfs.options[b'lfstrack'] = _trackedmatcher(self)
233 repo.svfs.options[b'lfstrack'] = _trackedmatcher(self)
234 return super(lfsrepo, self).commitctx(ctx, error, origctx=origctx)
234 return super(lfsrepo, self).commitctx(ctx, error, origctx=origctx)
235
235
236 repo.__class__ = lfsrepo
236 repo.__class__ = lfsrepo
237
237
238 if b'lfs' not in repo.requirements:
238 if b'lfs' not in repo.requirements:
239
239
240 def checkrequireslfs(ui, repo, **kwargs):
240 def checkrequireslfs(ui, repo, **kwargs):
241 if b'lfs' in repo.requirements:
241 if b'lfs' in repo.requirements:
242 return 0
242 return 0
243
243
244 last = kwargs.get('node_last')
244 last = kwargs.get('node_last')
245 _bin = node.bin
245 _bin = node.bin
246 if last:
246 if last:
247 s = repo.set(b'%n:%n', _bin(kwargs['node']), _bin(last))
247 s = repo.set(b'%n:%n', _bin(kwargs['node']), _bin(last))
248 else:
248 else:
249 s = repo.set(b'%n', _bin(kwargs['node']))
249 s = repo.set(b'%n', _bin(kwargs['node']))
250 match = repo._storenarrowmatch
250 match = repo._storenarrowmatch
251 for ctx in s:
251 for ctx in s:
252 # TODO: is there a way to just walk the files in the commit?
252 # TODO: is there a way to just walk the files in the commit?
253 if any(
253 if any(
254 ctx[f].islfs() for f in ctx.files() if f in ctx and match(f)
254 ctx[f].islfs() for f in ctx.files() if f in ctx and match(f)
255 ):
255 ):
256 repo.requirements.add(b'lfs')
256 repo.requirements.add(b'lfs')
257 repo.features.add(repository.REPO_FEATURE_LFS)
257 repo.features.add(repository.REPO_FEATURE_LFS)
258 scmutil.writereporequirements(repo)
258 scmutil.writereporequirements(repo)
259 repo.prepushoutgoinghooks.add(b'lfs', wrapper.prepush)
259 repo.prepushoutgoinghooks.add(b'lfs', wrapper.prepush)
260 break
260 break
261
261
262 ui.setconfig(b'hooks', b'commit.lfs', checkrequireslfs, b'lfs')
262 ui.setconfig(b'hooks', b'commit.lfs', checkrequireslfs, b'lfs')
263 ui.setconfig(
263 ui.setconfig(
264 b'hooks', b'pretxnchangegroup.lfs', checkrequireslfs, b'lfs'
264 b'hooks', b'pretxnchangegroup.lfs', checkrequireslfs, b'lfs'
265 )
265 )
266 else:
266 else:
267 repo.prepushoutgoinghooks.add(b'lfs', wrapper.prepush)
267 repo.prepushoutgoinghooks.add(b'lfs', wrapper.prepush)
268
268
269
269
270 def _trackedmatcher(repo):
270 def _trackedmatcher(repo):
271 """Return a function (path, size) -> bool indicating whether or not to
271 """Return a function (path, size) -> bool indicating whether or not to
272 track a given file with lfs."""
272 track a given file with lfs."""
273 if not repo.wvfs.exists(b'.hglfs'):
273 if not repo.wvfs.exists(b'.hglfs'):
274 # No '.hglfs' in wdir. Fallback to config for now.
274 # No '.hglfs' in wdir. Fallback to config for now.
275 trackspec = repo.ui.config(b'lfs', b'track')
275 trackspec = repo.ui.config(b'lfs', b'track')
276
276
277 # deprecated config: lfs.threshold
277 # deprecated config: lfs.threshold
278 threshold = repo.ui.configbytes(b'lfs', b'threshold')
278 threshold = repo.ui.configbytes(b'lfs', b'threshold')
279 if threshold:
279 if threshold:
280 filesetlang.parse(trackspec) # make sure syntax errors are confined
280 filesetlang.parse(trackspec) # make sure syntax errors are confined
281 trackspec = b"(%s) | size('>%d')" % (trackspec, threshold)
281 trackspec = b"(%s) | size('>%d')" % (trackspec, threshold)
282
282
283 return minifileset.compile(trackspec)
283 return minifileset.compile(trackspec)
284
284
285 data = repo.wvfs.tryread(b'.hglfs')
285 data = repo.wvfs.tryread(b'.hglfs')
286 if not data:
286 if not data:
287 return lambda p, s: False
287 return lambda p, s: False
288
288
289 # Parse errors here will abort with a message that points to the .hglfs file
289 # Parse errors here will abort with a message that points to the .hglfs file
290 # and line number.
290 # and line number.
291 cfg = config.config()
291 cfg = config.config()
292 cfg.parse(b'.hglfs', data)
292 cfg.parse(b'.hglfs', data)
293
293
294 try:
294 try:
295 rules = [
295 rules = [
296 (minifileset.compile(pattern), minifileset.compile(rule))
296 (minifileset.compile(pattern), minifileset.compile(rule))
297 for pattern, rule in cfg.items(b'track')
297 for pattern, rule in cfg.items(b'track')
298 ]
298 ]
299 except error.ParseError as e:
299 except error.ParseError as e:
300 # The original exception gives no indicator that the error is in the
300 # The original exception gives no indicator that the error is in the
301 # .hglfs file, so add that.
301 # .hglfs file, so add that.
302
302
303 # TODO: See if the line number of the file can be made available.
303 # TODO: See if the line number of the file can be made available.
304 raise error.Abort(_(b'parse error in .hglfs: %s') % e)
304 raise error.Abort(_(b'parse error in .hglfs: %s') % e)
305
305
306 def _match(path, size):
306 def _match(path, size):
307 for pat, rule in rules:
307 for pat, rule in rules:
308 if pat(path, size):
308 if pat(path, size):
309 return rule(path, size)
309 return rule(path, size)
310
310
311 return False
311 return False
312
312
313 return _match
313 return _match
314
314
315
315
316 # Called by remotefilelog
316 # Called by remotefilelog
317 def wrapfilelog(filelog):
317 def wrapfilelog(filelog):
318 wrapfunction = extensions.wrapfunction
318 wrapfunction = extensions.wrapfunction
319
319
320 wrapfunction(filelog, 'addrevision', wrapper.filelogaddrevision)
320 wrapfunction(filelog, 'addrevision', wrapper.filelogaddrevision)
321 wrapfunction(filelog, 'renamed', wrapper.filelogrenamed)
321 wrapfunction(filelog, 'renamed', wrapper.filelogrenamed)
322 wrapfunction(filelog, 'size', wrapper.filelogsize)
322 wrapfunction(filelog, 'size', wrapper.filelogsize)
323
323
324
324
325 @eh.wrapfunction(localrepo, b'resolverevlogstorevfsoptions')
325 @eh.wrapfunction(localrepo, b'resolverevlogstorevfsoptions')
326 def _resolverevlogstorevfsoptions(orig, ui, requirements, features):
326 def _resolverevlogstorevfsoptions(orig, ui, requirements, features):
327 opts = orig(ui, requirements, features)
327 opts = orig(ui, requirements, features)
328 for name, module in extensions.extensions(ui):
328 for name, module in extensions.extensions(ui):
329 if module is sys.modules[__name__]:
329 if module is sys.modules[__name__]:
330 if revlog.REVIDX_EXTSTORED in opts[b'flagprocessors']:
330 if revlog.REVIDX_EXTSTORED in opts[b'flagprocessors']:
331 msg = (
331 msg = (
332 _(b"cannot register multiple processors on flag '%#x'.")
332 _(b"cannot register multiple processors on flag '%#x'.")
333 % revlog.REVIDX_EXTSTORED
333 % revlog.REVIDX_EXTSTORED
334 )
334 )
335 raise error.Abort(msg)
335 raise error.Abort(msg)
336
336
337 opts[b'flagprocessors'][revlog.REVIDX_EXTSTORED] = lfsprocessor
337 opts[b'flagprocessors'][revlog.REVIDX_EXTSTORED] = lfsprocessor
338 break
338 break
339
339
340 return opts
340 return opts
341
341
342
342
343 @eh.extsetup
343 @eh.extsetup
344 def _extsetup(ui):
344 def _extsetup(ui):
345 wrapfilelog(filelog.filelog)
345 wrapfilelog(filelog.filelog)
346
346
347 context.basefilectx.islfs = wrapper.filectxislfs
347 context.basefilectx.islfs = wrapper.filectxislfs
348
348
349 scmutil.fileprefetchhooks.add(b'lfs', wrapper._prefetchfiles)
349 scmutil.fileprefetchhooks.add(b'lfs', wrapper._prefetchfiles)
350
350
351 # Make bundle choose changegroup3 instead of changegroup2. This affects
351 # Make bundle choose changegroup3 instead of changegroup2. This affects
352 # "hg bundle" command. Note: it does not cover all bundle formats like
352 # "hg bundle" command. Note: it does not cover all bundle formats like
353 # "packed1". Using "packed1" with lfs will likely cause trouble.
353 # "packed1". Using "packed1" with lfs will likely cause trouble.
354 exchange._bundlespeccontentopts[b"v2"][b"cg.version"] = b"03"
354 bundlecaches._bundlespeccontentopts[b"v2"][b"cg.version"] = b"03"
355
355
356
356
357 @eh.filesetpredicate(b'lfs()')
357 @eh.filesetpredicate(b'lfs()')
358 def lfsfileset(mctx, x):
358 def lfsfileset(mctx, x):
359 """File that uses LFS storage."""
359 """File that uses LFS storage."""
360 # i18n: "lfs" is a keyword
360 # i18n: "lfs" is a keyword
361 filesetlang.getargs(x, 0, 0, _(b"lfs takes no arguments"))
361 filesetlang.getargs(x, 0, 0, _(b"lfs takes no arguments"))
362 ctx = mctx.ctx
362 ctx = mctx.ctx
363
363
364 def lfsfilep(f):
364 def lfsfilep(f):
365 return wrapper.pointerfromctx(ctx, f, removed=True) is not None
365 return wrapper.pointerfromctx(ctx, f, removed=True) is not None
366
366
367 return mctx.predicate(lfsfilep, predrepr=b'<lfs>')
367 return mctx.predicate(lfsfilep, predrepr=b'<lfs>')
368
368
369
369
370 @eh.templatekeyword(b'lfs_files', requires={b'ctx'})
370 @eh.templatekeyword(b'lfs_files', requires={b'ctx'})
371 def lfsfiles(context, mapping):
371 def lfsfiles(context, mapping):
372 """List of strings. All files modified, added, or removed by this
372 """List of strings. All files modified, added, or removed by this
373 changeset."""
373 changeset."""
374 ctx = context.resource(mapping, b'ctx')
374 ctx = context.resource(mapping, b'ctx')
375
375
376 pointers = wrapper.pointersfromctx(ctx, removed=True) # {path: pointer}
376 pointers = wrapper.pointersfromctx(ctx, removed=True) # {path: pointer}
377 files = sorted(pointers.keys())
377 files = sorted(pointers.keys())
378
378
379 def pointer(v):
379 def pointer(v):
380 # In the file spec, version is first and the other keys are sorted.
380 # In the file spec, version is first and the other keys are sorted.
381 sortkeyfunc = lambda x: (x[0] != b'version', x)
381 sortkeyfunc = lambda x: (x[0] != b'version', x)
382 items = sorted(pycompat.iteritems(pointers[v]), key=sortkeyfunc)
382 items = sorted(pycompat.iteritems(pointers[v]), key=sortkeyfunc)
383 return util.sortdict(items)
383 return util.sortdict(items)
384
384
385 makemap = lambda v: {
385 makemap = lambda v: {
386 b'file': v,
386 b'file': v,
387 b'lfsoid': pointers[v].oid() if pointers[v] else None,
387 b'lfsoid': pointers[v].oid() if pointers[v] else None,
388 b'lfspointer': templateutil.hybriddict(pointer(v)),
388 b'lfspointer': templateutil.hybriddict(pointer(v)),
389 }
389 }
390
390
391 # TODO: make the separator ', '?
391 # TODO: make the separator ', '?
392 f = templateutil._showcompatlist(context, mapping, b'lfs_file', files)
392 f = templateutil._showcompatlist(context, mapping, b'lfs_file', files)
393 return templateutil.hybrid(f, files, makemap, pycompat.identity)
393 return templateutil.hybrid(f, files, makemap, pycompat.identity)
394
394
395
395
396 @eh.command(
396 @eh.command(
397 b'debuglfsupload',
397 b'debuglfsupload',
398 [(b'r', b'rev', [], _(b'upload large files introduced by REV'))],
398 [(b'r', b'rev', [], _(b'upload large files introduced by REV'))],
399 )
399 )
400 def debuglfsupload(ui, repo, **opts):
400 def debuglfsupload(ui, repo, **opts):
401 """upload lfs blobs added by the working copy parent or given revisions"""
401 """upload lfs blobs added by the working copy parent or given revisions"""
402 revs = opts.get('rev', [])
402 revs = opts.get('rev', [])
403 pointers = wrapper.extractpointers(repo, scmutil.revrange(repo, revs))
403 pointers = wrapper.extractpointers(repo, scmutil.revrange(repo, revs))
404 wrapper.uploadblobs(repo, pointers)
404 wrapper.uploadblobs(repo, pointers)
405
405
406
406
407 @eh.wrapcommand(
407 @eh.wrapcommand(
408 b'verify',
408 b'verify',
409 opts=[(b'', b'no-lfs', None, _(b'skip missing lfs blob content'))],
409 opts=[(b'', b'no-lfs', None, _(b'skip missing lfs blob content'))],
410 )
410 )
411 def verify(orig, ui, repo, **opts):
411 def verify(orig, ui, repo, **opts):
412 skipflags = repo.ui.configint(b'verify', b'skipflags')
412 skipflags = repo.ui.configint(b'verify', b'skipflags')
413 no_lfs = opts.pop('no_lfs')
413 no_lfs = opts.pop('no_lfs')
414
414
415 if skipflags:
415 if skipflags:
416 # --lfs overrides the config bit, if set.
416 # --lfs overrides the config bit, if set.
417 if no_lfs is False:
417 if no_lfs is False:
418 skipflags &= ~repository.REVISION_FLAG_EXTSTORED
418 skipflags &= ~repository.REVISION_FLAG_EXTSTORED
419 else:
419 else:
420 skipflags = 0
420 skipflags = 0
421
421
422 if no_lfs is True:
422 if no_lfs is True:
423 skipflags |= repository.REVISION_FLAG_EXTSTORED
423 skipflags |= repository.REVISION_FLAG_EXTSTORED
424
424
425 with ui.configoverride({(b'verify', b'skipflags'): skipflags}):
425 with ui.configoverride({(b'verify', b'skipflags'): skipflags}):
426 return orig(ui, repo, **opts)
426 return orig(ui, repo, **opts)
@@ -1,7663 +1,7666 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import sys
13 import sys
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 wdirhex,
21 wdirhex,
22 wdirrev,
22 wdirrev,
23 )
23 )
24 from .pycompat import open
24 from .pycompat import open
25 from . import (
25 from . import (
26 archival,
26 archival,
27 bookmarks,
27 bookmarks,
28 bundle2,
28 bundle2,
29 bundlecaches,
29 changegroup,
30 changegroup,
30 cmdutil,
31 cmdutil,
31 copies,
32 copies,
32 debugcommands as debugcommandsmod,
33 debugcommands as debugcommandsmod,
33 destutil,
34 destutil,
34 dirstateguard,
35 dirstateguard,
35 discovery,
36 discovery,
36 encoding,
37 encoding,
37 error,
38 error,
38 exchange,
39 exchange,
39 extensions,
40 extensions,
40 filemerge,
41 filemerge,
41 formatter,
42 formatter,
42 graphmod,
43 graphmod,
43 grep as grepmod,
44 grep as grepmod,
44 hbisect,
45 hbisect,
45 help,
46 help,
46 hg,
47 hg,
47 logcmdutil,
48 logcmdutil,
48 merge as mergemod,
49 merge as mergemod,
49 mergestate as mergestatemod,
50 mergestate as mergestatemod,
50 narrowspec,
51 narrowspec,
51 obsolete,
52 obsolete,
52 obsutil,
53 obsutil,
53 patch,
54 patch,
54 phases,
55 phases,
55 pycompat,
56 pycompat,
56 rcutil,
57 rcutil,
57 registrar,
58 registrar,
58 requirements,
59 requirements,
59 revsetlang,
60 revsetlang,
60 rewriteutil,
61 rewriteutil,
61 scmutil,
62 scmutil,
62 server,
63 server,
63 shelve as shelvemod,
64 shelve as shelvemod,
64 state as statemod,
65 state as statemod,
65 streamclone,
66 streamclone,
66 tags as tagsmod,
67 tags as tagsmod,
67 ui as uimod,
68 ui as uimod,
68 util,
69 util,
69 verify as verifymod,
70 verify as verifymod,
70 vfs as vfsmod,
71 vfs as vfsmod,
71 wireprotoserver,
72 wireprotoserver,
72 )
73 )
73 from .utils import (
74 from .utils import (
74 dateutil,
75 dateutil,
75 stringutil,
76 stringutil,
76 )
77 )
77
78
78 table = {}
79 table = {}
79 table.update(debugcommandsmod.command._table)
80 table.update(debugcommandsmod.command._table)
80
81
81 command = registrar.command(table)
82 command = registrar.command(table)
82 INTENT_READONLY = registrar.INTENT_READONLY
83 INTENT_READONLY = registrar.INTENT_READONLY
83
84
84 # common command options
85 # common command options
85
86
86 globalopts = [
87 globalopts = [
87 (
88 (
88 b'R',
89 b'R',
89 b'repository',
90 b'repository',
90 b'',
91 b'',
91 _(b'repository root directory or name of overlay bundle file'),
92 _(b'repository root directory or name of overlay bundle file'),
92 _(b'REPO'),
93 _(b'REPO'),
93 ),
94 ),
94 (b'', b'cwd', b'', _(b'change working directory'), _(b'DIR')),
95 (b'', b'cwd', b'', _(b'change working directory'), _(b'DIR')),
95 (
96 (
96 b'y',
97 b'y',
97 b'noninteractive',
98 b'noninteractive',
98 None,
99 None,
99 _(
100 _(
100 b'do not prompt, automatically pick the first choice for all prompts'
101 b'do not prompt, automatically pick the first choice for all prompts'
101 ),
102 ),
102 ),
103 ),
103 (b'q', b'quiet', None, _(b'suppress output')),
104 (b'q', b'quiet', None, _(b'suppress output')),
104 (b'v', b'verbose', None, _(b'enable additional output')),
105 (b'v', b'verbose', None, _(b'enable additional output')),
105 (
106 (
106 b'',
107 b'',
107 b'color',
108 b'color',
108 b'',
109 b'',
109 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
110 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
110 # and should not be translated
111 # and should not be translated
111 _(b"when to colorize (boolean, always, auto, never, or debug)"),
112 _(b"when to colorize (boolean, always, auto, never, or debug)"),
112 _(b'TYPE'),
113 _(b'TYPE'),
113 ),
114 ),
114 (
115 (
115 b'',
116 b'',
116 b'config',
117 b'config',
117 [],
118 [],
118 _(b'set/override config option (use \'section.name=value\')'),
119 _(b'set/override config option (use \'section.name=value\')'),
119 _(b'CONFIG'),
120 _(b'CONFIG'),
120 ),
121 ),
121 (b'', b'debug', None, _(b'enable debugging output')),
122 (b'', b'debug', None, _(b'enable debugging output')),
122 (b'', b'debugger', None, _(b'start debugger')),
123 (b'', b'debugger', None, _(b'start debugger')),
123 (
124 (
124 b'',
125 b'',
125 b'encoding',
126 b'encoding',
126 encoding.encoding,
127 encoding.encoding,
127 _(b'set the charset encoding'),
128 _(b'set the charset encoding'),
128 _(b'ENCODE'),
129 _(b'ENCODE'),
129 ),
130 ),
130 (
131 (
131 b'',
132 b'',
132 b'encodingmode',
133 b'encodingmode',
133 encoding.encodingmode,
134 encoding.encodingmode,
134 _(b'set the charset encoding mode'),
135 _(b'set the charset encoding mode'),
135 _(b'MODE'),
136 _(b'MODE'),
136 ),
137 ),
137 (b'', b'traceback', None, _(b'always print a traceback on exception')),
138 (b'', b'traceback', None, _(b'always print a traceback on exception')),
138 (b'', b'time', None, _(b'time how long the command takes')),
139 (b'', b'time', None, _(b'time how long the command takes')),
139 (b'', b'profile', None, _(b'print command execution profile')),
140 (b'', b'profile', None, _(b'print command execution profile')),
140 (b'', b'version', None, _(b'output version information and exit')),
141 (b'', b'version', None, _(b'output version information and exit')),
141 (b'h', b'help', None, _(b'display help and exit')),
142 (b'h', b'help', None, _(b'display help and exit')),
142 (b'', b'hidden', False, _(b'consider hidden changesets')),
143 (b'', b'hidden', False, _(b'consider hidden changesets')),
143 (
144 (
144 b'',
145 b'',
145 b'pager',
146 b'pager',
146 b'auto',
147 b'auto',
147 _(b"when to paginate (boolean, always, auto, or never)"),
148 _(b"when to paginate (boolean, always, auto, or never)"),
148 _(b'TYPE'),
149 _(b'TYPE'),
149 ),
150 ),
150 ]
151 ]
151
152
152 dryrunopts = cmdutil.dryrunopts
153 dryrunopts = cmdutil.dryrunopts
153 remoteopts = cmdutil.remoteopts
154 remoteopts = cmdutil.remoteopts
154 walkopts = cmdutil.walkopts
155 walkopts = cmdutil.walkopts
155 commitopts = cmdutil.commitopts
156 commitopts = cmdutil.commitopts
156 commitopts2 = cmdutil.commitopts2
157 commitopts2 = cmdutil.commitopts2
157 commitopts3 = cmdutil.commitopts3
158 commitopts3 = cmdutil.commitopts3
158 formatteropts = cmdutil.formatteropts
159 formatteropts = cmdutil.formatteropts
159 templateopts = cmdutil.templateopts
160 templateopts = cmdutil.templateopts
160 logopts = cmdutil.logopts
161 logopts = cmdutil.logopts
161 diffopts = cmdutil.diffopts
162 diffopts = cmdutil.diffopts
162 diffwsopts = cmdutil.diffwsopts
163 diffwsopts = cmdutil.diffwsopts
163 diffopts2 = cmdutil.diffopts2
164 diffopts2 = cmdutil.diffopts2
164 mergetoolopts = cmdutil.mergetoolopts
165 mergetoolopts = cmdutil.mergetoolopts
165 similarityopts = cmdutil.similarityopts
166 similarityopts = cmdutil.similarityopts
166 subrepoopts = cmdutil.subrepoopts
167 subrepoopts = cmdutil.subrepoopts
167 debugrevlogopts = cmdutil.debugrevlogopts
168 debugrevlogopts = cmdutil.debugrevlogopts
168
169
169 # Commands start here, listed alphabetically
170 # Commands start here, listed alphabetically
170
171
171
172
172 @command(
173 @command(
173 b'abort',
174 b'abort',
174 dryrunopts,
175 dryrunopts,
175 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
176 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
176 helpbasic=True,
177 helpbasic=True,
177 )
178 )
178 def abort(ui, repo, **opts):
179 def abort(ui, repo, **opts):
179 """abort an unfinished operation (EXPERIMENTAL)
180 """abort an unfinished operation (EXPERIMENTAL)
180
181
181 Aborts a multistep operation like graft, histedit, rebase, merge,
182 Aborts a multistep operation like graft, histedit, rebase, merge,
182 and unshelve if they are in an unfinished state.
183 and unshelve if they are in an unfinished state.
183
184
184 use --dry-run/-n to dry run the command.
185 use --dry-run/-n to dry run the command.
185 """
186 """
186 dryrun = opts.get('dry_run')
187 dryrun = opts.get('dry_run')
187 abortstate = cmdutil.getunfinishedstate(repo)
188 abortstate = cmdutil.getunfinishedstate(repo)
188 if not abortstate:
189 if not abortstate:
189 raise error.Abort(_(b'no operation in progress'))
190 raise error.Abort(_(b'no operation in progress'))
190 if not abortstate.abortfunc:
191 if not abortstate.abortfunc:
191 raise error.Abort(
192 raise error.Abort(
192 (
193 (
193 _(b"%s in progress but does not support 'hg abort'")
194 _(b"%s in progress but does not support 'hg abort'")
194 % (abortstate._opname)
195 % (abortstate._opname)
195 ),
196 ),
196 hint=abortstate.hint(),
197 hint=abortstate.hint(),
197 )
198 )
198 if dryrun:
199 if dryrun:
199 ui.status(
200 ui.status(
200 _(b'%s in progress, will be aborted\n') % (abortstate._opname)
201 _(b'%s in progress, will be aborted\n') % (abortstate._opname)
201 )
202 )
202 return
203 return
203 return abortstate.abortfunc(ui, repo)
204 return abortstate.abortfunc(ui, repo)
204
205
205
206
206 @command(
207 @command(
207 b'add',
208 b'add',
208 walkopts + subrepoopts + dryrunopts,
209 walkopts + subrepoopts + dryrunopts,
209 _(b'[OPTION]... [FILE]...'),
210 _(b'[OPTION]... [FILE]...'),
210 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
211 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
211 helpbasic=True,
212 helpbasic=True,
212 inferrepo=True,
213 inferrepo=True,
213 )
214 )
214 def add(ui, repo, *pats, **opts):
215 def add(ui, repo, *pats, **opts):
215 """add the specified files on the next commit
216 """add the specified files on the next commit
216
217
217 Schedule files to be version controlled and added to the
218 Schedule files to be version controlled and added to the
218 repository.
219 repository.
219
220
220 The files will be added to the repository at the next commit. To
221 The files will be added to the repository at the next commit. To
221 undo an add before that, see :hg:`forget`.
222 undo an add before that, see :hg:`forget`.
222
223
223 If no names are given, add all files to the repository (except
224 If no names are given, add all files to the repository (except
224 files matching ``.hgignore``).
225 files matching ``.hgignore``).
225
226
226 .. container:: verbose
227 .. container:: verbose
227
228
228 Examples:
229 Examples:
229
230
230 - New (unknown) files are added
231 - New (unknown) files are added
231 automatically by :hg:`add`::
232 automatically by :hg:`add`::
232
233
233 $ ls
234 $ ls
234 foo.c
235 foo.c
235 $ hg status
236 $ hg status
236 ? foo.c
237 ? foo.c
237 $ hg add
238 $ hg add
238 adding foo.c
239 adding foo.c
239 $ hg status
240 $ hg status
240 A foo.c
241 A foo.c
241
242
242 - Specific files to be added can be specified::
243 - Specific files to be added can be specified::
243
244
244 $ ls
245 $ ls
245 bar.c foo.c
246 bar.c foo.c
246 $ hg status
247 $ hg status
247 ? bar.c
248 ? bar.c
248 ? foo.c
249 ? foo.c
249 $ hg add bar.c
250 $ hg add bar.c
250 $ hg status
251 $ hg status
251 A bar.c
252 A bar.c
252 ? foo.c
253 ? foo.c
253
254
254 Returns 0 if all files are successfully added.
255 Returns 0 if all files are successfully added.
255 """
256 """
256
257
257 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
258 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
258 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
259 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
259 rejected = cmdutil.add(ui, repo, m, b"", uipathfn, False, **opts)
260 rejected = cmdutil.add(ui, repo, m, b"", uipathfn, False, **opts)
260 return rejected and 1 or 0
261 return rejected and 1 or 0
261
262
262
263
263 @command(
264 @command(
264 b'addremove',
265 b'addremove',
265 similarityopts + subrepoopts + walkopts + dryrunopts,
266 similarityopts + subrepoopts + walkopts + dryrunopts,
266 _(b'[OPTION]... [FILE]...'),
267 _(b'[OPTION]... [FILE]...'),
267 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
268 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
268 inferrepo=True,
269 inferrepo=True,
269 )
270 )
270 def addremove(ui, repo, *pats, **opts):
271 def addremove(ui, repo, *pats, **opts):
271 """add all new files, delete all missing files
272 """add all new files, delete all missing files
272
273
273 Add all new files and remove all missing files from the
274 Add all new files and remove all missing files from the
274 repository.
275 repository.
275
276
276 Unless names are given, new files are ignored if they match any of
277 Unless names are given, new files are ignored if they match any of
277 the patterns in ``.hgignore``. As with add, these changes take
278 the patterns in ``.hgignore``. As with add, these changes take
278 effect at the next commit.
279 effect at the next commit.
279
280
280 Use the -s/--similarity option to detect renamed files. This
281 Use the -s/--similarity option to detect renamed files. This
281 option takes a percentage between 0 (disabled) and 100 (files must
282 option takes a percentage between 0 (disabled) and 100 (files must
282 be identical) as its parameter. With a parameter greater than 0,
283 be identical) as its parameter. With a parameter greater than 0,
283 this compares every removed file with every added file and records
284 this compares every removed file with every added file and records
284 those similar enough as renames. Detecting renamed files this way
285 those similar enough as renames. Detecting renamed files this way
285 can be expensive. After using this option, :hg:`status -C` can be
286 can be expensive. After using this option, :hg:`status -C` can be
286 used to check which files were identified as moved or renamed. If
287 used to check which files were identified as moved or renamed. If
287 not specified, -s/--similarity defaults to 100 and only renames of
288 not specified, -s/--similarity defaults to 100 and only renames of
288 identical files are detected.
289 identical files are detected.
289
290
290 .. container:: verbose
291 .. container:: verbose
291
292
292 Examples:
293 Examples:
293
294
294 - A number of files (bar.c and foo.c) are new,
295 - A number of files (bar.c and foo.c) are new,
295 while foobar.c has been removed (without using :hg:`remove`)
296 while foobar.c has been removed (without using :hg:`remove`)
296 from the repository::
297 from the repository::
297
298
298 $ ls
299 $ ls
299 bar.c foo.c
300 bar.c foo.c
300 $ hg status
301 $ hg status
301 ! foobar.c
302 ! foobar.c
302 ? bar.c
303 ? bar.c
303 ? foo.c
304 ? foo.c
304 $ hg addremove
305 $ hg addremove
305 adding bar.c
306 adding bar.c
306 adding foo.c
307 adding foo.c
307 removing foobar.c
308 removing foobar.c
308 $ hg status
309 $ hg status
309 A bar.c
310 A bar.c
310 A foo.c
311 A foo.c
311 R foobar.c
312 R foobar.c
312
313
313 - A file foobar.c was moved to foo.c without using :hg:`rename`.
314 - A file foobar.c was moved to foo.c without using :hg:`rename`.
314 Afterwards, it was edited slightly::
315 Afterwards, it was edited slightly::
315
316
316 $ ls
317 $ ls
317 foo.c
318 foo.c
318 $ hg status
319 $ hg status
319 ! foobar.c
320 ! foobar.c
320 ? foo.c
321 ? foo.c
321 $ hg addremove --similarity 90
322 $ hg addremove --similarity 90
322 removing foobar.c
323 removing foobar.c
323 adding foo.c
324 adding foo.c
324 recording removal of foobar.c as rename to foo.c (94% similar)
325 recording removal of foobar.c as rename to foo.c (94% similar)
325 $ hg status -C
326 $ hg status -C
326 A foo.c
327 A foo.c
327 foobar.c
328 foobar.c
328 R foobar.c
329 R foobar.c
329
330
330 Returns 0 if all files are successfully added.
331 Returns 0 if all files are successfully added.
331 """
332 """
332 opts = pycompat.byteskwargs(opts)
333 opts = pycompat.byteskwargs(opts)
333 if not opts.get(b'similarity'):
334 if not opts.get(b'similarity'):
334 opts[b'similarity'] = b'100'
335 opts[b'similarity'] = b'100'
335 matcher = scmutil.match(repo[None], pats, opts)
336 matcher = scmutil.match(repo[None], pats, opts)
336 relative = scmutil.anypats(pats, opts)
337 relative = scmutil.anypats(pats, opts)
337 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
338 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
338 return scmutil.addremove(repo, matcher, b"", uipathfn, opts)
339 return scmutil.addremove(repo, matcher, b"", uipathfn, opts)
339
340
340
341
341 @command(
342 @command(
342 b'annotate|blame',
343 b'annotate|blame',
343 [
344 [
344 (b'r', b'rev', b'', _(b'annotate the specified revision'), _(b'REV')),
345 (b'r', b'rev', b'', _(b'annotate the specified revision'), _(b'REV')),
345 (
346 (
346 b'',
347 b'',
347 b'follow',
348 b'follow',
348 None,
349 None,
349 _(b'follow copies/renames and list the filename (DEPRECATED)'),
350 _(b'follow copies/renames and list the filename (DEPRECATED)'),
350 ),
351 ),
351 (b'', b'no-follow', None, _(b"don't follow copies and renames")),
352 (b'', b'no-follow', None, _(b"don't follow copies and renames")),
352 (b'a', b'text', None, _(b'treat all files as text')),
353 (b'a', b'text', None, _(b'treat all files as text')),
353 (b'u', b'user', None, _(b'list the author (long with -v)')),
354 (b'u', b'user', None, _(b'list the author (long with -v)')),
354 (b'f', b'file', None, _(b'list the filename')),
355 (b'f', b'file', None, _(b'list the filename')),
355 (b'd', b'date', None, _(b'list the date (short with -q)')),
356 (b'd', b'date', None, _(b'list the date (short with -q)')),
356 (b'n', b'number', None, _(b'list the revision number (default)')),
357 (b'n', b'number', None, _(b'list the revision number (default)')),
357 (b'c', b'changeset', None, _(b'list the changeset')),
358 (b'c', b'changeset', None, _(b'list the changeset')),
358 (
359 (
359 b'l',
360 b'l',
360 b'line-number',
361 b'line-number',
361 None,
362 None,
362 _(b'show line number at the first appearance'),
363 _(b'show line number at the first appearance'),
363 ),
364 ),
364 (
365 (
365 b'',
366 b'',
366 b'skip',
367 b'skip',
367 [],
368 [],
368 _(b'revset to not display (EXPERIMENTAL)'),
369 _(b'revset to not display (EXPERIMENTAL)'),
369 _(b'REV'),
370 _(b'REV'),
370 ),
371 ),
371 ]
372 ]
372 + diffwsopts
373 + diffwsopts
373 + walkopts
374 + walkopts
374 + formatteropts,
375 + formatteropts,
375 _(b'[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
376 _(b'[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
376 helpcategory=command.CATEGORY_FILE_CONTENTS,
377 helpcategory=command.CATEGORY_FILE_CONTENTS,
377 helpbasic=True,
378 helpbasic=True,
378 inferrepo=True,
379 inferrepo=True,
379 )
380 )
380 def annotate(ui, repo, *pats, **opts):
381 def annotate(ui, repo, *pats, **opts):
381 """show changeset information by line for each file
382 """show changeset information by line for each file
382
383
383 List changes in files, showing the revision id responsible for
384 List changes in files, showing the revision id responsible for
384 each line.
385 each line.
385
386
386 This command is useful for discovering when a change was made and
387 This command is useful for discovering when a change was made and
387 by whom.
388 by whom.
388
389
389 If you include --file, --user, or --date, the revision number is
390 If you include --file, --user, or --date, the revision number is
390 suppressed unless you also include --number.
391 suppressed unless you also include --number.
391
392
392 Without the -a/--text option, annotate will avoid processing files
393 Without the -a/--text option, annotate will avoid processing files
393 it detects as binary. With -a, annotate will annotate the file
394 it detects as binary. With -a, annotate will annotate the file
394 anyway, although the results will probably be neither useful
395 anyway, although the results will probably be neither useful
395 nor desirable.
396 nor desirable.
396
397
397 .. container:: verbose
398 .. container:: verbose
398
399
399 Template:
400 Template:
400
401
401 The following keywords are supported in addition to the common template
402 The following keywords are supported in addition to the common template
402 keywords and functions. See also :hg:`help templates`.
403 keywords and functions. See also :hg:`help templates`.
403
404
404 :lines: List of lines with annotation data.
405 :lines: List of lines with annotation data.
405 :path: String. Repository-absolute path of the specified file.
406 :path: String. Repository-absolute path of the specified file.
406
407
407 And each entry of ``{lines}`` provides the following sub-keywords in
408 And each entry of ``{lines}`` provides the following sub-keywords in
408 addition to ``{date}``, ``{node}``, ``{rev}``, ``{user}``, etc.
409 addition to ``{date}``, ``{node}``, ``{rev}``, ``{user}``, etc.
409
410
410 :line: String. Line content.
411 :line: String. Line content.
411 :lineno: Integer. Line number at that revision.
412 :lineno: Integer. Line number at that revision.
412 :path: String. Repository-absolute path of the file at that revision.
413 :path: String. Repository-absolute path of the file at that revision.
413
414
414 See :hg:`help templates.operators` for the list expansion syntax.
415 See :hg:`help templates.operators` for the list expansion syntax.
415
416
416 Returns 0 on success.
417 Returns 0 on success.
417 """
418 """
418 opts = pycompat.byteskwargs(opts)
419 opts = pycompat.byteskwargs(opts)
419 if not pats:
420 if not pats:
420 raise error.Abort(_(b'at least one filename or pattern is required'))
421 raise error.Abort(_(b'at least one filename or pattern is required'))
421
422
422 if opts.get(b'follow'):
423 if opts.get(b'follow'):
423 # --follow is deprecated and now just an alias for -f/--file
424 # --follow is deprecated and now just an alias for -f/--file
424 # to mimic the behavior of Mercurial before version 1.5
425 # to mimic the behavior of Mercurial before version 1.5
425 opts[b'file'] = True
426 opts[b'file'] = True
426
427
427 if (
428 if (
428 not opts.get(b'user')
429 not opts.get(b'user')
429 and not opts.get(b'changeset')
430 and not opts.get(b'changeset')
430 and not opts.get(b'date')
431 and not opts.get(b'date')
431 and not opts.get(b'file')
432 and not opts.get(b'file')
432 ):
433 ):
433 opts[b'number'] = True
434 opts[b'number'] = True
434
435
435 linenumber = opts.get(b'line_number') is not None
436 linenumber = opts.get(b'line_number') is not None
436 if (
437 if (
437 linenumber
438 linenumber
438 and (not opts.get(b'changeset'))
439 and (not opts.get(b'changeset'))
439 and (not opts.get(b'number'))
440 and (not opts.get(b'number'))
440 ):
441 ):
441 raise error.Abort(_(b'at least one of -n/-c is required for -l'))
442 raise error.Abort(_(b'at least one of -n/-c is required for -l'))
442
443
443 rev = opts.get(b'rev')
444 rev = opts.get(b'rev')
444 if rev:
445 if rev:
445 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
446 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
446 ctx = scmutil.revsingle(repo, rev)
447 ctx = scmutil.revsingle(repo, rev)
447
448
448 ui.pager(b'annotate')
449 ui.pager(b'annotate')
449 rootfm = ui.formatter(b'annotate', opts)
450 rootfm = ui.formatter(b'annotate', opts)
450 if ui.debugflag:
451 if ui.debugflag:
451 shorthex = pycompat.identity
452 shorthex = pycompat.identity
452 else:
453 else:
453
454
454 def shorthex(h):
455 def shorthex(h):
455 return h[:12]
456 return h[:12]
456
457
457 if ui.quiet:
458 if ui.quiet:
458 datefunc = dateutil.shortdate
459 datefunc = dateutil.shortdate
459 else:
460 else:
460 datefunc = dateutil.datestr
461 datefunc = dateutil.datestr
461 if ctx.rev() is None:
462 if ctx.rev() is None:
462 if opts.get(b'changeset'):
463 if opts.get(b'changeset'):
463 # omit "+" suffix which is appended to node hex
464 # omit "+" suffix which is appended to node hex
464 def formatrev(rev):
465 def formatrev(rev):
465 if rev == wdirrev:
466 if rev == wdirrev:
466 return b'%d' % ctx.p1().rev()
467 return b'%d' % ctx.p1().rev()
467 else:
468 else:
468 return b'%d' % rev
469 return b'%d' % rev
469
470
470 else:
471 else:
471
472
472 def formatrev(rev):
473 def formatrev(rev):
473 if rev == wdirrev:
474 if rev == wdirrev:
474 return b'%d+' % ctx.p1().rev()
475 return b'%d+' % ctx.p1().rev()
475 else:
476 else:
476 return b'%d ' % rev
477 return b'%d ' % rev
477
478
478 def formathex(h):
479 def formathex(h):
479 if h == wdirhex:
480 if h == wdirhex:
480 return b'%s+' % shorthex(hex(ctx.p1().node()))
481 return b'%s+' % shorthex(hex(ctx.p1().node()))
481 else:
482 else:
482 return b'%s ' % shorthex(h)
483 return b'%s ' % shorthex(h)
483
484
484 else:
485 else:
485 formatrev = b'%d'.__mod__
486 formatrev = b'%d'.__mod__
486 formathex = shorthex
487 formathex = shorthex
487
488
488 opmap = [
489 opmap = [
489 (b'user', b' ', lambda x: x.fctx.user(), ui.shortuser),
490 (b'user', b' ', lambda x: x.fctx.user(), ui.shortuser),
490 (b'rev', b' ', lambda x: scmutil.intrev(x.fctx), formatrev),
491 (b'rev', b' ', lambda x: scmutil.intrev(x.fctx), formatrev),
491 (b'node', b' ', lambda x: hex(scmutil.binnode(x.fctx)), formathex),
492 (b'node', b' ', lambda x: hex(scmutil.binnode(x.fctx)), formathex),
492 (b'date', b' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
493 (b'date', b' ', lambda x: x.fctx.date(), util.cachefunc(datefunc)),
493 (b'path', b' ', lambda x: x.fctx.path(), pycompat.bytestr),
494 (b'path', b' ', lambda x: x.fctx.path(), pycompat.bytestr),
494 (b'lineno', b':', lambda x: x.lineno, pycompat.bytestr),
495 (b'lineno', b':', lambda x: x.lineno, pycompat.bytestr),
495 ]
496 ]
496 opnamemap = {
497 opnamemap = {
497 b'rev': b'number',
498 b'rev': b'number',
498 b'node': b'changeset',
499 b'node': b'changeset',
499 b'path': b'file',
500 b'path': b'file',
500 b'lineno': b'line_number',
501 b'lineno': b'line_number',
501 }
502 }
502
503
503 if rootfm.isplain():
504 if rootfm.isplain():
504
505
505 def makefunc(get, fmt):
506 def makefunc(get, fmt):
506 return lambda x: fmt(get(x))
507 return lambda x: fmt(get(x))
507
508
508 else:
509 else:
509
510
510 def makefunc(get, fmt):
511 def makefunc(get, fmt):
511 return get
512 return get
512
513
513 datahint = rootfm.datahint()
514 datahint = rootfm.datahint()
514 funcmap = [
515 funcmap = [
515 (makefunc(get, fmt), sep)
516 (makefunc(get, fmt), sep)
516 for fn, sep, get, fmt in opmap
517 for fn, sep, get, fmt in opmap
517 if opts.get(opnamemap.get(fn, fn)) or fn in datahint
518 if opts.get(opnamemap.get(fn, fn)) or fn in datahint
518 ]
519 ]
519 funcmap[0] = (funcmap[0][0], b'') # no separator in front of first column
520 funcmap[0] = (funcmap[0][0], b'') # no separator in front of first column
520 fields = b' '.join(
521 fields = b' '.join(
521 fn
522 fn
522 for fn, sep, get, fmt in opmap
523 for fn, sep, get, fmt in opmap
523 if opts.get(opnamemap.get(fn, fn)) or fn in datahint
524 if opts.get(opnamemap.get(fn, fn)) or fn in datahint
524 )
525 )
525
526
526 def bad(x, y):
527 def bad(x, y):
527 raise error.Abort(b"%s: %s" % (x, y))
528 raise error.Abort(b"%s: %s" % (x, y))
528
529
529 m = scmutil.match(ctx, pats, opts, badfn=bad)
530 m = scmutil.match(ctx, pats, opts, badfn=bad)
530
531
531 follow = not opts.get(b'no_follow')
532 follow = not opts.get(b'no_follow')
532 diffopts = patch.difffeatureopts(
533 diffopts = patch.difffeatureopts(
533 ui, opts, section=b'annotate', whitespace=True
534 ui, opts, section=b'annotate', whitespace=True
534 )
535 )
535 skiprevs = opts.get(b'skip')
536 skiprevs = opts.get(b'skip')
536 if skiprevs:
537 if skiprevs:
537 skiprevs = scmutil.revrange(repo, skiprevs)
538 skiprevs = scmutil.revrange(repo, skiprevs)
538
539
539 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
540 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
540 for abs in ctx.walk(m):
541 for abs in ctx.walk(m):
541 fctx = ctx[abs]
542 fctx = ctx[abs]
542 rootfm.startitem()
543 rootfm.startitem()
543 rootfm.data(path=abs)
544 rootfm.data(path=abs)
544 if not opts.get(b'text') and fctx.isbinary():
545 if not opts.get(b'text') and fctx.isbinary():
545 rootfm.plain(_(b"%s: binary file\n") % uipathfn(abs))
546 rootfm.plain(_(b"%s: binary file\n") % uipathfn(abs))
546 continue
547 continue
547
548
548 fm = rootfm.nested(b'lines', tmpl=b'{rev}: {line}')
549 fm = rootfm.nested(b'lines', tmpl=b'{rev}: {line}')
549 lines = fctx.annotate(
550 lines = fctx.annotate(
550 follow=follow, skiprevs=skiprevs, diffopts=diffopts
551 follow=follow, skiprevs=skiprevs, diffopts=diffopts
551 )
552 )
552 if not lines:
553 if not lines:
553 fm.end()
554 fm.end()
554 continue
555 continue
555 formats = []
556 formats = []
556 pieces = []
557 pieces = []
557
558
558 for f, sep in funcmap:
559 for f, sep in funcmap:
559 l = [f(n) for n in lines]
560 l = [f(n) for n in lines]
560 if fm.isplain():
561 if fm.isplain():
561 sizes = [encoding.colwidth(x) for x in l]
562 sizes = [encoding.colwidth(x) for x in l]
562 ml = max(sizes)
563 ml = max(sizes)
563 formats.append([sep + b' ' * (ml - w) + b'%s' for w in sizes])
564 formats.append([sep + b' ' * (ml - w) + b'%s' for w in sizes])
564 else:
565 else:
565 formats.append([b'%s'] * len(l))
566 formats.append([b'%s'] * len(l))
566 pieces.append(l)
567 pieces.append(l)
567
568
568 for f, p, n in zip(zip(*formats), zip(*pieces), lines):
569 for f, p, n in zip(zip(*formats), zip(*pieces), lines):
569 fm.startitem()
570 fm.startitem()
570 fm.context(fctx=n.fctx)
571 fm.context(fctx=n.fctx)
571 fm.write(fields, b"".join(f), *p)
572 fm.write(fields, b"".join(f), *p)
572 if n.skip:
573 if n.skip:
573 fmt = b"* %s"
574 fmt = b"* %s"
574 else:
575 else:
575 fmt = b": %s"
576 fmt = b": %s"
576 fm.write(b'line', fmt, n.text)
577 fm.write(b'line', fmt, n.text)
577
578
578 if not lines[-1].text.endswith(b'\n'):
579 if not lines[-1].text.endswith(b'\n'):
579 fm.plain(b'\n')
580 fm.plain(b'\n')
580 fm.end()
581 fm.end()
581
582
582 rootfm.end()
583 rootfm.end()
583
584
584
585
585 @command(
586 @command(
586 b'archive',
587 b'archive',
587 [
588 [
588 (b'', b'no-decode', None, _(b'do not pass files through decoders')),
589 (b'', b'no-decode', None, _(b'do not pass files through decoders')),
589 (
590 (
590 b'p',
591 b'p',
591 b'prefix',
592 b'prefix',
592 b'',
593 b'',
593 _(b'directory prefix for files in archive'),
594 _(b'directory prefix for files in archive'),
594 _(b'PREFIX'),
595 _(b'PREFIX'),
595 ),
596 ),
596 (b'r', b'rev', b'', _(b'revision to distribute'), _(b'REV')),
597 (b'r', b'rev', b'', _(b'revision to distribute'), _(b'REV')),
597 (b't', b'type', b'', _(b'type of distribution to create'), _(b'TYPE')),
598 (b't', b'type', b'', _(b'type of distribution to create'), _(b'TYPE')),
598 ]
599 ]
599 + subrepoopts
600 + subrepoopts
600 + walkopts,
601 + walkopts,
601 _(b'[OPTION]... DEST'),
602 _(b'[OPTION]... DEST'),
602 helpcategory=command.CATEGORY_IMPORT_EXPORT,
603 helpcategory=command.CATEGORY_IMPORT_EXPORT,
603 )
604 )
604 def archive(ui, repo, dest, **opts):
605 def archive(ui, repo, dest, **opts):
605 '''create an unversioned archive of a repository revision
606 '''create an unversioned archive of a repository revision
606
607
607 By default, the revision used is the parent of the working
608 By default, the revision used is the parent of the working
608 directory; use -r/--rev to specify a different revision.
609 directory; use -r/--rev to specify a different revision.
609
610
610 The archive type is automatically detected based on file
611 The archive type is automatically detected based on file
611 extension (to override, use -t/--type).
612 extension (to override, use -t/--type).
612
613
613 .. container:: verbose
614 .. container:: verbose
614
615
615 Examples:
616 Examples:
616
617
617 - create a zip file containing the 1.0 release::
618 - create a zip file containing the 1.0 release::
618
619
619 hg archive -r 1.0 project-1.0.zip
620 hg archive -r 1.0 project-1.0.zip
620
621
621 - create a tarball excluding .hg files::
622 - create a tarball excluding .hg files::
622
623
623 hg archive project.tar.gz -X ".hg*"
624 hg archive project.tar.gz -X ".hg*"
624
625
625 Valid types are:
626 Valid types are:
626
627
627 :``files``: a directory full of files (default)
628 :``files``: a directory full of files (default)
628 :``tar``: tar archive, uncompressed
629 :``tar``: tar archive, uncompressed
629 :``tbz2``: tar archive, compressed using bzip2
630 :``tbz2``: tar archive, compressed using bzip2
630 :``tgz``: tar archive, compressed using gzip
631 :``tgz``: tar archive, compressed using gzip
631 :``txz``: tar archive, compressed using lzma (only in Python 3)
632 :``txz``: tar archive, compressed using lzma (only in Python 3)
632 :``uzip``: zip archive, uncompressed
633 :``uzip``: zip archive, uncompressed
633 :``zip``: zip archive, compressed using deflate
634 :``zip``: zip archive, compressed using deflate
634
635
635 The exact name of the destination archive or directory is given
636 The exact name of the destination archive or directory is given
636 using a format string; see :hg:`help export` for details.
637 using a format string; see :hg:`help export` for details.
637
638
638 Each member added to an archive file has a directory prefix
639 Each member added to an archive file has a directory prefix
639 prepended. Use -p/--prefix to specify a format string for the
640 prepended. Use -p/--prefix to specify a format string for the
640 prefix. The default is the basename of the archive, with suffixes
641 prefix. The default is the basename of the archive, with suffixes
641 removed.
642 removed.
642
643
643 Returns 0 on success.
644 Returns 0 on success.
644 '''
645 '''
645
646
646 opts = pycompat.byteskwargs(opts)
647 opts = pycompat.byteskwargs(opts)
647 rev = opts.get(b'rev')
648 rev = opts.get(b'rev')
648 if rev:
649 if rev:
649 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
650 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
650 ctx = scmutil.revsingle(repo, rev)
651 ctx = scmutil.revsingle(repo, rev)
651 if not ctx:
652 if not ctx:
652 raise error.Abort(_(b'no working directory: please specify a revision'))
653 raise error.Abort(_(b'no working directory: please specify a revision'))
653 node = ctx.node()
654 node = ctx.node()
654 dest = cmdutil.makefilename(ctx, dest)
655 dest = cmdutil.makefilename(ctx, dest)
655 if os.path.realpath(dest) == repo.root:
656 if os.path.realpath(dest) == repo.root:
656 raise error.Abort(_(b'repository root cannot be destination'))
657 raise error.Abort(_(b'repository root cannot be destination'))
657
658
658 kind = opts.get(b'type') or archival.guesskind(dest) or b'files'
659 kind = opts.get(b'type') or archival.guesskind(dest) or b'files'
659 prefix = opts.get(b'prefix')
660 prefix = opts.get(b'prefix')
660
661
661 if dest == b'-':
662 if dest == b'-':
662 if kind == b'files':
663 if kind == b'files':
663 raise error.Abort(_(b'cannot archive plain files to stdout'))
664 raise error.Abort(_(b'cannot archive plain files to stdout'))
664 dest = cmdutil.makefileobj(ctx, dest)
665 dest = cmdutil.makefileobj(ctx, dest)
665 if not prefix:
666 if not prefix:
666 prefix = os.path.basename(repo.root) + b'-%h'
667 prefix = os.path.basename(repo.root) + b'-%h'
667
668
668 prefix = cmdutil.makefilename(ctx, prefix)
669 prefix = cmdutil.makefilename(ctx, prefix)
669 match = scmutil.match(ctx, [], opts)
670 match = scmutil.match(ctx, [], opts)
670 archival.archive(
671 archival.archive(
671 repo,
672 repo,
672 dest,
673 dest,
673 node,
674 node,
674 kind,
675 kind,
675 not opts.get(b'no_decode'),
676 not opts.get(b'no_decode'),
676 match,
677 match,
677 prefix,
678 prefix,
678 subrepos=opts.get(b'subrepos'),
679 subrepos=opts.get(b'subrepos'),
679 )
680 )
680
681
681
682
682 @command(
683 @command(
683 b'backout',
684 b'backout',
684 [
685 [
685 (
686 (
686 b'',
687 b'',
687 b'merge',
688 b'merge',
688 None,
689 None,
689 _(b'merge with old dirstate parent after backout'),
690 _(b'merge with old dirstate parent after backout'),
690 ),
691 ),
691 (
692 (
692 b'',
693 b'',
693 b'commit',
694 b'commit',
694 None,
695 None,
695 _(b'commit if no conflicts were encountered (DEPRECATED)'),
696 _(b'commit if no conflicts were encountered (DEPRECATED)'),
696 ),
697 ),
697 (b'', b'no-commit', None, _(b'do not commit')),
698 (b'', b'no-commit', None, _(b'do not commit')),
698 (
699 (
699 b'',
700 b'',
700 b'parent',
701 b'parent',
701 b'',
702 b'',
702 _(b'parent to choose when backing out merge (DEPRECATED)'),
703 _(b'parent to choose when backing out merge (DEPRECATED)'),
703 _(b'REV'),
704 _(b'REV'),
704 ),
705 ),
705 (b'r', b'rev', b'', _(b'revision to backout'), _(b'REV')),
706 (b'r', b'rev', b'', _(b'revision to backout'), _(b'REV')),
706 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
707 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
707 ]
708 ]
708 + mergetoolopts
709 + mergetoolopts
709 + walkopts
710 + walkopts
710 + commitopts
711 + commitopts
711 + commitopts2,
712 + commitopts2,
712 _(b'[OPTION]... [-r] REV'),
713 _(b'[OPTION]... [-r] REV'),
713 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
714 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
714 )
715 )
715 def backout(ui, repo, node=None, rev=None, **opts):
716 def backout(ui, repo, node=None, rev=None, **opts):
716 '''reverse effect of earlier changeset
717 '''reverse effect of earlier changeset
717
718
718 Prepare a new changeset with the effect of REV undone in the
719 Prepare a new changeset with the effect of REV undone in the
719 current working directory. If no conflicts were encountered,
720 current working directory. If no conflicts were encountered,
720 it will be committed immediately.
721 it will be committed immediately.
721
722
722 If REV is the parent of the working directory, then this new changeset
723 If REV is the parent of the working directory, then this new changeset
723 is committed automatically (unless --no-commit is specified).
724 is committed automatically (unless --no-commit is specified).
724
725
725 .. note::
726 .. note::
726
727
727 :hg:`backout` cannot be used to fix either an unwanted or
728 :hg:`backout` cannot be used to fix either an unwanted or
728 incorrect merge.
729 incorrect merge.
729
730
730 .. container:: verbose
731 .. container:: verbose
731
732
732 Examples:
733 Examples:
733
734
734 - Reverse the effect of the parent of the working directory.
735 - Reverse the effect of the parent of the working directory.
735 This backout will be committed immediately::
736 This backout will be committed immediately::
736
737
737 hg backout -r .
738 hg backout -r .
738
739
739 - Reverse the effect of previous bad revision 23::
740 - Reverse the effect of previous bad revision 23::
740
741
741 hg backout -r 23
742 hg backout -r 23
742
743
743 - Reverse the effect of previous bad revision 23 and
744 - Reverse the effect of previous bad revision 23 and
744 leave changes uncommitted::
745 leave changes uncommitted::
745
746
746 hg backout -r 23 --no-commit
747 hg backout -r 23 --no-commit
747 hg commit -m "Backout revision 23"
748 hg commit -m "Backout revision 23"
748
749
749 By default, the pending changeset will have one parent,
750 By default, the pending changeset will have one parent,
750 maintaining a linear history. With --merge, the pending
751 maintaining a linear history. With --merge, the pending
751 changeset will instead have two parents: the old parent of the
752 changeset will instead have two parents: the old parent of the
752 working directory and a new child of REV that simply undoes REV.
753 working directory and a new child of REV that simply undoes REV.
753
754
754 Before version 1.7, the behavior without --merge was equivalent
755 Before version 1.7, the behavior without --merge was equivalent
755 to specifying --merge followed by :hg:`update --clean .` to
756 to specifying --merge followed by :hg:`update --clean .` to
756 cancel the merge and leave the child of REV as a head to be
757 cancel the merge and leave the child of REV as a head to be
757 merged separately.
758 merged separately.
758
759
759 See :hg:`help dates` for a list of formats valid for -d/--date.
760 See :hg:`help dates` for a list of formats valid for -d/--date.
760
761
761 See :hg:`help revert` for a way to restore files to the state
762 See :hg:`help revert` for a way to restore files to the state
762 of another revision.
763 of another revision.
763
764
764 Returns 0 on success, 1 if nothing to backout or there are unresolved
765 Returns 0 on success, 1 if nothing to backout or there are unresolved
765 files.
766 files.
766 '''
767 '''
767 with repo.wlock(), repo.lock():
768 with repo.wlock(), repo.lock():
768 return _dobackout(ui, repo, node, rev, **opts)
769 return _dobackout(ui, repo, node, rev, **opts)
769
770
770
771
771 def _dobackout(ui, repo, node=None, rev=None, **opts):
772 def _dobackout(ui, repo, node=None, rev=None, **opts):
772 cmdutil.check_incompatible_arguments(opts, 'no_commit', ['commit', 'merge'])
773 cmdutil.check_incompatible_arguments(opts, 'no_commit', ['commit', 'merge'])
773 opts = pycompat.byteskwargs(opts)
774 opts = pycompat.byteskwargs(opts)
774
775
775 if rev and node:
776 if rev and node:
776 raise error.Abort(_(b"please specify just one revision"))
777 raise error.Abort(_(b"please specify just one revision"))
777
778
778 if not rev:
779 if not rev:
779 rev = node
780 rev = node
780
781
781 if not rev:
782 if not rev:
782 raise error.Abort(_(b"please specify a revision to backout"))
783 raise error.Abort(_(b"please specify a revision to backout"))
783
784
784 date = opts.get(b'date')
785 date = opts.get(b'date')
785 if date:
786 if date:
786 opts[b'date'] = dateutil.parsedate(date)
787 opts[b'date'] = dateutil.parsedate(date)
787
788
788 cmdutil.checkunfinished(repo)
789 cmdutil.checkunfinished(repo)
789 cmdutil.bailifchanged(repo)
790 cmdutil.bailifchanged(repo)
790 ctx = scmutil.revsingle(repo, rev)
791 ctx = scmutil.revsingle(repo, rev)
791 node = ctx.node()
792 node = ctx.node()
792
793
793 op1, op2 = repo.dirstate.parents()
794 op1, op2 = repo.dirstate.parents()
794 if not repo.changelog.isancestor(node, op1):
795 if not repo.changelog.isancestor(node, op1):
795 raise error.Abort(_(b'cannot backout change that is not an ancestor'))
796 raise error.Abort(_(b'cannot backout change that is not an ancestor'))
796
797
797 p1, p2 = repo.changelog.parents(node)
798 p1, p2 = repo.changelog.parents(node)
798 if p1 == nullid:
799 if p1 == nullid:
799 raise error.Abort(_(b'cannot backout a change with no parents'))
800 raise error.Abort(_(b'cannot backout a change with no parents'))
800 if p2 != nullid:
801 if p2 != nullid:
801 if not opts.get(b'parent'):
802 if not opts.get(b'parent'):
802 raise error.Abort(_(b'cannot backout a merge changeset'))
803 raise error.Abort(_(b'cannot backout a merge changeset'))
803 p = repo.lookup(opts[b'parent'])
804 p = repo.lookup(opts[b'parent'])
804 if p not in (p1, p2):
805 if p not in (p1, p2):
805 raise error.Abort(
806 raise error.Abort(
806 _(b'%s is not a parent of %s') % (short(p), short(node))
807 _(b'%s is not a parent of %s') % (short(p), short(node))
807 )
808 )
808 parent = p
809 parent = p
809 else:
810 else:
810 if opts.get(b'parent'):
811 if opts.get(b'parent'):
811 raise error.Abort(_(b'cannot use --parent on non-merge changeset'))
812 raise error.Abort(_(b'cannot use --parent on non-merge changeset'))
812 parent = p1
813 parent = p1
813
814
814 # the backout should appear on the same branch
815 # the backout should appear on the same branch
815 branch = repo.dirstate.branch()
816 branch = repo.dirstate.branch()
816 bheads = repo.branchheads(branch)
817 bheads = repo.branchheads(branch)
817 rctx = scmutil.revsingle(repo, hex(parent))
818 rctx = scmutil.revsingle(repo, hex(parent))
818 if not opts.get(b'merge') and op1 != node:
819 if not opts.get(b'merge') and op1 != node:
819 with dirstateguard.dirstateguard(repo, b'backout'):
820 with dirstateguard.dirstateguard(repo, b'backout'):
820 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
821 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
821 with ui.configoverride(overrides, b'backout'):
822 with ui.configoverride(overrides, b'backout'):
822 stats = mergemod.back_out(ctx, parent=repo[parent])
823 stats = mergemod.back_out(ctx, parent=repo[parent])
823 repo.setparents(op1, op2)
824 repo.setparents(op1, op2)
824 hg._showstats(repo, stats)
825 hg._showstats(repo, stats)
825 if stats.unresolvedcount:
826 if stats.unresolvedcount:
826 repo.ui.status(
827 repo.ui.status(
827 _(b"use 'hg resolve' to retry unresolved file merges\n")
828 _(b"use 'hg resolve' to retry unresolved file merges\n")
828 )
829 )
829 return 1
830 return 1
830 else:
831 else:
831 hg.clean(repo, node, show_stats=False)
832 hg.clean(repo, node, show_stats=False)
832 repo.dirstate.setbranch(branch)
833 repo.dirstate.setbranch(branch)
833 cmdutil.revert(ui, repo, rctx)
834 cmdutil.revert(ui, repo, rctx)
834
835
835 if opts.get(b'no_commit'):
836 if opts.get(b'no_commit'):
836 msg = _(b"changeset %s backed out, don't forget to commit.\n")
837 msg = _(b"changeset %s backed out, don't forget to commit.\n")
837 ui.status(msg % short(node))
838 ui.status(msg % short(node))
838 return 0
839 return 0
839
840
840 def commitfunc(ui, repo, message, match, opts):
841 def commitfunc(ui, repo, message, match, opts):
841 editform = b'backout'
842 editform = b'backout'
842 e = cmdutil.getcommiteditor(
843 e = cmdutil.getcommiteditor(
843 editform=editform, **pycompat.strkwargs(opts)
844 editform=editform, **pycompat.strkwargs(opts)
844 )
845 )
845 if not message:
846 if not message:
846 # we don't translate commit messages
847 # we don't translate commit messages
847 message = b"Backed out changeset %s" % short(node)
848 message = b"Backed out changeset %s" % short(node)
848 e = cmdutil.getcommiteditor(edit=True, editform=editform)
849 e = cmdutil.getcommiteditor(edit=True, editform=editform)
849 return repo.commit(
850 return repo.commit(
850 message, opts.get(b'user'), opts.get(b'date'), match, editor=e
851 message, opts.get(b'user'), opts.get(b'date'), match, editor=e
851 )
852 )
852
853
853 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
854 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
854 if not newnode:
855 if not newnode:
855 ui.status(_(b"nothing changed\n"))
856 ui.status(_(b"nothing changed\n"))
856 return 1
857 return 1
857 cmdutil.commitstatus(repo, newnode, branch, bheads)
858 cmdutil.commitstatus(repo, newnode, branch, bheads)
858
859
859 def nice(node):
860 def nice(node):
860 return b'%d:%s' % (repo.changelog.rev(node), short(node))
861 return b'%d:%s' % (repo.changelog.rev(node), short(node))
861
862
862 ui.status(
863 ui.status(
863 _(b'changeset %s backs out changeset %s\n')
864 _(b'changeset %s backs out changeset %s\n')
864 % (nice(repo.changelog.tip()), nice(node))
865 % (nice(repo.changelog.tip()), nice(node))
865 )
866 )
866 if opts.get(b'merge') and op1 != node:
867 if opts.get(b'merge') and op1 != node:
867 hg.clean(repo, op1, show_stats=False)
868 hg.clean(repo, op1, show_stats=False)
868 ui.status(
869 ui.status(
869 _(b'merging with changeset %s\n') % nice(repo.changelog.tip())
870 _(b'merging with changeset %s\n') % nice(repo.changelog.tip())
870 )
871 )
871 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
872 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
872 with ui.configoverride(overrides, b'backout'):
873 with ui.configoverride(overrides, b'backout'):
873 return hg.merge(repo[b'tip'])
874 return hg.merge(repo[b'tip'])
874 return 0
875 return 0
875
876
876
877
877 @command(
878 @command(
878 b'bisect',
879 b'bisect',
879 [
880 [
880 (b'r', b'reset', False, _(b'reset bisect state')),
881 (b'r', b'reset', False, _(b'reset bisect state')),
881 (b'g', b'good', False, _(b'mark changeset good')),
882 (b'g', b'good', False, _(b'mark changeset good')),
882 (b'b', b'bad', False, _(b'mark changeset bad')),
883 (b'b', b'bad', False, _(b'mark changeset bad')),
883 (b's', b'skip', False, _(b'skip testing changeset')),
884 (b's', b'skip', False, _(b'skip testing changeset')),
884 (b'e', b'extend', False, _(b'extend the bisect range')),
885 (b'e', b'extend', False, _(b'extend the bisect range')),
885 (
886 (
886 b'c',
887 b'c',
887 b'command',
888 b'command',
888 b'',
889 b'',
889 _(b'use command to check changeset state'),
890 _(b'use command to check changeset state'),
890 _(b'CMD'),
891 _(b'CMD'),
891 ),
892 ),
892 (b'U', b'noupdate', False, _(b'do not update to target')),
893 (b'U', b'noupdate', False, _(b'do not update to target')),
893 ],
894 ],
894 _(b"[-gbsr] [-U] [-c CMD] [REV]"),
895 _(b"[-gbsr] [-U] [-c CMD] [REV]"),
895 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
896 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
896 )
897 )
897 def bisect(
898 def bisect(
898 ui,
899 ui,
899 repo,
900 repo,
900 rev=None,
901 rev=None,
901 extra=None,
902 extra=None,
902 command=None,
903 command=None,
903 reset=None,
904 reset=None,
904 good=None,
905 good=None,
905 bad=None,
906 bad=None,
906 skip=None,
907 skip=None,
907 extend=None,
908 extend=None,
908 noupdate=None,
909 noupdate=None,
909 ):
910 ):
910 """subdivision search of changesets
911 """subdivision search of changesets
911
912
912 This command helps to find changesets which introduce problems. To
913 This command helps to find changesets which introduce problems. To
913 use, mark the earliest changeset you know exhibits the problem as
914 use, mark the earliest changeset you know exhibits the problem as
914 bad, then mark the latest changeset which is free from the problem
915 bad, then mark the latest changeset which is free from the problem
915 as good. Bisect will update your working directory to a revision
916 as good. Bisect will update your working directory to a revision
916 for testing (unless the -U/--noupdate option is specified). Once
917 for testing (unless the -U/--noupdate option is specified). Once
917 you have performed tests, mark the working directory as good or
918 you have performed tests, mark the working directory as good or
918 bad, and bisect will either update to another candidate changeset
919 bad, and bisect will either update to another candidate changeset
919 or announce that it has found the bad revision.
920 or announce that it has found the bad revision.
920
921
921 As a shortcut, you can also use the revision argument to mark a
922 As a shortcut, you can also use the revision argument to mark a
922 revision as good or bad without checking it out first.
923 revision as good or bad without checking it out first.
923
924
924 If you supply a command, it will be used for automatic bisection.
925 If you supply a command, it will be used for automatic bisection.
925 The environment variable HG_NODE will contain the ID of the
926 The environment variable HG_NODE will contain the ID of the
926 changeset being tested. The exit status of the command will be
927 changeset being tested. The exit status of the command will be
927 used to mark revisions as good or bad: status 0 means good, 125
928 used to mark revisions as good or bad: status 0 means good, 125
928 means to skip the revision, 127 (command not found) will abort the
929 means to skip the revision, 127 (command not found) will abort the
929 bisection, and any other non-zero exit status means the revision
930 bisection, and any other non-zero exit status means the revision
930 is bad.
931 is bad.
931
932
932 .. container:: verbose
933 .. container:: verbose
933
934
934 Some examples:
935 Some examples:
935
936
936 - start a bisection with known bad revision 34, and good revision 12::
937 - start a bisection with known bad revision 34, and good revision 12::
937
938
938 hg bisect --bad 34
939 hg bisect --bad 34
939 hg bisect --good 12
940 hg bisect --good 12
940
941
941 - advance the current bisection by marking current revision as good or
942 - advance the current bisection by marking current revision as good or
942 bad::
943 bad::
943
944
944 hg bisect --good
945 hg bisect --good
945 hg bisect --bad
946 hg bisect --bad
946
947
947 - mark the current revision, or a known revision, to be skipped (e.g. if
948 - mark the current revision, or a known revision, to be skipped (e.g. if
948 that revision is not usable because of another issue)::
949 that revision is not usable because of another issue)::
949
950
950 hg bisect --skip
951 hg bisect --skip
951 hg bisect --skip 23
952 hg bisect --skip 23
952
953
953 - skip all revisions that do not touch directories ``foo`` or ``bar``::
954 - skip all revisions that do not touch directories ``foo`` or ``bar``::
954
955
955 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
956 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
956
957
957 - forget the current bisection::
958 - forget the current bisection::
958
959
959 hg bisect --reset
960 hg bisect --reset
960
961
961 - use 'make && make tests' to automatically find the first broken
962 - use 'make && make tests' to automatically find the first broken
962 revision::
963 revision::
963
964
964 hg bisect --reset
965 hg bisect --reset
965 hg bisect --bad 34
966 hg bisect --bad 34
966 hg bisect --good 12
967 hg bisect --good 12
967 hg bisect --command "make && make tests"
968 hg bisect --command "make && make tests"
968
969
969 - see all changesets whose states are already known in the current
970 - see all changesets whose states are already known in the current
970 bisection::
971 bisection::
971
972
972 hg log -r "bisect(pruned)"
973 hg log -r "bisect(pruned)"
973
974
974 - see the changeset currently being bisected (especially useful
975 - see the changeset currently being bisected (especially useful
975 if running with -U/--noupdate)::
976 if running with -U/--noupdate)::
976
977
977 hg log -r "bisect(current)"
978 hg log -r "bisect(current)"
978
979
979 - see all changesets that took part in the current bisection::
980 - see all changesets that took part in the current bisection::
980
981
981 hg log -r "bisect(range)"
982 hg log -r "bisect(range)"
982
983
983 - you can even get a nice graph::
984 - you can even get a nice graph::
984
985
985 hg log --graph -r "bisect(range)"
986 hg log --graph -r "bisect(range)"
986
987
987 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
988 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
988
989
989 Returns 0 on success.
990 Returns 0 on success.
990 """
991 """
991 # backward compatibility
992 # backward compatibility
992 if rev in b"good bad reset init".split():
993 if rev in b"good bad reset init".split():
993 ui.warn(_(b"(use of 'hg bisect <cmd>' is deprecated)\n"))
994 ui.warn(_(b"(use of 'hg bisect <cmd>' is deprecated)\n"))
994 cmd, rev, extra = rev, extra, None
995 cmd, rev, extra = rev, extra, None
995 if cmd == b"good":
996 if cmd == b"good":
996 good = True
997 good = True
997 elif cmd == b"bad":
998 elif cmd == b"bad":
998 bad = True
999 bad = True
999 else:
1000 else:
1000 reset = True
1001 reset = True
1001 elif extra:
1002 elif extra:
1002 raise error.Abort(_(b'incompatible arguments'))
1003 raise error.Abort(_(b'incompatible arguments'))
1003
1004
1004 incompatibles = {
1005 incompatibles = {
1005 b'--bad': bad,
1006 b'--bad': bad,
1006 b'--command': bool(command),
1007 b'--command': bool(command),
1007 b'--extend': extend,
1008 b'--extend': extend,
1008 b'--good': good,
1009 b'--good': good,
1009 b'--reset': reset,
1010 b'--reset': reset,
1010 b'--skip': skip,
1011 b'--skip': skip,
1011 }
1012 }
1012
1013
1013 enabled = [x for x in incompatibles if incompatibles[x]]
1014 enabled = [x for x in incompatibles if incompatibles[x]]
1014
1015
1015 if len(enabled) > 1:
1016 if len(enabled) > 1:
1016 raise error.Abort(
1017 raise error.Abort(
1017 _(b'%s and %s are incompatible') % tuple(sorted(enabled)[0:2])
1018 _(b'%s and %s are incompatible') % tuple(sorted(enabled)[0:2])
1018 )
1019 )
1019
1020
1020 if reset:
1021 if reset:
1021 hbisect.resetstate(repo)
1022 hbisect.resetstate(repo)
1022 return
1023 return
1023
1024
1024 state = hbisect.load_state(repo)
1025 state = hbisect.load_state(repo)
1025
1026
1026 # update state
1027 # update state
1027 if good or bad or skip:
1028 if good or bad or skip:
1028 if rev:
1029 if rev:
1029 nodes = [repo[i].node() for i in scmutil.revrange(repo, [rev])]
1030 nodes = [repo[i].node() for i in scmutil.revrange(repo, [rev])]
1030 else:
1031 else:
1031 nodes = [repo.lookup(b'.')]
1032 nodes = [repo.lookup(b'.')]
1032 if good:
1033 if good:
1033 state[b'good'] += nodes
1034 state[b'good'] += nodes
1034 elif bad:
1035 elif bad:
1035 state[b'bad'] += nodes
1036 state[b'bad'] += nodes
1036 elif skip:
1037 elif skip:
1037 state[b'skip'] += nodes
1038 state[b'skip'] += nodes
1038 hbisect.save_state(repo, state)
1039 hbisect.save_state(repo, state)
1039 if not (state[b'good'] and state[b'bad']):
1040 if not (state[b'good'] and state[b'bad']):
1040 return
1041 return
1041
1042
1042 def mayupdate(repo, node, show_stats=True):
1043 def mayupdate(repo, node, show_stats=True):
1043 """common used update sequence"""
1044 """common used update sequence"""
1044 if noupdate:
1045 if noupdate:
1045 return
1046 return
1046 cmdutil.checkunfinished(repo)
1047 cmdutil.checkunfinished(repo)
1047 cmdutil.bailifchanged(repo)
1048 cmdutil.bailifchanged(repo)
1048 return hg.clean(repo, node, show_stats=show_stats)
1049 return hg.clean(repo, node, show_stats=show_stats)
1049
1050
1050 displayer = logcmdutil.changesetdisplayer(ui, repo, {})
1051 displayer = logcmdutil.changesetdisplayer(ui, repo, {})
1051
1052
1052 if command:
1053 if command:
1053 changesets = 1
1054 changesets = 1
1054 if noupdate:
1055 if noupdate:
1055 try:
1056 try:
1056 node = state[b'current'][0]
1057 node = state[b'current'][0]
1057 except LookupError:
1058 except LookupError:
1058 raise error.Abort(
1059 raise error.Abort(
1059 _(
1060 _(
1060 b'current bisect revision is unknown - '
1061 b'current bisect revision is unknown - '
1061 b'start a new bisect to fix'
1062 b'start a new bisect to fix'
1062 )
1063 )
1063 )
1064 )
1064 else:
1065 else:
1065 node, p2 = repo.dirstate.parents()
1066 node, p2 = repo.dirstate.parents()
1066 if p2 != nullid:
1067 if p2 != nullid:
1067 raise error.Abort(_(b'current bisect revision is a merge'))
1068 raise error.Abort(_(b'current bisect revision is a merge'))
1068 if rev:
1069 if rev:
1069 node = repo[scmutil.revsingle(repo, rev, node)].node()
1070 node = repo[scmutil.revsingle(repo, rev, node)].node()
1070 with hbisect.restore_state(repo, state, node):
1071 with hbisect.restore_state(repo, state, node):
1071 while changesets:
1072 while changesets:
1072 # update state
1073 # update state
1073 state[b'current'] = [node]
1074 state[b'current'] = [node]
1074 hbisect.save_state(repo, state)
1075 hbisect.save_state(repo, state)
1075 status = ui.system(
1076 status = ui.system(
1076 command,
1077 command,
1077 environ={b'HG_NODE': hex(node)},
1078 environ={b'HG_NODE': hex(node)},
1078 blockedtag=b'bisect_check',
1079 blockedtag=b'bisect_check',
1079 )
1080 )
1080 if status == 125:
1081 if status == 125:
1081 transition = b"skip"
1082 transition = b"skip"
1082 elif status == 0:
1083 elif status == 0:
1083 transition = b"good"
1084 transition = b"good"
1084 # status < 0 means process was killed
1085 # status < 0 means process was killed
1085 elif status == 127:
1086 elif status == 127:
1086 raise error.Abort(_(b"failed to execute %s") % command)
1087 raise error.Abort(_(b"failed to execute %s") % command)
1087 elif status < 0:
1088 elif status < 0:
1088 raise error.Abort(_(b"%s killed") % command)
1089 raise error.Abort(_(b"%s killed") % command)
1089 else:
1090 else:
1090 transition = b"bad"
1091 transition = b"bad"
1091 state[transition].append(node)
1092 state[transition].append(node)
1092 ctx = repo[node]
1093 ctx = repo[node]
1093 ui.status(
1094 ui.status(
1094 _(b'changeset %d:%s: %s\n') % (ctx.rev(), ctx, transition)
1095 _(b'changeset %d:%s: %s\n') % (ctx.rev(), ctx, transition)
1095 )
1096 )
1096 hbisect.checkstate(state)
1097 hbisect.checkstate(state)
1097 # bisect
1098 # bisect
1098 nodes, changesets, bgood = hbisect.bisect(repo, state)
1099 nodes, changesets, bgood = hbisect.bisect(repo, state)
1099 # update to next check
1100 # update to next check
1100 node = nodes[0]
1101 node = nodes[0]
1101 mayupdate(repo, node, show_stats=False)
1102 mayupdate(repo, node, show_stats=False)
1102 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
1103 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
1103 return
1104 return
1104
1105
1105 hbisect.checkstate(state)
1106 hbisect.checkstate(state)
1106
1107
1107 # actually bisect
1108 # actually bisect
1108 nodes, changesets, good = hbisect.bisect(repo, state)
1109 nodes, changesets, good = hbisect.bisect(repo, state)
1109 if extend:
1110 if extend:
1110 if not changesets:
1111 if not changesets:
1111 extendnode = hbisect.extendrange(repo, state, nodes, good)
1112 extendnode = hbisect.extendrange(repo, state, nodes, good)
1112 if extendnode is not None:
1113 if extendnode is not None:
1113 ui.write(
1114 ui.write(
1114 _(b"Extending search to changeset %d:%s\n")
1115 _(b"Extending search to changeset %d:%s\n")
1115 % (extendnode.rev(), extendnode)
1116 % (extendnode.rev(), extendnode)
1116 )
1117 )
1117 state[b'current'] = [extendnode.node()]
1118 state[b'current'] = [extendnode.node()]
1118 hbisect.save_state(repo, state)
1119 hbisect.save_state(repo, state)
1119 return mayupdate(repo, extendnode.node())
1120 return mayupdate(repo, extendnode.node())
1120 raise error.Abort(_(b"nothing to extend"))
1121 raise error.Abort(_(b"nothing to extend"))
1121
1122
1122 if changesets == 0:
1123 if changesets == 0:
1123 hbisect.printresult(ui, repo, state, displayer, nodes, good)
1124 hbisect.printresult(ui, repo, state, displayer, nodes, good)
1124 else:
1125 else:
1125 assert len(nodes) == 1 # only a single node can be tested next
1126 assert len(nodes) == 1 # only a single node can be tested next
1126 node = nodes[0]
1127 node = nodes[0]
1127 # compute the approximate number of remaining tests
1128 # compute the approximate number of remaining tests
1128 tests, size = 0, 2
1129 tests, size = 0, 2
1129 while size <= changesets:
1130 while size <= changesets:
1130 tests, size = tests + 1, size * 2
1131 tests, size = tests + 1, size * 2
1131 rev = repo.changelog.rev(node)
1132 rev = repo.changelog.rev(node)
1132 ui.write(
1133 ui.write(
1133 _(
1134 _(
1134 b"Testing changeset %d:%s "
1135 b"Testing changeset %d:%s "
1135 b"(%d changesets remaining, ~%d tests)\n"
1136 b"(%d changesets remaining, ~%d tests)\n"
1136 )
1137 )
1137 % (rev, short(node), changesets, tests)
1138 % (rev, short(node), changesets, tests)
1138 )
1139 )
1139 state[b'current'] = [node]
1140 state[b'current'] = [node]
1140 hbisect.save_state(repo, state)
1141 hbisect.save_state(repo, state)
1141 return mayupdate(repo, node)
1142 return mayupdate(repo, node)
1142
1143
1143
1144
1144 @command(
1145 @command(
1145 b'bookmarks|bookmark',
1146 b'bookmarks|bookmark',
1146 [
1147 [
1147 (b'f', b'force', False, _(b'force')),
1148 (b'f', b'force', False, _(b'force')),
1148 (b'r', b'rev', b'', _(b'revision for bookmark action'), _(b'REV')),
1149 (b'r', b'rev', b'', _(b'revision for bookmark action'), _(b'REV')),
1149 (b'd', b'delete', False, _(b'delete a given bookmark')),
1150 (b'd', b'delete', False, _(b'delete a given bookmark')),
1150 (b'm', b'rename', b'', _(b'rename a given bookmark'), _(b'OLD')),
1151 (b'm', b'rename', b'', _(b'rename a given bookmark'), _(b'OLD')),
1151 (b'i', b'inactive', False, _(b'mark a bookmark inactive')),
1152 (b'i', b'inactive', False, _(b'mark a bookmark inactive')),
1152 (b'l', b'list', False, _(b'list existing bookmarks')),
1153 (b'l', b'list', False, _(b'list existing bookmarks')),
1153 ]
1154 ]
1154 + formatteropts,
1155 + formatteropts,
1155 _(b'hg bookmarks [OPTIONS]... [NAME]...'),
1156 _(b'hg bookmarks [OPTIONS]... [NAME]...'),
1156 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1157 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1157 )
1158 )
1158 def bookmark(ui, repo, *names, **opts):
1159 def bookmark(ui, repo, *names, **opts):
1159 '''create a new bookmark or list existing bookmarks
1160 '''create a new bookmark or list existing bookmarks
1160
1161
1161 Bookmarks are labels on changesets to help track lines of development.
1162 Bookmarks are labels on changesets to help track lines of development.
1162 Bookmarks are unversioned and can be moved, renamed and deleted.
1163 Bookmarks are unversioned and can be moved, renamed and deleted.
1163 Deleting or moving a bookmark has no effect on the associated changesets.
1164 Deleting or moving a bookmark has no effect on the associated changesets.
1164
1165
1165 Creating or updating to a bookmark causes it to be marked as 'active'.
1166 Creating or updating to a bookmark causes it to be marked as 'active'.
1166 The active bookmark is indicated with a '*'.
1167 The active bookmark is indicated with a '*'.
1167 When a commit is made, the active bookmark will advance to the new commit.
1168 When a commit is made, the active bookmark will advance to the new commit.
1168 A plain :hg:`update` will also advance an active bookmark, if possible.
1169 A plain :hg:`update` will also advance an active bookmark, if possible.
1169 Updating away from a bookmark will cause it to be deactivated.
1170 Updating away from a bookmark will cause it to be deactivated.
1170
1171
1171 Bookmarks can be pushed and pulled between repositories (see
1172 Bookmarks can be pushed and pulled between repositories (see
1172 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
1173 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
1173 diverged, a new 'divergent bookmark' of the form 'name@path' will
1174 diverged, a new 'divergent bookmark' of the form 'name@path' will
1174 be created. Using :hg:`merge` will resolve the divergence.
1175 be created. Using :hg:`merge` will resolve the divergence.
1175
1176
1176 Specifying bookmark as '.' to -m/-d/-l options is equivalent to specifying
1177 Specifying bookmark as '.' to -m/-d/-l options is equivalent to specifying
1177 the active bookmark's name.
1178 the active bookmark's name.
1178
1179
1179 A bookmark named '@' has the special property that :hg:`clone` will
1180 A bookmark named '@' has the special property that :hg:`clone` will
1180 check it out by default if it exists.
1181 check it out by default if it exists.
1181
1182
1182 .. container:: verbose
1183 .. container:: verbose
1183
1184
1184 Template:
1185 Template:
1185
1186
1186 The following keywords are supported in addition to the common template
1187 The following keywords are supported in addition to the common template
1187 keywords and functions such as ``{bookmark}``. See also
1188 keywords and functions such as ``{bookmark}``. See also
1188 :hg:`help templates`.
1189 :hg:`help templates`.
1189
1190
1190 :active: Boolean. True if the bookmark is active.
1191 :active: Boolean. True if the bookmark is active.
1191
1192
1192 Examples:
1193 Examples:
1193
1194
1194 - create an active bookmark for a new line of development::
1195 - create an active bookmark for a new line of development::
1195
1196
1196 hg book new-feature
1197 hg book new-feature
1197
1198
1198 - create an inactive bookmark as a place marker::
1199 - create an inactive bookmark as a place marker::
1199
1200
1200 hg book -i reviewed
1201 hg book -i reviewed
1201
1202
1202 - create an inactive bookmark on another changeset::
1203 - create an inactive bookmark on another changeset::
1203
1204
1204 hg book -r .^ tested
1205 hg book -r .^ tested
1205
1206
1206 - rename bookmark turkey to dinner::
1207 - rename bookmark turkey to dinner::
1207
1208
1208 hg book -m turkey dinner
1209 hg book -m turkey dinner
1209
1210
1210 - move the '@' bookmark from another branch::
1211 - move the '@' bookmark from another branch::
1211
1212
1212 hg book -f @
1213 hg book -f @
1213
1214
1214 - print only the active bookmark name::
1215 - print only the active bookmark name::
1215
1216
1216 hg book -ql .
1217 hg book -ql .
1217 '''
1218 '''
1218 opts = pycompat.byteskwargs(opts)
1219 opts = pycompat.byteskwargs(opts)
1219 force = opts.get(b'force')
1220 force = opts.get(b'force')
1220 rev = opts.get(b'rev')
1221 rev = opts.get(b'rev')
1221 inactive = opts.get(b'inactive') # meaning add/rename to inactive bookmark
1222 inactive = opts.get(b'inactive') # meaning add/rename to inactive bookmark
1222
1223
1223 action = cmdutil.check_at_most_one_arg(opts, b'delete', b'rename', b'list')
1224 action = cmdutil.check_at_most_one_arg(opts, b'delete', b'rename', b'list')
1224 if action:
1225 if action:
1225 cmdutil.check_incompatible_arguments(opts, action, [b'rev'])
1226 cmdutil.check_incompatible_arguments(opts, action, [b'rev'])
1226 elif names or rev:
1227 elif names or rev:
1227 action = b'add'
1228 action = b'add'
1228 elif inactive:
1229 elif inactive:
1229 action = b'inactive' # meaning deactivate
1230 action = b'inactive' # meaning deactivate
1230 else:
1231 else:
1231 action = b'list'
1232 action = b'list'
1232
1233
1233 cmdutil.check_incompatible_arguments(
1234 cmdutil.check_incompatible_arguments(
1234 opts, b'inactive', [b'delete', b'list']
1235 opts, b'inactive', [b'delete', b'list']
1235 )
1236 )
1236 if not names and action in {b'add', b'delete'}:
1237 if not names and action in {b'add', b'delete'}:
1237 raise error.Abort(_(b"bookmark name required"))
1238 raise error.Abort(_(b"bookmark name required"))
1238
1239
1239 if action in {b'add', b'delete', b'rename', b'inactive'}:
1240 if action in {b'add', b'delete', b'rename', b'inactive'}:
1240 with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr:
1241 with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr:
1241 if action == b'delete':
1242 if action == b'delete':
1242 names = pycompat.maplist(repo._bookmarks.expandname, names)
1243 names = pycompat.maplist(repo._bookmarks.expandname, names)
1243 bookmarks.delete(repo, tr, names)
1244 bookmarks.delete(repo, tr, names)
1244 elif action == b'rename':
1245 elif action == b'rename':
1245 if not names:
1246 if not names:
1246 raise error.Abort(_(b"new bookmark name required"))
1247 raise error.Abort(_(b"new bookmark name required"))
1247 elif len(names) > 1:
1248 elif len(names) > 1:
1248 raise error.Abort(_(b"only one new bookmark name allowed"))
1249 raise error.Abort(_(b"only one new bookmark name allowed"))
1249 oldname = repo._bookmarks.expandname(opts[b'rename'])
1250 oldname = repo._bookmarks.expandname(opts[b'rename'])
1250 bookmarks.rename(repo, tr, oldname, names[0], force, inactive)
1251 bookmarks.rename(repo, tr, oldname, names[0], force, inactive)
1251 elif action == b'add':
1252 elif action == b'add':
1252 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
1253 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
1253 elif action == b'inactive':
1254 elif action == b'inactive':
1254 if len(repo._bookmarks) == 0:
1255 if len(repo._bookmarks) == 0:
1255 ui.status(_(b"no bookmarks set\n"))
1256 ui.status(_(b"no bookmarks set\n"))
1256 elif not repo._activebookmark:
1257 elif not repo._activebookmark:
1257 ui.status(_(b"no active bookmark\n"))
1258 ui.status(_(b"no active bookmark\n"))
1258 else:
1259 else:
1259 bookmarks.deactivate(repo)
1260 bookmarks.deactivate(repo)
1260 elif action == b'list':
1261 elif action == b'list':
1261 names = pycompat.maplist(repo._bookmarks.expandname, names)
1262 names = pycompat.maplist(repo._bookmarks.expandname, names)
1262 with ui.formatter(b'bookmarks', opts) as fm:
1263 with ui.formatter(b'bookmarks', opts) as fm:
1263 bookmarks.printbookmarks(ui, repo, fm, names)
1264 bookmarks.printbookmarks(ui, repo, fm, names)
1264 else:
1265 else:
1265 raise error.ProgrammingError(b'invalid action: %s' % action)
1266 raise error.ProgrammingError(b'invalid action: %s' % action)
1266
1267
1267
1268
1268 @command(
1269 @command(
1269 b'branch',
1270 b'branch',
1270 [
1271 [
1271 (
1272 (
1272 b'f',
1273 b'f',
1273 b'force',
1274 b'force',
1274 None,
1275 None,
1275 _(b'set branch name even if it shadows an existing branch'),
1276 _(b'set branch name even if it shadows an existing branch'),
1276 ),
1277 ),
1277 (b'C', b'clean', None, _(b'reset branch name to parent branch name')),
1278 (b'C', b'clean', None, _(b'reset branch name to parent branch name')),
1278 (
1279 (
1279 b'r',
1280 b'r',
1280 b'rev',
1281 b'rev',
1281 [],
1282 [],
1282 _(b'change branches of the given revs (EXPERIMENTAL)'),
1283 _(b'change branches of the given revs (EXPERIMENTAL)'),
1283 ),
1284 ),
1284 ],
1285 ],
1285 _(b'[-fC] [NAME]'),
1286 _(b'[-fC] [NAME]'),
1286 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1287 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1287 )
1288 )
1288 def branch(ui, repo, label=None, **opts):
1289 def branch(ui, repo, label=None, **opts):
1289 """set or show the current branch name
1290 """set or show the current branch name
1290
1291
1291 .. note::
1292 .. note::
1292
1293
1293 Branch names are permanent and global. Use :hg:`bookmark` to create a
1294 Branch names are permanent and global. Use :hg:`bookmark` to create a
1294 light-weight bookmark instead. See :hg:`help glossary` for more
1295 light-weight bookmark instead. See :hg:`help glossary` for more
1295 information about named branches and bookmarks.
1296 information about named branches and bookmarks.
1296
1297
1297 With no argument, show the current branch name. With one argument,
1298 With no argument, show the current branch name. With one argument,
1298 set the working directory branch name (the branch will not exist
1299 set the working directory branch name (the branch will not exist
1299 in the repository until the next commit). Standard practice
1300 in the repository until the next commit). Standard practice
1300 recommends that primary development take place on the 'default'
1301 recommends that primary development take place on the 'default'
1301 branch.
1302 branch.
1302
1303
1303 Unless -f/--force is specified, branch will not let you set a
1304 Unless -f/--force is specified, branch will not let you set a
1304 branch name that already exists.
1305 branch name that already exists.
1305
1306
1306 Use -C/--clean to reset the working directory branch to that of
1307 Use -C/--clean to reset the working directory branch to that of
1307 the parent of the working directory, negating a previous branch
1308 the parent of the working directory, negating a previous branch
1308 change.
1309 change.
1309
1310
1310 Use the command :hg:`update` to switch to an existing branch. Use
1311 Use the command :hg:`update` to switch to an existing branch. Use
1311 :hg:`commit --close-branch` to mark this branch head as closed.
1312 :hg:`commit --close-branch` to mark this branch head as closed.
1312 When all heads of a branch are closed, the branch will be
1313 When all heads of a branch are closed, the branch will be
1313 considered closed.
1314 considered closed.
1314
1315
1315 Returns 0 on success.
1316 Returns 0 on success.
1316 """
1317 """
1317 opts = pycompat.byteskwargs(opts)
1318 opts = pycompat.byteskwargs(opts)
1318 revs = opts.get(b'rev')
1319 revs = opts.get(b'rev')
1319 if label:
1320 if label:
1320 label = label.strip()
1321 label = label.strip()
1321
1322
1322 if not opts.get(b'clean') and not label:
1323 if not opts.get(b'clean') and not label:
1323 if revs:
1324 if revs:
1324 raise error.Abort(_(b"no branch name specified for the revisions"))
1325 raise error.Abort(_(b"no branch name specified for the revisions"))
1325 ui.write(b"%s\n" % repo.dirstate.branch())
1326 ui.write(b"%s\n" % repo.dirstate.branch())
1326 return
1327 return
1327
1328
1328 with repo.wlock():
1329 with repo.wlock():
1329 if opts.get(b'clean'):
1330 if opts.get(b'clean'):
1330 label = repo[b'.'].branch()
1331 label = repo[b'.'].branch()
1331 repo.dirstate.setbranch(label)
1332 repo.dirstate.setbranch(label)
1332 ui.status(_(b'reset working directory to branch %s\n') % label)
1333 ui.status(_(b'reset working directory to branch %s\n') % label)
1333 elif label:
1334 elif label:
1334
1335
1335 scmutil.checknewlabel(repo, label, b'branch')
1336 scmutil.checknewlabel(repo, label, b'branch')
1336 if revs:
1337 if revs:
1337 return cmdutil.changebranch(ui, repo, revs, label, opts)
1338 return cmdutil.changebranch(ui, repo, revs, label, opts)
1338
1339
1339 if not opts.get(b'force') and label in repo.branchmap():
1340 if not opts.get(b'force') and label in repo.branchmap():
1340 if label not in [p.branch() for p in repo[None].parents()]:
1341 if label not in [p.branch() for p in repo[None].parents()]:
1341 raise error.Abort(
1342 raise error.Abort(
1342 _(b'a branch of the same name already exists'),
1343 _(b'a branch of the same name already exists'),
1343 # i18n: "it" refers to an existing branch
1344 # i18n: "it" refers to an existing branch
1344 hint=_(b"use 'hg update' to switch to it"),
1345 hint=_(b"use 'hg update' to switch to it"),
1345 )
1346 )
1346
1347
1347 repo.dirstate.setbranch(label)
1348 repo.dirstate.setbranch(label)
1348 ui.status(_(b'marked working directory as branch %s\n') % label)
1349 ui.status(_(b'marked working directory as branch %s\n') % label)
1349
1350
1350 # find any open named branches aside from default
1351 # find any open named branches aside from default
1351 for n, h, t, c in repo.branchmap().iterbranches():
1352 for n, h, t, c in repo.branchmap().iterbranches():
1352 if n != b"default" and not c:
1353 if n != b"default" and not c:
1353 return 0
1354 return 0
1354 ui.status(
1355 ui.status(
1355 _(
1356 _(
1356 b'(branches are permanent and global, '
1357 b'(branches are permanent and global, '
1357 b'did you want a bookmark?)\n'
1358 b'did you want a bookmark?)\n'
1358 )
1359 )
1359 )
1360 )
1360
1361
1361
1362
1362 @command(
1363 @command(
1363 b'branches',
1364 b'branches',
1364 [
1365 [
1365 (
1366 (
1366 b'a',
1367 b'a',
1367 b'active',
1368 b'active',
1368 False,
1369 False,
1369 _(b'show only branches that have unmerged heads (DEPRECATED)'),
1370 _(b'show only branches that have unmerged heads (DEPRECATED)'),
1370 ),
1371 ),
1371 (b'c', b'closed', False, _(b'show normal and closed branches')),
1372 (b'c', b'closed', False, _(b'show normal and closed branches')),
1372 (b'r', b'rev', [], _(b'show branch name(s) of the given rev')),
1373 (b'r', b'rev', [], _(b'show branch name(s) of the given rev')),
1373 ]
1374 ]
1374 + formatteropts,
1375 + formatteropts,
1375 _(b'[-c]'),
1376 _(b'[-c]'),
1376 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1377 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
1377 intents={INTENT_READONLY},
1378 intents={INTENT_READONLY},
1378 )
1379 )
1379 def branches(ui, repo, active=False, closed=False, **opts):
1380 def branches(ui, repo, active=False, closed=False, **opts):
1380 """list repository named branches
1381 """list repository named branches
1381
1382
1382 List the repository's named branches, indicating which ones are
1383 List the repository's named branches, indicating which ones are
1383 inactive. If -c/--closed is specified, also list branches which have
1384 inactive. If -c/--closed is specified, also list branches which have
1384 been marked closed (see :hg:`commit --close-branch`).
1385 been marked closed (see :hg:`commit --close-branch`).
1385
1386
1386 Use the command :hg:`update` to switch to an existing branch.
1387 Use the command :hg:`update` to switch to an existing branch.
1387
1388
1388 .. container:: verbose
1389 .. container:: verbose
1389
1390
1390 Template:
1391 Template:
1391
1392
1392 The following keywords are supported in addition to the common template
1393 The following keywords are supported in addition to the common template
1393 keywords and functions such as ``{branch}``. See also
1394 keywords and functions such as ``{branch}``. See also
1394 :hg:`help templates`.
1395 :hg:`help templates`.
1395
1396
1396 :active: Boolean. True if the branch is active.
1397 :active: Boolean. True if the branch is active.
1397 :closed: Boolean. True if the branch is closed.
1398 :closed: Boolean. True if the branch is closed.
1398 :current: Boolean. True if it is the current branch.
1399 :current: Boolean. True if it is the current branch.
1399
1400
1400 Returns 0.
1401 Returns 0.
1401 """
1402 """
1402
1403
1403 opts = pycompat.byteskwargs(opts)
1404 opts = pycompat.byteskwargs(opts)
1404 revs = opts.get(b'rev')
1405 revs = opts.get(b'rev')
1405 selectedbranches = None
1406 selectedbranches = None
1406 if revs:
1407 if revs:
1407 revs = scmutil.revrange(repo, revs)
1408 revs = scmutil.revrange(repo, revs)
1408 getbi = repo.revbranchcache().branchinfo
1409 getbi = repo.revbranchcache().branchinfo
1409 selectedbranches = {getbi(r)[0] for r in revs}
1410 selectedbranches = {getbi(r)[0] for r in revs}
1410
1411
1411 ui.pager(b'branches')
1412 ui.pager(b'branches')
1412 fm = ui.formatter(b'branches', opts)
1413 fm = ui.formatter(b'branches', opts)
1413 hexfunc = fm.hexfunc
1414 hexfunc = fm.hexfunc
1414
1415
1415 allheads = set(repo.heads())
1416 allheads = set(repo.heads())
1416 branches = []
1417 branches = []
1417 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1418 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1418 if selectedbranches is not None and tag not in selectedbranches:
1419 if selectedbranches is not None and tag not in selectedbranches:
1419 continue
1420 continue
1420 isactive = False
1421 isactive = False
1421 if not isclosed:
1422 if not isclosed:
1422 openheads = set(repo.branchmap().iteropen(heads))
1423 openheads = set(repo.branchmap().iteropen(heads))
1423 isactive = bool(openheads & allheads)
1424 isactive = bool(openheads & allheads)
1424 branches.append((tag, repo[tip], isactive, not isclosed))
1425 branches.append((tag, repo[tip], isactive, not isclosed))
1425 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]), reverse=True)
1426 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]), reverse=True)
1426
1427
1427 for tag, ctx, isactive, isopen in branches:
1428 for tag, ctx, isactive, isopen in branches:
1428 if active and not isactive:
1429 if active and not isactive:
1429 continue
1430 continue
1430 if isactive:
1431 if isactive:
1431 label = b'branches.active'
1432 label = b'branches.active'
1432 notice = b''
1433 notice = b''
1433 elif not isopen:
1434 elif not isopen:
1434 if not closed:
1435 if not closed:
1435 continue
1436 continue
1436 label = b'branches.closed'
1437 label = b'branches.closed'
1437 notice = _(b' (closed)')
1438 notice = _(b' (closed)')
1438 else:
1439 else:
1439 label = b'branches.inactive'
1440 label = b'branches.inactive'
1440 notice = _(b' (inactive)')
1441 notice = _(b' (inactive)')
1441 current = tag == repo.dirstate.branch()
1442 current = tag == repo.dirstate.branch()
1442 if current:
1443 if current:
1443 label = b'branches.current'
1444 label = b'branches.current'
1444
1445
1445 fm.startitem()
1446 fm.startitem()
1446 fm.write(b'branch', b'%s', tag, label=label)
1447 fm.write(b'branch', b'%s', tag, label=label)
1447 rev = ctx.rev()
1448 rev = ctx.rev()
1448 padsize = max(31 - len(b"%d" % rev) - encoding.colwidth(tag), 0)
1449 padsize = max(31 - len(b"%d" % rev) - encoding.colwidth(tag), 0)
1449 fmt = b' ' * padsize + b' %d:%s'
1450 fmt = b' ' * padsize + b' %d:%s'
1450 fm.condwrite(
1451 fm.condwrite(
1451 not ui.quiet,
1452 not ui.quiet,
1452 b'rev node',
1453 b'rev node',
1453 fmt,
1454 fmt,
1454 rev,
1455 rev,
1455 hexfunc(ctx.node()),
1456 hexfunc(ctx.node()),
1456 label=b'log.changeset changeset.%s' % ctx.phasestr(),
1457 label=b'log.changeset changeset.%s' % ctx.phasestr(),
1457 )
1458 )
1458 fm.context(ctx=ctx)
1459 fm.context(ctx=ctx)
1459 fm.data(active=isactive, closed=not isopen, current=current)
1460 fm.data(active=isactive, closed=not isopen, current=current)
1460 if not ui.quiet:
1461 if not ui.quiet:
1461 fm.plain(notice)
1462 fm.plain(notice)
1462 fm.plain(b'\n')
1463 fm.plain(b'\n')
1463 fm.end()
1464 fm.end()
1464
1465
1465
1466
1466 @command(
1467 @command(
1467 b'bundle',
1468 b'bundle',
1468 [
1469 [
1469 (
1470 (
1470 b'f',
1471 b'f',
1471 b'force',
1472 b'force',
1472 None,
1473 None,
1473 _(b'run even when the destination is unrelated'),
1474 _(b'run even when the destination is unrelated'),
1474 ),
1475 ),
1475 (
1476 (
1476 b'r',
1477 b'r',
1477 b'rev',
1478 b'rev',
1478 [],
1479 [],
1479 _(b'a changeset intended to be added to the destination'),
1480 _(b'a changeset intended to be added to the destination'),
1480 _(b'REV'),
1481 _(b'REV'),
1481 ),
1482 ),
1482 (
1483 (
1483 b'b',
1484 b'b',
1484 b'branch',
1485 b'branch',
1485 [],
1486 [],
1486 _(b'a specific branch you would like to bundle'),
1487 _(b'a specific branch you would like to bundle'),
1487 _(b'BRANCH'),
1488 _(b'BRANCH'),
1488 ),
1489 ),
1489 (
1490 (
1490 b'',
1491 b'',
1491 b'base',
1492 b'base',
1492 [],
1493 [],
1493 _(b'a base changeset assumed to be available at the destination'),
1494 _(b'a base changeset assumed to be available at the destination'),
1494 _(b'REV'),
1495 _(b'REV'),
1495 ),
1496 ),
1496 (b'a', b'all', None, _(b'bundle all changesets in the repository')),
1497 (b'a', b'all', None, _(b'bundle all changesets in the repository')),
1497 (
1498 (
1498 b't',
1499 b't',
1499 b'type',
1500 b'type',
1500 b'bzip2',
1501 b'bzip2',
1501 _(b'bundle compression type to use'),
1502 _(b'bundle compression type to use'),
1502 _(b'TYPE'),
1503 _(b'TYPE'),
1503 ),
1504 ),
1504 ]
1505 ]
1505 + remoteopts,
1506 + remoteopts,
1506 _(b'[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'),
1507 _(b'[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'),
1507 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1508 helpcategory=command.CATEGORY_IMPORT_EXPORT,
1508 )
1509 )
1509 def bundle(ui, repo, fname, dest=None, **opts):
1510 def bundle(ui, repo, fname, dest=None, **opts):
1510 """create a bundle file
1511 """create a bundle file
1511
1512
1512 Generate a bundle file containing data to be transferred to another
1513 Generate a bundle file containing data to be transferred to another
1513 repository.
1514 repository.
1514
1515
1515 To create a bundle containing all changesets, use -a/--all
1516 To create a bundle containing all changesets, use -a/--all
1516 (or --base null). Otherwise, hg assumes the destination will have
1517 (or --base null). Otherwise, hg assumes the destination will have
1517 all the nodes you specify with --base parameters. Otherwise, hg
1518 all the nodes you specify with --base parameters. Otherwise, hg
1518 will assume the repository has all the nodes in destination, or
1519 will assume the repository has all the nodes in destination, or
1519 default-push/default if no destination is specified, where destination
1520 default-push/default if no destination is specified, where destination
1520 is the repository you provide through DEST option.
1521 is the repository you provide through DEST option.
1521
1522
1522 You can change bundle format with the -t/--type option. See
1523 You can change bundle format with the -t/--type option. See
1523 :hg:`help bundlespec` for documentation on this format. By default,
1524 :hg:`help bundlespec` for documentation on this format. By default,
1524 the most appropriate format is used and compression defaults to
1525 the most appropriate format is used and compression defaults to
1525 bzip2.
1526 bzip2.
1526
1527
1527 The bundle file can then be transferred using conventional means
1528 The bundle file can then be transferred using conventional means
1528 and applied to another repository with the unbundle or pull
1529 and applied to another repository with the unbundle or pull
1529 command. This is useful when direct push and pull are not
1530 command. This is useful when direct push and pull are not
1530 available or when exporting an entire repository is undesirable.
1531 available or when exporting an entire repository is undesirable.
1531
1532
1532 Applying bundles preserves all changeset contents including
1533 Applying bundles preserves all changeset contents including
1533 permissions, copy/rename information, and revision history.
1534 permissions, copy/rename information, and revision history.
1534
1535
1535 Returns 0 on success, 1 if no changes found.
1536 Returns 0 on success, 1 if no changes found.
1536 """
1537 """
1537 opts = pycompat.byteskwargs(opts)
1538 opts = pycompat.byteskwargs(opts)
1538 revs = None
1539 revs = None
1539 if b'rev' in opts:
1540 if b'rev' in opts:
1540 revstrings = opts[b'rev']
1541 revstrings = opts[b'rev']
1541 revs = scmutil.revrange(repo, revstrings)
1542 revs = scmutil.revrange(repo, revstrings)
1542 if revstrings and not revs:
1543 if revstrings and not revs:
1543 raise error.Abort(_(b'no commits to bundle'))
1544 raise error.Abort(_(b'no commits to bundle'))
1544
1545
1545 bundletype = opts.get(b'type', b'bzip2').lower()
1546 bundletype = opts.get(b'type', b'bzip2').lower()
1546 try:
1547 try:
1547 bundlespec = exchange.parsebundlespec(repo, bundletype, strict=False)
1548 bundlespec = bundlecaches.parsebundlespec(
1549 repo, bundletype, strict=False
1550 )
1548 except error.UnsupportedBundleSpecification as e:
1551 except error.UnsupportedBundleSpecification as e:
1549 raise error.Abort(
1552 raise error.Abort(
1550 pycompat.bytestr(e),
1553 pycompat.bytestr(e),
1551 hint=_(b"see 'hg help bundlespec' for supported values for --type"),
1554 hint=_(b"see 'hg help bundlespec' for supported values for --type"),
1552 )
1555 )
1553 cgversion = bundlespec.contentopts[b"cg.version"]
1556 cgversion = bundlespec.contentopts[b"cg.version"]
1554
1557
1555 # Packed bundles are a pseudo bundle format for now.
1558 # Packed bundles are a pseudo bundle format for now.
1556 if cgversion == b's1':
1559 if cgversion == b's1':
1557 raise error.Abort(
1560 raise error.Abort(
1558 _(b'packed bundles cannot be produced by "hg bundle"'),
1561 _(b'packed bundles cannot be produced by "hg bundle"'),
1559 hint=_(b"use 'hg debugcreatestreamclonebundle'"),
1562 hint=_(b"use 'hg debugcreatestreamclonebundle'"),
1560 )
1563 )
1561
1564
1562 if opts.get(b'all'):
1565 if opts.get(b'all'):
1563 if dest:
1566 if dest:
1564 raise error.Abort(
1567 raise error.Abort(
1565 _(b"--all is incompatible with specifying a destination")
1568 _(b"--all is incompatible with specifying a destination")
1566 )
1569 )
1567 if opts.get(b'base'):
1570 if opts.get(b'base'):
1568 ui.warn(_(b"ignoring --base because --all was specified\n"))
1571 ui.warn(_(b"ignoring --base because --all was specified\n"))
1569 base = [nullrev]
1572 base = [nullrev]
1570 else:
1573 else:
1571 base = scmutil.revrange(repo, opts.get(b'base'))
1574 base = scmutil.revrange(repo, opts.get(b'base'))
1572 if cgversion not in changegroup.supportedoutgoingversions(repo):
1575 if cgversion not in changegroup.supportedoutgoingversions(repo):
1573 raise error.Abort(
1576 raise error.Abort(
1574 _(b"repository does not support bundle version %s") % cgversion
1577 _(b"repository does not support bundle version %s") % cgversion
1575 )
1578 )
1576
1579
1577 if base:
1580 if base:
1578 if dest:
1581 if dest:
1579 raise error.Abort(
1582 raise error.Abort(
1580 _(b"--base is incompatible with specifying a destination")
1583 _(b"--base is incompatible with specifying a destination")
1581 )
1584 )
1582 common = [repo[rev].node() for rev in base]
1585 common = [repo[rev].node() for rev in base]
1583 heads = [repo[r].node() for r in revs] if revs else None
1586 heads = [repo[r].node() for r in revs] if revs else None
1584 outgoing = discovery.outgoing(repo, common, heads)
1587 outgoing = discovery.outgoing(repo, common, heads)
1585 else:
1588 else:
1586 dest = ui.expandpath(dest or b'default-push', dest or b'default')
1589 dest = ui.expandpath(dest or b'default-push', dest or b'default')
1587 dest, branches = hg.parseurl(dest, opts.get(b'branch'))
1590 dest, branches = hg.parseurl(dest, opts.get(b'branch'))
1588 other = hg.peer(repo, opts, dest)
1591 other = hg.peer(repo, opts, dest)
1589 revs = [repo[r].hex() for r in revs]
1592 revs = [repo[r].hex() for r in revs]
1590 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1593 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1591 heads = revs and pycompat.maplist(repo.lookup, revs) or revs
1594 heads = revs and pycompat.maplist(repo.lookup, revs) or revs
1592 outgoing = discovery.findcommonoutgoing(
1595 outgoing = discovery.findcommonoutgoing(
1593 repo,
1596 repo,
1594 other,
1597 other,
1595 onlyheads=heads,
1598 onlyheads=heads,
1596 force=opts.get(b'force'),
1599 force=opts.get(b'force'),
1597 portable=True,
1600 portable=True,
1598 )
1601 )
1599
1602
1600 if not outgoing.missing:
1603 if not outgoing.missing:
1601 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1604 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1602 return 1
1605 return 1
1603
1606
1604 if cgversion == b'01': # bundle1
1607 if cgversion == b'01': # bundle1
1605 bversion = b'HG10' + bundlespec.wirecompression
1608 bversion = b'HG10' + bundlespec.wirecompression
1606 bcompression = None
1609 bcompression = None
1607 elif cgversion in (b'02', b'03'):
1610 elif cgversion in (b'02', b'03'):
1608 bversion = b'HG20'
1611 bversion = b'HG20'
1609 bcompression = bundlespec.wirecompression
1612 bcompression = bundlespec.wirecompression
1610 else:
1613 else:
1611 raise error.ProgrammingError(
1614 raise error.ProgrammingError(
1612 b'bundle: unexpected changegroup version %s' % cgversion
1615 b'bundle: unexpected changegroup version %s' % cgversion
1613 )
1616 )
1614
1617
1615 # TODO compression options should be derived from bundlespec parsing.
1618 # TODO compression options should be derived from bundlespec parsing.
1616 # This is a temporary hack to allow adjusting bundle compression
1619 # This is a temporary hack to allow adjusting bundle compression
1617 # level without a) formalizing the bundlespec changes to declare it
1620 # level without a) formalizing the bundlespec changes to declare it
1618 # b) introducing a command flag.
1621 # b) introducing a command flag.
1619 compopts = {}
1622 compopts = {}
1620 complevel = ui.configint(
1623 complevel = ui.configint(
1621 b'experimental', b'bundlecomplevel.' + bundlespec.compression
1624 b'experimental', b'bundlecomplevel.' + bundlespec.compression
1622 )
1625 )
1623 if complevel is None:
1626 if complevel is None:
1624 complevel = ui.configint(b'experimental', b'bundlecomplevel')
1627 complevel = ui.configint(b'experimental', b'bundlecomplevel')
1625 if complevel is not None:
1628 if complevel is not None:
1626 compopts[b'level'] = complevel
1629 compopts[b'level'] = complevel
1627
1630
1628 # Allow overriding the bundling of obsmarker in phases through
1631 # Allow overriding the bundling of obsmarker in phases through
1629 # configuration while we don't have a bundle version that include them
1632 # configuration while we don't have a bundle version that include them
1630 if repo.ui.configbool(b'experimental', b'evolution.bundle-obsmarker'):
1633 if repo.ui.configbool(b'experimental', b'evolution.bundle-obsmarker'):
1631 bundlespec.contentopts[b'obsolescence'] = True
1634 bundlespec.contentopts[b'obsolescence'] = True
1632 if repo.ui.configbool(b'experimental', b'bundle-phases'):
1635 if repo.ui.configbool(b'experimental', b'bundle-phases'):
1633 bundlespec.contentopts[b'phases'] = True
1636 bundlespec.contentopts[b'phases'] = True
1634
1637
1635 bundle2.writenewbundle(
1638 bundle2.writenewbundle(
1636 ui,
1639 ui,
1637 repo,
1640 repo,
1638 b'bundle',
1641 b'bundle',
1639 fname,
1642 fname,
1640 bversion,
1643 bversion,
1641 outgoing,
1644 outgoing,
1642 bundlespec.contentopts,
1645 bundlespec.contentopts,
1643 compression=bcompression,
1646 compression=bcompression,
1644 compopts=compopts,
1647 compopts=compopts,
1645 )
1648 )
1646
1649
1647
1650
1648 @command(
1651 @command(
1649 b'cat',
1652 b'cat',
1650 [
1653 [
1651 (
1654 (
1652 b'o',
1655 b'o',
1653 b'output',
1656 b'output',
1654 b'',
1657 b'',
1655 _(b'print output to file with formatted name'),
1658 _(b'print output to file with formatted name'),
1656 _(b'FORMAT'),
1659 _(b'FORMAT'),
1657 ),
1660 ),
1658 (b'r', b'rev', b'', _(b'print the given revision'), _(b'REV')),
1661 (b'r', b'rev', b'', _(b'print the given revision'), _(b'REV')),
1659 (b'', b'decode', None, _(b'apply any matching decode filter')),
1662 (b'', b'decode', None, _(b'apply any matching decode filter')),
1660 ]
1663 ]
1661 + walkopts
1664 + walkopts
1662 + formatteropts,
1665 + formatteropts,
1663 _(b'[OPTION]... FILE...'),
1666 _(b'[OPTION]... FILE...'),
1664 helpcategory=command.CATEGORY_FILE_CONTENTS,
1667 helpcategory=command.CATEGORY_FILE_CONTENTS,
1665 inferrepo=True,
1668 inferrepo=True,
1666 intents={INTENT_READONLY},
1669 intents={INTENT_READONLY},
1667 )
1670 )
1668 def cat(ui, repo, file1, *pats, **opts):
1671 def cat(ui, repo, file1, *pats, **opts):
1669 """output the current or given revision of files
1672 """output the current or given revision of files
1670
1673
1671 Print the specified files as they were at the given revision. If
1674 Print the specified files as they were at the given revision. If
1672 no revision is given, the parent of the working directory is used.
1675 no revision is given, the parent of the working directory is used.
1673
1676
1674 Output may be to a file, in which case the name of the file is
1677 Output may be to a file, in which case the name of the file is
1675 given using a template string. See :hg:`help templates`. In addition
1678 given using a template string. See :hg:`help templates`. In addition
1676 to the common template keywords, the following formatting rules are
1679 to the common template keywords, the following formatting rules are
1677 supported:
1680 supported:
1678
1681
1679 :``%%``: literal "%" character
1682 :``%%``: literal "%" character
1680 :``%s``: basename of file being printed
1683 :``%s``: basename of file being printed
1681 :``%d``: dirname of file being printed, or '.' if in repository root
1684 :``%d``: dirname of file being printed, or '.' if in repository root
1682 :``%p``: root-relative path name of file being printed
1685 :``%p``: root-relative path name of file being printed
1683 :``%H``: changeset hash (40 hexadecimal digits)
1686 :``%H``: changeset hash (40 hexadecimal digits)
1684 :``%R``: changeset revision number
1687 :``%R``: changeset revision number
1685 :``%h``: short-form changeset hash (12 hexadecimal digits)
1688 :``%h``: short-form changeset hash (12 hexadecimal digits)
1686 :``%r``: zero-padded changeset revision number
1689 :``%r``: zero-padded changeset revision number
1687 :``%b``: basename of the exporting repository
1690 :``%b``: basename of the exporting repository
1688 :``\\``: literal "\\" character
1691 :``\\``: literal "\\" character
1689
1692
1690 .. container:: verbose
1693 .. container:: verbose
1691
1694
1692 Template:
1695 Template:
1693
1696
1694 The following keywords are supported in addition to the common template
1697 The following keywords are supported in addition to the common template
1695 keywords and functions. See also :hg:`help templates`.
1698 keywords and functions. See also :hg:`help templates`.
1696
1699
1697 :data: String. File content.
1700 :data: String. File content.
1698 :path: String. Repository-absolute path of the file.
1701 :path: String. Repository-absolute path of the file.
1699
1702
1700 Returns 0 on success.
1703 Returns 0 on success.
1701 """
1704 """
1702 opts = pycompat.byteskwargs(opts)
1705 opts = pycompat.byteskwargs(opts)
1703 rev = opts.get(b'rev')
1706 rev = opts.get(b'rev')
1704 if rev:
1707 if rev:
1705 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
1708 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
1706 ctx = scmutil.revsingle(repo, rev)
1709 ctx = scmutil.revsingle(repo, rev)
1707 m = scmutil.match(ctx, (file1,) + pats, opts)
1710 m = scmutil.match(ctx, (file1,) + pats, opts)
1708 fntemplate = opts.pop(b'output', b'')
1711 fntemplate = opts.pop(b'output', b'')
1709 if cmdutil.isstdiofilename(fntemplate):
1712 if cmdutil.isstdiofilename(fntemplate):
1710 fntemplate = b''
1713 fntemplate = b''
1711
1714
1712 if fntemplate:
1715 if fntemplate:
1713 fm = formatter.nullformatter(ui, b'cat', opts)
1716 fm = formatter.nullformatter(ui, b'cat', opts)
1714 else:
1717 else:
1715 ui.pager(b'cat')
1718 ui.pager(b'cat')
1716 fm = ui.formatter(b'cat', opts)
1719 fm = ui.formatter(b'cat', opts)
1717 with fm:
1720 with fm:
1718 return cmdutil.cat(
1721 return cmdutil.cat(
1719 ui, repo, ctx, m, fm, fntemplate, b'', **pycompat.strkwargs(opts)
1722 ui, repo, ctx, m, fm, fntemplate, b'', **pycompat.strkwargs(opts)
1720 )
1723 )
1721
1724
1722
1725
1723 @command(
1726 @command(
1724 b'clone',
1727 b'clone',
1725 [
1728 [
1726 (
1729 (
1727 b'U',
1730 b'U',
1728 b'noupdate',
1731 b'noupdate',
1729 None,
1732 None,
1730 _(
1733 _(
1731 b'the clone will include an empty working '
1734 b'the clone will include an empty working '
1732 b'directory (only a repository)'
1735 b'directory (only a repository)'
1733 ),
1736 ),
1734 ),
1737 ),
1735 (
1738 (
1736 b'u',
1739 b'u',
1737 b'updaterev',
1740 b'updaterev',
1738 b'',
1741 b'',
1739 _(b'revision, tag, or branch to check out'),
1742 _(b'revision, tag, or branch to check out'),
1740 _(b'REV'),
1743 _(b'REV'),
1741 ),
1744 ),
1742 (
1745 (
1743 b'r',
1746 b'r',
1744 b'rev',
1747 b'rev',
1745 [],
1748 [],
1746 _(
1749 _(
1747 b'do not clone everything, but include this changeset'
1750 b'do not clone everything, but include this changeset'
1748 b' and its ancestors'
1751 b' and its ancestors'
1749 ),
1752 ),
1750 _(b'REV'),
1753 _(b'REV'),
1751 ),
1754 ),
1752 (
1755 (
1753 b'b',
1756 b'b',
1754 b'branch',
1757 b'branch',
1755 [],
1758 [],
1756 _(
1759 _(
1757 b'do not clone everything, but include this branch\'s'
1760 b'do not clone everything, but include this branch\'s'
1758 b' changesets and their ancestors'
1761 b' changesets and their ancestors'
1759 ),
1762 ),
1760 _(b'BRANCH'),
1763 _(b'BRANCH'),
1761 ),
1764 ),
1762 (b'', b'pull', None, _(b'use pull protocol to copy metadata')),
1765 (b'', b'pull', None, _(b'use pull protocol to copy metadata')),
1763 (b'', b'uncompressed', None, _(b'an alias to --stream (DEPRECATED)')),
1766 (b'', b'uncompressed', None, _(b'an alias to --stream (DEPRECATED)')),
1764 (b'', b'stream', None, _(b'clone with minimal data processing')),
1767 (b'', b'stream', None, _(b'clone with minimal data processing')),
1765 ]
1768 ]
1766 + remoteopts,
1769 + remoteopts,
1767 _(b'[OPTION]... SOURCE [DEST]'),
1770 _(b'[OPTION]... SOURCE [DEST]'),
1768 helpcategory=command.CATEGORY_REPO_CREATION,
1771 helpcategory=command.CATEGORY_REPO_CREATION,
1769 helpbasic=True,
1772 helpbasic=True,
1770 norepo=True,
1773 norepo=True,
1771 )
1774 )
1772 def clone(ui, source, dest=None, **opts):
1775 def clone(ui, source, dest=None, **opts):
1773 """make a copy of an existing repository
1776 """make a copy of an existing repository
1774
1777
1775 Create a copy of an existing repository in a new directory.
1778 Create a copy of an existing repository in a new directory.
1776
1779
1777 If no destination directory name is specified, it defaults to the
1780 If no destination directory name is specified, it defaults to the
1778 basename of the source.
1781 basename of the source.
1779
1782
1780 The location of the source is added to the new repository's
1783 The location of the source is added to the new repository's
1781 ``.hg/hgrc`` file, as the default to be used for future pulls.
1784 ``.hg/hgrc`` file, as the default to be used for future pulls.
1782
1785
1783 Only local paths and ``ssh://`` URLs are supported as
1786 Only local paths and ``ssh://`` URLs are supported as
1784 destinations. For ``ssh://`` destinations, no working directory or
1787 destinations. For ``ssh://`` destinations, no working directory or
1785 ``.hg/hgrc`` will be created on the remote side.
1788 ``.hg/hgrc`` will be created on the remote side.
1786
1789
1787 If the source repository has a bookmark called '@' set, that
1790 If the source repository has a bookmark called '@' set, that
1788 revision will be checked out in the new repository by default.
1791 revision will be checked out in the new repository by default.
1789
1792
1790 To check out a particular version, use -u/--update, or
1793 To check out a particular version, use -u/--update, or
1791 -U/--noupdate to create a clone with no working directory.
1794 -U/--noupdate to create a clone with no working directory.
1792
1795
1793 To pull only a subset of changesets, specify one or more revisions
1796 To pull only a subset of changesets, specify one or more revisions
1794 identifiers with -r/--rev or branches with -b/--branch. The
1797 identifiers with -r/--rev or branches with -b/--branch. The
1795 resulting clone will contain only the specified changesets and
1798 resulting clone will contain only the specified changesets and
1796 their ancestors. These options (or 'clone src#rev dest') imply
1799 their ancestors. These options (or 'clone src#rev dest') imply
1797 --pull, even for local source repositories.
1800 --pull, even for local source repositories.
1798
1801
1799 In normal clone mode, the remote normalizes repository data into a common
1802 In normal clone mode, the remote normalizes repository data into a common
1800 exchange format and the receiving end translates this data into its local
1803 exchange format and the receiving end translates this data into its local
1801 storage format. --stream activates a different clone mode that essentially
1804 storage format. --stream activates a different clone mode that essentially
1802 copies repository files from the remote with minimal data processing. This
1805 copies repository files from the remote with minimal data processing. This
1803 significantly reduces the CPU cost of a clone both remotely and locally.
1806 significantly reduces the CPU cost of a clone both remotely and locally.
1804 However, it often increases the transferred data size by 30-40%. This can
1807 However, it often increases the transferred data size by 30-40%. This can
1805 result in substantially faster clones where I/O throughput is plentiful,
1808 result in substantially faster clones where I/O throughput is plentiful,
1806 especially for larger repositories. A side-effect of --stream clones is
1809 especially for larger repositories. A side-effect of --stream clones is
1807 that storage settings and requirements on the remote are applied locally:
1810 that storage settings and requirements on the remote are applied locally:
1808 a modern client may inherit legacy or inefficient storage used by the
1811 a modern client may inherit legacy or inefficient storage used by the
1809 remote or a legacy Mercurial client may not be able to clone from a
1812 remote or a legacy Mercurial client may not be able to clone from a
1810 modern Mercurial remote.
1813 modern Mercurial remote.
1811
1814
1812 .. note::
1815 .. note::
1813
1816
1814 Specifying a tag will include the tagged changeset but not the
1817 Specifying a tag will include the tagged changeset but not the
1815 changeset containing the tag.
1818 changeset containing the tag.
1816
1819
1817 .. container:: verbose
1820 .. container:: verbose
1818
1821
1819 For efficiency, hardlinks are used for cloning whenever the
1822 For efficiency, hardlinks are used for cloning whenever the
1820 source and destination are on the same filesystem (note this
1823 source and destination are on the same filesystem (note this
1821 applies only to the repository data, not to the working
1824 applies only to the repository data, not to the working
1822 directory). Some filesystems, such as AFS, implement hardlinking
1825 directory). Some filesystems, such as AFS, implement hardlinking
1823 incorrectly, but do not report errors. In these cases, use the
1826 incorrectly, but do not report errors. In these cases, use the
1824 --pull option to avoid hardlinking.
1827 --pull option to avoid hardlinking.
1825
1828
1826 Mercurial will update the working directory to the first applicable
1829 Mercurial will update the working directory to the first applicable
1827 revision from this list:
1830 revision from this list:
1828
1831
1829 a) null if -U or the source repository has no changesets
1832 a) null if -U or the source repository has no changesets
1830 b) if -u . and the source repository is local, the first parent of
1833 b) if -u . and the source repository is local, the first parent of
1831 the source repository's working directory
1834 the source repository's working directory
1832 c) the changeset specified with -u (if a branch name, this means the
1835 c) the changeset specified with -u (if a branch name, this means the
1833 latest head of that branch)
1836 latest head of that branch)
1834 d) the changeset specified with -r
1837 d) the changeset specified with -r
1835 e) the tipmost head specified with -b
1838 e) the tipmost head specified with -b
1836 f) the tipmost head specified with the url#branch source syntax
1839 f) the tipmost head specified with the url#branch source syntax
1837 g) the revision marked with the '@' bookmark, if present
1840 g) the revision marked with the '@' bookmark, if present
1838 h) the tipmost head of the default branch
1841 h) the tipmost head of the default branch
1839 i) tip
1842 i) tip
1840
1843
1841 When cloning from servers that support it, Mercurial may fetch
1844 When cloning from servers that support it, Mercurial may fetch
1842 pre-generated data from a server-advertised URL or inline from the
1845 pre-generated data from a server-advertised URL or inline from the
1843 same stream. When this is done, hooks operating on incoming changesets
1846 same stream. When this is done, hooks operating on incoming changesets
1844 and changegroups may fire more than once, once for each pre-generated
1847 and changegroups may fire more than once, once for each pre-generated
1845 bundle and as well as for any additional remaining data. In addition,
1848 bundle and as well as for any additional remaining data. In addition,
1846 if an error occurs, the repository may be rolled back to a partial
1849 if an error occurs, the repository may be rolled back to a partial
1847 clone. This behavior may change in future releases.
1850 clone. This behavior may change in future releases.
1848 See :hg:`help -e clonebundles` for more.
1851 See :hg:`help -e clonebundles` for more.
1849
1852
1850 Examples:
1853 Examples:
1851
1854
1852 - clone a remote repository to a new directory named hg/::
1855 - clone a remote repository to a new directory named hg/::
1853
1856
1854 hg clone https://www.mercurial-scm.org/repo/hg/
1857 hg clone https://www.mercurial-scm.org/repo/hg/
1855
1858
1856 - create a lightweight local clone::
1859 - create a lightweight local clone::
1857
1860
1858 hg clone project/ project-feature/
1861 hg clone project/ project-feature/
1859
1862
1860 - clone from an absolute path on an ssh server (note double-slash)::
1863 - clone from an absolute path on an ssh server (note double-slash)::
1861
1864
1862 hg clone ssh://user@server//home/projects/alpha/
1865 hg clone ssh://user@server//home/projects/alpha/
1863
1866
1864 - do a streaming clone while checking out a specified version::
1867 - do a streaming clone while checking out a specified version::
1865
1868
1866 hg clone --stream http://server/repo -u 1.5
1869 hg clone --stream http://server/repo -u 1.5
1867
1870
1868 - create a repository without changesets after a particular revision::
1871 - create a repository without changesets after a particular revision::
1869
1872
1870 hg clone -r 04e544 experimental/ good/
1873 hg clone -r 04e544 experimental/ good/
1871
1874
1872 - clone (and track) a particular named branch::
1875 - clone (and track) a particular named branch::
1873
1876
1874 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1877 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1875
1878
1876 See :hg:`help urls` for details on specifying URLs.
1879 See :hg:`help urls` for details on specifying URLs.
1877
1880
1878 Returns 0 on success.
1881 Returns 0 on success.
1879 """
1882 """
1880 opts = pycompat.byteskwargs(opts)
1883 opts = pycompat.byteskwargs(opts)
1881 cmdutil.check_at_most_one_arg(opts, b'noupdate', b'updaterev')
1884 cmdutil.check_at_most_one_arg(opts, b'noupdate', b'updaterev')
1882
1885
1883 # --include/--exclude can come from narrow or sparse.
1886 # --include/--exclude can come from narrow or sparse.
1884 includepats, excludepats = None, None
1887 includepats, excludepats = None, None
1885
1888
1886 # hg.clone() differentiates between None and an empty set. So make sure
1889 # hg.clone() differentiates between None and an empty set. So make sure
1887 # patterns are sets if narrow is requested without patterns.
1890 # patterns are sets if narrow is requested without patterns.
1888 if opts.get(b'narrow'):
1891 if opts.get(b'narrow'):
1889 includepats = set()
1892 includepats = set()
1890 excludepats = set()
1893 excludepats = set()
1891
1894
1892 if opts.get(b'include'):
1895 if opts.get(b'include'):
1893 includepats = narrowspec.parsepatterns(opts.get(b'include'))
1896 includepats = narrowspec.parsepatterns(opts.get(b'include'))
1894 if opts.get(b'exclude'):
1897 if opts.get(b'exclude'):
1895 excludepats = narrowspec.parsepatterns(opts.get(b'exclude'))
1898 excludepats = narrowspec.parsepatterns(opts.get(b'exclude'))
1896
1899
1897 r = hg.clone(
1900 r = hg.clone(
1898 ui,
1901 ui,
1899 opts,
1902 opts,
1900 source,
1903 source,
1901 dest,
1904 dest,
1902 pull=opts.get(b'pull'),
1905 pull=opts.get(b'pull'),
1903 stream=opts.get(b'stream') or opts.get(b'uncompressed'),
1906 stream=opts.get(b'stream') or opts.get(b'uncompressed'),
1904 revs=opts.get(b'rev'),
1907 revs=opts.get(b'rev'),
1905 update=opts.get(b'updaterev') or not opts.get(b'noupdate'),
1908 update=opts.get(b'updaterev') or not opts.get(b'noupdate'),
1906 branch=opts.get(b'branch'),
1909 branch=opts.get(b'branch'),
1907 shareopts=opts.get(b'shareopts'),
1910 shareopts=opts.get(b'shareopts'),
1908 storeincludepats=includepats,
1911 storeincludepats=includepats,
1909 storeexcludepats=excludepats,
1912 storeexcludepats=excludepats,
1910 depth=opts.get(b'depth') or None,
1913 depth=opts.get(b'depth') or None,
1911 )
1914 )
1912
1915
1913 return r is None
1916 return r is None
1914
1917
1915
1918
1916 @command(
1919 @command(
1917 b'commit|ci',
1920 b'commit|ci',
1918 [
1921 [
1919 (
1922 (
1920 b'A',
1923 b'A',
1921 b'addremove',
1924 b'addremove',
1922 None,
1925 None,
1923 _(b'mark new/missing files as added/removed before committing'),
1926 _(b'mark new/missing files as added/removed before committing'),
1924 ),
1927 ),
1925 (b'', b'close-branch', None, _(b'mark a branch head as closed')),
1928 (b'', b'close-branch', None, _(b'mark a branch head as closed')),
1926 (b'', b'amend', None, _(b'amend the parent of the working directory')),
1929 (b'', b'amend', None, _(b'amend the parent of the working directory')),
1927 (b's', b'secret', None, _(b'use the secret phase for committing')),
1930 (b's', b'secret', None, _(b'use the secret phase for committing')),
1928 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
1931 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
1929 (
1932 (
1930 b'',
1933 b'',
1931 b'force-close-branch',
1934 b'force-close-branch',
1932 None,
1935 None,
1933 _(b'forcibly close branch from a non-head changeset (ADVANCED)'),
1936 _(b'forcibly close branch from a non-head changeset (ADVANCED)'),
1934 ),
1937 ),
1935 (b'i', b'interactive', None, _(b'use interactive mode')),
1938 (b'i', b'interactive', None, _(b'use interactive mode')),
1936 ]
1939 ]
1937 + walkopts
1940 + walkopts
1938 + commitopts
1941 + commitopts
1939 + commitopts2
1942 + commitopts2
1940 + subrepoopts,
1943 + subrepoopts,
1941 _(b'[OPTION]... [FILE]...'),
1944 _(b'[OPTION]... [FILE]...'),
1942 helpcategory=command.CATEGORY_COMMITTING,
1945 helpcategory=command.CATEGORY_COMMITTING,
1943 helpbasic=True,
1946 helpbasic=True,
1944 inferrepo=True,
1947 inferrepo=True,
1945 )
1948 )
1946 def commit(ui, repo, *pats, **opts):
1949 def commit(ui, repo, *pats, **opts):
1947 """commit the specified files or all outstanding changes
1950 """commit the specified files or all outstanding changes
1948
1951
1949 Commit changes to the given files into the repository. Unlike a
1952 Commit changes to the given files into the repository. Unlike a
1950 centralized SCM, this operation is a local operation. See
1953 centralized SCM, this operation is a local operation. See
1951 :hg:`push` for a way to actively distribute your changes.
1954 :hg:`push` for a way to actively distribute your changes.
1952
1955
1953 If a list of files is omitted, all changes reported by :hg:`status`
1956 If a list of files is omitted, all changes reported by :hg:`status`
1954 will be committed.
1957 will be committed.
1955
1958
1956 If you are committing the result of a merge, do not provide any
1959 If you are committing the result of a merge, do not provide any
1957 filenames or -I/-X filters.
1960 filenames or -I/-X filters.
1958
1961
1959 If no commit message is specified, Mercurial starts your
1962 If no commit message is specified, Mercurial starts your
1960 configured editor where you can enter a message. In case your
1963 configured editor where you can enter a message. In case your
1961 commit fails, you will find a backup of your message in
1964 commit fails, you will find a backup of your message in
1962 ``.hg/last-message.txt``.
1965 ``.hg/last-message.txt``.
1963
1966
1964 The --close-branch flag can be used to mark the current branch
1967 The --close-branch flag can be used to mark the current branch
1965 head closed. When all heads of a branch are closed, the branch
1968 head closed. When all heads of a branch are closed, the branch
1966 will be considered closed and no longer listed.
1969 will be considered closed and no longer listed.
1967
1970
1968 The --amend flag can be used to amend the parent of the
1971 The --amend flag can be used to amend the parent of the
1969 working directory with a new commit that contains the changes
1972 working directory with a new commit that contains the changes
1970 in the parent in addition to those currently reported by :hg:`status`,
1973 in the parent in addition to those currently reported by :hg:`status`,
1971 if there are any. The old commit is stored in a backup bundle in
1974 if there are any. The old commit is stored in a backup bundle in
1972 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1975 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1973 on how to restore it).
1976 on how to restore it).
1974
1977
1975 Message, user and date are taken from the amended commit unless
1978 Message, user and date are taken from the amended commit unless
1976 specified. When a message isn't specified on the command line,
1979 specified. When a message isn't specified on the command line,
1977 the editor will open with the message of the amended commit.
1980 the editor will open with the message of the amended commit.
1978
1981
1979 It is not possible to amend public changesets (see :hg:`help phases`)
1982 It is not possible to amend public changesets (see :hg:`help phases`)
1980 or changesets that have children.
1983 or changesets that have children.
1981
1984
1982 See :hg:`help dates` for a list of formats valid for -d/--date.
1985 See :hg:`help dates` for a list of formats valid for -d/--date.
1983
1986
1984 Returns 0 on success, 1 if nothing changed.
1987 Returns 0 on success, 1 if nothing changed.
1985
1988
1986 .. container:: verbose
1989 .. container:: verbose
1987
1990
1988 Examples:
1991 Examples:
1989
1992
1990 - commit all files ending in .py::
1993 - commit all files ending in .py::
1991
1994
1992 hg commit --include "set:**.py"
1995 hg commit --include "set:**.py"
1993
1996
1994 - commit all non-binary files::
1997 - commit all non-binary files::
1995
1998
1996 hg commit --exclude "set:binary()"
1999 hg commit --exclude "set:binary()"
1997
2000
1998 - amend the current commit and set the date to now::
2001 - amend the current commit and set the date to now::
1999
2002
2000 hg commit --amend --date now
2003 hg commit --amend --date now
2001 """
2004 """
2002 with repo.wlock(), repo.lock():
2005 with repo.wlock(), repo.lock():
2003 return _docommit(ui, repo, *pats, **opts)
2006 return _docommit(ui, repo, *pats, **opts)
2004
2007
2005
2008
2006 def _docommit(ui, repo, *pats, **opts):
2009 def _docommit(ui, repo, *pats, **opts):
2007 if opts.get('interactive'):
2010 if opts.get('interactive'):
2008 opts.pop('interactive')
2011 opts.pop('interactive')
2009 ret = cmdutil.dorecord(
2012 ret = cmdutil.dorecord(
2010 ui, repo, commit, None, False, cmdutil.recordfilter, *pats, **opts
2013 ui, repo, commit, None, False, cmdutil.recordfilter, *pats, **opts
2011 )
2014 )
2012 # ret can be 0 (no changes to record) or the value returned by
2015 # ret can be 0 (no changes to record) or the value returned by
2013 # commit(), 1 if nothing changed or None on success.
2016 # commit(), 1 if nothing changed or None on success.
2014 return 1 if ret == 0 else ret
2017 return 1 if ret == 0 else ret
2015
2018
2016 opts = pycompat.byteskwargs(opts)
2019 opts = pycompat.byteskwargs(opts)
2017 if opts.get(b'subrepos'):
2020 if opts.get(b'subrepos'):
2018 cmdutil.check_incompatible_arguments(opts, b'subrepos', [b'amend'])
2021 cmdutil.check_incompatible_arguments(opts, b'subrepos', [b'amend'])
2019 # Let --subrepos on the command line override config setting.
2022 # Let --subrepos on the command line override config setting.
2020 ui.setconfig(b'ui', b'commitsubrepos', True, b'commit')
2023 ui.setconfig(b'ui', b'commitsubrepos', True, b'commit')
2021
2024
2022 cmdutil.checkunfinished(repo, commit=True)
2025 cmdutil.checkunfinished(repo, commit=True)
2023
2026
2024 branch = repo[None].branch()
2027 branch = repo[None].branch()
2025 bheads = repo.branchheads(branch)
2028 bheads = repo.branchheads(branch)
2026
2029
2027 extra = {}
2030 extra = {}
2028 if opts.get(b'close_branch') or opts.get(b'force_close_branch'):
2031 if opts.get(b'close_branch') or opts.get(b'force_close_branch'):
2029 extra[b'close'] = b'1'
2032 extra[b'close'] = b'1'
2030
2033
2031 if repo[b'.'].closesbranch():
2034 if repo[b'.'].closesbranch():
2032 raise error.Abort(
2035 raise error.Abort(
2033 _(b'current revision is already a branch closing head')
2036 _(b'current revision is already a branch closing head')
2034 )
2037 )
2035 elif not bheads:
2038 elif not bheads:
2036 raise error.Abort(_(b'branch "%s" has no heads to close') % branch)
2039 raise error.Abort(_(b'branch "%s" has no heads to close') % branch)
2037 elif (
2040 elif (
2038 branch == repo[b'.'].branch()
2041 branch == repo[b'.'].branch()
2039 and repo[b'.'].node() not in bheads
2042 and repo[b'.'].node() not in bheads
2040 and not opts.get(b'force_close_branch')
2043 and not opts.get(b'force_close_branch')
2041 ):
2044 ):
2042 hint = _(
2045 hint = _(
2043 b'use --force-close-branch to close branch from a non-head'
2046 b'use --force-close-branch to close branch from a non-head'
2044 b' changeset'
2047 b' changeset'
2045 )
2048 )
2046 raise error.Abort(_(b'can only close branch heads'), hint=hint)
2049 raise error.Abort(_(b'can only close branch heads'), hint=hint)
2047 elif opts.get(b'amend'):
2050 elif opts.get(b'amend'):
2048 if (
2051 if (
2049 repo[b'.'].p1().branch() != branch
2052 repo[b'.'].p1().branch() != branch
2050 and repo[b'.'].p2().branch() != branch
2053 and repo[b'.'].p2().branch() != branch
2051 ):
2054 ):
2052 raise error.Abort(_(b'can only close branch heads'))
2055 raise error.Abort(_(b'can only close branch heads'))
2053
2056
2054 if opts.get(b'amend'):
2057 if opts.get(b'amend'):
2055 if ui.configbool(b'ui', b'commitsubrepos'):
2058 if ui.configbool(b'ui', b'commitsubrepos'):
2056 raise error.Abort(_(b'cannot amend with ui.commitsubrepos enabled'))
2059 raise error.Abort(_(b'cannot amend with ui.commitsubrepos enabled'))
2057
2060
2058 old = repo[b'.']
2061 old = repo[b'.']
2059 rewriteutil.precheck(repo, [old.rev()], b'amend')
2062 rewriteutil.precheck(repo, [old.rev()], b'amend')
2060
2063
2061 # Currently histedit gets confused if an amend happens while histedit
2064 # Currently histedit gets confused if an amend happens while histedit
2062 # is in progress. Since we have a checkunfinished command, we are
2065 # is in progress. Since we have a checkunfinished command, we are
2063 # temporarily honoring it.
2066 # temporarily honoring it.
2064 #
2067 #
2065 # Note: eventually this guard will be removed. Please do not expect
2068 # Note: eventually this guard will be removed. Please do not expect
2066 # this behavior to remain.
2069 # this behavior to remain.
2067 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2070 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2068 cmdutil.checkunfinished(repo)
2071 cmdutil.checkunfinished(repo)
2069
2072
2070 node = cmdutil.amend(ui, repo, old, extra, pats, opts)
2073 node = cmdutil.amend(ui, repo, old, extra, pats, opts)
2071 if node == old.node():
2074 if node == old.node():
2072 ui.status(_(b"nothing changed\n"))
2075 ui.status(_(b"nothing changed\n"))
2073 return 1
2076 return 1
2074 else:
2077 else:
2075
2078
2076 def commitfunc(ui, repo, message, match, opts):
2079 def commitfunc(ui, repo, message, match, opts):
2077 overrides = {}
2080 overrides = {}
2078 if opts.get(b'secret'):
2081 if opts.get(b'secret'):
2079 overrides[(b'phases', b'new-commit')] = b'secret'
2082 overrides[(b'phases', b'new-commit')] = b'secret'
2080
2083
2081 baseui = repo.baseui
2084 baseui = repo.baseui
2082 with baseui.configoverride(overrides, b'commit'):
2085 with baseui.configoverride(overrides, b'commit'):
2083 with ui.configoverride(overrides, b'commit'):
2086 with ui.configoverride(overrides, b'commit'):
2084 editform = cmdutil.mergeeditform(
2087 editform = cmdutil.mergeeditform(
2085 repo[None], b'commit.normal'
2088 repo[None], b'commit.normal'
2086 )
2089 )
2087 editor = cmdutil.getcommiteditor(
2090 editor = cmdutil.getcommiteditor(
2088 editform=editform, **pycompat.strkwargs(opts)
2091 editform=editform, **pycompat.strkwargs(opts)
2089 )
2092 )
2090 return repo.commit(
2093 return repo.commit(
2091 message,
2094 message,
2092 opts.get(b'user'),
2095 opts.get(b'user'),
2093 opts.get(b'date'),
2096 opts.get(b'date'),
2094 match,
2097 match,
2095 editor=editor,
2098 editor=editor,
2096 extra=extra,
2099 extra=extra,
2097 )
2100 )
2098
2101
2099 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
2102 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
2100
2103
2101 if not node:
2104 if not node:
2102 stat = cmdutil.postcommitstatus(repo, pats, opts)
2105 stat = cmdutil.postcommitstatus(repo, pats, opts)
2103 if stat.deleted:
2106 if stat.deleted:
2104 ui.status(
2107 ui.status(
2105 _(
2108 _(
2106 b"nothing changed (%d missing files, see "
2109 b"nothing changed (%d missing files, see "
2107 b"'hg status')\n"
2110 b"'hg status')\n"
2108 )
2111 )
2109 % len(stat.deleted)
2112 % len(stat.deleted)
2110 )
2113 )
2111 else:
2114 else:
2112 ui.status(_(b"nothing changed\n"))
2115 ui.status(_(b"nothing changed\n"))
2113 return 1
2116 return 1
2114
2117
2115 cmdutil.commitstatus(repo, node, branch, bheads, opts)
2118 cmdutil.commitstatus(repo, node, branch, bheads, opts)
2116
2119
2117 if not ui.quiet and ui.configbool(b'commands', b'commit.post-status'):
2120 if not ui.quiet and ui.configbool(b'commands', b'commit.post-status'):
2118 status(
2121 status(
2119 ui,
2122 ui,
2120 repo,
2123 repo,
2121 modified=True,
2124 modified=True,
2122 added=True,
2125 added=True,
2123 removed=True,
2126 removed=True,
2124 deleted=True,
2127 deleted=True,
2125 unknown=True,
2128 unknown=True,
2126 subrepos=opts.get(b'subrepos'),
2129 subrepos=opts.get(b'subrepos'),
2127 )
2130 )
2128
2131
2129
2132
2130 @command(
2133 @command(
2131 b'config|showconfig|debugconfig',
2134 b'config|showconfig|debugconfig',
2132 [
2135 [
2133 (b'u', b'untrusted', None, _(b'show untrusted configuration options')),
2136 (b'u', b'untrusted', None, _(b'show untrusted configuration options')),
2134 (b'e', b'edit', None, _(b'edit user config')),
2137 (b'e', b'edit', None, _(b'edit user config')),
2135 (b'l', b'local', None, _(b'edit repository config')),
2138 (b'l', b'local', None, _(b'edit repository config')),
2136 (
2139 (
2137 b'',
2140 b'',
2138 b'shared',
2141 b'shared',
2139 None,
2142 None,
2140 _(b'edit shared source repository config (EXPERIMENTAL)'),
2143 _(b'edit shared source repository config (EXPERIMENTAL)'),
2141 ),
2144 ),
2142 (b'', b'non-shared', None, _(b'edit non shared config (EXPERIMENTAL)')),
2145 (b'', b'non-shared', None, _(b'edit non shared config (EXPERIMENTAL)')),
2143 (b'g', b'global', None, _(b'edit global config')),
2146 (b'g', b'global', None, _(b'edit global config')),
2144 ]
2147 ]
2145 + formatteropts,
2148 + formatteropts,
2146 _(b'[-u] [NAME]...'),
2149 _(b'[-u] [NAME]...'),
2147 helpcategory=command.CATEGORY_HELP,
2150 helpcategory=command.CATEGORY_HELP,
2148 optionalrepo=True,
2151 optionalrepo=True,
2149 intents={INTENT_READONLY},
2152 intents={INTENT_READONLY},
2150 )
2153 )
2151 def config(ui, repo, *values, **opts):
2154 def config(ui, repo, *values, **opts):
2152 """show combined config settings from all hgrc files
2155 """show combined config settings from all hgrc files
2153
2156
2154 With no arguments, print names and values of all config items.
2157 With no arguments, print names and values of all config items.
2155
2158
2156 With one argument of the form section.name, print just the value
2159 With one argument of the form section.name, print just the value
2157 of that config item.
2160 of that config item.
2158
2161
2159 With multiple arguments, print names and values of all config
2162 With multiple arguments, print names and values of all config
2160 items with matching section names or section.names.
2163 items with matching section names or section.names.
2161
2164
2162 With --edit, start an editor on the user-level config file. With
2165 With --edit, start an editor on the user-level config file. With
2163 --global, edit the system-wide config file. With --local, edit the
2166 --global, edit the system-wide config file. With --local, edit the
2164 repository-level config file.
2167 repository-level config file.
2165
2168
2166 With --debug, the source (filename and line number) is printed
2169 With --debug, the source (filename and line number) is printed
2167 for each config item.
2170 for each config item.
2168
2171
2169 See :hg:`help config` for more information about config files.
2172 See :hg:`help config` for more information about config files.
2170
2173
2171 .. container:: verbose
2174 .. container:: verbose
2172
2175
2173 --non-shared flag is used to edit `.hg/hgrc-not-shared` config file.
2176 --non-shared flag is used to edit `.hg/hgrc-not-shared` config file.
2174 This file is not shared across shares when in share-safe mode.
2177 This file is not shared across shares when in share-safe mode.
2175
2178
2176 Template:
2179 Template:
2177
2180
2178 The following keywords are supported. See also :hg:`help templates`.
2181 The following keywords are supported. See also :hg:`help templates`.
2179
2182
2180 :name: String. Config name.
2183 :name: String. Config name.
2181 :source: String. Filename and line number where the item is defined.
2184 :source: String. Filename and line number where the item is defined.
2182 :value: String. Config value.
2185 :value: String. Config value.
2183
2186
2184 The --shared flag can be used to edit the config file of shared source
2187 The --shared flag can be used to edit the config file of shared source
2185 repository. It only works when you have shared using the experimental
2188 repository. It only works when you have shared using the experimental
2186 share safe feature.
2189 share safe feature.
2187
2190
2188 Returns 0 on success, 1 if NAME does not exist.
2191 Returns 0 on success, 1 if NAME does not exist.
2189
2192
2190 """
2193 """
2191
2194
2192 opts = pycompat.byteskwargs(opts)
2195 opts = pycompat.byteskwargs(opts)
2193 editopts = (b'edit', b'local', b'global', b'shared', b'non_shared')
2196 editopts = (b'edit', b'local', b'global', b'shared', b'non_shared')
2194 if any(opts.get(o) for o in editopts):
2197 if any(opts.get(o) for o in editopts):
2195 cmdutil.check_at_most_one_arg(opts, *editopts[1:])
2198 cmdutil.check_at_most_one_arg(opts, *editopts[1:])
2196 if opts.get(b'local'):
2199 if opts.get(b'local'):
2197 if not repo:
2200 if not repo:
2198 raise error.Abort(_(b"can't use --local outside a repository"))
2201 raise error.Abort(_(b"can't use --local outside a repository"))
2199 paths = [repo.vfs.join(b'hgrc')]
2202 paths = [repo.vfs.join(b'hgrc')]
2200 elif opts.get(b'global'):
2203 elif opts.get(b'global'):
2201 paths = rcutil.systemrcpath()
2204 paths = rcutil.systemrcpath()
2202 elif opts.get(b'shared'):
2205 elif opts.get(b'shared'):
2203 if not repo.shared():
2206 if not repo.shared():
2204 raise error.Abort(
2207 raise error.Abort(
2205 _(b"repository is not shared; can't use --shared")
2208 _(b"repository is not shared; can't use --shared")
2206 )
2209 )
2207 if requirements.SHARESAFE_REQUIREMENT not in repo.requirements:
2210 if requirements.SHARESAFE_REQUIREMENT not in repo.requirements:
2208 raise error.Abort(
2211 raise error.Abort(
2209 _(
2212 _(
2210 b"share safe feature not unabled; "
2213 b"share safe feature not unabled; "
2211 b"unable to edit shared source repository config"
2214 b"unable to edit shared source repository config"
2212 )
2215 )
2213 )
2216 )
2214 paths = [vfsmod.vfs(repo.sharedpath).join(b'hgrc')]
2217 paths = [vfsmod.vfs(repo.sharedpath).join(b'hgrc')]
2215 elif opts.get(b'non_shared'):
2218 elif opts.get(b'non_shared'):
2216 paths = [repo.vfs.join(b'hgrc-not-shared')]
2219 paths = [repo.vfs.join(b'hgrc-not-shared')]
2217 else:
2220 else:
2218 paths = rcutil.userrcpath()
2221 paths = rcutil.userrcpath()
2219
2222
2220 for f in paths:
2223 for f in paths:
2221 if os.path.exists(f):
2224 if os.path.exists(f):
2222 break
2225 break
2223 else:
2226 else:
2224 if opts.get(b'global'):
2227 if opts.get(b'global'):
2225 samplehgrc = uimod.samplehgrcs[b'global']
2228 samplehgrc = uimod.samplehgrcs[b'global']
2226 elif opts.get(b'local'):
2229 elif opts.get(b'local'):
2227 samplehgrc = uimod.samplehgrcs[b'local']
2230 samplehgrc = uimod.samplehgrcs[b'local']
2228 else:
2231 else:
2229 samplehgrc = uimod.samplehgrcs[b'user']
2232 samplehgrc = uimod.samplehgrcs[b'user']
2230
2233
2231 f = paths[0]
2234 f = paths[0]
2232 fp = open(f, b"wb")
2235 fp = open(f, b"wb")
2233 fp.write(util.tonativeeol(samplehgrc))
2236 fp.write(util.tonativeeol(samplehgrc))
2234 fp.close()
2237 fp.close()
2235
2238
2236 editor = ui.geteditor()
2239 editor = ui.geteditor()
2237 ui.system(
2240 ui.system(
2238 b"%s \"%s\"" % (editor, f),
2241 b"%s \"%s\"" % (editor, f),
2239 onerr=error.Abort,
2242 onerr=error.Abort,
2240 errprefix=_(b"edit failed"),
2243 errprefix=_(b"edit failed"),
2241 blockedtag=b'config_edit',
2244 blockedtag=b'config_edit',
2242 )
2245 )
2243 return
2246 return
2244 ui.pager(b'config')
2247 ui.pager(b'config')
2245 fm = ui.formatter(b'config', opts)
2248 fm = ui.formatter(b'config', opts)
2246 for t, f in rcutil.rccomponents():
2249 for t, f in rcutil.rccomponents():
2247 if t == b'path':
2250 if t == b'path':
2248 ui.debug(b'read config from: %s\n' % f)
2251 ui.debug(b'read config from: %s\n' % f)
2249 elif t == b'resource':
2252 elif t == b'resource':
2250 ui.debug(b'read config from: resource:%s.%s\n' % (f[0], f[1]))
2253 ui.debug(b'read config from: resource:%s.%s\n' % (f[0], f[1]))
2251 elif t == b'items':
2254 elif t == b'items':
2252 # Don't print anything for 'items'.
2255 # Don't print anything for 'items'.
2253 pass
2256 pass
2254 else:
2257 else:
2255 raise error.ProgrammingError(b'unknown rctype: %s' % t)
2258 raise error.ProgrammingError(b'unknown rctype: %s' % t)
2256 untrusted = bool(opts.get(b'untrusted'))
2259 untrusted = bool(opts.get(b'untrusted'))
2257
2260
2258 selsections = selentries = []
2261 selsections = selentries = []
2259 if values:
2262 if values:
2260 selsections = [v for v in values if b'.' not in v]
2263 selsections = [v for v in values if b'.' not in v]
2261 selentries = [v for v in values if b'.' in v]
2264 selentries = [v for v in values if b'.' in v]
2262 uniquesel = len(selentries) == 1 and not selsections
2265 uniquesel = len(selentries) == 1 and not selsections
2263 selsections = set(selsections)
2266 selsections = set(selsections)
2264 selentries = set(selentries)
2267 selentries = set(selentries)
2265
2268
2266 matched = False
2269 matched = False
2267 for section, name, value in ui.walkconfig(untrusted=untrusted):
2270 for section, name, value in ui.walkconfig(untrusted=untrusted):
2268 source = ui.configsource(section, name, untrusted)
2271 source = ui.configsource(section, name, untrusted)
2269 value = pycompat.bytestr(value)
2272 value = pycompat.bytestr(value)
2270 defaultvalue = ui.configdefault(section, name)
2273 defaultvalue = ui.configdefault(section, name)
2271 if fm.isplain():
2274 if fm.isplain():
2272 source = source or b'none'
2275 source = source or b'none'
2273 value = value.replace(b'\n', b'\\n')
2276 value = value.replace(b'\n', b'\\n')
2274 entryname = section + b'.' + name
2277 entryname = section + b'.' + name
2275 if values and not (section in selsections or entryname in selentries):
2278 if values and not (section in selsections or entryname in selentries):
2276 continue
2279 continue
2277 fm.startitem()
2280 fm.startitem()
2278 fm.condwrite(ui.debugflag, b'source', b'%s: ', source)
2281 fm.condwrite(ui.debugflag, b'source', b'%s: ', source)
2279 if uniquesel:
2282 if uniquesel:
2280 fm.data(name=entryname)
2283 fm.data(name=entryname)
2281 fm.write(b'value', b'%s\n', value)
2284 fm.write(b'value', b'%s\n', value)
2282 else:
2285 else:
2283 fm.write(b'name value', b'%s=%s\n', entryname, value)
2286 fm.write(b'name value', b'%s=%s\n', entryname, value)
2284 if formatter.isprintable(defaultvalue):
2287 if formatter.isprintable(defaultvalue):
2285 fm.data(defaultvalue=defaultvalue)
2288 fm.data(defaultvalue=defaultvalue)
2286 elif isinstance(defaultvalue, list) and all(
2289 elif isinstance(defaultvalue, list) and all(
2287 formatter.isprintable(e) for e in defaultvalue
2290 formatter.isprintable(e) for e in defaultvalue
2288 ):
2291 ):
2289 fm.data(defaultvalue=fm.formatlist(defaultvalue, name=b'value'))
2292 fm.data(defaultvalue=fm.formatlist(defaultvalue, name=b'value'))
2290 # TODO: no idea how to process unsupported defaultvalue types
2293 # TODO: no idea how to process unsupported defaultvalue types
2291 matched = True
2294 matched = True
2292 fm.end()
2295 fm.end()
2293 if matched:
2296 if matched:
2294 return 0
2297 return 0
2295 return 1
2298 return 1
2296
2299
2297
2300
2298 @command(
2301 @command(
2299 b'continue',
2302 b'continue',
2300 dryrunopts,
2303 dryrunopts,
2301 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
2304 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
2302 helpbasic=True,
2305 helpbasic=True,
2303 )
2306 )
2304 def continuecmd(ui, repo, **opts):
2307 def continuecmd(ui, repo, **opts):
2305 """resumes an interrupted operation (EXPERIMENTAL)
2308 """resumes an interrupted operation (EXPERIMENTAL)
2306
2309
2307 Finishes a multistep operation like graft, histedit, rebase, merge,
2310 Finishes a multistep operation like graft, histedit, rebase, merge,
2308 and unshelve if they are in an interrupted state.
2311 and unshelve if they are in an interrupted state.
2309
2312
2310 use --dry-run/-n to dry run the command.
2313 use --dry-run/-n to dry run the command.
2311 """
2314 """
2312 dryrun = opts.get('dry_run')
2315 dryrun = opts.get('dry_run')
2313 contstate = cmdutil.getunfinishedstate(repo)
2316 contstate = cmdutil.getunfinishedstate(repo)
2314 if not contstate:
2317 if not contstate:
2315 raise error.Abort(_(b'no operation in progress'))
2318 raise error.Abort(_(b'no operation in progress'))
2316 if not contstate.continuefunc:
2319 if not contstate.continuefunc:
2317 raise error.Abort(
2320 raise error.Abort(
2318 (
2321 (
2319 _(b"%s in progress but does not support 'hg continue'")
2322 _(b"%s in progress but does not support 'hg continue'")
2320 % (contstate._opname)
2323 % (contstate._opname)
2321 ),
2324 ),
2322 hint=contstate.continuemsg(),
2325 hint=contstate.continuemsg(),
2323 )
2326 )
2324 if dryrun:
2327 if dryrun:
2325 ui.status(_(b'%s in progress, will be resumed\n') % (contstate._opname))
2328 ui.status(_(b'%s in progress, will be resumed\n') % (contstate._opname))
2326 return
2329 return
2327 return contstate.continuefunc(ui, repo)
2330 return contstate.continuefunc(ui, repo)
2328
2331
2329
2332
2330 @command(
2333 @command(
2331 b'copy|cp',
2334 b'copy|cp',
2332 [
2335 [
2333 (b'', b'forget', None, _(b'unmark a destination file as copied')),
2336 (b'', b'forget', None, _(b'unmark a destination file as copied')),
2334 (b'A', b'after', None, _(b'record a copy that has already occurred')),
2337 (b'A', b'after', None, _(b'record a copy that has already occurred')),
2335 (
2338 (
2336 b'',
2339 b'',
2337 b'at-rev',
2340 b'at-rev',
2338 b'',
2341 b'',
2339 _(b'(un)mark copies in the given revision (EXPERIMENTAL)'),
2342 _(b'(un)mark copies in the given revision (EXPERIMENTAL)'),
2340 _(b'REV'),
2343 _(b'REV'),
2341 ),
2344 ),
2342 (
2345 (
2343 b'f',
2346 b'f',
2344 b'force',
2347 b'force',
2345 None,
2348 None,
2346 _(b'forcibly copy over an existing managed file'),
2349 _(b'forcibly copy over an existing managed file'),
2347 ),
2350 ),
2348 ]
2351 ]
2349 + walkopts
2352 + walkopts
2350 + dryrunopts,
2353 + dryrunopts,
2351 _(b'[OPTION]... SOURCE... DEST'),
2354 _(b'[OPTION]... SOURCE... DEST'),
2352 helpcategory=command.CATEGORY_FILE_CONTENTS,
2355 helpcategory=command.CATEGORY_FILE_CONTENTS,
2353 )
2356 )
2354 def copy(ui, repo, *pats, **opts):
2357 def copy(ui, repo, *pats, **opts):
2355 """mark files as copied for the next commit
2358 """mark files as copied for the next commit
2356
2359
2357 Mark dest as having copies of source files. If dest is a
2360 Mark dest as having copies of source files. If dest is a
2358 directory, copies are put in that directory. If dest is a file,
2361 directory, copies are put in that directory. If dest is a file,
2359 the source must be a single file.
2362 the source must be a single file.
2360
2363
2361 By default, this command copies the contents of files as they
2364 By default, this command copies the contents of files as they
2362 exist in the working directory. If invoked with -A/--after, the
2365 exist in the working directory. If invoked with -A/--after, the
2363 operation is recorded, but no copying is performed.
2366 operation is recorded, but no copying is performed.
2364
2367
2365 To undo marking a destination file as copied, use --forget. With that
2368 To undo marking a destination file as copied, use --forget. With that
2366 option, all given (positional) arguments are unmarked as copies. The
2369 option, all given (positional) arguments are unmarked as copies. The
2367 destination file(s) will be left in place (still tracked).
2370 destination file(s) will be left in place (still tracked).
2368
2371
2369 This command takes effect with the next commit by default.
2372 This command takes effect with the next commit by default.
2370
2373
2371 Returns 0 on success, 1 if errors are encountered.
2374 Returns 0 on success, 1 if errors are encountered.
2372 """
2375 """
2373 opts = pycompat.byteskwargs(opts)
2376 opts = pycompat.byteskwargs(opts)
2374 with repo.wlock():
2377 with repo.wlock():
2375 return cmdutil.copy(ui, repo, pats, opts)
2378 return cmdutil.copy(ui, repo, pats, opts)
2376
2379
2377
2380
2378 @command(
2381 @command(
2379 b'debugcommands',
2382 b'debugcommands',
2380 [],
2383 [],
2381 _(b'[COMMAND]'),
2384 _(b'[COMMAND]'),
2382 helpcategory=command.CATEGORY_HELP,
2385 helpcategory=command.CATEGORY_HELP,
2383 norepo=True,
2386 norepo=True,
2384 )
2387 )
2385 def debugcommands(ui, cmd=b'', *args):
2388 def debugcommands(ui, cmd=b'', *args):
2386 """list all available commands and options"""
2389 """list all available commands and options"""
2387 for cmd, vals in sorted(pycompat.iteritems(table)):
2390 for cmd, vals in sorted(pycompat.iteritems(table)):
2388 cmd = cmd.split(b'|')[0]
2391 cmd = cmd.split(b'|')[0]
2389 opts = b', '.join([i[1] for i in vals[1]])
2392 opts = b', '.join([i[1] for i in vals[1]])
2390 ui.write(b'%s: %s\n' % (cmd, opts))
2393 ui.write(b'%s: %s\n' % (cmd, opts))
2391
2394
2392
2395
2393 @command(
2396 @command(
2394 b'debugcomplete',
2397 b'debugcomplete',
2395 [(b'o', b'options', None, _(b'show the command options'))],
2398 [(b'o', b'options', None, _(b'show the command options'))],
2396 _(b'[-o] CMD'),
2399 _(b'[-o] CMD'),
2397 helpcategory=command.CATEGORY_HELP,
2400 helpcategory=command.CATEGORY_HELP,
2398 norepo=True,
2401 norepo=True,
2399 )
2402 )
2400 def debugcomplete(ui, cmd=b'', **opts):
2403 def debugcomplete(ui, cmd=b'', **opts):
2401 """returns the completion list associated with the given command"""
2404 """returns the completion list associated with the given command"""
2402
2405
2403 if opts.get('options'):
2406 if opts.get('options'):
2404 options = []
2407 options = []
2405 otables = [globalopts]
2408 otables = [globalopts]
2406 if cmd:
2409 if cmd:
2407 aliases, entry = cmdutil.findcmd(cmd, table, False)
2410 aliases, entry = cmdutil.findcmd(cmd, table, False)
2408 otables.append(entry[1])
2411 otables.append(entry[1])
2409 for t in otables:
2412 for t in otables:
2410 for o in t:
2413 for o in t:
2411 if b"(DEPRECATED)" in o[3]:
2414 if b"(DEPRECATED)" in o[3]:
2412 continue
2415 continue
2413 if o[0]:
2416 if o[0]:
2414 options.append(b'-%s' % o[0])
2417 options.append(b'-%s' % o[0])
2415 options.append(b'--%s' % o[1])
2418 options.append(b'--%s' % o[1])
2416 ui.write(b"%s\n" % b"\n".join(options))
2419 ui.write(b"%s\n" % b"\n".join(options))
2417 return
2420 return
2418
2421
2419 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
2422 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
2420 if ui.verbose:
2423 if ui.verbose:
2421 cmdlist = [b' '.join(c[0]) for c in cmdlist.values()]
2424 cmdlist = [b' '.join(c[0]) for c in cmdlist.values()]
2422 ui.write(b"%s\n" % b"\n".join(sorted(cmdlist)))
2425 ui.write(b"%s\n" % b"\n".join(sorted(cmdlist)))
2423
2426
2424
2427
2425 @command(
2428 @command(
2426 b'diff',
2429 b'diff',
2427 [
2430 [
2428 (b'r', b'rev', [], _(b'revision'), _(b'REV')),
2431 (b'r', b'rev', [], _(b'revision'), _(b'REV')),
2429 (b'c', b'change', b'', _(b'change made by revision'), _(b'REV')),
2432 (b'c', b'change', b'', _(b'change made by revision'), _(b'REV')),
2430 ]
2433 ]
2431 + diffopts
2434 + diffopts
2432 + diffopts2
2435 + diffopts2
2433 + walkopts
2436 + walkopts
2434 + subrepoopts,
2437 + subrepoopts,
2435 _(b'[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
2438 _(b'[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
2436 helpcategory=command.CATEGORY_FILE_CONTENTS,
2439 helpcategory=command.CATEGORY_FILE_CONTENTS,
2437 helpbasic=True,
2440 helpbasic=True,
2438 inferrepo=True,
2441 inferrepo=True,
2439 intents={INTENT_READONLY},
2442 intents={INTENT_READONLY},
2440 )
2443 )
2441 def diff(ui, repo, *pats, **opts):
2444 def diff(ui, repo, *pats, **opts):
2442 """diff repository (or selected files)
2445 """diff repository (or selected files)
2443
2446
2444 Show differences between revisions for the specified files.
2447 Show differences between revisions for the specified files.
2445
2448
2446 Differences between files are shown using the unified diff format.
2449 Differences between files are shown using the unified diff format.
2447
2450
2448 .. note::
2451 .. note::
2449
2452
2450 :hg:`diff` may generate unexpected results for merges, as it will
2453 :hg:`diff` may generate unexpected results for merges, as it will
2451 default to comparing against the working directory's first
2454 default to comparing against the working directory's first
2452 parent changeset if no revisions are specified.
2455 parent changeset if no revisions are specified.
2453
2456
2454 When two revision arguments are given, then changes are shown
2457 When two revision arguments are given, then changes are shown
2455 between those revisions. If only one revision is specified then
2458 between those revisions. If only one revision is specified then
2456 that revision is compared to the working directory, and, when no
2459 that revision is compared to the working directory, and, when no
2457 revisions are specified, the working directory files are compared
2460 revisions are specified, the working directory files are compared
2458 to its first parent.
2461 to its first parent.
2459
2462
2460 Alternatively you can specify -c/--change with a revision to see
2463 Alternatively you can specify -c/--change with a revision to see
2461 the changes in that changeset relative to its first parent.
2464 the changes in that changeset relative to its first parent.
2462
2465
2463 Without the -a/--text option, diff will avoid generating diffs of
2466 Without the -a/--text option, diff will avoid generating diffs of
2464 files it detects as binary. With -a, diff will generate a diff
2467 files it detects as binary. With -a, diff will generate a diff
2465 anyway, probably with undesirable results.
2468 anyway, probably with undesirable results.
2466
2469
2467 Use the -g/--git option to generate diffs in the git extended diff
2470 Use the -g/--git option to generate diffs in the git extended diff
2468 format. For more information, read :hg:`help diffs`.
2471 format. For more information, read :hg:`help diffs`.
2469
2472
2470 .. container:: verbose
2473 .. container:: verbose
2471
2474
2472 Examples:
2475 Examples:
2473
2476
2474 - compare a file in the current working directory to its parent::
2477 - compare a file in the current working directory to its parent::
2475
2478
2476 hg diff foo.c
2479 hg diff foo.c
2477
2480
2478 - compare two historical versions of a directory, with rename info::
2481 - compare two historical versions of a directory, with rename info::
2479
2482
2480 hg diff --git -r 1.0:1.2 lib/
2483 hg diff --git -r 1.0:1.2 lib/
2481
2484
2482 - get change stats relative to the last change on some date::
2485 - get change stats relative to the last change on some date::
2483
2486
2484 hg diff --stat -r "date('may 2')"
2487 hg diff --stat -r "date('may 2')"
2485
2488
2486 - diff all newly-added files that contain a keyword::
2489 - diff all newly-added files that contain a keyword::
2487
2490
2488 hg diff "set:added() and grep(GNU)"
2491 hg diff "set:added() and grep(GNU)"
2489
2492
2490 - compare a revision and its parents::
2493 - compare a revision and its parents::
2491
2494
2492 hg diff -c 9353 # compare against first parent
2495 hg diff -c 9353 # compare against first parent
2493 hg diff -r 9353^:9353 # same using revset syntax
2496 hg diff -r 9353^:9353 # same using revset syntax
2494 hg diff -r 9353^2:9353 # compare against the second parent
2497 hg diff -r 9353^2:9353 # compare against the second parent
2495
2498
2496 Returns 0 on success.
2499 Returns 0 on success.
2497 """
2500 """
2498
2501
2499 cmdutil.check_at_most_one_arg(opts, 'rev', 'change')
2502 cmdutil.check_at_most_one_arg(opts, 'rev', 'change')
2500 opts = pycompat.byteskwargs(opts)
2503 opts = pycompat.byteskwargs(opts)
2501 revs = opts.get(b'rev')
2504 revs = opts.get(b'rev')
2502 change = opts.get(b'change')
2505 change = opts.get(b'change')
2503 stat = opts.get(b'stat')
2506 stat = opts.get(b'stat')
2504 reverse = opts.get(b'reverse')
2507 reverse = opts.get(b'reverse')
2505
2508
2506 if change:
2509 if change:
2507 repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
2510 repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
2508 ctx2 = scmutil.revsingle(repo, change, None)
2511 ctx2 = scmutil.revsingle(repo, change, None)
2509 ctx1 = ctx2.p1()
2512 ctx1 = ctx2.p1()
2510 else:
2513 else:
2511 repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn')
2514 repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn')
2512 ctx1, ctx2 = scmutil.revpair(repo, revs)
2515 ctx1, ctx2 = scmutil.revpair(repo, revs)
2513
2516
2514 if reverse:
2517 if reverse:
2515 ctxleft = ctx2
2518 ctxleft = ctx2
2516 ctxright = ctx1
2519 ctxright = ctx1
2517 else:
2520 else:
2518 ctxleft = ctx1
2521 ctxleft = ctx1
2519 ctxright = ctx2
2522 ctxright = ctx2
2520
2523
2521 diffopts = patch.diffallopts(ui, opts)
2524 diffopts = patch.diffallopts(ui, opts)
2522 m = scmutil.match(ctx2, pats, opts)
2525 m = scmutil.match(ctx2, pats, opts)
2523 m = repo.narrowmatch(m)
2526 m = repo.narrowmatch(m)
2524 ui.pager(b'diff')
2527 ui.pager(b'diff')
2525 logcmdutil.diffordiffstat(
2528 logcmdutil.diffordiffstat(
2526 ui,
2529 ui,
2527 repo,
2530 repo,
2528 diffopts,
2531 diffopts,
2529 ctxleft,
2532 ctxleft,
2530 ctxright,
2533 ctxright,
2531 m,
2534 m,
2532 stat=stat,
2535 stat=stat,
2533 listsubrepos=opts.get(b'subrepos'),
2536 listsubrepos=opts.get(b'subrepos'),
2534 root=opts.get(b'root'),
2537 root=opts.get(b'root'),
2535 )
2538 )
2536
2539
2537
2540
2538 @command(
2541 @command(
2539 b'export',
2542 b'export',
2540 [
2543 [
2541 (
2544 (
2542 b'B',
2545 b'B',
2543 b'bookmark',
2546 b'bookmark',
2544 b'',
2547 b'',
2545 _(b'export changes only reachable by given bookmark'),
2548 _(b'export changes only reachable by given bookmark'),
2546 _(b'BOOKMARK'),
2549 _(b'BOOKMARK'),
2547 ),
2550 ),
2548 (
2551 (
2549 b'o',
2552 b'o',
2550 b'output',
2553 b'output',
2551 b'',
2554 b'',
2552 _(b'print output to file with formatted name'),
2555 _(b'print output to file with formatted name'),
2553 _(b'FORMAT'),
2556 _(b'FORMAT'),
2554 ),
2557 ),
2555 (b'', b'switch-parent', None, _(b'diff against the second parent')),
2558 (b'', b'switch-parent', None, _(b'diff against the second parent')),
2556 (b'r', b'rev', [], _(b'revisions to export'), _(b'REV')),
2559 (b'r', b'rev', [], _(b'revisions to export'), _(b'REV')),
2557 ]
2560 ]
2558 + diffopts
2561 + diffopts
2559 + formatteropts,
2562 + formatteropts,
2560 _(b'[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'),
2563 _(b'[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'),
2561 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2564 helpcategory=command.CATEGORY_IMPORT_EXPORT,
2562 helpbasic=True,
2565 helpbasic=True,
2563 intents={INTENT_READONLY},
2566 intents={INTENT_READONLY},
2564 )
2567 )
2565 def export(ui, repo, *changesets, **opts):
2568 def export(ui, repo, *changesets, **opts):
2566 """dump the header and diffs for one or more changesets
2569 """dump the header and diffs for one or more changesets
2567
2570
2568 Print the changeset header and diffs for one or more revisions.
2571 Print the changeset header and diffs for one or more revisions.
2569 If no revision is given, the parent of the working directory is used.
2572 If no revision is given, the parent of the working directory is used.
2570
2573
2571 The information shown in the changeset header is: author, date,
2574 The information shown in the changeset header is: author, date,
2572 branch name (if non-default), changeset hash, parent(s) and commit
2575 branch name (if non-default), changeset hash, parent(s) and commit
2573 comment.
2576 comment.
2574
2577
2575 .. note::
2578 .. note::
2576
2579
2577 :hg:`export` may generate unexpected diff output for merge
2580 :hg:`export` may generate unexpected diff output for merge
2578 changesets, as it will compare the merge changeset against its
2581 changesets, as it will compare the merge changeset against its
2579 first parent only.
2582 first parent only.
2580
2583
2581 Output may be to a file, in which case the name of the file is
2584 Output may be to a file, in which case the name of the file is
2582 given using a template string. See :hg:`help templates`. In addition
2585 given using a template string. See :hg:`help templates`. In addition
2583 to the common template keywords, the following formatting rules are
2586 to the common template keywords, the following formatting rules are
2584 supported:
2587 supported:
2585
2588
2586 :``%%``: literal "%" character
2589 :``%%``: literal "%" character
2587 :``%H``: changeset hash (40 hexadecimal digits)
2590 :``%H``: changeset hash (40 hexadecimal digits)
2588 :``%N``: number of patches being generated
2591 :``%N``: number of patches being generated
2589 :``%R``: changeset revision number
2592 :``%R``: changeset revision number
2590 :``%b``: basename of the exporting repository
2593 :``%b``: basename of the exporting repository
2591 :``%h``: short-form changeset hash (12 hexadecimal digits)
2594 :``%h``: short-form changeset hash (12 hexadecimal digits)
2592 :``%m``: first line of the commit message (only alphanumeric characters)
2595 :``%m``: first line of the commit message (only alphanumeric characters)
2593 :``%n``: zero-padded sequence number, starting at 1
2596 :``%n``: zero-padded sequence number, starting at 1
2594 :``%r``: zero-padded changeset revision number
2597 :``%r``: zero-padded changeset revision number
2595 :``\\``: literal "\\" character
2598 :``\\``: literal "\\" character
2596
2599
2597 Without the -a/--text option, export will avoid generating diffs
2600 Without the -a/--text option, export will avoid generating diffs
2598 of files it detects as binary. With -a, export will generate a
2601 of files it detects as binary. With -a, export will generate a
2599 diff anyway, probably with undesirable results.
2602 diff anyway, probably with undesirable results.
2600
2603
2601 With -B/--bookmark changesets reachable by the given bookmark are
2604 With -B/--bookmark changesets reachable by the given bookmark are
2602 selected.
2605 selected.
2603
2606
2604 Use the -g/--git option to generate diffs in the git extended diff
2607 Use the -g/--git option to generate diffs in the git extended diff
2605 format. See :hg:`help diffs` for more information.
2608 format. See :hg:`help diffs` for more information.
2606
2609
2607 With the --switch-parent option, the diff will be against the
2610 With the --switch-parent option, the diff will be against the
2608 second parent. It can be useful to review a merge.
2611 second parent. It can be useful to review a merge.
2609
2612
2610 .. container:: verbose
2613 .. container:: verbose
2611
2614
2612 Template:
2615 Template:
2613
2616
2614 The following keywords are supported in addition to the common template
2617 The following keywords are supported in addition to the common template
2615 keywords and functions. See also :hg:`help templates`.
2618 keywords and functions. See also :hg:`help templates`.
2616
2619
2617 :diff: String. Diff content.
2620 :diff: String. Diff content.
2618 :parents: List of strings. Parent nodes of the changeset.
2621 :parents: List of strings. Parent nodes of the changeset.
2619
2622
2620 Examples:
2623 Examples:
2621
2624
2622 - use export and import to transplant a bugfix to the current
2625 - use export and import to transplant a bugfix to the current
2623 branch::
2626 branch::
2624
2627
2625 hg export -r 9353 | hg import -
2628 hg export -r 9353 | hg import -
2626
2629
2627 - export all the changesets between two revisions to a file with
2630 - export all the changesets between two revisions to a file with
2628 rename information::
2631 rename information::
2629
2632
2630 hg export --git -r 123:150 > changes.txt
2633 hg export --git -r 123:150 > changes.txt
2631
2634
2632 - split outgoing changes into a series of patches with
2635 - split outgoing changes into a series of patches with
2633 descriptive names::
2636 descriptive names::
2634
2637
2635 hg export -r "outgoing()" -o "%n-%m.patch"
2638 hg export -r "outgoing()" -o "%n-%m.patch"
2636
2639
2637 Returns 0 on success.
2640 Returns 0 on success.
2638 """
2641 """
2639 opts = pycompat.byteskwargs(opts)
2642 opts = pycompat.byteskwargs(opts)
2640 bookmark = opts.get(b'bookmark')
2643 bookmark = opts.get(b'bookmark')
2641 changesets += tuple(opts.get(b'rev', []))
2644 changesets += tuple(opts.get(b'rev', []))
2642
2645
2643 cmdutil.check_at_most_one_arg(opts, b'rev', b'bookmark')
2646 cmdutil.check_at_most_one_arg(opts, b'rev', b'bookmark')
2644
2647
2645 if bookmark:
2648 if bookmark:
2646 if bookmark not in repo._bookmarks:
2649 if bookmark not in repo._bookmarks:
2647 raise error.Abort(_(b"bookmark '%s' not found") % bookmark)
2650 raise error.Abort(_(b"bookmark '%s' not found") % bookmark)
2648
2651
2649 revs = scmutil.bookmarkrevs(repo, bookmark)
2652 revs = scmutil.bookmarkrevs(repo, bookmark)
2650 else:
2653 else:
2651 if not changesets:
2654 if not changesets:
2652 changesets = [b'.']
2655 changesets = [b'.']
2653
2656
2654 repo = scmutil.unhidehashlikerevs(repo, changesets, b'nowarn')
2657 repo = scmutil.unhidehashlikerevs(repo, changesets, b'nowarn')
2655 revs = scmutil.revrange(repo, changesets)
2658 revs = scmutil.revrange(repo, changesets)
2656
2659
2657 if not revs:
2660 if not revs:
2658 raise error.Abort(_(b"export requires at least one changeset"))
2661 raise error.Abort(_(b"export requires at least one changeset"))
2659 if len(revs) > 1:
2662 if len(revs) > 1:
2660 ui.note(_(b'exporting patches:\n'))
2663 ui.note(_(b'exporting patches:\n'))
2661 else:
2664 else:
2662 ui.note(_(b'exporting patch:\n'))
2665 ui.note(_(b'exporting patch:\n'))
2663
2666
2664 fntemplate = opts.get(b'output')
2667 fntemplate = opts.get(b'output')
2665 if cmdutil.isstdiofilename(fntemplate):
2668 if cmdutil.isstdiofilename(fntemplate):
2666 fntemplate = b''
2669 fntemplate = b''
2667
2670
2668 if fntemplate:
2671 if fntemplate:
2669 fm = formatter.nullformatter(ui, b'export', opts)
2672 fm = formatter.nullformatter(ui, b'export', opts)
2670 else:
2673 else:
2671 ui.pager(b'export')
2674 ui.pager(b'export')
2672 fm = ui.formatter(b'export', opts)
2675 fm = ui.formatter(b'export', opts)
2673 with fm:
2676 with fm:
2674 cmdutil.export(
2677 cmdutil.export(
2675 repo,
2678 repo,
2676 revs,
2679 revs,
2677 fm,
2680 fm,
2678 fntemplate=fntemplate,
2681 fntemplate=fntemplate,
2679 switch_parent=opts.get(b'switch_parent'),
2682 switch_parent=opts.get(b'switch_parent'),
2680 opts=patch.diffallopts(ui, opts),
2683 opts=patch.diffallopts(ui, opts),
2681 )
2684 )
2682
2685
2683
2686
2684 @command(
2687 @command(
2685 b'files',
2688 b'files',
2686 [
2689 [
2687 (
2690 (
2688 b'r',
2691 b'r',
2689 b'rev',
2692 b'rev',
2690 b'',
2693 b'',
2691 _(b'search the repository as it is in REV'),
2694 _(b'search the repository as it is in REV'),
2692 _(b'REV'),
2695 _(b'REV'),
2693 ),
2696 ),
2694 (
2697 (
2695 b'0',
2698 b'0',
2696 b'print0',
2699 b'print0',
2697 None,
2700 None,
2698 _(b'end filenames with NUL, for use with xargs'),
2701 _(b'end filenames with NUL, for use with xargs'),
2699 ),
2702 ),
2700 ]
2703 ]
2701 + walkopts
2704 + walkopts
2702 + formatteropts
2705 + formatteropts
2703 + subrepoopts,
2706 + subrepoopts,
2704 _(b'[OPTION]... [FILE]...'),
2707 _(b'[OPTION]... [FILE]...'),
2705 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
2708 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
2706 intents={INTENT_READONLY},
2709 intents={INTENT_READONLY},
2707 )
2710 )
2708 def files(ui, repo, *pats, **opts):
2711 def files(ui, repo, *pats, **opts):
2709 """list tracked files
2712 """list tracked files
2710
2713
2711 Print files under Mercurial control in the working directory or
2714 Print files under Mercurial control in the working directory or
2712 specified revision for given files (excluding removed files).
2715 specified revision for given files (excluding removed files).
2713 Files can be specified as filenames or filesets.
2716 Files can be specified as filenames or filesets.
2714
2717
2715 If no files are given to match, this command prints the names
2718 If no files are given to match, this command prints the names
2716 of all files under Mercurial control.
2719 of all files under Mercurial control.
2717
2720
2718 .. container:: verbose
2721 .. container:: verbose
2719
2722
2720 Template:
2723 Template:
2721
2724
2722 The following keywords are supported in addition to the common template
2725 The following keywords are supported in addition to the common template
2723 keywords and functions. See also :hg:`help templates`.
2726 keywords and functions. See also :hg:`help templates`.
2724
2727
2725 :flags: String. Character denoting file's symlink and executable bits.
2728 :flags: String. Character denoting file's symlink and executable bits.
2726 :path: String. Repository-absolute path of the file.
2729 :path: String. Repository-absolute path of the file.
2727 :size: Integer. Size of the file in bytes.
2730 :size: Integer. Size of the file in bytes.
2728
2731
2729 Examples:
2732 Examples:
2730
2733
2731 - list all files under the current directory::
2734 - list all files under the current directory::
2732
2735
2733 hg files .
2736 hg files .
2734
2737
2735 - shows sizes and flags for current revision::
2738 - shows sizes and flags for current revision::
2736
2739
2737 hg files -vr .
2740 hg files -vr .
2738
2741
2739 - list all files named README::
2742 - list all files named README::
2740
2743
2741 hg files -I "**/README"
2744 hg files -I "**/README"
2742
2745
2743 - list all binary files::
2746 - list all binary files::
2744
2747
2745 hg files "set:binary()"
2748 hg files "set:binary()"
2746
2749
2747 - find files containing a regular expression::
2750 - find files containing a regular expression::
2748
2751
2749 hg files "set:grep('bob')"
2752 hg files "set:grep('bob')"
2750
2753
2751 - search tracked file contents with xargs and grep::
2754 - search tracked file contents with xargs and grep::
2752
2755
2753 hg files -0 | xargs -0 grep foo
2756 hg files -0 | xargs -0 grep foo
2754
2757
2755 See :hg:`help patterns` and :hg:`help filesets` for more information
2758 See :hg:`help patterns` and :hg:`help filesets` for more information
2756 on specifying file patterns.
2759 on specifying file patterns.
2757
2760
2758 Returns 0 if a match is found, 1 otherwise.
2761 Returns 0 if a match is found, 1 otherwise.
2759
2762
2760 """
2763 """
2761
2764
2762 opts = pycompat.byteskwargs(opts)
2765 opts = pycompat.byteskwargs(opts)
2763 rev = opts.get(b'rev')
2766 rev = opts.get(b'rev')
2764 if rev:
2767 if rev:
2765 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
2768 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
2766 ctx = scmutil.revsingle(repo, rev, None)
2769 ctx = scmutil.revsingle(repo, rev, None)
2767
2770
2768 end = b'\n'
2771 end = b'\n'
2769 if opts.get(b'print0'):
2772 if opts.get(b'print0'):
2770 end = b'\0'
2773 end = b'\0'
2771 fmt = b'%s' + end
2774 fmt = b'%s' + end
2772
2775
2773 m = scmutil.match(ctx, pats, opts)
2776 m = scmutil.match(ctx, pats, opts)
2774 ui.pager(b'files')
2777 ui.pager(b'files')
2775 uipathfn = scmutil.getuipathfn(ctx.repo(), legacyrelativevalue=True)
2778 uipathfn = scmutil.getuipathfn(ctx.repo(), legacyrelativevalue=True)
2776 with ui.formatter(b'files', opts) as fm:
2779 with ui.formatter(b'files', opts) as fm:
2777 return cmdutil.files(
2780 return cmdutil.files(
2778 ui, ctx, m, uipathfn, fm, fmt, opts.get(b'subrepos')
2781 ui, ctx, m, uipathfn, fm, fmt, opts.get(b'subrepos')
2779 )
2782 )
2780
2783
2781
2784
2782 @command(
2785 @command(
2783 b'forget',
2786 b'forget',
2784 [(b'i', b'interactive', None, _(b'use interactive mode')),]
2787 [(b'i', b'interactive', None, _(b'use interactive mode')),]
2785 + walkopts
2788 + walkopts
2786 + dryrunopts,
2789 + dryrunopts,
2787 _(b'[OPTION]... FILE...'),
2790 _(b'[OPTION]... FILE...'),
2788 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
2791 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
2789 helpbasic=True,
2792 helpbasic=True,
2790 inferrepo=True,
2793 inferrepo=True,
2791 )
2794 )
2792 def forget(ui, repo, *pats, **opts):
2795 def forget(ui, repo, *pats, **opts):
2793 """forget the specified files on the next commit
2796 """forget the specified files on the next commit
2794
2797
2795 Mark the specified files so they will no longer be tracked
2798 Mark the specified files so they will no longer be tracked
2796 after the next commit.
2799 after the next commit.
2797
2800
2798 This only removes files from the current branch, not from the
2801 This only removes files from the current branch, not from the
2799 entire project history, and it does not delete them from the
2802 entire project history, and it does not delete them from the
2800 working directory.
2803 working directory.
2801
2804
2802 To delete the file from the working directory, see :hg:`remove`.
2805 To delete the file from the working directory, see :hg:`remove`.
2803
2806
2804 To undo a forget before the next commit, see :hg:`add`.
2807 To undo a forget before the next commit, see :hg:`add`.
2805
2808
2806 .. container:: verbose
2809 .. container:: verbose
2807
2810
2808 Examples:
2811 Examples:
2809
2812
2810 - forget newly-added binary files::
2813 - forget newly-added binary files::
2811
2814
2812 hg forget "set:added() and binary()"
2815 hg forget "set:added() and binary()"
2813
2816
2814 - forget files that would be excluded by .hgignore::
2817 - forget files that would be excluded by .hgignore::
2815
2818
2816 hg forget "set:hgignore()"
2819 hg forget "set:hgignore()"
2817
2820
2818 Returns 0 on success.
2821 Returns 0 on success.
2819 """
2822 """
2820
2823
2821 opts = pycompat.byteskwargs(opts)
2824 opts = pycompat.byteskwargs(opts)
2822 if not pats:
2825 if not pats:
2823 raise error.Abort(_(b'no files specified'))
2826 raise error.Abort(_(b'no files specified'))
2824
2827
2825 m = scmutil.match(repo[None], pats, opts)
2828 m = scmutil.match(repo[None], pats, opts)
2826 dryrun, interactive = opts.get(b'dry_run'), opts.get(b'interactive')
2829 dryrun, interactive = opts.get(b'dry_run'), opts.get(b'interactive')
2827 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2830 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2828 rejected = cmdutil.forget(
2831 rejected = cmdutil.forget(
2829 ui,
2832 ui,
2830 repo,
2833 repo,
2831 m,
2834 m,
2832 prefix=b"",
2835 prefix=b"",
2833 uipathfn=uipathfn,
2836 uipathfn=uipathfn,
2834 explicitonly=False,
2837 explicitonly=False,
2835 dryrun=dryrun,
2838 dryrun=dryrun,
2836 interactive=interactive,
2839 interactive=interactive,
2837 )[0]
2840 )[0]
2838 return rejected and 1 or 0
2841 return rejected and 1 or 0
2839
2842
2840
2843
2841 @command(
2844 @command(
2842 b'graft',
2845 b'graft',
2843 [
2846 [
2844 (b'r', b'rev', [], _(b'revisions to graft'), _(b'REV')),
2847 (b'r', b'rev', [], _(b'revisions to graft'), _(b'REV')),
2845 (
2848 (
2846 b'',
2849 b'',
2847 b'base',
2850 b'base',
2848 b'',
2851 b'',
2849 _(b'base revision when doing the graft merge (ADVANCED)'),
2852 _(b'base revision when doing the graft merge (ADVANCED)'),
2850 _(b'REV'),
2853 _(b'REV'),
2851 ),
2854 ),
2852 (b'c', b'continue', False, _(b'resume interrupted graft')),
2855 (b'c', b'continue', False, _(b'resume interrupted graft')),
2853 (b'', b'stop', False, _(b'stop interrupted graft')),
2856 (b'', b'stop', False, _(b'stop interrupted graft')),
2854 (b'', b'abort', False, _(b'abort interrupted graft')),
2857 (b'', b'abort', False, _(b'abort interrupted graft')),
2855 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
2858 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
2856 (b'', b'log', None, _(b'append graft info to log message')),
2859 (b'', b'log', None, _(b'append graft info to log message')),
2857 (
2860 (
2858 b'',
2861 b'',
2859 b'no-commit',
2862 b'no-commit',
2860 None,
2863 None,
2861 _(b"don't commit, just apply the changes in working directory"),
2864 _(b"don't commit, just apply the changes in working directory"),
2862 ),
2865 ),
2863 (b'f', b'force', False, _(b'force graft')),
2866 (b'f', b'force', False, _(b'force graft')),
2864 (
2867 (
2865 b'D',
2868 b'D',
2866 b'currentdate',
2869 b'currentdate',
2867 False,
2870 False,
2868 _(b'record the current date as commit date'),
2871 _(b'record the current date as commit date'),
2869 ),
2872 ),
2870 (
2873 (
2871 b'U',
2874 b'U',
2872 b'currentuser',
2875 b'currentuser',
2873 False,
2876 False,
2874 _(b'record the current user as committer'),
2877 _(b'record the current user as committer'),
2875 ),
2878 ),
2876 ]
2879 ]
2877 + commitopts2
2880 + commitopts2
2878 + mergetoolopts
2881 + mergetoolopts
2879 + dryrunopts,
2882 + dryrunopts,
2880 _(b'[OPTION]... [-r REV]... REV...'),
2883 _(b'[OPTION]... [-r REV]... REV...'),
2881 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
2884 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
2882 )
2885 )
2883 def graft(ui, repo, *revs, **opts):
2886 def graft(ui, repo, *revs, **opts):
2884 '''copy changes from other branches onto the current branch
2887 '''copy changes from other branches onto the current branch
2885
2888
2886 This command uses Mercurial's merge logic to copy individual
2889 This command uses Mercurial's merge logic to copy individual
2887 changes from other branches without merging branches in the
2890 changes from other branches without merging branches in the
2888 history graph. This is sometimes known as 'backporting' or
2891 history graph. This is sometimes known as 'backporting' or
2889 'cherry-picking'. By default, graft will copy user, date, and
2892 'cherry-picking'. By default, graft will copy user, date, and
2890 description from the source changesets.
2893 description from the source changesets.
2891
2894
2892 Changesets that are ancestors of the current revision, that have
2895 Changesets that are ancestors of the current revision, that have
2893 already been grafted, or that are merges will be skipped.
2896 already been grafted, or that are merges will be skipped.
2894
2897
2895 If --log is specified, log messages will have a comment appended
2898 If --log is specified, log messages will have a comment appended
2896 of the form::
2899 of the form::
2897
2900
2898 (grafted from CHANGESETHASH)
2901 (grafted from CHANGESETHASH)
2899
2902
2900 If --force is specified, revisions will be grafted even if they
2903 If --force is specified, revisions will be grafted even if they
2901 are already ancestors of, or have been grafted to, the destination.
2904 are already ancestors of, or have been grafted to, the destination.
2902 This is useful when the revisions have since been backed out.
2905 This is useful when the revisions have since been backed out.
2903
2906
2904 If a graft merge results in conflicts, the graft process is
2907 If a graft merge results in conflicts, the graft process is
2905 interrupted so that the current merge can be manually resolved.
2908 interrupted so that the current merge can be manually resolved.
2906 Once all conflicts are addressed, the graft process can be
2909 Once all conflicts are addressed, the graft process can be
2907 continued with the -c/--continue option.
2910 continued with the -c/--continue option.
2908
2911
2909 The -c/--continue option reapplies all the earlier options.
2912 The -c/--continue option reapplies all the earlier options.
2910
2913
2911 .. container:: verbose
2914 .. container:: verbose
2912
2915
2913 The --base option exposes more of how graft internally uses merge with a
2916 The --base option exposes more of how graft internally uses merge with a
2914 custom base revision. --base can be used to specify another ancestor than
2917 custom base revision. --base can be used to specify another ancestor than
2915 the first and only parent.
2918 the first and only parent.
2916
2919
2917 The command::
2920 The command::
2918
2921
2919 hg graft -r 345 --base 234
2922 hg graft -r 345 --base 234
2920
2923
2921 is thus pretty much the same as::
2924 is thus pretty much the same as::
2922
2925
2923 hg diff -r 234 -r 345 | hg import
2926 hg diff -r 234 -r 345 | hg import
2924
2927
2925 but using merge to resolve conflicts and track moved files.
2928 but using merge to resolve conflicts and track moved files.
2926
2929
2927 The result of a merge can thus be backported as a single commit by
2930 The result of a merge can thus be backported as a single commit by
2928 specifying one of the merge parents as base, and thus effectively
2931 specifying one of the merge parents as base, and thus effectively
2929 grafting the changes from the other side.
2932 grafting the changes from the other side.
2930
2933
2931 It is also possible to collapse multiple changesets and clean up history
2934 It is also possible to collapse multiple changesets and clean up history
2932 by specifying another ancestor as base, much like rebase --collapse
2935 by specifying another ancestor as base, much like rebase --collapse
2933 --keep.
2936 --keep.
2934
2937
2935 The commit message can be tweaked after the fact using commit --amend .
2938 The commit message can be tweaked after the fact using commit --amend .
2936
2939
2937 For using non-ancestors as the base to backout changes, see the backout
2940 For using non-ancestors as the base to backout changes, see the backout
2938 command and the hidden --parent option.
2941 command and the hidden --parent option.
2939
2942
2940 .. container:: verbose
2943 .. container:: verbose
2941
2944
2942 Examples:
2945 Examples:
2943
2946
2944 - copy a single change to the stable branch and edit its description::
2947 - copy a single change to the stable branch and edit its description::
2945
2948
2946 hg update stable
2949 hg update stable
2947 hg graft --edit 9393
2950 hg graft --edit 9393
2948
2951
2949 - graft a range of changesets with one exception, updating dates::
2952 - graft a range of changesets with one exception, updating dates::
2950
2953
2951 hg graft -D "2085::2093 and not 2091"
2954 hg graft -D "2085::2093 and not 2091"
2952
2955
2953 - continue a graft after resolving conflicts::
2956 - continue a graft after resolving conflicts::
2954
2957
2955 hg graft -c
2958 hg graft -c
2956
2959
2957 - show the source of a grafted changeset::
2960 - show the source of a grafted changeset::
2958
2961
2959 hg log --debug -r .
2962 hg log --debug -r .
2960
2963
2961 - show revisions sorted by date::
2964 - show revisions sorted by date::
2962
2965
2963 hg log -r "sort(all(), date)"
2966 hg log -r "sort(all(), date)"
2964
2967
2965 - backport the result of a merge as a single commit::
2968 - backport the result of a merge as a single commit::
2966
2969
2967 hg graft -r 123 --base 123^
2970 hg graft -r 123 --base 123^
2968
2971
2969 - land a feature branch as one changeset::
2972 - land a feature branch as one changeset::
2970
2973
2971 hg up -cr default
2974 hg up -cr default
2972 hg graft -r featureX --base "ancestor('featureX', 'default')"
2975 hg graft -r featureX --base "ancestor('featureX', 'default')"
2973
2976
2974 See :hg:`help revisions` for more about specifying revisions.
2977 See :hg:`help revisions` for more about specifying revisions.
2975
2978
2976 Returns 0 on successful completion, 1 if there are unresolved files.
2979 Returns 0 on successful completion, 1 if there are unresolved files.
2977 '''
2980 '''
2978 with repo.wlock():
2981 with repo.wlock():
2979 return _dograft(ui, repo, *revs, **opts)
2982 return _dograft(ui, repo, *revs, **opts)
2980
2983
2981
2984
2982 def _dograft(ui, repo, *revs, **opts):
2985 def _dograft(ui, repo, *revs, **opts):
2983 opts = pycompat.byteskwargs(opts)
2986 opts = pycompat.byteskwargs(opts)
2984 if revs and opts.get(b'rev'):
2987 if revs and opts.get(b'rev'):
2985 ui.warn(
2988 ui.warn(
2986 _(
2989 _(
2987 b'warning: inconsistent use of --rev might give unexpected '
2990 b'warning: inconsistent use of --rev might give unexpected '
2988 b'revision ordering!\n'
2991 b'revision ordering!\n'
2989 )
2992 )
2990 )
2993 )
2991
2994
2992 revs = list(revs)
2995 revs = list(revs)
2993 revs.extend(opts.get(b'rev'))
2996 revs.extend(opts.get(b'rev'))
2994 # a dict of data to be stored in state file
2997 # a dict of data to be stored in state file
2995 statedata = {}
2998 statedata = {}
2996 # list of new nodes created by ongoing graft
2999 # list of new nodes created by ongoing graft
2997 statedata[b'newnodes'] = []
3000 statedata[b'newnodes'] = []
2998
3001
2999 cmdutil.resolvecommitoptions(ui, opts)
3002 cmdutil.resolvecommitoptions(ui, opts)
3000
3003
3001 editor = cmdutil.getcommiteditor(
3004 editor = cmdutil.getcommiteditor(
3002 editform=b'graft', **pycompat.strkwargs(opts)
3005 editform=b'graft', **pycompat.strkwargs(opts)
3003 )
3006 )
3004
3007
3005 cmdutil.check_at_most_one_arg(opts, b'abort', b'stop', b'continue')
3008 cmdutil.check_at_most_one_arg(opts, b'abort', b'stop', b'continue')
3006
3009
3007 cont = False
3010 cont = False
3008 if opts.get(b'no_commit'):
3011 if opts.get(b'no_commit'):
3009 cmdutil.check_incompatible_arguments(
3012 cmdutil.check_incompatible_arguments(
3010 opts,
3013 opts,
3011 b'no_commit',
3014 b'no_commit',
3012 [b'edit', b'currentuser', b'currentdate', b'log'],
3015 [b'edit', b'currentuser', b'currentdate', b'log'],
3013 )
3016 )
3014
3017
3015 graftstate = statemod.cmdstate(repo, b'graftstate')
3018 graftstate = statemod.cmdstate(repo, b'graftstate')
3016
3019
3017 if opts.get(b'stop'):
3020 if opts.get(b'stop'):
3018 cmdutil.check_incompatible_arguments(
3021 cmdutil.check_incompatible_arguments(
3019 opts,
3022 opts,
3020 b'stop',
3023 b'stop',
3021 [
3024 [
3022 b'edit',
3025 b'edit',
3023 b'log',
3026 b'log',
3024 b'user',
3027 b'user',
3025 b'date',
3028 b'date',
3026 b'currentdate',
3029 b'currentdate',
3027 b'currentuser',
3030 b'currentuser',
3028 b'rev',
3031 b'rev',
3029 ],
3032 ],
3030 )
3033 )
3031 return _stopgraft(ui, repo, graftstate)
3034 return _stopgraft(ui, repo, graftstate)
3032 elif opts.get(b'abort'):
3035 elif opts.get(b'abort'):
3033 cmdutil.check_incompatible_arguments(
3036 cmdutil.check_incompatible_arguments(
3034 opts,
3037 opts,
3035 b'abort',
3038 b'abort',
3036 [
3039 [
3037 b'edit',
3040 b'edit',
3038 b'log',
3041 b'log',
3039 b'user',
3042 b'user',
3040 b'date',
3043 b'date',
3041 b'currentdate',
3044 b'currentdate',
3042 b'currentuser',
3045 b'currentuser',
3043 b'rev',
3046 b'rev',
3044 ],
3047 ],
3045 )
3048 )
3046 return cmdutil.abortgraft(ui, repo, graftstate)
3049 return cmdutil.abortgraft(ui, repo, graftstate)
3047 elif opts.get(b'continue'):
3050 elif opts.get(b'continue'):
3048 cont = True
3051 cont = True
3049 if revs:
3052 if revs:
3050 raise error.Abort(_(b"can't specify --continue and revisions"))
3053 raise error.Abort(_(b"can't specify --continue and revisions"))
3051 # read in unfinished revisions
3054 # read in unfinished revisions
3052 if graftstate.exists():
3055 if graftstate.exists():
3053 statedata = cmdutil.readgraftstate(repo, graftstate)
3056 statedata = cmdutil.readgraftstate(repo, graftstate)
3054 if statedata.get(b'date'):
3057 if statedata.get(b'date'):
3055 opts[b'date'] = statedata[b'date']
3058 opts[b'date'] = statedata[b'date']
3056 if statedata.get(b'user'):
3059 if statedata.get(b'user'):
3057 opts[b'user'] = statedata[b'user']
3060 opts[b'user'] = statedata[b'user']
3058 if statedata.get(b'log'):
3061 if statedata.get(b'log'):
3059 opts[b'log'] = True
3062 opts[b'log'] = True
3060 if statedata.get(b'no_commit'):
3063 if statedata.get(b'no_commit'):
3061 opts[b'no_commit'] = statedata.get(b'no_commit')
3064 opts[b'no_commit'] = statedata.get(b'no_commit')
3062 if statedata.get(b'base'):
3065 if statedata.get(b'base'):
3063 opts[b'base'] = statedata.get(b'base')
3066 opts[b'base'] = statedata.get(b'base')
3064 nodes = statedata[b'nodes']
3067 nodes = statedata[b'nodes']
3065 revs = [repo[node].rev() for node in nodes]
3068 revs = [repo[node].rev() for node in nodes]
3066 else:
3069 else:
3067 cmdutil.wrongtooltocontinue(repo, _(b'graft'))
3070 cmdutil.wrongtooltocontinue(repo, _(b'graft'))
3068 else:
3071 else:
3069 if not revs:
3072 if not revs:
3070 raise error.Abort(_(b'no revisions specified'))
3073 raise error.Abort(_(b'no revisions specified'))
3071 cmdutil.checkunfinished(repo)
3074 cmdutil.checkunfinished(repo)
3072 cmdutil.bailifchanged(repo)
3075 cmdutil.bailifchanged(repo)
3073 revs = scmutil.revrange(repo, revs)
3076 revs = scmutil.revrange(repo, revs)
3074
3077
3075 skipped = set()
3078 skipped = set()
3076 basectx = None
3079 basectx = None
3077 if opts.get(b'base'):
3080 if opts.get(b'base'):
3078 basectx = scmutil.revsingle(repo, opts[b'base'], None)
3081 basectx = scmutil.revsingle(repo, opts[b'base'], None)
3079 if basectx is None:
3082 if basectx is None:
3080 # check for merges
3083 # check for merges
3081 for rev in repo.revs(b'%ld and merge()', revs):
3084 for rev in repo.revs(b'%ld and merge()', revs):
3082 ui.warn(_(b'skipping ungraftable merge revision %d\n') % rev)
3085 ui.warn(_(b'skipping ungraftable merge revision %d\n') % rev)
3083 skipped.add(rev)
3086 skipped.add(rev)
3084 revs = [r for r in revs if r not in skipped]
3087 revs = [r for r in revs if r not in skipped]
3085 if not revs:
3088 if not revs:
3086 return -1
3089 return -1
3087 if basectx is not None and len(revs) != 1:
3090 if basectx is not None and len(revs) != 1:
3088 raise error.Abort(_(b'only one revision allowed with --base '))
3091 raise error.Abort(_(b'only one revision allowed with --base '))
3089
3092
3090 # Don't check in the --continue case, in effect retaining --force across
3093 # Don't check in the --continue case, in effect retaining --force across
3091 # --continues. That's because without --force, any revisions we decided to
3094 # --continues. That's because without --force, any revisions we decided to
3092 # skip would have been filtered out here, so they wouldn't have made their
3095 # skip would have been filtered out here, so they wouldn't have made their
3093 # way to the graftstate. With --force, any revisions we would have otherwise
3096 # way to the graftstate. With --force, any revisions we would have otherwise
3094 # skipped would not have been filtered out, and if they hadn't been applied
3097 # skipped would not have been filtered out, and if they hadn't been applied
3095 # already, they'd have been in the graftstate.
3098 # already, they'd have been in the graftstate.
3096 if not (cont or opts.get(b'force')) and basectx is None:
3099 if not (cont or opts.get(b'force')) and basectx is None:
3097 # check for ancestors of dest branch
3100 # check for ancestors of dest branch
3098 ancestors = repo.revs(b'%ld & (::.)', revs)
3101 ancestors = repo.revs(b'%ld & (::.)', revs)
3099 for rev in ancestors:
3102 for rev in ancestors:
3100 ui.warn(_(b'skipping ancestor revision %d:%s\n') % (rev, repo[rev]))
3103 ui.warn(_(b'skipping ancestor revision %d:%s\n') % (rev, repo[rev]))
3101
3104
3102 revs = [r for r in revs if r not in ancestors]
3105 revs = [r for r in revs if r not in ancestors]
3103
3106
3104 if not revs:
3107 if not revs:
3105 return -1
3108 return -1
3106
3109
3107 # analyze revs for earlier grafts
3110 # analyze revs for earlier grafts
3108 ids = {}
3111 ids = {}
3109 for ctx in repo.set(b"%ld", revs):
3112 for ctx in repo.set(b"%ld", revs):
3110 ids[ctx.hex()] = ctx.rev()
3113 ids[ctx.hex()] = ctx.rev()
3111 n = ctx.extra().get(b'source')
3114 n = ctx.extra().get(b'source')
3112 if n:
3115 if n:
3113 ids[n] = ctx.rev()
3116 ids[n] = ctx.rev()
3114
3117
3115 # check ancestors for earlier grafts
3118 # check ancestors for earlier grafts
3116 ui.debug(b'scanning for duplicate grafts\n')
3119 ui.debug(b'scanning for duplicate grafts\n')
3117
3120
3118 # The only changesets we can be sure doesn't contain grafts of any
3121 # The only changesets we can be sure doesn't contain grafts of any
3119 # revs, are the ones that are common ancestors of *all* revs:
3122 # revs, are the ones that are common ancestors of *all* revs:
3120 for rev in repo.revs(b'only(%d,ancestor(%ld))', repo[b'.'].rev(), revs):
3123 for rev in repo.revs(b'only(%d,ancestor(%ld))', repo[b'.'].rev(), revs):
3121 ctx = repo[rev]
3124 ctx = repo[rev]
3122 n = ctx.extra().get(b'source')
3125 n = ctx.extra().get(b'source')
3123 if n in ids:
3126 if n in ids:
3124 try:
3127 try:
3125 r = repo[n].rev()
3128 r = repo[n].rev()
3126 except error.RepoLookupError:
3129 except error.RepoLookupError:
3127 r = None
3130 r = None
3128 if r in revs:
3131 if r in revs:
3129 ui.warn(
3132 ui.warn(
3130 _(
3133 _(
3131 b'skipping revision %d:%s '
3134 b'skipping revision %d:%s '
3132 b'(already grafted to %d:%s)\n'
3135 b'(already grafted to %d:%s)\n'
3133 )
3136 )
3134 % (r, repo[r], rev, ctx)
3137 % (r, repo[r], rev, ctx)
3135 )
3138 )
3136 revs.remove(r)
3139 revs.remove(r)
3137 elif ids[n] in revs:
3140 elif ids[n] in revs:
3138 if r is None:
3141 if r is None:
3139 ui.warn(
3142 ui.warn(
3140 _(
3143 _(
3141 b'skipping already grafted revision %d:%s '
3144 b'skipping already grafted revision %d:%s '
3142 b'(%d:%s also has unknown origin %s)\n'
3145 b'(%d:%s also has unknown origin %s)\n'
3143 )
3146 )
3144 % (ids[n], repo[ids[n]], rev, ctx, n[:12])
3147 % (ids[n], repo[ids[n]], rev, ctx, n[:12])
3145 )
3148 )
3146 else:
3149 else:
3147 ui.warn(
3150 ui.warn(
3148 _(
3151 _(
3149 b'skipping already grafted revision %d:%s '
3152 b'skipping already grafted revision %d:%s '
3150 b'(%d:%s also has origin %d:%s)\n'
3153 b'(%d:%s also has origin %d:%s)\n'
3151 )
3154 )
3152 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12])
3155 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12])
3153 )
3156 )
3154 revs.remove(ids[n])
3157 revs.remove(ids[n])
3155 elif ctx.hex() in ids:
3158 elif ctx.hex() in ids:
3156 r = ids[ctx.hex()]
3159 r = ids[ctx.hex()]
3157 if r in revs:
3160 if r in revs:
3158 ui.warn(
3161 ui.warn(
3159 _(
3162 _(
3160 b'skipping already grafted revision %d:%s '
3163 b'skipping already grafted revision %d:%s '
3161 b'(was grafted from %d:%s)\n'
3164 b'(was grafted from %d:%s)\n'
3162 )
3165 )
3163 % (r, repo[r], rev, ctx)
3166 % (r, repo[r], rev, ctx)
3164 )
3167 )
3165 revs.remove(r)
3168 revs.remove(r)
3166 if not revs:
3169 if not revs:
3167 return -1
3170 return -1
3168
3171
3169 if opts.get(b'no_commit'):
3172 if opts.get(b'no_commit'):
3170 statedata[b'no_commit'] = True
3173 statedata[b'no_commit'] = True
3171 if opts.get(b'base'):
3174 if opts.get(b'base'):
3172 statedata[b'base'] = opts[b'base']
3175 statedata[b'base'] = opts[b'base']
3173 for pos, ctx in enumerate(repo.set(b"%ld", revs)):
3176 for pos, ctx in enumerate(repo.set(b"%ld", revs)):
3174 desc = b'%d:%s "%s"' % (
3177 desc = b'%d:%s "%s"' % (
3175 ctx.rev(),
3178 ctx.rev(),
3176 ctx,
3179 ctx,
3177 ctx.description().split(b'\n', 1)[0],
3180 ctx.description().split(b'\n', 1)[0],
3178 )
3181 )
3179 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
3182 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
3180 if names:
3183 if names:
3181 desc += b' (%s)' % b' '.join(names)
3184 desc += b' (%s)' % b' '.join(names)
3182 ui.status(_(b'grafting %s\n') % desc)
3185 ui.status(_(b'grafting %s\n') % desc)
3183 if opts.get(b'dry_run'):
3186 if opts.get(b'dry_run'):
3184 continue
3187 continue
3185
3188
3186 source = ctx.extra().get(b'source')
3189 source = ctx.extra().get(b'source')
3187 extra = {}
3190 extra = {}
3188 if source:
3191 if source:
3189 extra[b'source'] = source
3192 extra[b'source'] = source
3190 extra[b'intermediate-source'] = ctx.hex()
3193 extra[b'intermediate-source'] = ctx.hex()
3191 else:
3194 else:
3192 extra[b'source'] = ctx.hex()
3195 extra[b'source'] = ctx.hex()
3193 user = ctx.user()
3196 user = ctx.user()
3194 if opts.get(b'user'):
3197 if opts.get(b'user'):
3195 user = opts[b'user']
3198 user = opts[b'user']
3196 statedata[b'user'] = user
3199 statedata[b'user'] = user
3197 date = ctx.date()
3200 date = ctx.date()
3198 if opts.get(b'date'):
3201 if opts.get(b'date'):
3199 date = opts[b'date']
3202 date = opts[b'date']
3200 statedata[b'date'] = date
3203 statedata[b'date'] = date
3201 message = ctx.description()
3204 message = ctx.description()
3202 if opts.get(b'log'):
3205 if opts.get(b'log'):
3203 message += b'\n(grafted from %s)' % ctx.hex()
3206 message += b'\n(grafted from %s)' % ctx.hex()
3204 statedata[b'log'] = True
3207 statedata[b'log'] = True
3205
3208
3206 # we don't merge the first commit when continuing
3209 # we don't merge the first commit when continuing
3207 if not cont:
3210 if not cont:
3208 # perform the graft merge with p1(rev) as 'ancestor'
3211 # perform the graft merge with p1(rev) as 'ancestor'
3209 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
3212 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
3210 base = ctx.p1() if basectx is None else basectx
3213 base = ctx.p1() if basectx is None else basectx
3211 with ui.configoverride(overrides, b'graft'):
3214 with ui.configoverride(overrides, b'graft'):
3212 stats = mergemod.graft(repo, ctx, base, [b'local', b'graft'])
3215 stats = mergemod.graft(repo, ctx, base, [b'local', b'graft'])
3213 # report any conflicts
3216 # report any conflicts
3214 if stats.unresolvedcount > 0:
3217 if stats.unresolvedcount > 0:
3215 # write out state for --continue
3218 # write out state for --continue
3216 nodes = [repo[rev].hex() for rev in revs[pos:]]
3219 nodes = [repo[rev].hex() for rev in revs[pos:]]
3217 statedata[b'nodes'] = nodes
3220 statedata[b'nodes'] = nodes
3218 stateversion = 1
3221 stateversion = 1
3219 graftstate.save(stateversion, statedata)
3222 graftstate.save(stateversion, statedata)
3220 ui.error(_(b"abort: unresolved conflicts, can't continue\n"))
3223 ui.error(_(b"abort: unresolved conflicts, can't continue\n"))
3221 ui.error(_(b"(use 'hg resolve' and 'hg graft --continue')\n"))
3224 ui.error(_(b"(use 'hg resolve' and 'hg graft --continue')\n"))
3222 return 1
3225 return 1
3223 else:
3226 else:
3224 cont = False
3227 cont = False
3225
3228
3226 # commit if --no-commit is false
3229 # commit if --no-commit is false
3227 if not opts.get(b'no_commit'):
3230 if not opts.get(b'no_commit'):
3228 node = repo.commit(
3231 node = repo.commit(
3229 text=message, user=user, date=date, extra=extra, editor=editor
3232 text=message, user=user, date=date, extra=extra, editor=editor
3230 )
3233 )
3231 if node is None:
3234 if node is None:
3232 ui.warn(
3235 ui.warn(
3233 _(b'note: graft of %d:%s created no changes to commit\n')
3236 _(b'note: graft of %d:%s created no changes to commit\n')
3234 % (ctx.rev(), ctx)
3237 % (ctx.rev(), ctx)
3235 )
3238 )
3236 # checking that newnodes exist because old state files won't have it
3239 # checking that newnodes exist because old state files won't have it
3237 elif statedata.get(b'newnodes') is not None:
3240 elif statedata.get(b'newnodes') is not None:
3238 statedata[b'newnodes'].append(node)
3241 statedata[b'newnodes'].append(node)
3239
3242
3240 # remove state when we complete successfully
3243 # remove state when we complete successfully
3241 if not opts.get(b'dry_run'):
3244 if not opts.get(b'dry_run'):
3242 graftstate.delete()
3245 graftstate.delete()
3243
3246
3244 return 0
3247 return 0
3245
3248
3246
3249
3247 def _stopgraft(ui, repo, graftstate):
3250 def _stopgraft(ui, repo, graftstate):
3248 """stop the interrupted graft"""
3251 """stop the interrupted graft"""
3249 if not graftstate.exists():
3252 if not graftstate.exists():
3250 raise error.Abort(_(b"no interrupted graft found"))
3253 raise error.Abort(_(b"no interrupted graft found"))
3251 pctx = repo[b'.']
3254 pctx = repo[b'.']
3252 mergemod.clean_update(pctx)
3255 mergemod.clean_update(pctx)
3253 graftstate.delete()
3256 graftstate.delete()
3254 ui.status(_(b"stopped the interrupted graft\n"))
3257 ui.status(_(b"stopped the interrupted graft\n"))
3255 ui.status(_(b"working directory is now at %s\n") % pctx.hex()[:12])
3258 ui.status(_(b"working directory is now at %s\n") % pctx.hex()[:12])
3256 return 0
3259 return 0
3257
3260
3258
3261
3259 statemod.addunfinished(
3262 statemod.addunfinished(
3260 b'graft',
3263 b'graft',
3261 fname=b'graftstate',
3264 fname=b'graftstate',
3262 clearable=True,
3265 clearable=True,
3263 stopflag=True,
3266 stopflag=True,
3264 continueflag=True,
3267 continueflag=True,
3265 abortfunc=cmdutil.hgabortgraft,
3268 abortfunc=cmdutil.hgabortgraft,
3266 cmdhint=_(b"use 'hg graft --continue' or 'hg graft --stop' to stop"),
3269 cmdhint=_(b"use 'hg graft --continue' or 'hg graft --stop' to stop"),
3267 )
3270 )
3268
3271
3269
3272
3270 @command(
3273 @command(
3271 b'grep',
3274 b'grep',
3272 [
3275 [
3273 (b'0', b'print0', None, _(b'end fields with NUL')),
3276 (b'0', b'print0', None, _(b'end fields with NUL')),
3274 (b'', b'all', None, _(b'an alias to --diff (DEPRECATED)')),
3277 (b'', b'all', None, _(b'an alias to --diff (DEPRECATED)')),
3275 (
3278 (
3276 b'',
3279 b'',
3277 b'diff',
3280 b'diff',
3278 None,
3281 None,
3279 _(
3282 _(
3280 b'search revision differences for when the pattern was added '
3283 b'search revision differences for when the pattern was added '
3281 b'or removed'
3284 b'or removed'
3282 ),
3285 ),
3283 ),
3286 ),
3284 (b'a', b'text', None, _(b'treat all files as text')),
3287 (b'a', b'text', None, _(b'treat all files as text')),
3285 (
3288 (
3286 b'f',
3289 b'f',
3287 b'follow',
3290 b'follow',
3288 None,
3291 None,
3289 _(
3292 _(
3290 b'follow changeset history,'
3293 b'follow changeset history,'
3291 b' or file history across copies and renames'
3294 b' or file history across copies and renames'
3292 ),
3295 ),
3293 ),
3296 ),
3294 (b'i', b'ignore-case', None, _(b'ignore case when matching')),
3297 (b'i', b'ignore-case', None, _(b'ignore case when matching')),
3295 (
3298 (
3296 b'l',
3299 b'l',
3297 b'files-with-matches',
3300 b'files-with-matches',
3298 None,
3301 None,
3299 _(b'print only filenames and revisions that match'),
3302 _(b'print only filenames and revisions that match'),
3300 ),
3303 ),
3301 (b'n', b'line-number', None, _(b'print matching line numbers')),
3304 (b'n', b'line-number', None, _(b'print matching line numbers')),
3302 (
3305 (
3303 b'r',
3306 b'r',
3304 b'rev',
3307 b'rev',
3305 [],
3308 [],
3306 _(b'search files changed within revision range'),
3309 _(b'search files changed within revision range'),
3307 _(b'REV'),
3310 _(b'REV'),
3308 ),
3311 ),
3309 (
3312 (
3310 b'',
3313 b'',
3311 b'all-files',
3314 b'all-files',
3312 None,
3315 None,
3313 _(
3316 _(
3314 b'include all files in the changeset while grepping (DEPRECATED)'
3317 b'include all files in the changeset while grepping (DEPRECATED)'
3315 ),
3318 ),
3316 ),
3319 ),
3317 (b'u', b'user', None, _(b'list the author (long with -v)')),
3320 (b'u', b'user', None, _(b'list the author (long with -v)')),
3318 (b'd', b'date', None, _(b'list the date (short with -q)')),
3321 (b'd', b'date', None, _(b'list the date (short with -q)')),
3319 ]
3322 ]
3320 + formatteropts
3323 + formatteropts
3321 + walkopts,
3324 + walkopts,
3322 _(b'[--diff] [OPTION]... PATTERN [FILE]...'),
3325 _(b'[--diff] [OPTION]... PATTERN [FILE]...'),
3323 helpcategory=command.CATEGORY_FILE_CONTENTS,
3326 helpcategory=command.CATEGORY_FILE_CONTENTS,
3324 inferrepo=True,
3327 inferrepo=True,
3325 intents={INTENT_READONLY},
3328 intents={INTENT_READONLY},
3326 )
3329 )
3327 def grep(ui, repo, pattern, *pats, **opts):
3330 def grep(ui, repo, pattern, *pats, **opts):
3328 """search for a pattern in specified files
3331 """search for a pattern in specified files
3329
3332
3330 Search the working directory or revision history for a regular
3333 Search the working directory or revision history for a regular
3331 expression in the specified files for the entire repository.
3334 expression in the specified files for the entire repository.
3332
3335
3333 By default, grep searches the repository files in the working
3336 By default, grep searches the repository files in the working
3334 directory and prints the files where it finds a match. To specify
3337 directory and prints the files where it finds a match. To specify
3335 historical revisions instead of the working directory, use the
3338 historical revisions instead of the working directory, use the
3336 --rev flag.
3339 --rev flag.
3337
3340
3338 To search instead historical revision differences that contains a
3341 To search instead historical revision differences that contains a
3339 change in match status ("-" for a match that becomes a non-match,
3342 change in match status ("-" for a match that becomes a non-match,
3340 or "+" for a non-match that becomes a match), use the --diff flag.
3343 or "+" for a non-match that becomes a match), use the --diff flag.
3341
3344
3342 PATTERN can be any Python (roughly Perl-compatible) regular
3345 PATTERN can be any Python (roughly Perl-compatible) regular
3343 expression.
3346 expression.
3344
3347
3345 If no FILEs are specified and the --rev flag isn't supplied, all
3348 If no FILEs are specified and the --rev flag isn't supplied, all
3346 files in the working directory are searched. When using the --rev
3349 files in the working directory are searched. When using the --rev
3347 flag and specifying FILEs, use the --follow argument to also
3350 flag and specifying FILEs, use the --follow argument to also
3348 follow the specified FILEs across renames and copies.
3351 follow the specified FILEs across renames and copies.
3349
3352
3350 .. container:: verbose
3353 .. container:: verbose
3351
3354
3352 Template:
3355 Template:
3353
3356
3354 The following keywords are supported in addition to the common template
3357 The following keywords are supported in addition to the common template
3355 keywords and functions. See also :hg:`help templates`.
3358 keywords and functions. See also :hg:`help templates`.
3356
3359
3357 :change: String. Character denoting insertion ``+`` or removal ``-``.
3360 :change: String. Character denoting insertion ``+`` or removal ``-``.
3358 Available if ``--diff`` is specified.
3361 Available if ``--diff`` is specified.
3359 :lineno: Integer. Line number of the match.
3362 :lineno: Integer. Line number of the match.
3360 :path: String. Repository-absolute path of the file.
3363 :path: String. Repository-absolute path of the file.
3361 :texts: List of text chunks.
3364 :texts: List of text chunks.
3362
3365
3363 And each entry of ``{texts}`` provides the following sub-keywords.
3366 And each entry of ``{texts}`` provides the following sub-keywords.
3364
3367
3365 :matched: Boolean. True if the chunk matches the specified pattern.
3368 :matched: Boolean. True if the chunk matches the specified pattern.
3366 :text: String. Chunk content.
3369 :text: String. Chunk content.
3367
3370
3368 See :hg:`help templates.operators` for the list expansion syntax.
3371 See :hg:`help templates.operators` for the list expansion syntax.
3369
3372
3370 Returns 0 if a match is found, 1 otherwise.
3373 Returns 0 if a match is found, 1 otherwise.
3371
3374
3372 """
3375 """
3373 cmdutil.check_incompatible_arguments(opts, 'all_files', ['all', 'diff'])
3376 cmdutil.check_incompatible_arguments(opts, 'all_files', ['all', 'diff'])
3374 opts = pycompat.byteskwargs(opts)
3377 opts = pycompat.byteskwargs(opts)
3375 diff = opts.get(b'all') or opts.get(b'diff')
3378 diff = opts.get(b'all') or opts.get(b'diff')
3376 follow = opts.get(b'follow')
3379 follow = opts.get(b'follow')
3377 if opts.get(b'all_files') is None and not diff:
3380 if opts.get(b'all_files') is None and not diff:
3378 opts[b'all_files'] = True
3381 opts[b'all_files'] = True
3379 plaingrep = (
3382 plaingrep = (
3380 opts.get(b'all_files')
3383 opts.get(b'all_files')
3381 and not opts.get(b'rev')
3384 and not opts.get(b'rev')
3382 and not opts.get(b'follow')
3385 and not opts.get(b'follow')
3383 )
3386 )
3384 all_files = opts.get(b'all_files')
3387 all_files = opts.get(b'all_files')
3385 if plaingrep:
3388 if plaingrep:
3386 opts[b'rev'] = [b'wdir()']
3389 opts[b'rev'] = [b'wdir()']
3387
3390
3388 reflags = re.M
3391 reflags = re.M
3389 if opts.get(b'ignore_case'):
3392 if opts.get(b'ignore_case'):
3390 reflags |= re.I
3393 reflags |= re.I
3391 try:
3394 try:
3392 regexp = util.re.compile(pattern, reflags)
3395 regexp = util.re.compile(pattern, reflags)
3393 except re.error as inst:
3396 except re.error as inst:
3394 ui.warn(
3397 ui.warn(
3395 _(b"grep: invalid match pattern: %s\n") % pycompat.bytestr(inst)
3398 _(b"grep: invalid match pattern: %s\n") % pycompat.bytestr(inst)
3396 )
3399 )
3397 return 1
3400 return 1
3398 sep, eol = b':', b'\n'
3401 sep, eol = b':', b'\n'
3399 if opts.get(b'print0'):
3402 if opts.get(b'print0'):
3400 sep = eol = b'\0'
3403 sep = eol = b'\0'
3401
3404
3402 searcher = grepmod.grepsearcher(
3405 searcher = grepmod.grepsearcher(
3403 ui, repo, regexp, all_files=all_files, diff=diff, follow=follow
3406 ui, repo, regexp, all_files=all_files, diff=diff, follow=follow
3404 )
3407 )
3405
3408
3406 getfile = searcher._getfile
3409 getfile = searcher._getfile
3407
3410
3408 uipathfn = scmutil.getuipathfn(repo)
3411 uipathfn = scmutil.getuipathfn(repo)
3409
3412
3410 def display(fm, fn, ctx, pstates, states):
3413 def display(fm, fn, ctx, pstates, states):
3411 rev = scmutil.intrev(ctx)
3414 rev = scmutil.intrev(ctx)
3412 if fm.isplain():
3415 if fm.isplain():
3413 formatuser = ui.shortuser
3416 formatuser = ui.shortuser
3414 else:
3417 else:
3415 formatuser = pycompat.bytestr
3418 formatuser = pycompat.bytestr
3416 if ui.quiet:
3419 if ui.quiet:
3417 datefmt = b'%Y-%m-%d'
3420 datefmt = b'%Y-%m-%d'
3418 else:
3421 else:
3419 datefmt = b'%a %b %d %H:%M:%S %Y %1%2'
3422 datefmt = b'%a %b %d %H:%M:%S %Y %1%2'
3420 found = False
3423 found = False
3421
3424
3422 @util.cachefunc
3425 @util.cachefunc
3423 def binary():
3426 def binary():
3424 flog = getfile(fn)
3427 flog = getfile(fn)
3425 try:
3428 try:
3426 return stringutil.binary(flog.read(ctx.filenode(fn)))
3429 return stringutil.binary(flog.read(ctx.filenode(fn)))
3427 except error.WdirUnsupported:
3430 except error.WdirUnsupported:
3428 return ctx[fn].isbinary()
3431 return ctx[fn].isbinary()
3429
3432
3430 fieldnamemap = {b'linenumber': b'lineno'}
3433 fieldnamemap = {b'linenumber': b'lineno'}
3431 if diff:
3434 if diff:
3432 iter = grepmod.difflinestates(pstates, states)
3435 iter = grepmod.difflinestates(pstates, states)
3433 else:
3436 else:
3434 iter = [(b'', l) for l in states]
3437 iter = [(b'', l) for l in states]
3435 for change, l in iter:
3438 for change, l in iter:
3436 fm.startitem()
3439 fm.startitem()
3437 fm.context(ctx=ctx)
3440 fm.context(ctx=ctx)
3438 fm.data(node=fm.hexfunc(scmutil.binnode(ctx)), path=fn)
3441 fm.data(node=fm.hexfunc(scmutil.binnode(ctx)), path=fn)
3439 fm.plain(uipathfn(fn), label=b'grep.filename')
3442 fm.plain(uipathfn(fn), label=b'grep.filename')
3440
3443
3441 cols = [
3444 cols = [
3442 (b'rev', b'%d', rev, not plaingrep, b''),
3445 (b'rev', b'%d', rev, not plaingrep, b''),
3443 (
3446 (
3444 b'linenumber',
3447 b'linenumber',
3445 b'%d',
3448 b'%d',
3446 l.linenum,
3449 l.linenum,
3447 opts.get(b'line_number'),
3450 opts.get(b'line_number'),
3448 b'',
3451 b'',
3449 ),
3452 ),
3450 ]
3453 ]
3451 if diff:
3454 if diff:
3452 cols.append(
3455 cols.append(
3453 (
3456 (
3454 b'change',
3457 b'change',
3455 b'%s',
3458 b'%s',
3456 change,
3459 change,
3457 True,
3460 True,
3458 b'grep.inserted '
3461 b'grep.inserted '
3459 if change == b'+'
3462 if change == b'+'
3460 else b'grep.deleted ',
3463 else b'grep.deleted ',
3461 )
3464 )
3462 )
3465 )
3463 cols.extend(
3466 cols.extend(
3464 [
3467 [
3465 (
3468 (
3466 b'user',
3469 b'user',
3467 b'%s',
3470 b'%s',
3468 formatuser(ctx.user()),
3471 formatuser(ctx.user()),
3469 opts.get(b'user'),
3472 opts.get(b'user'),
3470 b'',
3473 b'',
3471 ),
3474 ),
3472 (
3475 (
3473 b'date',
3476 b'date',
3474 b'%s',
3477 b'%s',
3475 fm.formatdate(ctx.date(), datefmt),
3478 fm.formatdate(ctx.date(), datefmt),
3476 opts.get(b'date'),
3479 opts.get(b'date'),
3477 b'',
3480 b'',
3478 ),
3481 ),
3479 ]
3482 ]
3480 )
3483 )
3481 for name, fmt, data, cond, extra_label in cols:
3484 for name, fmt, data, cond, extra_label in cols:
3482 if cond:
3485 if cond:
3483 fm.plain(sep, label=b'grep.sep')
3486 fm.plain(sep, label=b'grep.sep')
3484 field = fieldnamemap.get(name, name)
3487 field = fieldnamemap.get(name, name)
3485 label = extra_label + (b'grep.%s' % name)
3488 label = extra_label + (b'grep.%s' % name)
3486 fm.condwrite(cond, field, fmt, data, label=label)
3489 fm.condwrite(cond, field, fmt, data, label=label)
3487 if not opts.get(b'files_with_matches'):
3490 if not opts.get(b'files_with_matches'):
3488 fm.plain(sep, label=b'grep.sep')
3491 fm.plain(sep, label=b'grep.sep')
3489 if not opts.get(b'text') and binary():
3492 if not opts.get(b'text') and binary():
3490 fm.plain(_(b" Binary file matches"))
3493 fm.plain(_(b" Binary file matches"))
3491 else:
3494 else:
3492 displaymatches(fm.nested(b'texts', tmpl=b'{text}'), l)
3495 displaymatches(fm.nested(b'texts', tmpl=b'{text}'), l)
3493 fm.plain(eol)
3496 fm.plain(eol)
3494 found = True
3497 found = True
3495 if opts.get(b'files_with_matches'):
3498 if opts.get(b'files_with_matches'):
3496 break
3499 break
3497 return found
3500 return found
3498
3501
3499 def displaymatches(fm, l):
3502 def displaymatches(fm, l):
3500 p = 0
3503 p = 0
3501 for s, e in l.findpos(regexp):
3504 for s, e in l.findpos(regexp):
3502 if p < s:
3505 if p < s:
3503 fm.startitem()
3506 fm.startitem()
3504 fm.write(b'text', b'%s', l.line[p:s])
3507 fm.write(b'text', b'%s', l.line[p:s])
3505 fm.data(matched=False)
3508 fm.data(matched=False)
3506 fm.startitem()
3509 fm.startitem()
3507 fm.write(b'text', b'%s', l.line[s:e], label=b'grep.match')
3510 fm.write(b'text', b'%s', l.line[s:e], label=b'grep.match')
3508 fm.data(matched=True)
3511 fm.data(matched=True)
3509 p = e
3512 p = e
3510 if p < len(l.line):
3513 if p < len(l.line):
3511 fm.startitem()
3514 fm.startitem()
3512 fm.write(b'text', b'%s', l.line[p:])
3515 fm.write(b'text', b'%s', l.line[p:])
3513 fm.data(matched=False)
3516 fm.data(matched=False)
3514 fm.end()
3517 fm.end()
3515
3518
3516 found = False
3519 found = False
3517
3520
3518 wopts = logcmdutil.walkopts(
3521 wopts = logcmdutil.walkopts(
3519 pats=pats,
3522 pats=pats,
3520 opts=opts,
3523 opts=opts,
3521 revspec=opts[b'rev'],
3524 revspec=opts[b'rev'],
3522 include_pats=opts[b'include'],
3525 include_pats=opts[b'include'],
3523 exclude_pats=opts[b'exclude'],
3526 exclude_pats=opts[b'exclude'],
3524 follow=follow,
3527 follow=follow,
3525 force_changelog_traversal=all_files,
3528 force_changelog_traversal=all_files,
3526 filter_revisions_by_pats=not all_files,
3529 filter_revisions_by_pats=not all_files,
3527 )
3530 )
3528 revs, makefilematcher = logcmdutil.makewalker(repo, wopts)
3531 revs, makefilematcher = logcmdutil.makewalker(repo, wopts)
3529
3532
3530 ui.pager(b'grep')
3533 ui.pager(b'grep')
3531 fm = ui.formatter(b'grep', opts)
3534 fm = ui.formatter(b'grep', opts)
3532 for fn, ctx, pstates, states in searcher.searchfiles(revs, makefilematcher):
3535 for fn, ctx, pstates, states in searcher.searchfiles(revs, makefilematcher):
3533 r = display(fm, fn, ctx, pstates, states)
3536 r = display(fm, fn, ctx, pstates, states)
3534 found = found or r
3537 found = found or r
3535 if r and not diff and not all_files:
3538 if r and not diff and not all_files:
3536 searcher.skipfile(fn, ctx.rev())
3539 searcher.skipfile(fn, ctx.rev())
3537 fm.end()
3540 fm.end()
3538
3541
3539 return not found
3542 return not found
3540
3543
3541
3544
3542 @command(
3545 @command(
3543 b'heads',
3546 b'heads',
3544 [
3547 [
3545 (
3548 (
3546 b'r',
3549 b'r',
3547 b'rev',
3550 b'rev',
3548 b'',
3551 b'',
3549 _(b'show only heads which are descendants of STARTREV'),
3552 _(b'show only heads which are descendants of STARTREV'),
3550 _(b'STARTREV'),
3553 _(b'STARTREV'),
3551 ),
3554 ),
3552 (b't', b'topo', False, _(b'show topological heads only')),
3555 (b't', b'topo', False, _(b'show topological heads only')),
3553 (
3556 (
3554 b'a',
3557 b'a',
3555 b'active',
3558 b'active',
3556 False,
3559 False,
3557 _(b'show active branchheads only (DEPRECATED)'),
3560 _(b'show active branchheads only (DEPRECATED)'),
3558 ),
3561 ),
3559 (b'c', b'closed', False, _(b'show normal and closed branch heads')),
3562 (b'c', b'closed', False, _(b'show normal and closed branch heads')),
3560 ]
3563 ]
3561 + templateopts,
3564 + templateopts,
3562 _(b'[-ct] [-r STARTREV] [REV]...'),
3565 _(b'[-ct] [-r STARTREV] [REV]...'),
3563 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
3566 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
3564 intents={INTENT_READONLY},
3567 intents={INTENT_READONLY},
3565 )
3568 )
3566 def heads(ui, repo, *branchrevs, **opts):
3569 def heads(ui, repo, *branchrevs, **opts):
3567 """show branch heads
3570 """show branch heads
3568
3571
3569 With no arguments, show all open branch heads in the repository.
3572 With no arguments, show all open branch heads in the repository.
3570 Branch heads are changesets that have no descendants on the
3573 Branch heads are changesets that have no descendants on the
3571 same branch. They are where development generally takes place and
3574 same branch. They are where development generally takes place and
3572 are the usual targets for update and merge operations.
3575 are the usual targets for update and merge operations.
3573
3576
3574 If one or more REVs are given, only open branch heads on the
3577 If one or more REVs are given, only open branch heads on the
3575 branches associated with the specified changesets are shown. This
3578 branches associated with the specified changesets are shown. This
3576 means that you can use :hg:`heads .` to see the heads on the
3579 means that you can use :hg:`heads .` to see the heads on the
3577 currently checked-out branch.
3580 currently checked-out branch.
3578
3581
3579 If -c/--closed is specified, also show branch heads marked closed
3582 If -c/--closed is specified, also show branch heads marked closed
3580 (see :hg:`commit --close-branch`).
3583 (see :hg:`commit --close-branch`).
3581
3584
3582 If STARTREV is specified, only those heads that are descendants of
3585 If STARTREV is specified, only those heads that are descendants of
3583 STARTREV will be displayed.
3586 STARTREV will be displayed.
3584
3587
3585 If -t/--topo is specified, named branch mechanics will be ignored and only
3588 If -t/--topo is specified, named branch mechanics will be ignored and only
3586 topological heads (changesets with no children) will be shown.
3589 topological heads (changesets with no children) will be shown.
3587
3590
3588 Returns 0 if matching heads are found, 1 if not.
3591 Returns 0 if matching heads are found, 1 if not.
3589 """
3592 """
3590
3593
3591 opts = pycompat.byteskwargs(opts)
3594 opts = pycompat.byteskwargs(opts)
3592 start = None
3595 start = None
3593 rev = opts.get(b'rev')
3596 rev = opts.get(b'rev')
3594 if rev:
3597 if rev:
3595 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
3598 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
3596 start = scmutil.revsingle(repo, rev, None).node()
3599 start = scmutil.revsingle(repo, rev, None).node()
3597
3600
3598 if opts.get(b'topo'):
3601 if opts.get(b'topo'):
3599 heads = [repo[h] for h in repo.heads(start)]
3602 heads = [repo[h] for h in repo.heads(start)]
3600 else:
3603 else:
3601 heads = []
3604 heads = []
3602 for branch in repo.branchmap():
3605 for branch in repo.branchmap():
3603 heads += repo.branchheads(branch, start, opts.get(b'closed'))
3606 heads += repo.branchheads(branch, start, opts.get(b'closed'))
3604 heads = [repo[h] for h in heads]
3607 heads = [repo[h] for h in heads]
3605
3608
3606 if branchrevs:
3609 if branchrevs:
3607 branches = {
3610 branches = {
3608 repo[r].branch() for r in scmutil.revrange(repo, branchrevs)
3611 repo[r].branch() for r in scmutil.revrange(repo, branchrevs)
3609 }
3612 }
3610 heads = [h for h in heads if h.branch() in branches]
3613 heads = [h for h in heads if h.branch() in branches]
3611
3614
3612 if opts.get(b'active') and branchrevs:
3615 if opts.get(b'active') and branchrevs:
3613 dagheads = repo.heads(start)
3616 dagheads = repo.heads(start)
3614 heads = [h for h in heads if h.node() in dagheads]
3617 heads = [h for h in heads if h.node() in dagheads]
3615
3618
3616 if branchrevs:
3619 if branchrevs:
3617 haveheads = {h.branch() for h in heads}
3620 haveheads = {h.branch() for h in heads}
3618 if branches - haveheads:
3621 if branches - haveheads:
3619 headless = b', '.join(b for b in branches - haveheads)
3622 headless = b', '.join(b for b in branches - haveheads)
3620 msg = _(b'no open branch heads found on branches %s')
3623 msg = _(b'no open branch heads found on branches %s')
3621 if opts.get(b'rev'):
3624 if opts.get(b'rev'):
3622 msg += _(b' (started at %s)') % opts[b'rev']
3625 msg += _(b' (started at %s)') % opts[b'rev']
3623 ui.warn((msg + b'\n') % headless)
3626 ui.warn((msg + b'\n') % headless)
3624
3627
3625 if not heads:
3628 if not heads:
3626 return 1
3629 return 1
3627
3630
3628 ui.pager(b'heads')
3631 ui.pager(b'heads')
3629 heads = sorted(heads, key=lambda x: -(x.rev()))
3632 heads = sorted(heads, key=lambda x: -(x.rev()))
3630 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3633 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
3631 for ctx in heads:
3634 for ctx in heads:
3632 displayer.show(ctx)
3635 displayer.show(ctx)
3633 displayer.close()
3636 displayer.close()
3634
3637
3635
3638
3636 @command(
3639 @command(
3637 b'help',
3640 b'help',
3638 [
3641 [
3639 (b'e', b'extension', None, _(b'show only help for extensions')),
3642 (b'e', b'extension', None, _(b'show only help for extensions')),
3640 (b'c', b'command', None, _(b'show only help for commands')),
3643 (b'c', b'command', None, _(b'show only help for commands')),
3641 (b'k', b'keyword', None, _(b'show topics matching keyword')),
3644 (b'k', b'keyword', None, _(b'show topics matching keyword')),
3642 (
3645 (
3643 b's',
3646 b's',
3644 b'system',
3647 b'system',
3645 [],
3648 [],
3646 _(b'show help for specific platform(s)'),
3649 _(b'show help for specific platform(s)'),
3647 _(b'PLATFORM'),
3650 _(b'PLATFORM'),
3648 ),
3651 ),
3649 ],
3652 ],
3650 _(b'[-eck] [-s PLATFORM] [TOPIC]'),
3653 _(b'[-eck] [-s PLATFORM] [TOPIC]'),
3651 helpcategory=command.CATEGORY_HELP,
3654 helpcategory=command.CATEGORY_HELP,
3652 norepo=True,
3655 norepo=True,
3653 intents={INTENT_READONLY},
3656 intents={INTENT_READONLY},
3654 )
3657 )
3655 def help_(ui, name=None, **opts):
3658 def help_(ui, name=None, **opts):
3656 """show help for a given topic or a help overview
3659 """show help for a given topic or a help overview
3657
3660
3658 With no arguments, print a list of commands with short help messages.
3661 With no arguments, print a list of commands with short help messages.
3659
3662
3660 Given a topic, extension, or command name, print help for that
3663 Given a topic, extension, or command name, print help for that
3661 topic.
3664 topic.
3662
3665
3663 Returns 0 if successful.
3666 Returns 0 if successful.
3664 """
3667 """
3665
3668
3666 keep = opts.get('system') or []
3669 keep = opts.get('system') or []
3667 if len(keep) == 0:
3670 if len(keep) == 0:
3668 if pycompat.sysplatform.startswith(b'win'):
3671 if pycompat.sysplatform.startswith(b'win'):
3669 keep.append(b'windows')
3672 keep.append(b'windows')
3670 elif pycompat.sysplatform == b'OpenVMS':
3673 elif pycompat.sysplatform == b'OpenVMS':
3671 keep.append(b'vms')
3674 keep.append(b'vms')
3672 elif pycompat.sysplatform == b'plan9':
3675 elif pycompat.sysplatform == b'plan9':
3673 keep.append(b'plan9')
3676 keep.append(b'plan9')
3674 else:
3677 else:
3675 keep.append(b'unix')
3678 keep.append(b'unix')
3676 keep.append(pycompat.sysplatform.lower())
3679 keep.append(pycompat.sysplatform.lower())
3677 if ui.verbose:
3680 if ui.verbose:
3678 keep.append(b'verbose')
3681 keep.append(b'verbose')
3679
3682
3680 commands = sys.modules[__name__]
3683 commands = sys.modules[__name__]
3681 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
3684 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
3682 ui.pager(b'help')
3685 ui.pager(b'help')
3683 ui.write(formatted)
3686 ui.write(formatted)
3684
3687
3685
3688
3686 @command(
3689 @command(
3687 b'identify|id',
3690 b'identify|id',
3688 [
3691 [
3689 (b'r', b'rev', b'', _(b'identify the specified revision'), _(b'REV')),
3692 (b'r', b'rev', b'', _(b'identify the specified revision'), _(b'REV')),
3690 (b'n', b'num', None, _(b'show local revision number')),
3693 (b'n', b'num', None, _(b'show local revision number')),
3691 (b'i', b'id', None, _(b'show global revision id')),
3694 (b'i', b'id', None, _(b'show global revision id')),
3692 (b'b', b'branch', None, _(b'show branch')),
3695 (b'b', b'branch', None, _(b'show branch')),
3693 (b't', b'tags', None, _(b'show tags')),
3696 (b't', b'tags', None, _(b'show tags')),
3694 (b'B', b'bookmarks', None, _(b'show bookmarks')),
3697 (b'B', b'bookmarks', None, _(b'show bookmarks')),
3695 ]
3698 ]
3696 + remoteopts
3699 + remoteopts
3697 + formatteropts,
3700 + formatteropts,
3698 _(b'[-nibtB] [-r REV] [SOURCE]'),
3701 _(b'[-nibtB] [-r REV] [SOURCE]'),
3699 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
3702 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
3700 optionalrepo=True,
3703 optionalrepo=True,
3701 intents={INTENT_READONLY},
3704 intents={INTENT_READONLY},
3702 )
3705 )
3703 def identify(
3706 def identify(
3704 ui,
3707 ui,
3705 repo,
3708 repo,
3706 source=None,
3709 source=None,
3707 rev=None,
3710 rev=None,
3708 num=None,
3711 num=None,
3709 id=None,
3712 id=None,
3710 branch=None,
3713 branch=None,
3711 tags=None,
3714 tags=None,
3712 bookmarks=None,
3715 bookmarks=None,
3713 **opts
3716 **opts
3714 ):
3717 ):
3715 """identify the working directory or specified revision
3718 """identify the working directory or specified revision
3716
3719
3717 Print a summary identifying the repository state at REV using one or
3720 Print a summary identifying the repository state at REV using one or
3718 two parent hash identifiers, followed by a "+" if the working
3721 two parent hash identifiers, followed by a "+" if the working
3719 directory has uncommitted changes, the branch name (if not default),
3722 directory has uncommitted changes, the branch name (if not default),
3720 a list of tags, and a list of bookmarks.
3723 a list of tags, and a list of bookmarks.
3721
3724
3722 When REV is not given, print a summary of the current state of the
3725 When REV is not given, print a summary of the current state of the
3723 repository including the working directory. Specify -r. to get information
3726 repository including the working directory. Specify -r. to get information
3724 of the working directory parent without scanning uncommitted changes.
3727 of the working directory parent without scanning uncommitted changes.
3725
3728
3726 Specifying a path to a repository root or Mercurial bundle will
3729 Specifying a path to a repository root or Mercurial bundle will
3727 cause lookup to operate on that repository/bundle.
3730 cause lookup to operate on that repository/bundle.
3728
3731
3729 .. container:: verbose
3732 .. container:: verbose
3730
3733
3731 Template:
3734 Template:
3732
3735
3733 The following keywords are supported in addition to the common template
3736 The following keywords are supported in addition to the common template
3734 keywords and functions. See also :hg:`help templates`.
3737 keywords and functions. See also :hg:`help templates`.
3735
3738
3736 :dirty: String. Character ``+`` denoting if the working directory has
3739 :dirty: String. Character ``+`` denoting if the working directory has
3737 uncommitted changes.
3740 uncommitted changes.
3738 :id: String. One or two nodes, optionally followed by ``+``.
3741 :id: String. One or two nodes, optionally followed by ``+``.
3739 :parents: List of strings. Parent nodes of the changeset.
3742 :parents: List of strings. Parent nodes of the changeset.
3740
3743
3741 Examples:
3744 Examples:
3742
3745
3743 - generate a build identifier for the working directory::
3746 - generate a build identifier for the working directory::
3744
3747
3745 hg id --id > build-id.dat
3748 hg id --id > build-id.dat
3746
3749
3747 - find the revision corresponding to a tag::
3750 - find the revision corresponding to a tag::
3748
3751
3749 hg id -n -r 1.3
3752 hg id -n -r 1.3
3750
3753
3751 - check the most recent revision of a remote repository::
3754 - check the most recent revision of a remote repository::
3752
3755
3753 hg id -r tip https://www.mercurial-scm.org/repo/hg/
3756 hg id -r tip https://www.mercurial-scm.org/repo/hg/
3754
3757
3755 See :hg:`log` for generating more information about specific revisions,
3758 See :hg:`log` for generating more information about specific revisions,
3756 including full hash identifiers.
3759 including full hash identifiers.
3757
3760
3758 Returns 0 if successful.
3761 Returns 0 if successful.
3759 """
3762 """
3760
3763
3761 opts = pycompat.byteskwargs(opts)
3764 opts = pycompat.byteskwargs(opts)
3762 if not repo and not source:
3765 if not repo and not source:
3763 raise error.Abort(
3766 raise error.Abort(
3764 _(b"there is no Mercurial repository here (.hg not found)")
3767 _(b"there is no Mercurial repository here (.hg not found)")
3765 )
3768 )
3766
3769
3767 default = not (num or id or branch or tags or bookmarks)
3770 default = not (num or id or branch or tags or bookmarks)
3768 output = []
3771 output = []
3769 revs = []
3772 revs = []
3770
3773
3771 if source:
3774 if source:
3772 source, branches = hg.parseurl(ui.expandpath(source))
3775 source, branches = hg.parseurl(ui.expandpath(source))
3773 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
3776 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
3774 repo = peer.local()
3777 repo = peer.local()
3775 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3778 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3776
3779
3777 fm = ui.formatter(b'identify', opts)
3780 fm = ui.formatter(b'identify', opts)
3778 fm.startitem()
3781 fm.startitem()
3779
3782
3780 if not repo:
3783 if not repo:
3781 if num or branch or tags:
3784 if num or branch or tags:
3782 raise error.Abort(
3785 raise error.Abort(
3783 _(b"can't query remote revision number, branch, or tags")
3786 _(b"can't query remote revision number, branch, or tags")
3784 )
3787 )
3785 if not rev and revs:
3788 if not rev and revs:
3786 rev = revs[0]
3789 rev = revs[0]
3787 if not rev:
3790 if not rev:
3788 rev = b"tip"
3791 rev = b"tip"
3789
3792
3790 remoterev = peer.lookup(rev)
3793 remoterev = peer.lookup(rev)
3791 hexrev = fm.hexfunc(remoterev)
3794 hexrev = fm.hexfunc(remoterev)
3792 if default or id:
3795 if default or id:
3793 output = [hexrev]
3796 output = [hexrev]
3794 fm.data(id=hexrev)
3797 fm.data(id=hexrev)
3795
3798
3796 @util.cachefunc
3799 @util.cachefunc
3797 def getbms():
3800 def getbms():
3798 bms = []
3801 bms = []
3799
3802
3800 if b'bookmarks' in peer.listkeys(b'namespaces'):
3803 if b'bookmarks' in peer.listkeys(b'namespaces'):
3801 hexremoterev = hex(remoterev)
3804 hexremoterev = hex(remoterev)
3802 bms = [
3805 bms = [
3803 bm
3806 bm
3804 for bm, bmr in pycompat.iteritems(
3807 for bm, bmr in pycompat.iteritems(
3805 peer.listkeys(b'bookmarks')
3808 peer.listkeys(b'bookmarks')
3806 )
3809 )
3807 if bmr == hexremoterev
3810 if bmr == hexremoterev
3808 ]
3811 ]
3809
3812
3810 return sorted(bms)
3813 return sorted(bms)
3811
3814
3812 if fm.isplain():
3815 if fm.isplain():
3813 if bookmarks:
3816 if bookmarks:
3814 output.extend(getbms())
3817 output.extend(getbms())
3815 elif default and not ui.quiet:
3818 elif default and not ui.quiet:
3816 # multiple bookmarks for a single parent separated by '/'
3819 # multiple bookmarks for a single parent separated by '/'
3817 bm = b'/'.join(getbms())
3820 bm = b'/'.join(getbms())
3818 if bm:
3821 if bm:
3819 output.append(bm)
3822 output.append(bm)
3820 else:
3823 else:
3821 fm.data(node=hex(remoterev))
3824 fm.data(node=hex(remoterev))
3822 if bookmarks or b'bookmarks' in fm.datahint():
3825 if bookmarks or b'bookmarks' in fm.datahint():
3823 fm.data(bookmarks=fm.formatlist(getbms(), name=b'bookmark'))
3826 fm.data(bookmarks=fm.formatlist(getbms(), name=b'bookmark'))
3824 else:
3827 else:
3825 if rev:
3828 if rev:
3826 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
3829 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
3827 ctx = scmutil.revsingle(repo, rev, None)
3830 ctx = scmutil.revsingle(repo, rev, None)
3828
3831
3829 if ctx.rev() is None:
3832 if ctx.rev() is None:
3830 ctx = repo[None]
3833 ctx = repo[None]
3831 parents = ctx.parents()
3834 parents = ctx.parents()
3832 taglist = []
3835 taglist = []
3833 for p in parents:
3836 for p in parents:
3834 taglist.extend(p.tags())
3837 taglist.extend(p.tags())
3835
3838
3836 dirty = b""
3839 dirty = b""
3837 if ctx.dirty(missing=True, merge=False, branch=False):
3840 if ctx.dirty(missing=True, merge=False, branch=False):
3838 dirty = b'+'
3841 dirty = b'+'
3839 fm.data(dirty=dirty)
3842 fm.data(dirty=dirty)
3840
3843
3841 hexoutput = [fm.hexfunc(p.node()) for p in parents]
3844 hexoutput = [fm.hexfunc(p.node()) for p in parents]
3842 if default or id:
3845 if default or id:
3843 output = [b"%s%s" % (b'+'.join(hexoutput), dirty)]
3846 output = [b"%s%s" % (b'+'.join(hexoutput), dirty)]
3844 fm.data(id=b"%s%s" % (b'+'.join(hexoutput), dirty))
3847 fm.data(id=b"%s%s" % (b'+'.join(hexoutput), dirty))
3845
3848
3846 if num:
3849 if num:
3847 numoutput = [b"%d" % p.rev() for p in parents]
3850 numoutput = [b"%d" % p.rev() for p in parents]
3848 output.append(b"%s%s" % (b'+'.join(numoutput), dirty))
3851 output.append(b"%s%s" % (b'+'.join(numoutput), dirty))
3849
3852
3850 fm.data(
3853 fm.data(
3851 parents=fm.formatlist(
3854 parents=fm.formatlist(
3852 [fm.hexfunc(p.node()) for p in parents], name=b'node'
3855 [fm.hexfunc(p.node()) for p in parents], name=b'node'
3853 )
3856 )
3854 )
3857 )
3855 else:
3858 else:
3856 hexoutput = fm.hexfunc(ctx.node())
3859 hexoutput = fm.hexfunc(ctx.node())
3857 if default or id:
3860 if default or id:
3858 output = [hexoutput]
3861 output = [hexoutput]
3859 fm.data(id=hexoutput)
3862 fm.data(id=hexoutput)
3860
3863
3861 if num:
3864 if num:
3862 output.append(pycompat.bytestr(ctx.rev()))
3865 output.append(pycompat.bytestr(ctx.rev()))
3863 taglist = ctx.tags()
3866 taglist = ctx.tags()
3864
3867
3865 if default and not ui.quiet:
3868 if default and not ui.quiet:
3866 b = ctx.branch()
3869 b = ctx.branch()
3867 if b != b'default':
3870 if b != b'default':
3868 output.append(b"(%s)" % b)
3871 output.append(b"(%s)" % b)
3869
3872
3870 # multiple tags for a single parent separated by '/'
3873 # multiple tags for a single parent separated by '/'
3871 t = b'/'.join(taglist)
3874 t = b'/'.join(taglist)
3872 if t:
3875 if t:
3873 output.append(t)
3876 output.append(t)
3874
3877
3875 # multiple bookmarks for a single parent separated by '/'
3878 # multiple bookmarks for a single parent separated by '/'
3876 bm = b'/'.join(ctx.bookmarks())
3879 bm = b'/'.join(ctx.bookmarks())
3877 if bm:
3880 if bm:
3878 output.append(bm)
3881 output.append(bm)
3879 else:
3882 else:
3880 if branch:
3883 if branch:
3881 output.append(ctx.branch())
3884 output.append(ctx.branch())
3882
3885
3883 if tags:
3886 if tags:
3884 output.extend(taglist)
3887 output.extend(taglist)
3885
3888
3886 if bookmarks:
3889 if bookmarks:
3887 output.extend(ctx.bookmarks())
3890 output.extend(ctx.bookmarks())
3888
3891
3889 fm.data(node=ctx.hex())
3892 fm.data(node=ctx.hex())
3890 fm.data(branch=ctx.branch())
3893 fm.data(branch=ctx.branch())
3891 fm.data(tags=fm.formatlist(taglist, name=b'tag', sep=b':'))
3894 fm.data(tags=fm.formatlist(taglist, name=b'tag', sep=b':'))
3892 fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'))
3895 fm.data(bookmarks=fm.formatlist(ctx.bookmarks(), name=b'bookmark'))
3893 fm.context(ctx=ctx)
3896 fm.context(ctx=ctx)
3894
3897
3895 fm.plain(b"%s\n" % b' '.join(output))
3898 fm.plain(b"%s\n" % b' '.join(output))
3896 fm.end()
3899 fm.end()
3897
3900
3898
3901
3899 @command(
3902 @command(
3900 b'import|patch',
3903 b'import|patch',
3901 [
3904 [
3902 (
3905 (
3903 b'p',
3906 b'p',
3904 b'strip',
3907 b'strip',
3905 1,
3908 1,
3906 _(
3909 _(
3907 b'directory strip option for patch. This has the same '
3910 b'directory strip option for patch. This has the same '
3908 b'meaning as the corresponding patch option'
3911 b'meaning as the corresponding patch option'
3909 ),
3912 ),
3910 _(b'NUM'),
3913 _(b'NUM'),
3911 ),
3914 ),
3912 (b'b', b'base', b'', _(b'base path (DEPRECATED)'), _(b'PATH')),
3915 (b'b', b'base', b'', _(b'base path (DEPRECATED)'), _(b'PATH')),
3913 (b'', b'secret', None, _(b'use the secret phase for committing')),
3916 (b'', b'secret', None, _(b'use the secret phase for committing')),
3914 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
3917 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
3915 (
3918 (
3916 b'f',
3919 b'f',
3917 b'force',
3920 b'force',
3918 None,
3921 None,
3919 _(b'skip check for outstanding uncommitted changes (DEPRECATED)'),
3922 _(b'skip check for outstanding uncommitted changes (DEPRECATED)'),
3920 ),
3923 ),
3921 (
3924 (
3922 b'',
3925 b'',
3923 b'no-commit',
3926 b'no-commit',
3924 None,
3927 None,
3925 _(b"don't commit, just update the working directory"),
3928 _(b"don't commit, just update the working directory"),
3926 ),
3929 ),
3927 (
3930 (
3928 b'',
3931 b'',
3929 b'bypass',
3932 b'bypass',
3930 None,
3933 None,
3931 _(b"apply patch without touching the working directory"),
3934 _(b"apply patch without touching the working directory"),
3932 ),
3935 ),
3933 (b'', b'partial', None, _(b'commit even if some hunks fail')),
3936 (b'', b'partial', None, _(b'commit even if some hunks fail')),
3934 (b'', b'exact', None, _(b'abort if patch would apply lossily')),
3937 (b'', b'exact', None, _(b'abort if patch would apply lossily')),
3935 (b'', b'prefix', b'', _(b'apply patch to subdirectory'), _(b'DIR')),
3938 (b'', b'prefix', b'', _(b'apply patch to subdirectory'), _(b'DIR')),
3936 (
3939 (
3937 b'',
3940 b'',
3938 b'import-branch',
3941 b'import-branch',
3939 None,
3942 None,
3940 _(b'use any branch information in patch (implied by --exact)'),
3943 _(b'use any branch information in patch (implied by --exact)'),
3941 ),
3944 ),
3942 ]
3945 ]
3943 + commitopts
3946 + commitopts
3944 + commitopts2
3947 + commitopts2
3945 + similarityopts,
3948 + similarityopts,
3946 _(b'[OPTION]... PATCH...'),
3949 _(b'[OPTION]... PATCH...'),
3947 helpcategory=command.CATEGORY_IMPORT_EXPORT,
3950 helpcategory=command.CATEGORY_IMPORT_EXPORT,
3948 )
3951 )
3949 def import_(ui, repo, patch1=None, *patches, **opts):
3952 def import_(ui, repo, patch1=None, *patches, **opts):
3950 """import an ordered set of patches
3953 """import an ordered set of patches
3951
3954
3952 Import a list of patches and commit them individually (unless
3955 Import a list of patches and commit them individually (unless
3953 --no-commit is specified).
3956 --no-commit is specified).
3954
3957
3955 To read a patch from standard input (stdin), use "-" as the patch
3958 To read a patch from standard input (stdin), use "-" as the patch
3956 name. If a URL is specified, the patch will be downloaded from
3959 name. If a URL is specified, the patch will be downloaded from
3957 there.
3960 there.
3958
3961
3959 Import first applies changes to the working directory (unless
3962 Import first applies changes to the working directory (unless
3960 --bypass is specified), import will abort if there are outstanding
3963 --bypass is specified), import will abort if there are outstanding
3961 changes.
3964 changes.
3962
3965
3963 Use --bypass to apply and commit patches directly to the
3966 Use --bypass to apply and commit patches directly to the
3964 repository, without affecting the working directory. Without
3967 repository, without affecting the working directory. Without
3965 --exact, patches will be applied on top of the working directory
3968 --exact, patches will be applied on top of the working directory
3966 parent revision.
3969 parent revision.
3967
3970
3968 You can import a patch straight from a mail message. Even patches
3971 You can import a patch straight from a mail message. Even patches
3969 as attachments work (to use the body part, it must have type
3972 as attachments work (to use the body part, it must have type
3970 text/plain or text/x-patch). From and Subject headers of email
3973 text/plain or text/x-patch). From and Subject headers of email
3971 message are used as default committer and commit message. All
3974 message are used as default committer and commit message. All
3972 text/plain body parts before first diff are added to the commit
3975 text/plain body parts before first diff are added to the commit
3973 message.
3976 message.
3974
3977
3975 If the imported patch was generated by :hg:`export`, user and
3978 If the imported patch was generated by :hg:`export`, user and
3976 description from patch override values from message headers and
3979 description from patch override values from message headers and
3977 body. Values given on command line with -m/--message and -u/--user
3980 body. Values given on command line with -m/--message and -u/--user
3978 override these.
3981 override these.
3979
3982
3980 If --exact is specified, import will set the working directory to
3983 If --exact is specified, import will set the working directory to
3981 the parent of each patch before applying it, and will abort if the
3984 the parent of each patch before applying it, and will abort if the
3982 resulting changeset has a different ID than the one recorded in
3985 resulting changeset has a different ID than the one recorded in
3983 the patch. This will guard against various ways that portable
3986 the patch. This will guard against various ways that portable
3984 patch formats and mail systems might fail to transfer Mercurial
3987 patch formats and mail systems might fail to transfer Mercurial
3985 data or metadata. See :hg:`bundle` for lossless transmission.
3988 data or metadata. See :hg:`bundle` for lossless transmission.
3986
3989
3987 Use --partial to ensure a changeset will be created from the patch
3990 Use --partial to ensure a changeset will be created from the patch
3988 even if some hunks fail to apply. Hunks that fail to apply will be
3991 even if some hunks fail to apply. Hunks that fail to apply will be
3989 written to a <target-file>.rej file. Conflicts can then be resolved
3992 written to a <target-file>.rej file. Conflicts can then be resolved
3990 by hand before :hg:`commit --amend` is run to update the created
3993 by hand before :hg:`commit --amend` is run to update the created
3991 changeset. This flag exists to let people import patches that
3994 changeset. This flag exists to let people import patches that
3992 partially apply without losing the associated metadata (author,
3995 partially apply without losing the associated metadata (author,
3993 date, description, ...).
3996 date, description, ...).
3994
3997
3995 .. note::
3998 .. note::
3996
3999
3997 When no hunks apply cleanly, :hg:`import --partial` will create
4000 When no hunks apply cleanly, :hg:`import --partial` will create
3998 an empty changeset, importing only the patch metadata.
4001 an empty changeset, importing only the patch metadata.
3999
4002
4000 With -s/--similarity, hg will attempt to discover renames and
4003 With -s/--similarity, hg will attempt to discover renames and
4001 copies in the patch in the same way as :hg:`addremove`.
4004 copies in the patch in the same way as :hg:`addremove`.
4002
4005
4003 It is possible to use external patch programs to perform the patch
4006 It is possible to use external patch programs to perform the patch
4004 by setting the ``ui.patch`` configuration option. For the default
4007 by setting the ``ui.patch`` configuration option. For the default
4005 internal tool, the fuzz can also be configured via ``patch.fuzz``.
4008 internal tool, the fuzz can also be configured via ``patch.fuzz``.
4006 See :hg:`help config` for more information about configuration
4009 See :hg:`help config` for more information about configuration
4007 files and how to use these options.
4010 files and how to use these options.
4008
4011
4009 See :hg:`help dates` for a list of formats valid for -d/--date.
4012 See :hg:`help dates` for a list of formats valid for -d/--date.
4010
4013
4011 .. container:: verbose
4014 .. container:: verbose
4012
4015
4013 Examples:
4016 Examples:
4014
4017
4015 - import a traditional patch from a website and detect renames::
4018 - import a traditional patch from a website and detect renames::
4016
4019
4017 hg import -s 80 http://example.com/bugfix.patch
4020 hg import -s 80 http://example.com/bugfix.patch
4018
4021
4019 - import a changeset from an hgweb server::
4022 - import a changeset from an hgweb server::
4020
4023
4021 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
4024 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
4022
4025
4023 - import all the patches in an Unix-style mbox::
4026 - import all the patches in an Unix-style mbox::
4024
4027
4025 hg import incoming-patches.mbox
4028 hg import incoming-patches.mbox
4026
4029
4027 - import patches from stdin::
4030 - import patches from stdin::
4028
4031
4029 hg import -
4032 hg import -
4030
4033
4031 - attempt to exactly restore an exported changeset (not always
4034 - attempt to exactly restore an exported changeset (not always
4032 possible)::
4035 possible)::
4033
4036
4034 hg import --exact proposed-fix.patch
4037 hg import --exact proposed-fix.patch
4035
4038
4036 - use an external tool to apply a patch which is too fuzzy for
4039 - use an external tool to apply a patch which is too fuzzy for
4037 the default internal tool.
4040 the default internal tool.
4038
4041
4039 hg import --config ui.patch="patch --merge" fuzzy.patch
4042 hg import --config ui.patch="patch --merge" fuzzy.patch
4040
4043
4041 - change the default fuzzing from 2 to a less strict 7
4044 - change the default fuzzing from 2 to a less strict 7
4042
4045
4043 hg import --config ui.fuzz=7 fuzz.patch
4046 hg import --config ui.fuzz=7 fuzz.patch
4044
4047
4045 Returns 0 on success, 1 on partial success (see --partial).
4048 Returns 0 on success, 1 on partial success (see --partial).
4046 """
4049 """
4047
4050
4048 cmdutil.check_incompatible_arguments(
4051 cmdutil.check_incompatible_arguments(
4049 opts, 'no_commit', ['bypass', 'secret']
4052 opts, 'no_commit', ['bypass', 'secret']
4050 )
4053 )
4051 cmdutil.check_incompatible_arguments(opts, 'exact', ['edit', 'prefix'])
4054 cmdutil.check_incompatible_arguments(opts, 'exact', ['edit', 'prefix'])
4052 opts = pycompat.byteskwargs(opts)
4055 opts = pycompat.byteskwargs(opts)
4053 if not patch1:
4056 if not patch1:
4054 raise error.Abort(_(b'need at least one patch to import'))
4057 raise error.Abort(_(b'need at least one patch to import'))
4055
4058
4056 patches = (patch1,) + patches
4059 patches = (patch1,) + patches
4057
4060
4058 date = opts.get(b'date')
4061 date = opts.get(b'date')
4059 if date:
4062 if date:
4060 opts[b'date'] = dateutil.parsedate(date)
4063 opts[b'date'] = dateutil.parsedate(date)
4061
4064
4062 exact = opts.get(b'exact')
4065 exact = opts.get(b'exact')
4063 update = not opts.get(b'bypass')
4066 update = not opts.get(b'bypass')
4064 try:
4067 try:
4065 sim = float(opts.get(b'similarity') or 0)
4068 sim = float(opts.get(b'similarity') or 0)
4066 except ValueError:
4069 except ValueError:
4067 raise error.Abort(_(b'similarity must be a number'))
4070 raise error.Abort(_(b'similarity must be a number'))
4068 if sim < 0 or sim > 100:
4071 if sim < 0 or sim > 100:
4069 raise error.Abort(_(b'similarity must be between 0 and 100'))
4072 raise error.Abort(_(b'similarity must be between 0 and 100'))
4070 if sim and not update:
4073 if sim and not update:
4071 raise error.Abort(_(b'cannot use --similarity with --bypass'))
4074 raise error.Abort(_(b'cannot use --similarity with --bypass'))
4072
4075
4073 base = opts[b"base"]
4076 base = opts[b"base"]
4074 msgs = []
4077 msgs = []
4075 ret = 0
4078 ret = 0
4076
4079
4077 with repo.wlock():
4080 with repo.wlock():
4078 if update:
4081 if update:
4079 cmdutil.checkunfinished(repo)
4082 cmdutil.checkunfinished(repo)
4080 if exact or not opts.get(b'force'):
4083 if exact or not opts.get(b'force'):
4081 cmdutil.bailifchanged(repo)
4084 cmdutil.bailifchanged(repo)
4082
4085
4083 if not opts.get(b'no_commit'):
4086 if not opts.get(b'no_commit'):
4084 lock = repo.lock
4087 lock = repo.lock
4085 tr = lambda: repo.transaction(b'import')
4088 tr = lambda: repo.transaction(b'import')
4086 dsguard = util.nullcontextmanager
4089 dsguard = util.nullcontextmanager
4087 else:
4090 else:
4088 lock = util.nullcontextmanager
4091 lock = util.nullcontextmanager
4089 tr = util.nullcontextmanager
4092 tr = util.nullcontextmanager
4090 dsguard = lambda: dirstateguard.dirstateguard(repo, b'import')
4093 dsguard = lambda: dirstateguard.dirstateguard(repo, b'import')
4091 with lock(), tr(), dsguard():
4094 with lock(), tr(), dsguard():
4092 parents = repo[None].parents()
4095 parents = repo[None].parents()
4093 for patchurl in patches:
4096 for patchurl in patches:
4094 if patchurl == b'-':
4097 if patchurl == b'-':
4095 ui.status(_(b'applying patch from stdin\n'))
4098 ui.status(_(b'applying patch from stdin\n'))
4096 patchfile = ui.fin
4099 patchfile = ui.fin
4097 patchurl = b'stdin' # for error message
4100 patchurl = b'stdin' # for error message
4098 else:
4101 else:
4099 patchurl = os.path.join(base, patchurl)
4102 patchurl = os.path.join(base, patchurl)
4100 ui.status(_(b'applying %s\n') % patchurl)
4103 ui.status(_(b'applying %s\n') % patchurl)
4101 patchfile = hg.openpath(ui, patchurl, sendaccept=False)
4104 patchfile = hg.openpath(ui, patchurl, sendaccept=False)
4102
4105
4103 haspatch = False
4106 haspatch = False
4104 for hunk in patch.split(patchfile):
4107 for hunk in patch.split(patchfile):
4105 with patch.extract(ui, hunk) as patchdata:
4108 with patch.extract(ui, hunk) as patchdata:
4106 msg, node, rej = cmdutil.tryimportone(
4109 msg, node, rej = cmdutil.tryimportone(
4107 ui, repo, patchdata, parents, opts, msgs, hg.clean
4110 ui, repo, patchdata, parents, opts, msgs, hg.clean
4108 )
4111 )
4109 if msg:
4112 if msg:
4110 haspatch = True
4113 haspatch = True
4111 ui.note(msg + b'\n')
4114 ui.note(msg + b'\n')
4112 if update or exact:
4115 if update or exact:
4113 parents = repo[None].parents()
4116 parents = repo[None].parents()
4114 else:
4117 else:
4115 parents = [repo[node]]
4118 parents = [repo[node]]
4116 if rej:
4119 if rej:
4117 ui.write_err(_(b"patch applied partially\n"))
4120 ui.write_err(_(b"patch applied partially\n"))
4118 ui.write_err(
4121 ui.write_err(
4119 _(
4122 _(
4120 b"(fix the .rej files and run "
4123 b"(fix the .rej files and run "
4121 b"`hg commit --amend`)\n"
4124 b"`hg commit --amend`)\n"
4122 )
4125 )
4123 )
4126 )
4124 ret = 1
4127 ret = 1
4125 break
4128 break
4126
4129
4127 if not haspatch:
4130 if not haspatch:
4128 raise error.Abort(_(b'%s: no diffs found') % patchurl)
4131 raise error.Abort(_(b'%s: no diffs found') % patchurl)
4129
4132
4130 if msgs:
4133 if msgs:
4131 repo.savecommitmessage(b'\n* * *\n'.join(msgs))
4134 repo.savecommitmessage(b'\n* * *\n'.join(msgs))
4132 return ret
4135 return ret
4133
4136
4134
4137
4135 @command(
4138 @command(
4136 b'incoming|in',
4139 b'incoming|in',
4137 [
4140 [
4138 (
4141 (
4139 b'f',
4142 b'f',
4140 b'force',
4143 b'force',
4141 None,
4144 None,
4142 _(b'run even if remote repository is unrelated'),
4145 _(b'run even if remote repository is unrelated'),
4143 ),
4146 ),
4144 (b'n', b'newest-first', None, _(b'show newest record first')),
4147 (b'n', b'newest-first', None, _(b'show newest record first')),
4145 (b'', b'bundle', b'', _(b'file to store the bundles into'), _(b'FILE')),
4148 (b'', b'bundle', b'', _(b'file to store the bundles into'), _(b'FILE')),
4146 (
4149 (
4147 b'r',
4150 b'r',
4148 b'rev',
4151 b'rev',
4149 [],
4152 [],
4150 _(b'a remote changeset intended to be added'),
4153 _(b'a remote changeset intended to be added'),
4151 _(b'REV'),
4154 _(b'REV'),
4152 ),
4155 ),
4153 (b'B', b'bookmarks', False, _(b"compare bookmarks")),
4156 (b'B', b'bookmarks', False, _(b"compare bookmarks")),
4154 (
4157 (
4155 b'b',
4158 b'b',
4156 b'branch',
4159 b'branch',
4157 [],
4160 [],
4158 _(b'a specific branch you would like to pull'),
4161 _(b'a specific branch you would like to pull'),
4159 _(b'BRANCH'),
4162 _(b'BRANCH'),
4160 ),
4163 ),
4161 ]
4164 ]
4162 + logopts
4165 + logopts
4163 + remoteopts
4166 + remoteopts
4164 + subrepoopts,
4167 + subrepoopts,
4165 _(b'[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'),
4168 _(b'[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'),
4166 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
4169 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
4167 )
4170 )
4168 def incoming(ui, repo, source=b"default", **opts):
4171 def incoming(ui, repo, source=b"default", **opts):
4169 """show new changesets found in source
4172 """show new changesets found in source
4170
4173
4171 Show new changesets found in the specified path/URL or the default
4174 Show new changesets found in the specified path/URL or the default
4172 pull location. These are the changesets that would have been pulled
4175 pull location. These are the changesets that would have been pulled
4173 by :hg:`pull` at the time you issued this command.
4176 by :hg:`pull` at the time you issued this command.
4174
4177
4175 See pull for valid source format details.
4178 See pull for valid source format details.
4176
4179
4177 .. container:: verbose
4180 .. container:: verbose
4178
4181
4179 With -B/--bookmarks, the result of bookmark comparison between
4182 With -B/--bookmarks, the result of bookmark comparison between
4180 local and remote repositories is displayed. With -v/--verbose,
4183 local and remote repositories is displayed. With -v/--verbose,
4181 status is also displayed for each bookmark like below::
4184 status is also displayed for each bookmark like below::
4182
4185
4183 BM1 01234567890a added
4186 BM1 01234567890a added
4184 BM2 1234567890ab advanced
4187 BM2 1234567890ab advanced
4185 BM3 234567890abc diverged
4188 BM3 234567890abc diverged
4186 BM4 34567890abcd changed
4189 BM4 34567890abcd changed
4187
4190
4188 The action taken locally when pulling depends on the
4191 The action taken locally when pulling depends on the
4189 status of each bookmark:
4192 status of each bookmark:
4190
4193
4191 :``added``: pull will create it
4194 :``added``: pull will create it
4192 :``advanced``: pull will update it
4195 :``advanced``: pull will update it
4193 :``diverged``: pull will create a divergent bookmark
4196 :``diverged``: pull will create a divergent bookmark
4194 :``changed``: result depends on remote changesets
4197 :``changed``: result depends on remote changesets
4195
4198
4196 From the point of view of pulling behavior, bookmark
4199 From the point of view of pulling behavior, bookmark
4197 existing only in the remote repository are treated as ``added``,
4200 existing only in the remote repository are treated as ``added``,
4198 even if it is in fact locally deleted.
4201 even if it is in fact locally deleted.
4199
4202
4200 .. container:: verbose
4203 .. container:: verbose
4201
4204
4202 For remote repository, using --bundle avoids downloading the
4205 For remote repository, using --bundle avoids downloading the
4203 changesets twice if the incoming is followed by a pull.
4206 changesets twice if the incoming is followed by a pull.
4204
4207
4205 Examples:
4208 Examples:
4206
4209
4207 - show incoming changes with patches and full description::
4210 - show incoming changes with patches and full description::
4208
4211
4209 hg incoming -vp
4212 hg incoming -vp
4210
4213
4211 - show incoming changes excluding merges, store a bundle::
4214 - show incoming changes excluding merges, store a bundle::
4212
4215
4213 hg in -vpM --bundle incoming.hg
4216 hg in -vpM --bundle incoming.hg
4214 hg pull incoming.hg
4217 hg pull incoming.hg
4215
4218
4216 - briefly list changes inside a bundle::
4219 - briefly list changes inside a bundle::
4217
4220
4218 hg in changes.hg -T "{desc|firstline}\\n"
4221 hg in changes.hg -T "{desc|firstline}\\n"
4219
4222
4220 Returns 0 if there are incoming changes, 1 otherwise.
4223 Returns 0 if there are incoming changes, 1 otherwise.
4221 """
4224 """
4222 opts = pycompat.byteskwargs(opts)
4225 opts = pycompat.byteskwargs(opts)
4223 if opts.get(b'graph'):
4226 if opts.get(b'graph'):
4224 logcmdutil.checkunsupportedgraphflags([], opts)
4227 logcmdutil.checkunsupportedgraphflags([], opts)
4225
4228
4226 def display(other, chlist, displayer):
4229 def display(other, chlist, displayer):
4227 revdag = logcmdutil.graphrevs(other, chlist, opts)
4230 revdag = logcmdutil.graphrevs(other, chlist, opts)
4228 logcmdutil.displaygraph(
4231 logcmdutil.displaygraph(
4229 ui, repo, revdag, displayer, graphmod.asciiedges
4232 ui, repo, revdag, displayer, graphmod.asciiedges
4230 )
4233 )
4231
4234
4232 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4235 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4233 return 0
4236 return 0
4234
4237
4235 cmdutil.check_incompatible_arguments(opts, b'subrepos', [b'bundle'])
4238 cmdutil.check_incompatible_arguments(opts, b'subrepos', [b'bundle'])
4236
4239
4237 if opts.get(b'bookmarks'):
4240 if opts.get(b'bookmarks'):
4238 source, branches = hg.parseurl(
4241 source, branches = hg.parseurl(
4239 ui.expandpath(source), opts.get(b'branch')
4242 ui.expandpath(source), opts.get(b'branch')
4240 )
4243 )
4241 other = hg.peer(repo, opts, source)
4244 other = hg.peer(repo, opts, source)
4242 if b'bookmarks' not in other.listkeys(b'namespaces'):
4245 if b'bookmarks' not in other.listkeys(b'namespaces'):
4243 ui.warn(_(b"remote doesn't support bookmarks\n"))
4246 ui.warn(_(b"remote doesn't support bookmarks\n"))
4244 return 0
4247 return 0
4245 ui.pager(b'incoming')
4248 ui.pager(b'incoming')
4246 ui.status(_(b'comparing with %s\n') % util.hidepassword(source))
4249 ui.status(_(b'comparing with %s\n') % util.hidepassword(source))
4247 return bookmarks.incoming(ui, repo, other)
4250 return bookmarks.incoming(ui, repo, other)
4248
4251
4249 repo._subtoppath = ui.expandpath(source)
4252 repo._subtoppath = ui.expandpath(source)
4250 try:
4253 try:
4251 return hg.incoming(ui, repo, source, opts)
4254 return hg.incoming(ui, repo, source, opts)
4252 finally:
4255 finally:
4253 del repo._subtoppath
4256 del repo._subtoppath
4254
4257
4255
4258
4256 @command(
4259 @command(
4257 b'init',
4260 b'init',
4258 remoteopts,
4261 remoteopts,
4259 _(b'[-e CMD] [--remotecmd CMD] [DEST]'),
4262 _(b'[-e CMD] [--remotecmd CMD] [DEST]'),
4260 helpcategory=command.CATEGORY_REPO_CREATION,
4263 helpcategory=command.CATEGORY_REPO_CREATION,
4261 helpbasic=True,
4264 helpbasic=True,
4262 norepo=True,
4265 norepo=True,
4263 )
4266 )
4264 def init(ui, dest=b".", **opts):
4267 def init(ui, dest=b".", **opts):
4265 """create a new repository in the given directory
4268 """create a new repository in the given directory
4266
4269
4267 Initialize a new repository in the given directory. If the given
4270 Initialize a new repository in the given directory. If the given
4268 directory does not exist, it will be created.
4271 directory does not exist, it will be created.
4269
4272
4270 If no directory is given, the current directory is used.
4273 If no directory is given, the current directory is used.
4271
4274
4272 It is possible to specify an ``ssh://`` URL as the destination.
4275 It is possible to specify an ``ssh://`` URL as the destination.
4273 See :hg:`help urls` for more information.
4276 See :hg:`help urls` for more information.
4274
4277
4275 Returns 0 on success.
4278 Returns 0 on success.
4276 """
4279 """
4277 opts = pycompat.byteskwargs(opts)
4280 opts = pycompat.byteskwargs(opts)
4278 hg.peer(ui, opts, ui.expandpath(dest), create=True)
4281 hg.peer(ui, opts, ui.expandpath(dest), create=True)
4279
4282
4280
4283
4281 @command(
4284 @command(
4282 b'locate',
4285 b'locate',
4283 [
4286 [
4284 (
4287 (
4285 b'r',
4288 b'r',
4286 b'rev',
4289 b'rev',
4287 b'',
4290 b'',
4288 _(b'search the repository as it is in REV'),
4291 _(b'search the repository as it is in REV'),
4289 _(b'REV'),
4292 _(b'REV'),
4290 ),
4293 ),
4291 (
4294 (
4292 b'0',
4295 b'0',
4293 b'print0',
4296 b'print0',
4294 None,
4297 None,
4295 _(b'end filenames with NUL, for use with xargs'),
4298 _(b'end filenames with NUL, for use with xargs'),
4296 ),
4299 ),
4297 (
4300 (
4298 b'f',
4301 b'f',
4299 b'fullpath',
4302 b'fullpath',
4300 None,
4303 None,
4301 _(b'print complete paths from the filesystem root'),
4304 _(b'print complete paths from the filesystem root'),
4302 ),
4305 ),
4303 ]
4306 ]
4304 + walkopts,
4307 + walkopts,
4305 _(b'[OPTION]... [PATTERN]...'),
4308 _(b'[OPTION]... [PATTERN]...'),
4306 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
4309 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
4307 )
4310 )
4308 def locate(ui, repo, *pats, **opts):
4311 def locate(ui, repo, *pats, **opts):
4309 """locate files matching specific patterns (DEPRECATED)
4312 """locate files matching specific patterns (DEPRECATED)
4310
4313
4311 Print files under Mercurial control in the working directory whose
4314 Print files under Mercurial control in the working directory whose
4312 names match the given patterns.
4315 names match the given patterns.
4313
4316
4314 By default, this command searches all directories in the working
4317 By default, this command searches all directories in the working
4315 directory. To search just the current directory and its
4318 directory. To search just the current directory and its
4316 subdirectories, use "--include .".
4319 subdirectories, use "--include .".
4317
4320
4318 If no patterns are given to match, this command prints the names
4321 If no patterns are given to match, this command prints the names
4319 of all files under Mercurial control in the working directory.
4322 of all files under Mercurial control in the working directory.
4320
4323
4321 If you want to feed the output of this command into the "xargs"
4324 If you want to feed the output of this command into the "xargs"
4322 command, use the -0 option to both this command and "xargs". This
4325 command, use the -0 option to both this command and "xargs". This
4323 will avoid the problem of "xargs" treating single filenames that
4326 will avoid the problem of "xargs" treating single filenames that
4324 contain whitespace as multiple filenames.
4327 contain whitespace as multiple filenames.
4325
4328
4326 See :hg:`help files` for a more versatile command.
4329 See :hg:`help files` for a more versatile command.
4327
4330
4328 Returns 0 if a match is found, 1 otherwise.
4331 Returns 0 if a match is found, 1 otherwise.
4329 """
4332 """
4330 opts = pycompat.byteskwargs(opts)
4333 opts = pycompat.byteskwargs(opts)
4331 if opts.get(b'print0'):
4334 if opts.get(b'print0'):
4332 end = b'\0'
4335 end = b'\0'
4333 else:
4336 else:
4334 end = b'\n'
4337 end = b'\n'
4335 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
4338 ctx = scmutil.revsingle(repo, opts.get(b'rev'), None)
4336
4339
4337 ret = 1
4340 ret = 1
4338 m = scmutil.match(
4341 m = scmutil.match(
4339 ctx, pats, opts, default=b'relglob', badfn=lambda x, y: False
4342 ctx, pats, opts, default=b'relglob', badfn=lambda x, y: False
4340 )
4343 )
4341
4344
4342 ui.pager(b'locate')
4345 ui.pager(b'locate')
4343 if ctx.rev() is None:
4346 if ctx.rev() is None:
4344 # When run on the working copy, "locate" includes removed files, so
4347 # When run on the working copy, "locate" includes removed files, so
4345 # we get the list of files from the dirstate.
4348 # we get the list of files from the dirstate.
4346 filesgen = sorted(repo.dirstate.matches(m))
4349 filesgen = sorted(repo.dirstate.matches(m))
4347 else:
4350 else:
4348 filesgen = ctx.matches(m)
4351 filesgen = ctx.matches(m)
4349 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=bool(pats))
4352 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=bool(pats))
4350 for abs in filesgen:
4353 for abs in filesgen:
4351 if opts.get(b'fullpath'):
4354 if opts.get(b'fullpath'):
4352 ui.write(repo.wjoin(abs), end)
4355 ui.write(repo.wjoin(abs), end)
4353 else:
4356 else:
4354 ui.write(uipathfn(abs), end)
4357 ui.write(uipathfn(abs), end)
4355 ret = 0
4358 ret = 0
4356
4359
4357 return ret
4360 return ret
4358
4361
4359
4362
4360 @command(
4363 @command(
4361 b'log|history',
4364 b'log|history',
4362 [
4365 [
4363 (
4366 (
4364 b'f',
4367 b'f',
4365 b'follow',
4368 b'follow',
4366 None,
4369 None,
4367 _(
4370 _(
4368 b'follow changeset history, or file history across copies and renames'
4371 b'follow changeset history, or file history across copies and renames'
4369 ),
4372 ),
4370 ),
4373 ),
4371 (
4374 (
4372 b'',
4375 b'',
4373 b'follow-first',
4376 b'follow-first',
4374 None,
4377 None,
4375 _(b'only follow the first parent of merge changesets (DEPRECATED)'),
4378 _(b'only follow the first parent of merge changesets (DEPRECATED)'),
4376 ),
4379 ),
4377 (
4380 (
4378 b'd',
4381 b'd',
4379 b'date',
4382 b'date',
4380 b'',
4383 b'',
4381 _(b'show revisions matching date spec'),
4384 _(b'show revisions matching date spec'),
4382 _(b'DATE'),
4385 _(b'DATE'),
4383 ),
4386 ),
4384 (b'C', b'copies', None, _(b'show copied files')),
4387 (b'C', b'copies', None, _(b'show copied files')),
4385 (
4388 (
4386 b'k',
4389 b'k',
4387 b'keyword',
4390 b'keyword',
4388 [],
4391 [],
4389 _(b'do case-insensitive search for a given text'),
4392 _(b'do case-insensitive search for a given text'),
4390 _(b'TEXT'),
4393 _(b'TEXT'),
4391 ),
4394 ),
4392 (
4395 (
4393 b'r',
4396 b'r',
4394 b'rev',
4397 b'rev',
4395 [],
4398 [],
4396 _(b'show the specified revision or revset'),
4399 _(b'show the specified revision or revset'),
4397 _(b'REV'),
4400 _(b'REV'),
4398 ),
4401 ),
4399 (
4402 (
4400 b'L',
4403 b'L',
4401 b'line-range',
4404 b'line-range',
4402 [],
4405 [],
4403 _(b'follow line range of specified file (EXPERIMENTAL)'),
4406 _(b'follow line range of specified file (EXPERIMENTAL)'),
4404 _(b'FILE,RANGE'),
4407 _(b'FILE,RANGE'),
4405 ),
4408 ),
4406 (
4409 (
4407 b'',
4410 b'',
4408 b'removed',
4411 b'removed',
4409 None,
4412 None,
4410 _(b'include revisions where files were removed'),
4413 _(b'include revisions where files were removed'),
4411 ),
4414 ),
4412 (
4415 (
4413 b'm',
4416 b'm',
4414 b'only-merges',
4417 b'only-merges',
4415 None,
4418 None,
4416 _(b'show only merges (DEPRECATED) (use -r "merge()" instead)'),
4419 _(b'show only merges (DEPRECATED) (use -r "merge()" instead)'),
4417 ),
4420 ),
4418 (b'u', b'user', [], _(b'revisions committed by user'), _(b'USER')),
4421 (b'u', b'user', [], _(b'revisions committed by user'), _(b'USER')),
4419 (
4422 (
4420 b'',
4423 b'',
4421 b'only-branch',
4424 b'only-branch',
4422 [],
4425 [],
4423 _(
4426 _(
4424 b'show only changesets within the given named branch (DEPRECATED)'
4427 b'show only changesets within the given named branch (DEPRECATED)'
4425 ),
4428 ),
4426 _(b'BRANCH'),
4429 _(b'BRANCH'),
4427 ),
4430 ),
4428 (
4431 (
4429 b'b',
4432 b'b',
4430 b'branch',
4433 b'branch',
4431 [],
4434 [],
4432 _(b'show changesets within the given named branch'),
4435 _(b'show changesets within the given named branch'),
4433 _(b'BRANCH'),
4436 _(b'BRANCH'),
4434 ),
4437 ),
4435 (
4438 (
4436 b'P',
4439 b'P',
4437 b'prune',
4440 b'prune',
4438 [],
4441 [],
4439 _(b'do not display revision or any of its ancestors'),
4442 _(b'do not display revision or any of its ancestors'),
4440 _(b'REV'),
4443 _(b'REV'),
4441 ),
4444 ),
4442 ]
4445 ]
4443 + logopts
4446 + logopts
4444 + walkopts,
4447 + walkopts,
4445 _(b'[OPTION]... [FILE]'),
4448 _(b'[OPTION]... [FILE]'),
4446 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
4449 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
4447 helpbasic=True,
4450 helpbasic=True,
4448 inferrepo=True,
4451 inferrepo=True,
4449 intents={INTENT_READONLY},
4452 intents={INTENT_READONLY},
4450 )
4453 )
4451 def log(ui, repo, *pats, **opts):
4454 def log(ui, repo, *pats, **opts):
4452 """show revision history of entire repository or files
4455 """show revision history of entire repository or files
4453
4456
4454 Print the revision history of the specified files or the entire
4457 Print the revision history of the specified files or the entire
4455 project.
4458 project.
4456
4459
4457 If no revision range is specified, the default is ``tip:0`` unless
4460 If no revision range is specified, the default is ``tip:0`` unless
4458 --follow is set, in which case the working directory parent is
4461 --follow is set, in which case the working directory parent is
4459 used as the starting revision.
4462 used as the starting revision.
4460
4463
4461 File history is shown without following rename or copy history of
4464 File history is shown without following rename or copy history of
4462 files. Use -f/--follow with a filename to follow history across
4465 files. Use -f/--follow with a filename to follow history across
4463 renames and copies. --follow without a filename will only show
4466 renames and copies. --follow without a filename will only show
4464 ancestors of the starting revision.
4467 ancestors of the starting revision.
4465
4468
4466 By default this command prints revision number and changeset id,
4469 By default this command prints revision number and changeset id,
4467 tags, non-trivial parents, user, date and time, and a summary for
4470 tags, non-trivial parents, user, date and time, and a summary for
4468 each commit. When the -v/--verbose switch is used, the list of
4471 each commit. When the -v/--verbose switch is used, the list of
4469 changed files and full commit message are shown.
4472 changed files and full commit message are shown.
4470
4473
4471 With --graph the revisions are shown as an ASCII art DAG with the most
4474 With --graph the revisions are shown as an ASCII art DAG with the most
4472 recent changeset at the top.
4475 recent changeset at the top.
4473 'o' is a changeset, '@' is a working directory parent, '%' is a changeset
4476 'o' is a changeset, '@' is a working directory parent, '%' is a changeset
4474 involved in an unresolved merge conflict, '_' closes a branch,
4477 involved in an unresolved merge conflict, '_' closes a branch,
4475 'x' is obsolete, '*' is unstable, and '+' represents a fork where the
4478 'x' is obsolete, '*' is unstable, and '+' represents a fork where the
4476 changeset from the lines below is a parent of the 'o' merge on the same
4479 changeset from the lines below is a parent of the 'o' merge on the same
4477 line.
4480 line.
4478 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
4481 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
4479 of a '|' indicates one or more revisions in a path are omitted.
4482 of a '|' indicates one or more revisions in a path are omitted.
4480
4483
4481 .. container:: verbose
4484 .. container:: verbose
4482
4485
4483 Use -L/--line-range FILE,M:N options to follow the history of lines
4486 Use -L/--line-range FILE,M:N options to follow the history of lines
4484 from M to N in FILE. With -p/--patch only diff hunks affecting
4487 from M to N in FILE. With -p/--patch only diff hunks affecting
4485 specified line range will be shown. This option requires --follow;
4488 specified line range will be shown. This option requires --follow;
4486 it can be specified multiple times. Currently, this option is not
4489 it can be specified multiple times. Currently, this option is not
4487 compatible with --graph. This option is experimental.
4490 compatible with --graph. This option is experimental.
4488
4491
4489 .. note::
4492 .. note::
4490
4493
4491 :hg:`log --patch` may generate unexpected diff output for merge
4494 :hg:`log --patch` may generate unexpected diff output for merge
4492 changesets, as it will only compare the merge changeset against
4495 changesets, as it will only compare the merge changeset against
4493 its first parent. Also, only files different from BOTH parents
4496 its first parent. Also, only files different from BOTH parents
4494 will appear in files:.
4497 will appear in files:.
4495
4498
4496 .. note::
4499 .. note::
4497
4500
4498 For performance reasons, :hg:`log FILE` may omit duplicate changes
4501 For performance reasons, :hg:`log FILE` may omit duplicate changes
4499 made on branches and will not show removals or mode changes. To
4502 made on branches and will not show removals or mode changes. To
4500 see all such changes, use the --removed switch.
4503 see all such changes, use the --removed switch.
4501
4504
4502 .. container:: verbose
4505 .. container:: verbose
4503
4506
4504 .. note::
4507 .. note::
4505
4508
4506 The history resulting from -L/--line-range options depends on diff
4509 The history resulting from -L/--line-range options depends on diff
4507 options; for instance if white-spaces are ignored, respective changes
4510 options; for instance if white-spaces are ignored, respective changes
4508 with only white-spaces in specified line range will not be listed.
4511 with only white-spaces in specified line range will not be listed.
4509
4512
4510 .. container:: verbose
4513 .. container:: verbose
4511
4514
4512 Some examples:
4515 Some examples:
4513
4516
4514 - changesets with full descriptions and file lists::
4517 - changesets with full descriptions and file lists::
4515
4518
4516 hg log -v
4519 hg log -v
4517
4520
4518 - changesets ancestral to the working directory::
4521 - changesets ancestral to the working directory::
4519
4522
4520 hg log -f
4523 hg log -f
4521
4524
4522 - last 10 commits on the current branch::
4525 - last 10 commits on the current branch::
4523
4526
4524 hg log -l 10 -b .
4527 hg log -l 10 -b .
4525
4528
4526 - changesets showing all modifications of a file, including removals::
4529 - changesets showing all modifications of a file, including removals::
4527
4530
4528 hg log --removed file.c
4531 hg log --removed file.c
4529
4532
4530 - all changesets that touch a directory, with diffs, excluding merges::
4533 - all changesets that touch a directory, with diffs, excluding merges::
4531
4534
4532 hg log -Mp lib/
4535 hg log -Mp lib/
4533
4536
4534 - all revision numbers that match a keyword::
4537 - all revision numbers that match a keyword::
4535
4538
4536 hg log -k bug --template "{rev}\\n"
4539 hg log -k bug --template "{rev}\\n"
4537
4540
4538 - the full hash identifier of the working directory parent::
4541 - the full hash identifier of the working directory parent::
4539
4542
4540 hg log -r . --template "{node}\\n"
4543 hg log -r . --template "{node}\\n"
4541
4544
4542 - list available log templates::
4545 - list available log templates::
4543
4546
4544 hg log -T list
4547 hg log -T list
4545
4548
4546 - check if a given changeset is included in a tagged release::
4549 - check if a given changeset is included in a tagged release::
4547
4550
4548 hg log -r "a21ccf and ancestor(1.9)"
4551 hg log -r "a21ccf and ancestor(1.9)"
4549
4552
4550 - find all changesets by some user in a date range::
4553 - find all changesets by some user in a date range::
4551
4554
4552 hg log -k alice -d "may 2008 to jul 2008"
4555 hg log -k alice -d "may 2008 to jul 2008"
4553
4556
4554 - summary of all changesets after the last tag::
4557 - summary of all changesets after the last tag::
4555
4558
4556 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4559 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4557
4560
4558 - changesets touching lines 13 to 23 for file.c::
4561 - changesets touching lines 13 to 23 for file.c::
4559
4562
4560 hg log -L file.c,13:23
4563 hg log -L file.c,13:23
4561
4564
4562 - changesets touching lines 13 to 23 for file.c and lines 2 to 6 of
4565 - changesets touching lines 13 to 23 for file.c and lines 2 to 6 of
4563 main.c with patch::
4566 main.c with patch::
4564
4567
4565 hg log -L file.c,13:23 -L main.c,2:6 -p
4568 hg log -L file.c,13:23 -L main.c,2:6 -p
4566
4569
4567 See :hg:`help dates` for a list of formats valid for -d/--date.
4570 See :hg:`help dates` for a list of formats valid for -d/--date.
4568
4571
4569 See :hg:`help revisions` for more about specifying and ordering
4572 See :hg:`help revisions` for more about specifying and ordering
4570 revisions.
4573 revisions.
4571
4574
4572 See :hg:`help templates` for more about pre-packaged styles and
4575 See :hg:`help templates` for more about pre-packaged styles and
4573 specifying custom templates. The default template used by the log
4576 specifying custom templates. The default template used by the log
4574 command can be customized via the ``command-templates.log`` configuration
4577 command can be customized via the ``command-templates.log`` configuration
4575 setting.
4578 setting.
4576
4579
4577 Returns 0 on success.
4580 Returns 0 on success.
4578
4581
4579 """
4582 """
4580 opts = pycompat.byteskwargs(opts)
4583 opts = pycompat.byteskwargs(opts)
4581 linerange = opts.get(b'line_range')
4584 linerange = opts.get(b'line_range')
4582
4585
4583 if linerange and not opts.get(b'follow'):
4586 if linerange and not opts.get(b'follow'):
4584 raise error.Abort(_(b'--line-range requires --follow'))
4587 raise error.Abort(_(b'--line-range requires --follow'))
4585
4588
4586 if linerange and pats:
4589 if linerange and pats:
4587 # TODO: take pats as patterns with no line-range filter
4590 # TODO: take pats as patterns with no line-range filter
4588 raise error.Abort(
4591 raise error.Abort(
4589 _(b'FILE arguments are not compatible with --line-range option')
4592 _(b'FILE arguments are not compatible with --line-range option')
4590 )
4593 )
4591
4594
4592 repo = scmutil.unhidehashlikerevs(repo, opts.get(b'rev'), b'nowarn')
4595 repo = scmutil.unhidehashlikerevs(repo, opts.get(b'rev'), b'nowarn')
4593 revs, differ = logcmdutil.getrevs(
4596 revs, differ = logcmdutil.getrevs(
4594 repo, logcmdutil.parseopts(ui, pats, opts)
4597 repo, logcmdutil.parseopts(ui, pats, opts)
4595 )
4598 )
4596 if linerange:
4599 if linerange:
4597 # TODO: should follow file history from logcmdutil._initialrevs(),
4600 # TODO: should follow file history from logcmdutil._initialrevs(),
4598 # then filter the result by logcmdutil._makerevset() and --limit
4601 # then filter the result by logcmdutil._makerevset() and --limit
4599 revs, differ = logcmdutil.getlinerangerevs(repo, revs, opts)
4602 revs, differ = logcmdutil.getlinerangerevs(repo, revs, opts)
4600
4603
4601 getcopies = None
4604 getcopies = None
4602 if opts.get(b'copies'):
4605 if opts.get(b'copies'):
4603 endrev = None
4606 endrev = None
4604 if revs:
4607 if revs:
4605 endrev = revs.max() + 1
4608 endrev = revs.max() + 1
4606 getcopies = scmutil.getcopiesfn(repo, endrev=endrev)
4609 getcopies = scmutil.getcopiesfn(repo, endrev=endrev)
4607
4610
4608 ui.pager(b'log')
4611 ui.pager(b'log')
4609 displayer = logcmdutil.changesetdisplayer(
4612 displayer = logcmdutil.changesetdisplayer(
4610 ui, repo, opts, differ, buffered=True
4613 ui, repo, opts, differ, buffered=True
4611 )
4614 )
4612 if opts.get(b'graph'):
4615 if opts.get(b'graph'):
4613 displayfn = logcmdutil.displaygraphrevs
4616 displayfn = logcmdutil.displaygraphrevs
4614 else:
4617 else:
4615 displayfn = logcmdutil.displayrevs
4618 displayfn = logcmdutil.displayrevs
4616 displayfn(ui, repo, revs, displayer, getcopies)
4619 displayfn(ui, repo, revs, displayer, getcopies)
4617
4620
4618
4621
4619 @command(
4622 @command(
4620 b'manifest',
4623 b'manifest',
4621 [
4624 [
4622 (b'r', b'rev', b'', _(b'revision to display'), _(b'REV')),
4625 (b'r', b'rev', b'', _(b'revision to display'), _(b'REV')),
4623 (b'', b'all', False, _(b"list files from all revisions")),
4626 (b'', b'all', False, _(b"list files from all revisions")),
4624 ]
4627 ]
4625 + formatteropts,
4628 + formatteropts,
4626 _(b'[-r REV]'),
4629 _(b'[-r REV]'),
4627 helpcategory=command.CATEGORY_MAINTENANCE,
4630 helpcategory=command.CATEGORY_MAINTENANCE,
4628 intents={INTENT_READONLY},
4631 intents={INTENT_READONLY},
4629 )
4632 )
4630 def manifest(ui, repo, node=None, rev=None, **opts):
4633 def manifest(ui, repo, node=None, rev=None, **opts):
4631 """output the current or given revision of the project manifest
4634 """output the current or given revision of the project manifest
4632
4635
4633 Print a list of version controlled files for the given revision.
4636 Print a list of version controlled files for the given revision.
4634 If no revision is given, the first parent of the working directory
4637 If no revision is given, the first parent of the working directory
4635 is used, or the null revision if no revision is checked out.
4638 is used, or the null revision if no revision is checked out.
4636
4639
4637 With -v, print file permissions, symlink and executable bits.
4640 With -v, print file permissions, symlink and executable bits.
4638 With --debug, print file revision hashes.
4641 With --debug, print file revision hashes.
4639
4642
4640 If option --all is specified, the list of all files from all revisions
4643 If option --all is specified, the list of all files from all revisions
4641 is printed. This includes deleted and renamed files.
4644 is printed. This includes deleted and renamed files.
4642
4645
4643 Returns 0 on success.
4646 Returns 0 on success.
4644 """
4647 """
4645 opts = pycompat.byteskwargs(opts)
4648 opts = pycompat.byteskwargs(opts)
4646 fm = ui.formatter(b'manifest', opts)
4649 fm = ui.formatter(b'manifest', opts)
4647
4650
4648 if opts.get(b'all'):
4651 if opts.get(b'all'):
4649 if rev or node:
4652 if rev or node:
4650 raise error.Abort(_(b"can't specify a revision with --all"))
4653 raise error.Abort(_(b"can't specify a revision with --all"))
4651
4654
4652 res = set()
4655 res = set()
4653 for rev in repo:
4656 for rev in repo:
4654 ctx = repo[rev]
4657 ctx = repo[rev]
4655 res |= set(ctx.files())
4658 res |= set(ctx.files())
4656
4659
4657 ui.pager(b'manifest')
4660 ui.pager(b'manifest')
4658 for f in sorted(res):
4661 for f in sorted(res):
4659 fm.startitem()
4662 fm.startitem()
4660 fm.write(b"path", b'%s\n', f)
4663 fm.write(b"path", b'%s\n', f)
4661 fm.end()
4664 fm.end()
4662 return
4665 return
4663
4666
4664 if rev and node:
4667 if rev and node:
4665 raise error.Abort(_(b"please specify just one revision"))
4668 raise error.Abort(_(b"please specify just one revision"))
4666
4669
4667 if not node:
4670 if not node:
4668 node = rev
4671 node = rev
4669
4672
4670 char = {b'l': b'@', b'x': b'*', b'': b'', b't': b'd'}
4673 char = {b'l': b'@', b'x': b'*', b'': b'', b't': b'd'}
4671 mode = {b'l': b'644', b'x': b'755', b'': b'644', b't': b'755'}
4674 mode = {b'l': b'644', b'x': b'755', b'': b'644', b't': b'755'}
4672 if node:
4675 if node:
4673 repo = scmutil.unhidehashlikerevs(repo, [node], b'nowarn')
4676 repo = scmutil.unhidehashlikerevs(repo, [node], b'nowarn')
4674 ctx = scmutil.revsingle(repo, node)
4677 ctx = scmutil.revsingle(repo, node)
4675 mf = ctx.manifest()
4678 mf = ctx.manifest()
4676 ui.pager(b'manifest')
4679 ui.pager(b'manifest')
4677 for f in ctx:
4680 for f in ctx:
4678 fm.startitem()
4681 fm.startitem()
4679 fm.context(ctx=ctx)
4682 fm.context(ctx=ctx)
4680 fl = ctx[f].flags()
4683 fl = ctx[f].flags()
4681 fm.condwrite(ui.debugflag, b'hash', b'%s ', hex(mf[f]))
4684 fm.condwrite(ui.debugflag, b'hash', b'%s ', hex(mf[f]))
4682 fm.condwrite(ui.verbose, b'mode type', b'%s %1s ', mode[fl], char[fl])
4685 fm.condwrite(ui.verbose, b'mode type', b'%s %1s ', mode[fl], char[fl])
4683 fm.write(b'path', b'%s\n', f)
4686 fm.write(b'path', b'%s\n', f)
4684 fm.end()
4687 fm.end()
4685
4688
4686
4689
4687 @command(
4690 @command(
4688 b'merge',
4691 b'merge',
4689 [
4692 [
4690 (
4693 (
4691 b'f',
4694 b'f',
4692 b'force',
4695 b'force',
4693 None,
4696 None,
4694 _(b'force a merge including outstanding changes (DEPRECATED)'),
4697 _(b'force a merge including outstanding changes (DEPRECATED)'),
4695 ),
4698 ),
4696 (b'r', b'rev', b'', _(b'revision to merge'), _(b'REV')),
4699 (b'r', b'rev', b'', _(b'revision to merge'), _(b'REV')),
4697 (
4700 (
4698 b'P',
4701 b'P',
4699 b'preview',
4702 b'preview',
4700 None,
4703 None,
4701 _(b'review revisions to merge (no merge is performed)'),
4704 _(b'review revisions to merge (no merge is performed)'),
4702 ),
4705 ),
4703 (b'', b'abort', None, _(b'abort the ongoing merge')),
4706 (b'', b'abort', None, _(b'abort the ongoing merge')),
4704 ]
4707 ]
4705 + mergetoolopts,
4708 + mergetoolopts,
4706 _(b'[-P] [[-r] REV]'),
4709 _(b'[-P] [[-r] REV]'),
4707 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
4710 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
4708 helpbasic=True,
4711 helpbasic=True,
4709 )
4712 )
4710 def merge(ui, repo, node=None, **opts):
4713 def merge(ui, repo, node=None, **opts):
4711 """merge another revision into working directory
4714 """merge another revision into working directory
4712
4715
4713 The current working directory is updated with all changes made in
4716 The current working directory is updated with all changes made in
4714 the requested revision since the last common predecessor revision.
4717 the requested revision since the last common predecessor revision.
4715
4718
4716 Files that changed between either parent are marked as changed for
4719 Files that changed between either parent are marked as changed for
4717 the next commit and a commit must be performed before any further
4720 the next commit and a commit must be performed before any further
4718 updates to the repository are allowed. The next commit will have
4721 updates to the repository are allowed. The next commit will have
4719 two parents.
4722 two parents.
4720
4723
4721 ``--tool`` can be used to specify the merge tool used for file
4724 ``--tool`` can be used to specify the merge tool used for file
4722 merges. It overrides the HGMERGE environment variable and your
4725 merges. It overrides the HGMERGE environment variable and your
4723 configuration files. See :hg:`help merge-tools` for options.
4726 configuration files. See :hg:`help merge-tools` for options.
4724
4727
4725 If no revision is specified, the working directory's parent is a
4728 If no revision is specified, the working directory's parent is a
4726 head revision, and the current branch contains exactly one other
4729 head revision, and the current branch contains exactly one other
4727 head, the other head is merged with by default. Otherwise, an
4730 head, the other head is merged with by default. Otherwise, an
4728 explicit revision with which to merge must be provided.
4731 explicit revision with which to merge must be provided.
4729
4732
4730 See :hg:`help resolve` for information on handling file conflicts.
4733 See :hg:`help resolve` for information on handling file conflicts.
4731
4734
4732 To undo an uncommitted merge, use :hg:`merge --abort` which
4735 To undo an uncommitted merge, use :hg:`merge --abort` which
4733 will check out a clean copy of the original merge parent, losing
4736 will check out a clean copy of the original merge parent, losing
4734 all changes.
4737 all changes.
4735
4738
4736 Returns 0 on success, 1 if there are unresolved files.
4739 Returns 0 on success, 1 if there are unresolved files.
4737 """
4740 """
4738
4741
4739 opts = pycompat.byteskwargs(opts)
4742 opts = pycompat.byteskwargs(opts)
4740 abort = opts.get(b'abort')
4743 abort = opts.get(b'abort')
4741 if abort and repo.dirstate.p2() == nullid:
4744 if abort and repo.dirstate.p2() == nullid:
4742 cmdutil.wrongtooltocontinue(repo, _(b'merge'))
4745 cmdutil.wrongtooltocontinue(repo, _(b'merge'))
4743 cmdutil.check_incompatible_arguments(opts, b'abort', [b'rev', b'preview'])
4746 cmdutil.check_incompatible_arguments(opts, b'abort', [b'rev', b'preview'])
4744 if abort:
4747 if abort:
4745 state = cmdutil.getunfinishedstate(repo)
4748 state = cmdutil.getunfinishedstate(repo)
4746 if state and state._opname != b'merge':
4749 if state and state._opname != b'merge':
4747 raise error.Abort(
4750 raise error.Abort(
4748 _(b'cannot abort merge with %s in progress') % (state._opname),
4751 _(b'cannot abort merge with %s in progress') % (state._opname),
4749 hint=state.hint(),
4752 hint=state.hint(),
4750 )
4753 )
4751 if node:
4754 if node:
4752 raise error.Abort(_(b"cannot specify a node with --abort"))
4755 raise error.Abort(_(b"cannot specify a node with --abort"))
4753 return hg.abortmerge(repo.ui, repo)
4756 return hg.abortmerge(repo.ui, repo)
4754
4757
4755 if opts.get(b'rev') and node:
4758 if opts.get(b'rev') and node:
4756 raise error.Abort(_(b"please specify just one revision"))
4759 raise error.Abort(_(b"please specify just one revision"))
4757 if not node:
4760 if not node:
4758 node = opts.get(b'rev')
4761 node = opts.get(b'rev')
4759
4762
4760 if node:
4763 if node:
4761 ctx = scmutil.revsingle(repo, node)
4764 ctx = scmutil.revsingle(repo, node)
4762 else:
4765 else:
4763 if ui.configbool(b'commands', b'merge.require-rev'):
4766 if ui.configbool(b'commands', b'merge.require-rev'):
4764 raise error.Abort(
4767 raise error.Abort(
4765 _(
4768 _(
4766 b'configuration requires specifying revision to merge '
4769 b'configuration requires specifying revision to merge '
4767 b'with'
4770 b'with'
4768 )
4771 )
4769 )
4772 )
4770 ctx = repo[destutil.destmerge(repo)]
4773 ctx = repo[destutil.destmerge(repo)]
4771
4774
4772 if ctx.node() is None:
4775 if ctx.node() is None:
4773 raise error.Abort(_(b'merging with the working copy has no effect'))
4776 raise error.Abort(_(b'merging with the working copy has no effect'))
4774
4777
4775 if opts.get(b'preview'):
4778 if opts.get(b'preview'):
4776 # find nodes that are ancestors of p2 but not of p1
4779 # find nodes that are ancestors of p2 but not of p1
4777 p1 = repo[b'.'].node()
4780 p1 = repo[b'.'].node()
4778 p2 = ctx.node()
4781 p2 = ctx.node()
4779 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4782 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4780
4783
4781 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
4784 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
4782 for node in nodes:
4785 for node in nodes:
4783 displayer.show(repo[node])
4786 displayer.show(repo[node])
4784 displayer.close()
4787 displayer.close()
4785 return 0
4788 return 0
4786
4789
4787 # ui.forcemerge is an internal variable, do not document
4790 # ui.forcemerge is an internal variable, do not document
4788 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
4791 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
4789 with ui.configoverride(overrides, b'merge'):
4792 with ui.configoverride(overrides, b'merge'):
4790 force = opts.get(b'force')
4793 force = opts.get(b'force')
4791 labels = [b'working copy', b'merge rev']
4794 labels = [b'working copy', b'merge rev']
4792 return hg.merge(ctx, force=force, labels=labels)
4795 return hg.merge(ctx, force=force, labels=labels)
4793
4796
4794
4797
4795 statemod.addunfinished(
4798 statemod.addunfinished(
4796 b'merge',
4799 b'merge',
4797 fname=None,
4800 fname=None,
4798 clearable=True,
4801 clearable=True,
4799 allowcommit=True,
4802 allowcommit=True,
4800 cmdmsg=_(b'outstanding uncommitted merge'),
4803 cmdmsg=_(b'outstanding uncommitted merge'),
4801 abortfunc=hg.abortmerge,
4804 abortfunc=hg.abortmerge,
4802 statushint=_(
4805 statushint=_(
4803 b'To continue: hg commit\nTo abort: hg merge --abort'
4806 b'To continue: hg commit\nTo abort: hg merge --abort'
4804 ),
4807 ),
4805 cmdhint=_(b"use 'hg commit' or 'hg merge --abort'"),
4808 cmdhint=_(b"use 'hg commit' or 'hg merge --abort'"),
4806 )
4809 )
4807
4810
4808
4811
4809 @command(
4812 @command(
4810 b'outgoing|out',
4813 b'outgoing|out',
4811 [
4814 [
4812 (
4815 (
4813 b'f',
4816 b'f',
4814 b'force',
4817 b'force',
4815 None,
4818 None,
4816 _(b'run even when the destination is unrelated'),
4819 _(b'run even when the destination is unrelated'),
4817 ),
4820 ),
4818 (
4821 (
4819 b'r',
4822 b'r',
4820 b'rev',
4823 b'rev',
4821 [],
4824 [],
4822 _(b'a changeset intended to be included in the destination'),
4825 _(b'a changeset intended to be included in the destination'),
4823 _(b'REV'),
4826 _(b'REV'),
4824 ),
4827 ),
4825 (b'n', b'newest-first', None, _(b'show newest record first')),
4828 (b'n', b'newest-first', None, _(b'show newest record first')),
4826 (b'B', b'bookmarks', False, _(b'compare bookmarks')),
4829 (b'B', b'bookmarks', False, _(b'compare bookmarks')),
4827 (
4830 (
4828 b'b',
4831 b'b',
4829 b'branch',
4832 b'branch',
4830 [],
4833 [],
4831 _(b'a specific branch you would like to push'),
4834 _(b'a specific branch you would like to push'),
4832 _(b'BRANCH'),
4835 _(b'BRANCH'),
4833 ),
4836 ),
4834 ]
4837 ]
4835 + logopts
4838 + logopts
4836 + remoteopts
4839 + remoteopts
4837 + subrepoopts,
4840 + subrepoopts,
4838 _(b'[-M] [-p] [-n] [-f] [-r REV]... [DEST]'),
4841 _(b'[-M] [-p] [-n] [-f] [-r REV]... [DEST]'),
4839 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
4842 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
4840 )
4843 )
4841 def outgoing(ui, repo, dest=None, **opts):
4844 def outgoing(ui, repo, dest=None, **opts):
4842 """show changesets not found in the destination
4845 """show changesets not found in the destination
4843
4846
4844 Show changesets not found in the specified destination repository
4847 Show changesets not found in the specified destination repository
4845 or the default push location. These are the changesets that would
4848 or the default push location. These are the changesets that would
4846 be pushed if a push was requested.
4849 be pushed if a push was requested.
4847
4850
4848 See pull for details of valid destination formats.
4851 See pull for details of valid destination formats.
4849
4852
4850 .. container:: verbose
4853 .. container:: verbose
4851
4854
4852 With -B/--bookmarks, the result of bookmark comparison between
4855 With -B/--bookmarks, the result of bookmark comparison between
4853 local and remote repositories is displayed. With -v/--verbose,
4856 local and remote repositories is displayed. With -v/--verbose,
4854 status is also displayed for each bookmark like below::
4857 status is also displayed for each bookmark like below::
4855
4858
4856 BM1 01234567890a added
4859 BM1 01234567890a added
4857 BM2 deleted
4860 BM2 deleted
4858 BM3 234567890abc advanced
4861 BM3 234567890abc advanced
4859 BM4 34567890abcd diverged
4862 BM4 34567890abcd diverged
4860 BM5 4567890abcde changed
4863 BM5 4567890abcde changed
4861
4864
4862 The action taken when pushing depends on the
4865 The action taken when pushing depends on the
4863 status of each bookmark:
4866 status of each bookmark:
4864
4867
4865 :``added``: push with ``-B`` will create it
4868 :``added``: push with ``-B`` will create it
4866 :``deleted``: push with ``-B`` will delete it
4869 :``deleted``: push with ``-B`` will delete it
4867 :``advanced``: push will update it
4870 :``advanced``: push will update it
4868 :``diverged``: push with ``-B`` will update it
4871 :``diverged``: push with ``-B`` will update it
4869 :``changed``: push with ``-B`` will update it
4872 :``changed``: push with ``-B`` will update it
4870
4873
4871 From the point of view of pushing behavior, bookmarks
4874 From the point of view of pushing behavior, bookmarks
4872 existing only in the remote repository are treated as
4875 existing only in the remote repository are treated as
4873 ``deleted``, even if it is in fact added remotely.
4876 ``deleted``, even if it is in fact added remotely.
4874
4877
4875 Returns 0 if there are outgoing changes, 1 otherwise.
4878 Returns 0 if there are outgoing changes, 1 otherwise.
4876 """
4879 """
4877 # hg._outgoing() needs to re-resolve the path in order to handle #branch
4880 # hg._outgoing() needs to re-resolve the path in order to handle #branch
4878 # style URLs, so don't overwrite dest.
4881 # style URLs, so don't overwrite dest.
4879 path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
4882 path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
4880 if not path:
4883 if not path:
4881 raise error.Abort(
4884 raise error.Abort(
4882 _(b'default repository not configured!'),
4885 _(b'default repository not configured!'),
4883 hint=_(b"see 'hg help config.paths'"),
4886 hint=_(b"see 'hg help config.paths'"),
4884 )
4887 )
4885
4888
4886 opts = pycompat.byteskwargs(opts)
4889 opts = pycompat.byteskwargs(opts)
4887 if opts.get(b'graph'):
4890 if opts.get(b'graph'):
4888 logcmdutil.checkunsupportedgraphflags([], opts)
4891 logcmdutil.checkunsupportedgraphflags([], opts)
4889 o, other = hg._outgoing(ui, repo, dest, opts)
4892 o, other = hg._outgoing(ui, repo, dest, opts)
4890 if not o:
4893 if not o:
4891 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4894 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4892 return
4895 return
4893
4896
4894 revdag = logcmdutil.graphrevs(repo, o, opts)
4897 revdag = logcmdutil.graphrevs(repo, o, opts)
4895 ui.pager(b'outgoing')
4898 ui.pager(b'outgoing')
4896 displayer = logcmdutil.changesetdisplayer(ui, repo, opts, buffered=True)
4899 displayer = logcmdutil.changesetdisplayer(ui, repo, opts, buffered=True)
4897 logcmdutil.displaygraph(
4900 logcmdutil.displaygraph(
4898 ui, repo, revdag, displayer, graphmod.asciiedges
4901 ui, repo, revdag, displayer, graphmod.asciiedges
4899 )
4902 )
4900 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4903 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4901 return 0
4904 return 0
4902
4905
4903 if opts.get(b'bookmarks'):
4906 if opts.get(b'bookmarks'):
4904 dest = path.pushloc or path.loc
4907 dest = path.pushloc or path.loc
4905 other = hg.peer(repo, opts, dest)
4908 other = hg.peer(repo, opts, dest)
4906 if b'bookmarks' not in other.listkeys(b'namespaces'):
4909 if b'bookmarks' not in other.listkeys(b'namespaces'):
4907 ui.warn(_(b"remote doesn't support bookmarks\n"))
4910 ui.warn(_(b"remote doesn't support bookmarks\n"))
4908 return 0
4911 return 0
4909 ui.status(_(b'comparing with %s\n') % util.hidepassword(dest))
4912 ui.status(_(b'comparing with %s\n') % util.hidepassword(dest))
4910 ui.pager(b'outgoing')
4913 ui.pager(b'outgoing')
4911 return bookmarks.outgoing(ui, repo, other)
4914 return bookmarks.outgoing(ui, repo, other)
4912
4915
4913 repo._subtoppath = path.pushloc or path.loc
4916 repo._subtoppath = path.pushloc or path.loc
4914 try:
4917 try:
4915 return hg.outgoing(ui, repo, dest, opts)
4918 return hg.outgoing(ui, repo, dest, opts)
4916 finally:
4919 finally:
4917 del repo._subtoppath
4920 del repo._subtoppath
4918
4921
4919
4922
4920 @command(
4923 @command(
4921 b'parents',
4924 b'parents',
4922 [
4925 [
4923 (
4926 (
4924 b'r',
4927 b'r',
4925 b'rev',
4928 b'rev',
4926 b'',
4929 b'',
4927 _(b'show parents of the specified revision'),
4930 _(b'show parents of the specified revision'),
4928 _(b'REV'),
4931 _(b'REV'),
4929 ),
4932 ),
4930 ]
4933 ]
4931 + templateopts,
4934 + templateopts,
4932 _(b'[-r REV] [FILE]'),
4935 _(b'[-r REV] [FILE]'),
4933 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
4936 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
4934 inferrepo=True,
4937 inferrepo=True,
4935 )
4938 )
4936 def parents(ui, repo, file_=None, **opts):
4939 def parents(ui, repo, file_=None, **opts):
4937 """show the parents of the working directory or revision (DEPRECATED)
4940 """show the parents of the working directory or revision (DEPRECATED)
4938
4941
4939 Print the working directory's parent revisions. If a revision is
4942 Print the working directory's parent revisions. If a revision is
4940 given via -r/--rev, the parent of that revision will be printed.
4943 given via -r/--rev, the parent of that revision will be printed.
4941 If a file argument is given, the revision in which the file was
4944 If a file argument is given, the revision in which the file was
4942 last changed (before the working directory revision or the
4945 last changed (before the working directory revision or the
4943 argument to --rev if given) is printed.
4946 argument to --rev if given) is printed.
4944
4947
4945 This command is equivalent to::
4948 This command is equivalent to::
4946
4949
4947 hg log -r "p1()+p2()" or
4950 hg log -r "p1()+p2()" or
4948 hg log -r "p1(REV)+p2(REV)" or
4951 hg log -r "p1(REV)+p2(REV)" or
4949 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
4952 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
4950 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
4953 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
4951
4954
4952 See :hg:`summary` and :hg:`help revsets` for related information.
4955 See :hg:`summary` and :hg:`help revsets` for related information.
4953
4956
4954 Returns 0 on success.
4957 Returns 0 on success.
4955 """
4958 """
4956
4959
4957 opts = pycompat.byteskwargs(opts)
4960 opts = pycompat.byteskwargs(opts)
4958 rev = opts.get(b'rev')
4961 rev = opts.get(b'rev')
4959 if rev:
4962 if rev:
4960 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
4963 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
4961 ctx = scmutil.revsingle(repo, rev, None)
4964 ctx = scmutil.revsingle(repo, rev, None)
4962
4965
4963 if file_:
4966 if file_:
4964 m = scmutil.match(ctx, (file_,), opts)
4967 m = scmutil.match(ctx, (file_,), opts)
4965 if m.anypats() or len(m.files()) != 1:
4968 if m.anypats() or len(m.files()) != 1:
4966 raise error.Abort(_(b'can only specify an explicit filename'))
4969 raise error.Abort(_(b'can only specify an explicit filename'))
4967 file_ = m.files()[0]
4970 file_ = m.files()[0]
4968 filenodes = []
4971 filenodes = []
4969 for cp in ctx.parents():
4972 for cp in ctx.parents():
4970 if not cp:
4973 if not cp:
4971 continue
4974 continue
4972 try:
4975 try:
4973 filenodes.append(cp.filenode(file_))
4976 filenodes.append(cp.filenode(file_))
4974 except error.LookupError:
4977 except error.LookupError:
4975 pass
4978 pass
4976 if not filenodes:
4979 if not filenodes:
4977 raise error.Abort(_(b"'%s' not found in manifest!") % file_)
4980 raise error.Abort(_(b"'%s' not found in manifest!") % file_)
4978 p = []
4981 p = []
4979 for fn in filenodes:
4982 for fn in filenodes:
4980 fctx = repo.filectx(file_, fileid=fn)
4983 fctx = repo.filectx(file_, fileid=fn)
4981 p.append(fctx.node())
4984 p.append(fctx.node())
4982 else:
4985 else:
4983 p = [cp.node() for cp in ctx.parents()]
4986 p = [cp.node() for cp in ctx.parents()]
4984
4987
4985 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
4988 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
4986 for n in p:
4989 for n in p:
4987 if n != nullid:
4990 if n != nullid:
4988 displayer.show(repo[n])
4991 displayer.show(repo[n])
4989 displayer.close()
4992 displayer.close()
4990
4993
4991
4994
4992 @command(
4995 @command(
4993 b'paths',
4996 b'paths',
4994 formatteropts,
4997 formatteropts,
4995 _(b'[NAME]'),
4998 _(b'[NAME]'),
4996 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
4999 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
4997 optionalrepo=True,
5000 optionalrepo=True,
4998 intents={INTENT_READONLY},
5001 intents={INTENT_READONLY},
4999 )
5002 )
5000 def paths(ui, repo, search=None, **opts):
5003 def paths(ui, repo, search=None, **opts):
5001 """show aliases for remote repositories
5004 """show aliases for remote repositories
5002
5005
5003 Show definition of symbolic path name NAME. If no name is given,
5006 Show definition of symbolic path name NAME. If no name is given,
5004 show definition of all available names.
5007 show definition of all available names.
5005
5008
5006 Option -q/--quiet suppresses all output when searching for NAME
5009 Option -q/--quiet suppresses all output when searching for NAME
5007 and shows only the path names when listing all definitions.
5010 and shows only the path names when listing all definitions.
5008
5011
5009 Path names are defined in the [paths] section of your
5012 Path names are defined in the [paths] section of your
5010 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
5013 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
5011 repository, ``.hg/hgrc`` is used, too.
5014 repository, ``.hg/hgrc`` is used, too.
5012
5015
5013 The path names ``default`` and ``default-push`` have a special
5016 The path names ``default`` and ``default-push`` have a special
5014 meaning. When performing a push or pull operation, they are used
5017 meaning. When performing a push or pull operation, they are used
5015 as fallbacks if no location is specified on the command-line.
5018 as fallbacks if no location is specified on the command-line.
5016 When ``default-push`` is set, it will be used for push and
5019 When ``default-push`` is set, it will be used for push and
5017 ``default`` will be used for pull; otherwise ``default`` is used
5020 ``default`` will be used for pull; otherwise ``default`` is used
5018 as the fallback for both. When cloning a repository, the clone
5021 as the fallback for both. When cloning a repository, the clone
5019 source is written as ``default`` in ``.hg/hgrc``.
5022 source is written as ``default`` in ``.hg/hgrc``.
5020
5023
5021 .. note::
5024 .. note::
5022
5025
5023 ``default`` and ``default-push`` apply to all inbound (e.g.
5026 ``default`` and ``default-push`` apply to all inbound (e.g.
5024 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
5027 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
5025 and :hg:`bundle`) operations.
5028 and :hg:`bundle`) operations.
5026
5029
5027 See :hg:`help urls` for more information.
5030 See :hg:`help urls` for more information.
5028
5031
5029 .. container:: verbose
5032 .. container:: verbose
5030
5033
5031 Template:
5034 Template:
5032
5035
5033 The following keywords are supported. See also :hg:`help templates`.
5036 The following keywords are supported. See also :hg:`help templates`.
5034
5037
5035 :name: String. Symbolic name of the path alias.
5038 :name: String. Symbolic name of the path alias.
5036 :pushurl: String. URL for push operations.
5039 :pushurl: String. URL for push operations.
5037 :url: String. URL or directory path for the other operations.
5040 :url: String. URL or directory path for the other operations.
5038
5041
5039 Returns 0 on success.
5042 Returns 0 on success.
5040 """
5043 """
5041
5044
5042 opts = pycompat.byteskwargs(opts)
5045 opts = pycompat.byteskwargs(opts)
5043 ui.pager(b'paths')
5046 ui.pager(b'paths')
5044 if search:
5047 if search:
5045 pathitems = [
5048 pathitems = [
5046 (name, path)
5049 (name, path)
5047 for name, path in pycompat.iteritems(ui.paths)
5050 for name, path in pycompat.iteritems(ui.paths)
5048 if name == search
5051 if name == search
5049 ]
5052 ]
5050 else:
5053 else:
5051 pathitems = sorted(pycompat.iteritems(ui.paths))
5054 pathitems = sorted(pycompat.iteritems(ui.paths))
5052
5055
5053 fm = ui.formatter(b'paths', opts)
5056 fm = ui.formatter(b'paths', opts)
5054 if fm.isplain():
5057 if fm.isplain():
5055 hidepassword = util.hidepassword
5058 hidepassword = util.hidepassword
5056 else:
5059 else:
5057 hidepassword = bytes
5060 hidepassword = bytes
5058 if ui.quiet:
5061 if ui.quiet:
5059 namefmt = b'%s\n'
5062 namefmt = b'%s\n'
5060 else:
5063 else:
5061 namefmt = b'%s = '
5064 namefmt = b'%s = '
5062 showsubopts = not search and not ui.quiet
5065 showsubopts = not search and not ui.quiet
5063
5066
5064 for name, path in pathitems:
5067 for name, path in pathitems:
5065 fm.startitem()
5068 fm.startitem()
5066 fm.condwrite(not search, b'name', namefmt, name)
5069 fm.condwrite(not search, b'name', namefmt, name)
5067 fm.condwrite(not ui.quiet, b'url', b'%s\n', hidepassword(path.rawloc))
5070 fm.condwrite(not ui.quiet, b'url', b'%s\n', hidepassword(path.rawloc))
5068 for subopt, value in sorted(path.suboptions.items()):
5071 for subopt, value in sorted(path.suboptions.items()):
5069 assert subopt not in (b'name', b'url')
5072 assert subopt not in (b'name', b'url')
5070 if showsubopts:
5073 if showsubopts:
5071 fm.plain(b'%s:%s = ' % (name, subopt))
5074 fm.plain(b'%s:%s = ' % (name, subopt))
5072 fm.condwrite(showsubopts, subopt, b'%s\n', value)
5075 fm.condwrite(showsubopts, subopt, b'%s\n', value)
5073
5076
5074 fm.end()
5077 fm.end()
5075
5078
5076 if search and not pathitems:
5079 if search and not pathitems:
5077 if not ui.quiet:
5080 if not ui.quiet:
5078 ui.warn(_(b"not found!\n"))
5081 ui.warn(_(b"not found!\n"))
5079 return 1
5082 return 1
5080 else:
5083 else:
5081 return 0
5084 return 0
5082
5085
5083
5086
5084 @command(
5087 @command(
5085 b'phase',
5088 b'phase',
5086 [
5089 [
5087 (b'p', b'public', False, _(b'set changeset phase to public')),
5090 (b'p', b'public', False, _(b'set changeset phase to public')),
5088 (b'd', b'draft', False, _(b'set changeset phase to draft')),
5091 (b'd', b'draft', False, _(b'set changeset phase to draft')),
5089 (b's', b'secret', False, _(b'set changeset phase to secret')),
5092 (b's', b'secret', False, _(b'set changeset phase to secret')),
5090 (b'f', b'force', False, _(b'allow to move boundary backward')),
5093 (b'f', b'force', False, _(b'allow to move boundary backward')),
5091 (b'r', b'rev', [], _(b'target revision'), _(b'REV')),
5094 (b'r', b'rev', [], _(b'target revision'), _(b'REV')),
5092 ],
5095 ],
5093 _(b'[-p|-d|-s] [-f] [-r] [REV...]'),
5096 _(b'[-p|-d|-s] [-f] [-r] [REV...]'),
5094 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
5097 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
5095 )
5098 )
5096 def phase(ui, repo, *revs, **opts):
5099 def phase(ui, repo, *revs, **opts):
5097 """set or show the current phase name
5100 """set or show the current phase name
5098
5101
5099 With no argument, show the phase name of the current revision(s).
5102 With no argument, show the phase name of the current revision(s).
5100
5103
5101 With one of -p/--public, -d/--draft or -s/--secret, change the
5104 With one of -p/--public, -d/--draft or -s/--secret, change the
5102 phase value of the specified revisions.
5105 phase value of the specified revisions.
5103
5106
5104 Unless -f/--force is specified, :hg:`phase` won't move changesets from a
5107 Unless -f/--force is specified, :hg:`phase` won't move changesets from a
5105 lower phase to a higher phase. Phases are ordered as follows::
5108 lower phase to a higher phase. Phases are ordered as follows::
5106
5109
5107 public < draft < secret
5110 public < draft < secret
5108
5111
5109 Returns 0 on success, 1 if some phases could not be changed.
5112 Returns 0 on success, 1 if some phases could not be changed.
5110
5113
5111 (For more information about the phases concept, see :hg:`help phases`.)
5114 (For more information about the phases concept, see :hg:`help phases`.)
5112 """
5115 """
5113 opts = pycompat.byteskwargs(opts)
5116 opts = pycompat.byteskwargs(opts)
5114 # search for a unique phase argument
5117 # search for a unique phase argument
5115 targetphase = None
5118 targetphase = None
5116 for idx, name in enumerate(phases.cmdphasenames):
5119 for idx, name in enumerate(phases.cmdphasenames):
5117 if opts[name]:
5120 if opts[name]:
5118 if targetphase is not None:
5121 if targetphase is not None:
5119 raise error.Abort(_(b'only one phase can be specified'))
5122 raise error.Abort(_(b'only one phase can be specified'))
5120 targetphase = idx
5123 targetphase = idx
5121
5124
5122 # look for specified revision
5125 # look for specified revision
5123 revs = list(revs)
5126 revs = list(revs)
5124 revs.extend(opts[b'rev'])
5127 revs.extend(opts[b'rev'])
5125 if not revs:
5128 if not revs:
5126 # display both parents as the second parent phase can influence
5129 # display both parents as the second parent phase can influence
5127 # the phase of a merge commit
5130 # the phase of a merge commit
5128 revs = [c.rev() for c in repo[None].parents()]
5131 revs = [c.rev() for c in repo[None].parents()]
5129
5132
5130 revs = scmutil.revrange(repo, revs)
5133 revs = scmutil.revrange(repo, revs)
5131
5134
5132 ret = 0
5135 ret = 0
5133 if targetphase is None:
5136 if targetphase is None:
5134 # display
5137 # display
5135 for r in revs:
5138 for r in revs:
5136 ctx = repo[r]
5139 ctx = repo[r]
5137 ui.write(b'%i: %s\n' % (ctx.rev(), ctx.phasestr()))
5140 ui.write(b'%i: %s\n' % (ctx.rev(), ctx.phasestr()))
5138 else:
5141 else:
5139 with repo.lock(), repo.transaction(b"phase") as tr:
5142 with repo.lock(), repo.transaction(b"phase") as tr:
5140 # set phase
5143 # set phase
5141 if not revs:
5144 if not revs:
5142 raise error.Abort(_(b'empty revision set'))
5145 raise error.Abort(_(b'empty revision set'))
5143 nodes = [repo[r].node() for r in revs]
5146 nodes = [repo[r].node() for r in revs]
5144 # moving revision from public to draft may hide them
5147 # moving revision from public to draft may hide them
5145 # We have to check result on an unfiltered repository
5148 # We have to check result on an unfiltered repository
5146 unfi = repo.unfiltered()
5149 unfi = repo.unfiltered()
5147 getphase = unfi._phasecache.phase
5150 getphase = unfi._phasecache.phase
5148 olddata = [getphase(unfi, r) for r in unfi]
5151 olddata = [getphase(unfi, r) for r in unfi]
5149 phases.advanceboundary(repo, tr, targetphase, nodes)
5152 phases.advanceboundary(repo, tr, targetphase, nodes)
5150 if opts[b'force']:
5153 if opts[b'force']:
5151 phases.retractboundary(repo, tr, targetphase, nodes)
5154 phases.retractboundary(repo, tr, targetphase, nodes)
5152 getphase = unfi._phasecache.phase
5155 getphase = unfi._phasecache.phase
5153 newdata = [getphase(unfi, r) for r in unfi]
5156 newdata = [getphase(unfi, r) for r in unfi]
5154 changes = sum(newdata[r] != olddata[r] for r in unfi)
5157 changes = sum(newdata[r] != olddata[r] for r in unfi)
5155 cl = unfi.changelog
5158 cl = unfi.changelog
5156 rejected = [n for n in nodes if newdata[cl.rev(n)] < targetphase]
5159 rejected = [n for n in nodes if newdata[cl.rev(n)] < targetphase]
5157 if rejected:
5160 if rejected:
5158 ui.warn(
5161 ui.warn(
5159 _(
5162 _(
5160 b'cannot move %i changesets to a higher '
5163 b'cannot move %i changesets to a higher '
5161 b'phase, use --force\n'
5164 b'phase, use --force\n'
5162 )
5165 )
5163 % len(rejected)
5166 % len(rejected)
5164 )
5167 )
5165 ret = 1
5168 ret = 1
5166 if changes:
5169 if changes:
5167 msg = _(b'phase changed for %i changesets\n') % changes
5170 msg = _(b'phase changed for %i changesets\n') % changes
5168 if ret:
5171 if ret:
5169 ui.status(msg)
5172 ui.status(msg)
5170 else:
5173 else:
5171 ui.note(msg)
5174 ui.note(msg)
5172 else:
5175 else:
5173 ui.warn(_(b'no phases changed\n'))
5176 ui.warn(_(b'no phases changed\n'))
5174 return ret
5177 return ret
5175
5178
5176
5179
5177 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
5180 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
5178 """Run after a changegroup has been added via pull/unbundle
5181 """Run after a changegroup has been added via pull/unbundle
5179
5182
5180 This takes arguments below:
5183 This takes arguments below:
5181
5184
5182 :modheads: change of heads by pull/unbundle
5185 :modheads: change of heads by pull/unbundle
5183 :optupdate: updating working directory is needed or not
5186 :optupdate: updating working directory is needed or not
5184 :checkout: update destination revision (or None to default destination)
5187 :checkout: update destination revision (or None to default destination)
5185 :brev: a name, which might be a bookmark to be activated after updating
5188 :brev: a name, which might be a bookmark to be activated after updating
5186 """
5189 """
5187 if modheads == 0:
5190 if modheads == 0:
5188 return
5191 return
5189 if optupdate:
5192 if optupdate:
5190 try:
5193 try:
5191 return hg.updatetotally(ui, repo, checkout, brev)
5194 return hg.updatetotally(ui, repo, checkout, brev)
5192 except error.UpdateAbort as inst:
5195 except error.UpdateAbort as inst:
5193 msg = _(b"not updating: %s") % stringutil.forcebytestr(inst)
5196 msg = _(b"not updating: %s") % stringutil.forcebytestr(inst)
5194 hint = inst.hint
5197 hint = inst.hint
5195 raise error.UpdateAbort(msg, hint=hint)
5198 raise error.UpdateAbort(msg, hint=hint)
5196 if modheads is not None and modheads > 1:
5199 if modheads is not None and modheads > 1:
5197 currentbranchheads = len(repo.branchheads())
5200 currentbranchheads = len(repo.branchheads())
5198 if currentbranchheads == modheads:
5201 if currentbranchheads == modheads:
5199 ui.status(
5202 ui.status(
5200 _(b"(run 'hg heads' to see heads, 'hg merge' to merge)\n")
5203 _(b"(run 'hg heads' to see heads, 'hg merge' to merge)\n")
5201 )
5204 )
5202 elif currentbranchheads > 1:
5205 elif currentbranchheads > 1:
5203 ui.status(
5206 ui.status(
5204 _(b"(run 'hg heads .' to see heads, 'hg merge' to merge)\n")
5207 _(b"(run 'hg heads .' to see heads, 'hg merge' to merge)\n")
5205 )
5208 )
5206 else:
5209 else:
5207 ui.status(_(b"(run 'hg heads' to see heads)\n"))
5210 ui.status(_(b"(run 'hg heads' to see heads)\n"))
5208 elif not ui.configbool(b'commands', b'update.requiredest'):
5211 elif not ui.configbool(b'commands', b'update.requiredest'):
5209 ui.status(_(b"(run 'hg update' to get a working copy)\n"))
5212 ui.status(_(b"(run 'hg update' to get a working copy)\n"))
5210
5213
5211
5214
5212 @command(
5215 @command(
5213 b'pull',
5216 b'pull',
5214 [
5217 [
5215 (
5218 (
5216 b'u',
5219 b'u',
5217 b'update',
5220 b'update',
5218 None,
5221 None,
5219 _(b'update to new branch head if new descendants were pulled'),
5222 _(b'update to new branch head if new descendants were pulled'),
5220 ),
5223 ),
5221 (
5224 (
5222 b'f',
5225 b'f',
5223 b'force',
5226 b'force',
5224 None,
5227 None,
5225 _(b'run even when remote repository is unrelated'),
5228 _(b'run even when remote repository is unrelated'),
5226 ),
5229 ),
5227 (b'', b'confirm', None, _(b'confirm pull before applying changes'),),
5230 (b'', b'confirm', None, _(b'confirm pull before applying changes'),),
5228 (
5231 (
5229 b'r',
5232 b'r',
5230 b'rev',
5233 b'rev',
5231 [],
5234 [],
5232 _(b'a remote changeset intended to be added'),
5235 _(b'a remote changeset intended to be added'),
5233 _(b'REV'),
5236 _(b'REV'),
5234 ),
5237 ),
5235 (b'B', b'bookmark', [], _(b"bookmark to pull"), _(b'BOOKMARK')),
5238 (b'B', b'bookmark', [], _(b"bookmark to pull"), _(b'BOOKMARK')),
5236 (
5239 (
5237 b'b',
5240 b'b',
5238 b'branch',
5241 b'branch',
5239 [],
5242 [],
5240 _(b'a specific branch you would like to pull'),
5243 _(b'a specific branch you would like to pull'),
5241 _(b'BRANCH'),
5244 _(b'BRANCH'),
5242 ),
5245 ),
5243 ]
5246 ]
5244 + remoteopts,
5247 + remoteopts,
5245 _(b'[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'),
5248 _(b'[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'),
5246 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5249 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5247 helpbasic=True,
5250 helpbasic=True,
5248 )
5251 )
5249 def pull(ui, repo, source=b"default", **opts):
5252 def pull(ui, repo, source=b"default", **opts):
5250 """pull changes from the specified source
5253 """pull changes from the specified source
5251
5254
5252 Pull changes from a remote repository to a local one.
5255 Pull changes from a remote repository to a local one.
5253
5256
5254 This finds all changes from the repository at the specified path
5257 This finds all changes from the repository at the specified path
5255 or URL and adds them to a local repository (the current one unless
5258 or URL and adds them to a local repository (the current one unless
5256 -R is specified). By default, this does not update the copy of the
5259 -R is specified). By default, this does not update the copy of the
5257 project in the working directory.
5260 project in the working directory.
5258
5261
5259 When cloning from servers that support it, Mercurial may fetch
5262 When cloning from servers that support it, Mercurial may fetch
5260 pre-generated data. When this is done, hooks operating on incoming
5263 pre-generated data. When this is done, hooks operating on incoming
5261 changesets and changegroups may fire more than once, once for each
5264 changesets and changegroups may fire more than once, once for each
5262 pre-generated bundle and as well as for any additional remaining
5265 pre-generated bundle and as well as for any additional remaining
5263 data. See :hg:`help -e clonebundles` for more.
5266 data. See :hg:`help -e clonebundles` for more.
5264
5267
5265 Use :hg:`incoming` if you want to see what would have been added
5268 Use :hg:`incoming` if you want to see what would have been added
5266 by a pull at the time you issued this command. If you then decide
5269 by a pull at the time you issued this command. If you then decide
5267 to add those changes to the repository, you should use :hg:`pull
5270 to add those changes to the repository, you should use :hg:`pull
5268 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5271 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5269
5272
5270 If SOURCE is omitted, the 'default' path will be used.
5273 If SOURCE is omitted, the 'default' path will be used.
5271 See :hg:`help urls` for more information.
5274 See :hg:`help urls` for more information.
5272
5275
5273 Specifying bookmark as ``.`` is equivalent to specifying the active
5276 Specifying bookmark as ``.`` is equivalent to specifying the active
5274 bookmark's name.
5277 bookmark's name.
5275
5278
5276 Returns 0 on success, 1 if an update had unresolved files.
5279 Returns 0 on success, 1 if an update had unresolved files.
5277 """
5280 """
5278
5281
5279 opts = pycompat.byteskwargs(opts)
5282 opts = pycompat.byteskwargs(opts)
5280 if ui.configbool(b'commands', b'update.requiredest') and opts.get(
5283 if ui.configbool(b'commands', b'update.requiredest') and opts.get(
5281 b'update'
5284 b'update'
5282 ):
5285 ):
5283 msg = _(b'update destination required by configuration')
5286 msg = _(b'update destination required by configuration')
5284 hint = _(b'use hg pull followed by hg update DEST')
5287 hint = _(b'use hg pull followed by hg update DEST')
5285 raise error.Abort(msg, hint=hint)
5288 raise error.Abort(msg, hint=hint)
5286
5289
5287 source, branches = hg.parseurl(ui.expandpath(source), opts.get(b'branch'))
5290 source, branches = hg.parseurl(ui.expandpath(source), opts.get(b'branch'))
5288 ui.status(_(b'pulling from %s\n') % util.hidepassword(source))
5291 ui.status(_(b'pulling from %s\n') % util.hidepassword(source))
5289 other = hg.peer(repo, opts, source)
5292 other = hg.peer(repo, opts, source)
5290 try:
5293 try:
5291 revs, checkout = hg.addbranchrevs(
5294 revs, checkout = hg.addbranchrevs(
5292 repo, other, branches, opts.get(b'rev')
5295 repo, other, branches, opts.get(b'rev')
5293 )
5296 )
5294
5297
5295 pullopargs = {}
5298 pullopargs = {}
5296
5299
5297 nodes = None
5300 nodes = None
5298 if opts.get(b'bookmark') or revs:
5301 if opts.get(b'bookmark') or revs:
5299 # The list of bookmark used here is the same used to actually update
5302 # The list of bookmark used here is the same used to actually update
5300 # the bookmark names, to avoid the race from issue 4689 and we do
5303 # the bookmark names, to avoid the race from issue 4689 and we do
5301 # all lookup and bookmark queries in one go so they see the same
5304 # all lookup and bookmark queries in one go so they see the same
5302 # version of the server state (issue 4700).
5305 # version of the server state (issue 4700).
5303 nodes = []
5306 nodes = []
5304 fnodes = []
5307 fnodes = []
5305 revs = revs or []
5308 revs = revs or []
5306 if revs and not other.capable(b'lookup'):
5309 if revs and not other.capable(b'lookup'):
5307 err = _(
5310 err = _(
5308 b"other repository doesn't support revision lookup, "
5311 b"other repository doesn't support revision lookup, "
5309 b"so a rev cannot be specified."
5312 b"so a rev cannot be specified."
5310 )
5313 )
5311 raise error.Abort(err)
5314 raise error.Abort(err)
5312 with other.commandexecutor() as e:
5315 with other.commandexecutor() as e:
5313 fremotebookmarks = e.callcommand(
5316 fremotebookmarks = e.callcommand(
5314 b'listkeys', {b'namespace': b'bookmarks'}
5317 b'listkeys', {b'namespace': b'bookmarks'}
5315 )
5318 )
5316 for r in revs:
5319 for r in revs:
5317 fnodes.append(e.callcommand(b'lookup', {b'key': r}))
5320 fnodes.append(e.callcommand(b'lookup', {b'key': r}))
5318 remotebookmarks = fremotebookmarks.result()
5321 remotebookmarks = fremotebookmarks.result()
5319 remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
5322 remotebookmarks = bookmarks.unhexlifybookmarks(remotebookmarks)
5320 pullopargs[b'remotebookmarks'] = remotebookmarks
5323 pullopargs[b'remotebookmarks'] = remotebookmarks
5321 for b in opts.get(b'bookmark', []):
5324 for b in opts.get(b'bookmark', []):
5322 b = repo._bookmarks.expandname(b)
5325 b = repo._bookmarks.expandname(b)
5323 if b not in remotebookmarks:
5326 if b not in remotebookmarks:
5324 raise error.Abort(_(b'remote bookmark %s not found!') % b)
5327 raise error.Abort(_(b'remote bookmark %s not found!') % b)
5325 nodes.append(remotebookmarks[b])
5328 nodes.append(remotebookmarks[b])
5326 for i, rev in enumerate(revs):
5329 for i, rev in enumerate(revs):
5327 node = fnodes[i].result()
5330 node = fnodes[i].result()
5328 nodes.append(node)
5331 nodes.append(node)
5329 if rev == checkout:
5332 if rev == checkout:
5330 checkout = node
5333 checkout = node
5331
5334
5332 wlock = util.nullcontextmanager()
5335 wlock = util.nullcontextmanager()
5333 if opts.get(b'update'):
5336 if opts.get(b'update'):
5334 wlock = repo.wlock()
5337 wlock = repo.wlock()
5335 with wlock:
5338 with wlock:
5336 pullopargs.update(opts.get(b'opargs', {}))
5339 pullopargs.update(opts.get(b'opargs', {}))
5337 modheads = exchange.pull(
5340 modheads = exchange.pull(
5338 repo,
5341 repo,
5339 other,
5342 other,
5340 heads=nodes,
5343 heads=nodes,
5341 force=opts.get(b'force'),
5344 force=opts.get(b'force'),
5342 bookmarks=opts.get(b'bookmark', ()),
5345 bookmarks=opts.get(b'bookmark', ()),
5343 opargs=pullopargs,
5346 opargs=pullopargs,
5344 confirm=opts.get(b'confirm'),
5347 confirm=opts.get(b'confirm'),
5345 ).cgresult
5348 ).cgresult
5346
5349
5347 # brev is a name, which might be a bookmark to be activated at
5350 # brev is a name, which might be a bookmark to be activated at
5348 # the end of the update. In other words, it is an explicit
5351 # the end of the update. In other words, it is an explicit
5349 # destination of the update
5352 # destination of the update
5350 brev = None
5353 brev = None
5351
5354
5352 if checkout:
5355 if checkout:
5353 checkout = repo.unfiltered().changelog.rev(checkout)
5356 checkout = repo.unfiltered().changelog.rev(checkout)
5354
5357
5355 # order below depends on implementation of
5358 # order below depends on implementation of
5356 # hg.addbranchrevs(). opts['bookmark'] is ignored,
5359 # hg.addbranchrevs(). opts['bookmark'] is ignored,
5357 # because 'checkout' is determined without it.
5360 # because 'checkout' is determined without it.
5358 if opts.get(b'rev'):
5361 if opts.get(b'rev'):
5359 brev = opts[b'rev'][0]
5362 brev = opts[b'rev'][0]
5360 elif opts.get(b'branch'):
5363 elif opts.get(b'branch'):
5361 brev = opts[b'branch'][0]
5364 brev = opts[b'branch'][0]
5362 else:
5365 else:
5363 brev = branches[0]
5366 brev = branches[0]
5364 repo._subtoppath = source
5367 repo._subtoppath = source
5365 try:
5368 try:
5366 ret = postincoming(
5369 ret = postincoming(
5367 ui, repo, modheads, opts.get(b'update'), checkout, brev
5370 ui, repo, modheads, opts.get(b'update'), checkout, brev
5368 )
5371 )
5369 except error.FilteredRepoLookupError as exc:
5372 except error.FilteredRepoLookupError as exc:
5370 msg = _(b'cannot update to target: %s') % exc.args[0]
5373 msg = _(b'cannot update to target: %s') % exc.args[0]
5371 exc.args = (msg,) + exc.args[1:]
5374 exc.args = (msg,) + exc.args[1:]
5372 raise
5375 raise
5373 finally:
5376 finally:
5374 del repo._subtoppath
5377 del repo._subtoppath
5375
5378
5376 finally:
5379 finally:
5377 other.close()
5380 other.close()
5378 return ret
5381 return ret
5379
5382
5380
5383
5381 @command(
5384 @command(
5382 b'push',
5385 b'push',
5383 [
5386 [
5384 (b'f', b'force', None, _(b'force push')),
5387 (b'f', b'force', None, _(b'force push')),
5385 (
5388 (
5386 b'r',
5389 b'r',
5387 b'rev',
5390 b'rev',
5388 [],
5391 [],
5389 _(b'a changeset intended to be included in the destination'),
5392 _(b'a changeset intended to be included in the destination'),
5390 _(b'REV'),
5393 _(b'REV'),
5391 ),
5394 ),
5392 (b'B', b'bookmark', [], _(b"bookmark to push"), _(b'BOOKMARK')),
5395 (b'B', b'bookmark', [], _(b"bookmark to push"), _(b'BOOKMARK')),
5393 (
5396 (
5394 b'b',
5397 b'b',
5395 b'branch',
5398 b'branch',
5396 [],
5399 [],
5397 _(b'a specific branch you would like to push'),
5400 _(b'a specific branch you would like to push'),
5398 _(b'BRANCH'),
5401 _(b'BRANCH'),
5399 ),
5402 ),
5400 (b'', b'new-branch', False, _(b'allow pushing a new branch')),
5403 (b'', b'new-branch', False, _(b'allow pushing a new branch')),
5401 (
5404 (
5402 b'',
5405 b'',
5403 b'pushvars',
5406 b'pushvars',
5404 [],
5407 [],
5405 _(b'variables that can be sent to server (ADVANCED)'),
5408 _(b'variables that can be sent to server (ADVANCED)'),
5406 ),
5409 ),
5407 (
5410 (
5408 b'',
5411 b'',
5409 b'publish',
5412 b'publish',
5410 False,
5413 False,
5411 _(b'push the changeset as public (EXPERIMENTAL)'),
5414 _(b'push the changeset as public (EXPERIMENTAL)'),
5412 ),
5415 ),
5413 ]
5416 ]
5414 + remoteopts,
5417 + remoteopts,
5415 _(b'[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'),
5418 _(b'[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'),
5416 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5419 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
5417 helpbasic=True,
5420 helpbasic=True,
5418 )
5421 )
5419 def push(ui, repo, dest=None, **opts):
5422 def push(ui, repo, dest=None, **opts):
5420 """push changes to the specified destination
5423 """push changes to the specified destination
5421
5424
5422 Push changesets from the local repository to the specified
5425 Push changesets from the local repository to the specified
5423 destination.
5426 destination.
5424
5427
5425 This operation is symmetrical to pull: it is identical to a pull
5428 This operation is symmetrical to pull: it is identical to a pull
5426 in the destination repository from the current one.
5429 in the destination repository from the current one.
5427
5430
5428 By default, push will not allow creation of new heads at the
5431 By default, push will not allow creation of new heads at the
5429 destination, since multiple heads would make it unclear which head
5432 destination, since multiple heads would make it unclear which head
5430 to use. In this situation, it is recommended to pull and merge
5433 to use. In this situation, it is recommended to pull and merge
5431 before pushing.
5434 before pushing.
5432
5435
5433 Use --new-branch if you want to allow push to create a new named
5436 Use --new-branch if you want to allow push to create a new named
5434 branch that is not present at the destination. This allows you to
5437 branch that is not present at the destination. This allows you to
5435 only create a new branch without forcing other changes.
5438 only create a new branch without forcing other changes.
5436
5439
5437 .. note::
5440 .. note::
5438
5441
5439 Extra care should be taken with the -f/--force option,
5442 Extra care should be taken with the -f/--force option,
5440 which will push all new heads on all branches, an action which will
5443 which will push all new heads on all branches, an action which will
5441 almost always cause confusion for collaborators.
5444 almost always cause confusion for collaborators.
5442
5445
5443 If -r/--rev is used, the specified revision and all its ancestors
5446 If -r/--rev is used, the specified revision and all its ancestors
5444 will be pushed to the remote repository.
5447 will be pushed to the remote repository.
5445
5448
5446 If -B/--bookmark is used, the specified bookmarked revision, its
5449 If -B/--bookmark is used, the specified bookmarked revision, its
5447 ancestors, and the bookmark will be pushed to the remote
5450 ancestors, and the bookmark will be pushed to the remote
5448 repository. Specifying ``.`` is equivalent to specifying the active
5451 repository. Specifying ``.`` is equivalent to specifying the active
5449 bookmark's name.
5452 bookmark's name.
5450
5453
5451 Please see :hg:`help urls` for important details about ``ssh://``
5454 Please see :hg:`help urls` for important details about ``ssh://``
5452 URLs. If DESTINATION is omitted, a default path will be used.
5455 URLs. If DESTINATION is omitted, a default path will be used.
5453
5456
5454 .. container:: verbose
5457 .. container:: verbose
5455
5458
5456 The --pushvars option sends strings to the server that become
5459 The --pushvars option sends strings to the server that become
5457 environment variables prepended with ``HG_USERVAR_``. For example,
5460 environment variables prepended with ``HG_USERVAR_``. For example,
5458 ``--pushvars ENABLE_FEATURE=true``, provides the server side hooks with
5461 ``--pushvars ENABLE_FEATURE=true``, provides the server side hooks with
5459 ``HG_USERVAR_ENABLE_FEATURE=true`` as part of their environment.
5462 ``HG_USERVAR_ENABLE_FEATURE=true`` as part of their environment.
5460
5463
5461 pushvars can provide for user-overridable hooks as well as set debug
5464 pushvars can provide for user-overridable hooks as well as set debug
5462 levels. One example is having a hook that blocks commits containing
5465 levels. One example is having a hook that blocks commits containing
5463 conflict markers, but enables the user to override the hook if the file
5466 conflict markers, but enables the user to override the hook if the file
5464 is using conflict markers for testing purposes or the file format has
5467 is using conflict markers for testing purposes or the file format has
5465 strings that look like conflict markers.
5468 strings that look like conflict markers.
5466
5469
5467 By default, servers will ignore `--pushvars`. To enable it add the
5470 By default, servers will ignore `--pushvars`. To enable it add the
5468 following to your configuration file::
5471 following to your configuration file::
5469
5472
5470 [push]
5473 [push]
5471 pushvars.server = true
5474 pushvars.server = true
5472
5475
5473 Returns 0 if push was successful, 1 if nothing to push.
5476 Returns 0 if push was successful, 1 if nothing to push.
5474 """
5477 """
5475
5478
5476 opts = pycompat.byteskwargs(opts)
5479 opts = pycompat.byteskwargs(opts)
5477 if opts.get(b'bookmark'):
5480 if opts.get(b'bookmark'):
5478 ui.setconfig(b'bookmarks', b'pushing', opts[b'bookmark'], b'push')
5481 ui.setconfig(b'bookmarks', b'pushing', opts[b'bookmark'], b'push')
5479 for b in opts[b'bookmark']:
5482 for b in opts[b'bookmark']:
5480 # translate -B options to -r so changesets get pushed
5483 # translate -B options to -r so changesets get pushed
5481 b = repo._bookmarks.expandname(b)
5484 b = repo._bookmarks.expandname(b)
5482 if b in repo._bookmarks:
5485 if b in repo._bookmarks:
5483 opts.setdefault(b'rev', []).append(b)
5486 opts.setdefault(b'rev', []).append(b)
5484 else:
5487 else:
5485 # if we try to push a deleted bookmark, translate it to null
5488 # if we try to push a deleted bookmark, translate it to null
5486 # this lets simultaneous -r, -b options continue working
5489 # this lets simultaneous -r, -b options continue working
5487 opts.setdefault(b'rev', []).append(b"null")
5490 opts.setdefault(b'rev', []).append(b"null")
5488
5491
5489 path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
5492 path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
5490 if not path:
5493 if not path:
5491 raise error.Abort(
5494 raise error.Abort(
5492 _(b'default repository not configured!'),
5495 _(b'default repository not configured!'),
5493 hint=_(b"see 'hg help config.paths'"),
5496 hint=_(b"see 'hg help config.paths'"),
5494 )
5497 )
5495 dest = path.pushloc or path.loc
5498 dest = path.pushloc or path.loc
5496 branches = (path.branch, opts.get(b'branch') or [])
5499 branches = (path.branch, opts.get(b'branch') or [])
5497 ui.status(_(b'pushing to %s\n') % util.hidepassword(dest))
5500 ui.status(_(b'pushing to %s\n') % util.hidepassword(dest))
5498 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get(b'rev'))
5501 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get(b'rev'))
5499 other = hg.peer(repo, opts, dest)
5502 other = hg.peer(repo, opts, dest)
5500
5503
5501 if revs:
5504 if revs:
5502 revs = [repo[r].node() for r in scmutil.revrange(repo, revs)]
5505 revs = [repo[r].node() for r in scmutil.revrange(repo, revs)]
5503 if not revs:
5506 if not revs:
5504 raise error.Abort(
5507 raise error.Abort(
5505 _(b"specified revisions evaluate to an empty set"),
5508 _(b"specified revisions evaluate to an empty set"),
5506 hint=_(b"use different revision arguments"),
5509 hint=_(b"use different revision arguments"),
5507 )
5510 )
5508 elif path.pushrev:
5511 elif path.pushrev:
5509 # It doesn't make any sense to specify ancestor revisions. So limit
5512 # It doesn't make any sense to specify ancestor revisions. So limit
5510 # to DAG heads to make discovery simpler.
5513 # to DAG heads to make discovery simpler.
5511 expr = revsetlang.formatspec(b'heads(%r)', path.pushrev)
5514 expr = revsetlang.formatspec(b'heads(%r)', path.pushrev)
5512 revs = scmutil.revrange(repo, [expr])
5515 revs = scmutil.revrange(repo, [expr])
5513 revs = [repo[rev].node() for rev in revs]
5516 revs = [repo[rev].node() for rev in revs]
5514 if not revs:
5517 if not revs:
5515 raise error.Abort(
5518 raise error.Abort(
5516 _(b'default push revset for path evaluates to an empty set')
5519 _(b'default push revset for path evaluates to an empty set')
5517 )
5520 )
5518 elif ui.configbool(b'commands', b'push.require-revs'):
5521 elif ui.configbool(b'commands', b'push.require-revs'):
5519 raise error.Abort(
5522 raise error.Abort(
5520 _(b'no revisions specified to push'),
5523 _(b'no revisions specified to push'),
5521 hint=_(b'did you mean "hg push -r ."?'),
5524 hint=_(b'did you mean "hg push -r ."?'),
5522 )
5525 )
5523
5526
5524 repo._subtoppath = dest
5527 repo._subtoppath = dest
5525 try:
5528 try:
5526 # push subrepos depth-first for coherent ordering
5529 # push subrepos depth-first for coherent ordering
5527 c = repo[b'.']
5530 c = repo[b'.']
5528 subs = c.substate # only repos that are committed
5531 subs = c.substate # only repos that are committed
5529 for s in sorted(subs):
5532 for s in sorted(subs):
5530 result = c.sub(s).push(opts)
5533 result = c.sub(s).push(opts)
5531 if result == 0:
5534 if result == 0:
5532 return not result
5535 return not result
5533 finally:
5536 finally:
5534 del repo._subtoppath
5537 del repo._subtoppath
5535
5538
5536 opargs = dict(opts.get(b'opargs', {})) # copy opargs since we may mutate it
5539 opargs = dict(opts.get(b'opargs', {})) # copy opargs since we may mutate it
5537 opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', []))
5540 opargs.setdefault(b'pushvars', []).extend(opts.get(b'pushvars', []))
5538
5541
5539 pushop = exchange.push(
5542 pushop = exchange.push(
5540 repo,
5543 repo,
5541 other,
5544 other,
5542 opts.get(b'force'),
5545 opts.get(b'force'),
5543 revs=revs,
5546 revs=revs,
5544 newbranch=opts.get(b'new_branch'),
5547 newbranch=opts.get(b'new_branch'),
5545 bookmarks=opts.get(b'bookmark', ()),
5548 bookmarks=opts.get(b'bookmark', ()),
5546 publish=opts.get(b'publish'),
5549 publish=opts.get(b'publish'),
5547 opargs=opargs,
5550 opargs=opargs,
5548 )
5551 )
5549
5552
5550 result = not pushop.cgresult
5553 result = not pushop.cgresult
5551
5554
5552 if pushop.bkresult is not None:
5555 if pushop.bkresult is not None:
5553 if pushop.bkresult == 2:
5556 if pushop.bkresult == 2:
5554 result = 2
5557 result = 2
5555 elif not result and pushop.bkresult:
5558 elif not result and pushop.bkresult:
5556 result = 2
5559 result = 2
5557
5560
5558 return result
5561 return result
5559
5562
5560
5563
5561 @command(
5564 @command(
5562 b'recover',
5565 b'recover',
5563 [(b'', b'verify', False, b"run `hg verify` after successful recover"),],
5566 [(b'', b'verify', False, b"run `hg verify` after successful recover"),],
5564 helpcategory=command.CATEGORY_MAINTENANCE,
5567 helpcategory=command.CATEGORY_MAINTENANCE,
5565 )
5568 )
5566 def recover(ui, repo, **opts):
5569 def recover(ui, repo, **opts):
5567 """roll back an interrupted transaction
5570 """roll back an interrupted transaction
5568
5571
5569 Recover from an interrupted commit or pull.
5572 Recover from an interrupted commit or pull.
5570
5573
5571 This command tries to fix the repository status after an
5574 This command tries to fix the repository status after an
5572 interrupted operation. It should only be necessary when Mercurial
5575 interrupted operation. It should only be necessary when Mercurial
5573 suggests it.
5576 suggests it.
5574
5577
5575 Returns 0 if successful, 1 if nothing to recover or verify fails.
5578 Returns 0 if successful, 1 if nothing to recover or verify fails.
5576 """
5579 """
5577 ret = repo.recover()
5580 ret = repo.recover()
5578 if ret:
5581 if ret:
5579 if opts['verify']:
5582 if opts['verify']:
5580 return hg.verify(repo)
5583 return hg.verify(repo)
5581 else:
5584 else:
5582 msg = _(
5585 msg = _(
5583 b"(verify step skipped, run `hg verify` to check your "
5586 b"(verify step skipped, run `hg verify` to check your "
5584 b"repository content)\n"
5587 b"repository content)\n"
5585 )
5588 )
5586 ui.warn(msg)
5589 ui.warn(msg)
5587 return 0
5590 return 0
5588 return 1
5591 return 1
5589
5592
5590
5593
5591 @command(
5594 @command(
5592 b'remove|rm',
5595 b'remove|rm',
5593 [
5596 [
5594 (b'A', b'after', None, _(b'record delete for missing files')),
5597 (b'A', b'after', None, _(b'record delete for missing files')),
5595 (b'f', b'force', None, _(b'forget added files, delete modified files')),
5598 (b'f', b'force', None, _(b'forget added files, delete modified files')),
5596 ]
5599 ]
5597 + subrepoopts
5600 + subrepoopts
5598 + walkopts
5601 + walkopts
5599 + dryrunopts,
5602 + dryrunopts,
5600 _(b'[OPTION]... FILE...'),
5603 _(b'[OPTION]... FILE...'),
5601 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5604 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5602 helpbasic=True,
5605 helpbasic=True,
5603 inferrepo=True,
5606 inferrepo=True,
5604 )
5607 )
5605 def remove(ui, repo, *pats, **opts):
5608 def remove(ui, repo, *pats, **opts):
5606 """remove the specified files on the next commit
5609 """remove the specified files on the next commit
5607
5610
5608 Schedule the indicated files for removal from the current branch.
5611 Schedule the indicated files for removal from the current branch.
5609
5612
5610 This command schedules the files to be removed at the next commit.
5613 This command schedules the files to be removed at the next commit.
5611 To undo a remove before that, see :hg:`revert`. To undo added
5614 To undo a remove before that, see :hg:`revert`. To undo added
5612 files, see :hg:`forget`.
5615 files, see :hg:`forget`.
5613
5616
5614 .. container:: verbose
5617 .. container:: verbose
5615
5618
5616 -A/--after can be used to remove only files that have already
5619 -A/--after can be used to remove only files that have already
5617 been deleted, -f/--force can be used to force deletion, and -Af
5620 been deleted, -f/--force can be used to force deletion, and -Af
5618 can be used to remove files from the next revision without
5621 can be used to remove files from the next revision without
5619 deleting them from the working directory.
5622 deleting them from the working directory.
5620
5623
5621 The following table details the behavior of remove for different
5624 The following table details the behavior of remove for different
5622 file states (columns) and option combinations (rows). The file
5625 file states (columns) and option combinations (rows). The file
5623 states are Added [A], Clean [C], Modified [M] and Missing [!]
5626 states are Added [A], Clean [C], Modified [M] and Missing [!]
5624 (as reported by :hg:`status`). The actions are Warn, Remove
5627 (as reported by :hg:`status`). The actions are Warn, Remove
5625 (from branch) and Delete (from disk):
5628 (from branch) and Delete (from disk):
5626
5629
5627 ========= == == == ==
5630 ========= == == == ==
5628 opt/state A C M !
5631 opt/state A C M !
5629 ========= == == == ==
5632 ========= == == == ==
5630 none W RD W R
5633 none W RD W R
5631 -f R RD RD R
5634 -f R RD RD R
5632 -A W W W R
5635 -A W W W R
5633 -Af R R R R
5636 -Af R R R R
5634 ========= == == == ==
5637 ========= == == == ==
5635
5638
5636 .. note::
5639 .. note::
5637
5640
5638 :hg:`remove` never deletes files in Added [A] state from the
5641 :hg:`remove` never deletes files in Added [A] state from the
5639 working directory, not even if ``--force`` is specified.
5642 working directory, not even if ``--force`` is specified.
5640
5643
5641 Returns 0 on success, 1 if any warnings encountered.
5644 Returns 0 on success, 1 if any warnings encountered.
5642 """
5645 """
5643
5646
5644 opts = pycompat.byteskwargs(opts)
5647 opts = pycompat.byteskwargs(opts)
5645 after, force = opts.get(b'after'), opts.get(b'force')
5648 after, force = opts.get(b'after'), opts.get(b'force')
5646 dryrun = opts.get(b'dry_run')
5649 dryrun = opts.get(b'dry_run')
5647 if not pats and not after:
5650 if not pats and not after:
5648 raise error.Abort(_(b'no files specified'))
5651 raise error.Abort(_(b'no files specified'))
5649
5652
5650 m = scmutil.match(repo[None], pats, opts)
5653 m = scmutil.match(repo[None], pats, opts)
5651 subrepos = opts.get(b'subrepos')
5654 subrepos = opts.get(b'subrepos')
5652 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
5655 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
5653 return cmdutil.remove(
5656 return cmdutil.remove(
5654 ui, repo, m, b"", uipathfn, after, force, subrepos, dryrun=dryrun
5657 ui, repo, m, b"", uipathfn, after, force, subrepos, dryrun=dryrun
5655 )
5658 )
5656
5659
5657
5660
5658 @command(
5661 @command(
5659 b'rename|move|mv',
5662 b'rename|move|mv',
5660 [
5663 [
5661 (b'A', b'after', None, _(b'record a rename that has already occurred')),
5664 (b'A', b'after', None, _(b'record a rename that has already occurred')),
5662 (
5665 (
5663 b'',
5666 b'',
5664 b'at-rev',
5667 b'at-rev',
5665 b'',
5668 b'',
5666 _(b'(un)mark renames in the given revision (EXPERIMENTAL)'),
5669 _(b'(un)mark renames in the given revision (EXPERIMENTAL)'),
5667 _(b'REV'),
5670 _(b'REV'),
5668 ),
5671 ),
5669 (
5672 (
5670 b'f',
5673 b'f',
5671 b'force',
5674 b'force',
5672 None,
5675 None,
5673 _(b'forcibly move over an existing managed file'),
5676 _(b'forcibly move over an existing managed file'),
5674 ),
5677 ),
5675 ]
5678 ]
5676 + walkopts
5679 + walkopts
5677 + dryrunopts,
5680 + dryrunopts,
5678 _(b'[OPTION]... SOURCE... DEST'),
5681 _(b'[OPTION]... SOURCE... DEST'),
5679 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5682 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5680 )
5683 )
5681 def rename(ui, repo, *pats, **opts):
5684 def rename(ui, repo, *pats, **opts):
5682 """rename files; equivalent of copy + remove
5685 """rename files; equivalent of copy + remove
5683
5686
5684 Mark dest as copies of sources; mark sources for deletion. If dest
5687 Mark dest as copies of sources; mark sources for deletion. If dest
5685 is a directory, copies are put in that directory. If dest is a
5688 is a directory, copies are put in that directory. If dest is a
5686 file, there can only be one source.
5689 file, there can only be one source.
5687
5690
5688 By default, this command copies the contents of files as they
5691 By default, this command copies the contents of files as they
5689 exist in the working directory. If invoked with -A/--after, the
5692 exist in the working directory. If invoked with -A/--after, the
5690 operation is recorded, but no copying is performed.
5693 operation is recorded, but no copying is performed.
5691
5694
5692 This command takes effect at the next commit. To undo a rename
5695 This command takes effect at the next commit. To undo a rename
5693 before that, see :hg:`revert`.
5696 before that, see :hg:`revert`.
5694
5697
5695 Returns 0 on success, 1 if errors are encountered.
5698 Returns 0 on success, 1 if errors are encountered.
5696 """
5699 """
5697 opts = pycompat.byteskwargs(opts)
5700 opts = pycompat.byteskwargs(opts)
5698 with repo.wlock():
5701 with repo.wlock():
5699 return cmdutil.copy(ui, repo, pats, opts, rename=True)
5702 return cmdutil.copy(ui, repo, pats, opts, rename=True)
5700
5703
5701
5704
5702 @command(
5705 @command(
5703 b'resolve',
5706 b'resolve',
5704 [
5707 [
5705 (b'a', b'all', None, _(b'select all unresolved files')),
5708 (b'a', b'all', None, _(b'select all unresolved files')),
5706 (b'l', b'list', None, _(b'list state of files needing merge')),
5709 (b'l', b'list', None, _(b'list state of files needing merge')),
5707 (b'm', b'mark', None, _(b'mark files as resolved')),
5710 (b'm', b'mark', None, _(b'mark files as resolved')),
5708 (b'u', b'unmark', None, _(b'mark files as unresolved')),
5711 (b'u', b'unmark', None, _(b'mark files as unresolved')),
5709 (b'n', b'no-status', None, _(b'hide status prefix')),
5712 (b'n', b'no-status', None, _(b'hide status prefix')),
5710 (b'', b're-merge', None, _(b're-merge files')),
5713 (b'', b're-merge', None, _(b're-merge files')),
5711 ]
5714 ]
5712 + mergetoolopts
5715 + mergetoolopts
5713 + walkopts
5716 + walkopts
5714 + formatteropts,
5717 + formatteropts,
5715 _(b'[OPTION]... [FILE]...'),
5718 _(b'[OPTION]... [FILE]...'),
5716 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5719 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
5717 inferrepo=True,
5720 inferrepo=True,
5718 )
5721 )
5719 def resolve(ui, repo, *pats, **opts):
5722 def resolve(ui, repo, *pats, **opts):
5720 """redo merges or set/view the merge status of files
5723 """redo merges or set/view the merge status of files
5721
5724
5722 Merges with unresolved conflicts are often the result of
5725 Merges with unresolved conflicts are often the result of
5723 non-interactive merging using the ``internal:merge`` configuration
5726 non-interactive merging using the ``internal:merge`` configuration
5724 setting, or a command-line merge tool like ``diff3``. The resolve
5727 setting, or a command-line merge tool like ``diff3``. The resolve
5725 command is used to manage the files involved in a merge, after
5728 command is used to manage the files involved in a merge, after
5726 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
5729 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
5727 working directory must have two parents). See :hg:`help
5730 working directory must have two parents). See :hg:`help
5728 merge-tools` for information on configuring merge tools.
5731 merge-tools` for information on configuring merge tools.
5729
5732
5730 The resolve command can be used in the following ways:
5733 The resolve command can be used in the following ways:
5731
5734
5732 - :hg:`resolve [--re-merge] [--tool TOOL] FILE...`: attempt to re-merge
5735 - :hg:`resolve [--re-merge] [--tool TOOL] FILE...`: attempt to re-merge
5733 the specified files, discarding any previous merge attempts. Re-merging
5736 the specified files, discarding any previous merge attempts. Re-merging
5734 is not performed for files already marked as resolved. Use ``--all/-a``
5737 is not performed for files already marked as resolved. Use ``--all/-a``
5735 to select all unresolved files. ``--tool`` can be used to specify
5738 to select all unresolved files. ``--tool`` can be used to specify
5736 the merge tool used for the given files. It overrides the HGMERGE
5739 the merge tool used for the given files. It overrides the HGMERGE
5737 environment variable and your configuration files. Previous file
5740 environment variable and your configuration files. Previous file
5738 contents are saved with a ``.orig`` suffix.
5741 contents are saved with a ``.orig`` suffix.
5739
5742
5740 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
5743 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
5741 (e.g. after having manually fixed-up the files). The default is
5744 (e.g. after having manually fixed-up the files). The default is
5742 to mark all unresolved files.
5745 to mark all unresolved files.
5743
5746
5744 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
5747 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
5745 default is to mark all resolved files.
5748 default is to mark all resolved files.
5746
5749
5747 - :hg:`resolve -l`: list files which had or still have conflicts.
5750 - :hg:`resolve -l`: list files which had or still have conflicts.
5748 In the printed list, ``U`` = unresolved and ``R`` = resolved.
5751 In the printed list, ``U`` = unresolved and ``R`` = resolved.
5749 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
5752 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
5750 the list. See :hg:`help filesets` for details.
5753 the list. See :hg:`help filesets` for details.
5751
5754
5752 .. note::
5755 .. note::
5753
5756
5754 Mercurial will not let you commit files with unresolved merge
5757 Mercurial will not let you commit files with unresolved merge
5755 conflicts. You must use :hg:`resolve -m ...` before you can
5758 conflicts. You must use :hg:`resolve -m ...` before you can
5756 commit after a conflicting merge.
5759 commit after a conflicting merge.
5757
5760
5758 .. container:: verbose
5761 .. container:: verbose
5759
5762
5760 Template:
5763 Template:
5761
5764
5762 The following keywords are supported in addition to the common template
5765 The following keywords are supported in addition to the common template
5763 keywords and functions. See also :hg:`help templates`.
5766 keywords and functions. See also :hg:`help templates`.
5764
5767
5765 :mergestatus: String. Character denoting merge conflicts, ``U`` or ``R``.
5768 :mergestatus: String. Character denoting merge conflicts, ``U`` or ``R``.
5766 :path: String. Repository-absolute path of the file.
5769 :path: String. Repository-absolute path of the file.
5767
5770
5768 Returns 0 on success, 1 if any files fail a resolve attempt.
5771 Returns 0 on success, 1 if any files fail a resolve attempt.
5769 """
5772 """
5770
5773
5771 opts = pycompat.byteskwargs(opts)
5774 opts = pycompat.byteskwargs(opts)
5772 confirm = ui.configbool(b'commands', b'resolve.confirm')
5775 confirm = ui.configbool(b'commands', b'resolve.confirm')
5773 flaglist = b'all mark unmark list no_status re_merge'.split()
5776 flaglist = b'all mark unmark list no_status re_merge'.split()
5774 all, mark, unmark, show, nostatus, remerge = [opts.get(o) for o in flaglist]
5777 all, mark, unmark, show, nostatus, remerge = [opts.get(o) for o in flaglist]
5775
5778
5776 actioncount = len(list(filter(None, [show, mark, unmark, remerge])))
5779 actioncount = len(list(filter(None, [show, mark, unmark, remerge])))
5777 if actioncount > 1:
5780 if actioncount > 1:
5778 raise error.Abort(_(b"too many actions specified"))
5781 raise error.Abort(_(b"too many actions specified"))
5779 elif actioncount == 0 and ui.configbool(
5782 elif actioncount == 0 and ui.configbool(
5780 b'commands', b'resolve.explicit-re-merge'
5783 b'commands', b'resolve.explicit-re-merge'
5781 ):
5784 ):
5782 hint = _(b'use --mark, --unmark, --list or --re-merge')
5785 hint = _(b'use --mark, --unmark, --list or --re-merge')
5783 raise error.Abort(_(b'no action specified'), hint=hint)
5786 raise error.Abort(_(b'no action specified'), hint=hint)
5784 if pats and all:
5787 if pats and all:
5785 raise error.Abort(_(b"can't specify --all and patterns"))
5788 raise error.Abort(_(b"can't specify --all and patterns"))
5786 if not (all or pats or show or mark or unmark):
5789 if not (all or pats or show or mark or unmark):
5787 raise error.Abort(
5790 raise error.Abort(
5788 _(b'no files or directories specified'),
5791 _(b'no files or directories specified'),
5789 hint=b'use --all to re-merge all unresolved files',
5792 hint=b'use --all to re-merge all unresolved files',
5790 )
5793 )
5791
5794
5792 if confirm:
5795 if confirm:
5793 if all:
5796 if all:
5794 if ui.promptchoice(
5797 if ui.promptchoice(
5795 _(b're-merge all unresolved files (yn)?$$ &Yes $$ &No')
5798 _(b're-merge all unresolved files (yn)?$$ &Yes $$ &No')
5796 ):
5799 ):
5797 raise error.Abort(_(b'user quit'))
5800 raise error.Abort(_(b'user quit'))
5798 if mark and not pats:
5801 if mark and not pats:
5799 if ui.promptchoice(
5802 if ui.promptchoice(
5800 _(
5803 _(
5801 b'mark all unresolved files as resolved (yn)?'
5804 b'mark all unresolved files as resolved (yn)?'
5802 b'$$ &Yes $$ &No'
5805 b'$$ &Yes $$ &No'
5803 )
5806 )
5804 ):
5807 ):
5805 raise error.Abort(_(b'user quit'))
5808 raise error.Abort(_(b'user quit'))
5806 if unmark and not pats:
5809 if unmark and not pats:
5807 if ui.promptchoice(
5810 if ui.promptchoice(
5808 _(
5811 _(
5809 b'mark all resolved files as unresolved (yn)?'
5812 b'mark all resolved files as unresolved (yn)?'
5810 b'$$ &Yes $$ &No'
5813 b'$$ &Yes $$ &No'
5811 )
5814 )
5812 ):
5815 ):
5813 raise error.Abort(_(b'user quit'))
5816 raise error.Abort(_(b'user quit'))
5814
5817
5815 uipathfn = scmutil.getuipathfn(repo)
5818 uipathfn = scmutil.getuipathfn(repo)
5816
5819
5817 if show:
5820 if show:
5818 ui.pager(b'resolve')
5821 ui.pager(b'resolve')
5819 fm = ui.formatter(b'resolve', opts)
5822 fm = ui.formatter(b'resolve', opts)
5820 ms = mergestatemod.mergestate.read(repo)
5823 ms = mergestatemod.mergestate.read(repo)
5821 wctx = repo[None]
5824 wctx = repo[None]
5822 m = scmutil.match(wctx, pats, opts)
5825 m = scmutil.match(wctx, pats, opts)
5823
5826
5824 # Labels and keys based on merge state. Unresolved path conflicts show
5827 # Labels and keys based on merge state. Unresolved path conflicts show
5825 # as 'P'. Resolved path conflicts show as 'R', the same as normal
5828 # as 'P'. Resolved path conflicts show as 'R', the same as normal
5826 # resolved conflicts.
5829 # resolved conflicts.
5827 mergestateinfo = {
5830 mergestateinfo = {
5828 mergestatemod.MERGE_RECORD_UNRESOLVED: (
5831 mergestatemod.MERGE_RECORD_UNRESOLVED: (
5829 b'resolve.unresolved',
5832 b'resolve.unresolved',
5830 b'U',
5833 b'U',
5831 ),
5834 ),
5832 mergestatemod.MERGE_RECORD_RESOLVED: (b'resolve.resolved', b'R'),
5835 mergestatemod.MERGE_RECORD_RESOLVED: (b'resolve.resolved', b'R'),
5833 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH: (
5836 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH: (
5834 b'resolve.unresolved',
5837 b'resolve.unresolved',
5835 b'P',
5838 b'P',
5836 ),
5839 ),
5837 mergestatemod.MERGE_RECORD_RESOLVED_PATH: (
5840 mergestatemod.MERGE_RECORD_RESOLVED_PATH: (
5838 b'resolve.resolved',
5841 b'resolve.resolved',
5839 b'R',
5842 b'R',
5840 ),
5843 ),
5841 }
5844 }
5842
5845
5843 for f in ms:
5846 for f in ms:
5844 if not m(f):
5847 if not m(f):
5845 continue
5848 continue
5846
5849
5847 label, key = mergestateinfo[ms[f]]
5850 label, key = mergestateinfo[ms[f]]
5848 fm.startitem()
5851 fm.startitem()
5849 fm.context(ctx=wctx)
5852 fm.context(ctx=wctx)
5850 fm.condwrite(not nostatus, b'mergestatus', b'%s ', key, label=label)
5853 fm.condwrite(not nostatus, b'mergestatus', b'%s ', key, label=label)
5851 fm.data(path=f)
5854 fm.data(path=f)
5852 fm.plain(b'%s\n' % uipathfn(f), label=label)
5855 fm.plain(b'%s\n' % uipathfn(f), label=label)
5853 fm.end()
5856 fm.end()
5854 return 0
5857 return 0
5855
5858
5856 with repo.wlock():
5859 with repo.wlock():
5857 ms = mergestatemod.mergestate.read(repo)
5860 ms = mergestatemod.mergestate.read(repo)
5858
5861
5859 if not (ms.active() or repo.dirstate.p2() != nullid):
5862 if not (ms.active() or repo.dirstate.p2() != nullid):
5860 raise error.Abort(
5863 raise error.Abort(
5861 _(b'resolve command not applicable when not merging')
5864 _(b'resolve command not applicable when not merging')
5862 )
5865 )
5863
5866
5864 wctx = repo[None]
5867 wctx = repo[None]
5865 m = scmutil.match(wctx, pats, opts)
5868 m = scmutil.match(wctx, pats, opts)
5866 ret = 0
5869 ret = 0
5867 didwork = False
5870 didwork = False
5868
5871
5869 tocomplete = []
5872 tocomplete = []
5870 hasconflictmarkers = []
5873 hasconflictmarkers = []
5871 if mark:
5874 if mark:
5872 markcheck = ui.config(b'commands', b'resolve.mark-check')
5875 markcheck = ui.config(b'commands', b'resolve.mark-check')
5873 if markcheck not in [b'warn', b'abort']:
5876 if markcheck not in [b'warn', b'abort']:
5874 # Treat all invalid / unrecognized values as 'none'.
5877 # Treat all invalid / unrecognized values as 'none'.
5875 markcheck = False
5878 markcheck = False
5876 for f in ms:
5879 for f in ms:
5877 if not m(f):
5880 if not m(f):
5878 continue
5881 continue
5879
5882
5880 didwork = True
5883 didwork = True
5881
5884
5882 # path conflicts must be resolved manually
5885 # path conflicts must be resolved manually
5883 if ms[f] in (
5886 if ms[f] in (
5884 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
5887 mergestatemod.MERGE_RECORD_UNRESOLVED_PATH,
5885 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
5888 mergestatemod.MERGE_RECORD_RESOLVED_PATH,
5886 ):
5889 ):
5887 if mark:
5890 if mark:
5888 ms.mark(f, mergestatemod.MERGE_RECORD_RESOLVED_PATH)
5891 ms.mark(f, mergestatemod.MERGE_RECORD_RESOLVED_PATH)
5889 elif unmark:
5892 elif unmark:
5890 ms.mark(f, mergestatemod.MERGE_RECORD_UNRESOLVED_PATH)
5893 ms.mark(f, mergestatemod.MERGE_RECORD_UNRESOLVED_PATH)
5891 elif ms[f] == mergestatemod.MERGE_RECORD_UNRESOLVED_PATH:
5894 elif ms[f] == mergestatemod.MERGE_RECORD_UNRESOLVED_PATH:
5892 ui.warn(
5895 ui.warn(
5893 _(b'%s: path conflict must be resolved manually\n')
5896 _(b'%s: path conflict must be resolved manually\n')
5894 % uipathfn(f)
5897 % uipathfn(f)
5895 )
5898 )
5896 continue
5899 continue
5897
5900
5898 if mark:
5901 if mark:
5899 if markcheck:
5902 if markcheck:
5900 fdata = repo.wvfs.tryread(f)
5903 fdata = repo.wvfs.tryread(f)
5901 if (
5904 if (
5902 filemerge.hasconflictmarkers(fdata)
5905 filemerge.hasconflictmarkers(fdata)
5903 and ms[f] != mergestatemod.MERGE_RECORD_RESOLVED
5906 and ms[f] != mergestatemod.MERGE_RECORD_RESOLVED
5904 ):
5907 ):
5905 hasconflictmarkers.append(f)
5908 hasconflictmarkers.append(f)
5906 ms.mark(f, mergestatemod.MERGE_RECORD_RESOLVED)
5909 ms.mark(f, mergestatemod.MERGE_RECORD_RESOLVED)
5907 elif unmark:
5910 elif unmark:
5908 ms.mark(f, mergestatemod.MERGE_RECORD_UNRESOLVED)
5911 ms.mark(f, mergestatemod.MERGE_RECORD_UNRESOLVED)
5909 else:
5912 else:
5910 # backup pre-resolve (merge uses .orig for its own purposes)
5913 # backup pre-resolve (merge uses .orig for its own purposes)
5911 a = repo.wjoin(f)
5914 a = repo.wjoin(f)
5912 try:
5915 try:
5913 util.copyfile(a, a + b".resolve")
5916 util.copyfile(a, a + b".resolve")
5914 except (IOError, OSError) as inst:
5917 except (IOError, OSError) as inst:
5915 if inst.errno != errno.ENOENT:
5918 if inst.errno != errno.ENOENT:
5916 raise
5919 raise
5917
5920
5918 try:
5921 try:
5919 # preresolve file
5922 # preresolve file
5920 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
5923 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
5921 with ui.configoverride(overrides, b'resolve'):
5924 with ui.configoverride(overrides, b'resolve'):
5922 complete, r = ms.preresolve(f, wctx)
5925 complete, r = ms.preresolve(f, wctx)
5923 if not complete:
5926 if not complete:
5924 tocomplete.append(f)
5927 tocomplete.append(f)
5925 elif r:
5928 elif r:
5926 ret = 1
5929 ret = 1
5927 finally:
5930 finally:
5928 ms.commit()
5931 ms.commit()
5929
5932
5930 # replace filemerge's .orig file with our resolve file, but only
5933 # replace filemerge's .orig file with our resolve file, but only
5931 # for merges that are complete
5934 # for merges that are complete
5932 if complete:
5935 if complete:
5933 try:
5936 try:
5934 util.rename(
5937 util.rename(
5935 a + b".resolve", scmutil.backuppath(ui, repo, f)
5938 a + b".resolve", scmutil.backuppath(ui, repo, f)
5936 )
5939 )
5937 except OSError as inst:
5940 except OSError as inst:
5938 if inst.errno != errno.ENOENT:
5941 if inst.errno != errno.ENOENT:
5939 raise
5942 raise
5940
5943
5941 if hasconflictmarkers:
5944 if hasconflictmarkers:
5942 ui.warn(
5945 ui.warn(
5943 _(
5946 _(
5944 b'warning: the following files still have conflict '
5947 b'warning: the following files still have conflict '
5945 b'markers:\n'
5948 b'markers:\n'
5946 )
5949 )
5947 + b''.join(
5950 + b''.join(
5948 b' ' + uipathfn(f) + b'\n' for f in hasconflictmarkers
5951 b' ' + uipathfn(f) + b'\n' for f in hasconflictmarkers
5949 )
5952 )
5950 )
5953 )
5951 if markcheck == b'abort' and not all and not pats:
5954 if markcheck == b'abort' and not all and not pats:
5952 raise error.Abort(
5955 raise error.Abort(
5953 _(b'conflict markers detected'),
5956 _(b'conflict markers detected'),
5954 hint=_(b'use --all to mark anyway'),
5957 hint=_(b'use --all to mark anyway'),
5955 )
5958 )
5956
5959
5957 for f in tocomplete:
5960 for f in tocomplete:
5958 try:
5961 try:
5959 # resolve file
5962 # resolve file
5960 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
5963 overrides = {(b'ui', b'forcemerge'): opts.get(b'tool', b'')}
5961 with ui.configoverride(overrides, b'resolve'):
5964 with ui.configoverride(overrides, b'resolve'):
5962 r = ms.resolve(f, wctx)
5965 r = ms.resolve(f, wctx)
5963 if r:
5966 if r:
5964 ret = 1
5967 ret = 1
5965 finally:
5968 finally:
5966 ms.commit()
5969 ms.commit()
5967
5970
5968 # replace filemerge's .orig file with our resolve file
5971 # replace filemerge's .orig file with our resolve file
5969 a = repo.wjoin(f)
5972 a = repo.wjoin(f)
5970 try:
5973 try:
5971 util.rename(a + b".resolve", scmutil.backuppath(ui, repo, f))
5974 util.rename(a + b".resolve", scmutil.backuppath(ui, repo, f))
5972 except OSError as inst:
5975 except OSError as inst:
5973 if inst.errno != errno.ENOENT:
5976 if inst.errno != errno.ENOENT:
5974 raise
5977 raise
5975
5978
5976 ms.commit()
5979 ms.commit()
5977 branchmerge = repo.dirstate.p2() != nullid
5980 branchmerge = repo.dirstate.p2() != nullid
5978 mergestatemod.recordupdates(repo, ms.actions(), branchmerge, None)
5981 mergestatemod.recordupdates(repo, ms.actions(), branchmerge, None)
5979
5982
5980 if not didwork and pats:
5983 if not didwork and pats:
5981 hint = None
5984 hint = None
5982 if not any([p for p in pats if p.find(b':') >= 0]):
5985 if not any([p for p in pats if p.find(b':') >= 0]):
5983 pats = [b'path:%s' % p for p in pats]
5986 pats = [b'path:%s' % p for p in pats]
5984 m = scmutil.match(wctx, pats, opts)
5987 m = scmutil.match(wctx, pats, opts)
5985 for f in ms:
5988 for f in ms:
5986 if not m(f):
5989 if not m(f):
5987 continue
5990 continue
5988
5991
5989 def flag(o):
5992 def flag(o):
5990 if o == b're_merge':
5993 if o == b're_merge':
5991 return b'--re-merge '
5994 return b'--re-merge '
5992 return b'-%s ' % o[0:1]
5995 return b'-%s ' % o[0:1]
5993
5996
5994 flags = b''.join([flag(o) for o in flaglist if opts.get(o)])
5997 flags = b''.join([flag(o) for o in flaglist if opts.get(o)])
5995 hint = _(b"(try: hg resolve %s%s)\n") % (
5998 hint = _(b"(try: hg resolve %s%s)\n") % (
5996 flags,
5999 flags,
5997 b' '.join(pats),
6000 b' '.join(pats),
5998 )
6001 )
5999 break
6002 break
6000 ui.warn(_(b"arguments do not match paths that need resolving\n"))
6003 ui.warn(_(b"arguments do not match paths that need resolving\n"))
6001 if hint:
6004 if hint:
6002 ui.warn(hint)
6005 ui.warn(hint)
6003
6006
6004 unresolvedf = list(ms.unresolved())
6007 unresolvedf = list(ms.unresolved())
6005 if not unresolvedf:
6008 if not unresolvedf:
6006 ui.status(_(b'(no more unresolved files)\n'))
6009 ui.status(_(b'(no more unresolved files)\n'))
6007 cmdutil.checkafterresolved(repo)
6010 cmdutil.checkafterresolved(repo)
6008
6011
6009 return ret
6012 return ret
6010
6013
6011
6014
6012 @command(
6015 @command(
6013 b'revert',
6016 b'revert',
6014 [
6017 [
6015 (b'a', b'all', None, _(b'revert all changes when no arguments given')),
6018 (b'a', b'all', None, _(b'revert all changes when no arguments given')),
6016 (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')),
6019 (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')),
6017 (b'r', b'rev', b'', _(b'revert to the specified revision'), _(b'REV')),
6020 (b'r', b'rev', b'', _(b'revert to the specified revision'), _(b'REV')),
6018 (b'C', b'no-backup', None, _(b'do not save backup copies of files')),
6021 (b'C', b'no-backup', None, _(b'do not save backup copies of files')),
6019 (b'i', b'interactive', None, _(b'interactively select the changes')),
6022 (b'i', b'interactive', None, _(b'interactively select the changes')),
6020 ]
6023 ]
6021 + walkopts
6024 + walkopts
6022 + dryrunopts,
6025 + dryrunopts,
6023 _(b'[OPTION]... [-r REV] [NAME]...'),
6026 _(b'[OPTION]... [-r REV] [NAME]...'),
6024 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6027 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6025 )
6028 )
6026 def revert(ui, repo, *pats, **opts):
6029 def revert(ui, repo, *pats, **opts):
6027 """restore files to their checkout state
6030 """restore files to their checkout state
6028
6031
6029 .. note::
6032 .. note::
6030
6033
6031 To check out earlier revisions, you should use :hg:`update REV`.
6034 To check out earlier revisions, you should use :hg:`update REV`.
6032 To cancel an uncommitted merge (and lose your changes),
6035 To cancel an uncommitted merge (and lose your changes),
6033 use :hg:`merge --abort`.
6036 use :hg:`merge --abort`.
6034
6037
6035 With no revision specified, revert the specified files or directories
6038 With no revision specified, revert the specified files or directories
6036 to the contents they had in the parent of the working directory.
6039 to the contents they had in the parent of the working directory.
6037 This restores the contents of files to an unmodified
6040 This restores the contents of files to an unmodified
6038 state and unschedules adds, removes, copies, and renames. If the
6041 state and unschedules adds, removes, copies, and renames. If the
6039 working directory has two parents, you must explicitly specify a
6042 working directory has two parents, you must explicitly specify a
6040 revision.
6043 revision.
6041
6044
6042 Using the -r/--rev or -d/--date options, revert the given files or
6045 Using the -r/--rev or -d/--date options, revert the given files or
6043 directories to their states as of a specific revision. Because
6046 directories to their states as of a specific revision. Because
6044 revert does not change the working directory parents, this will
6047 revert does not change the working directory parents, this will
6045 cause these files to appear modified. This can be helpful to "back
6048 cause these files to appear modified. This can be helpful to "back
6046 out" some or all of an earlier change. See :hg:`backout` for a
6049 out" some or all of an earlier change. See :hg:`backout` for a
6047 related method.
6050 related method.
6048
6051
6049 Modified files are saved with a .orig suffix before reverting.
6052 Modified files are saved with a .orig suffix before reverting.
6050 To disable these backups, use --no-backup. It is possible to store
6053 To disable these backups, use --no-backup. It is possible to store
6051 the backup files in a custom directory relative to the root of the
6054 the backup files in a custom directory relative to the root of the
6052 repository by setting the ``ui.origbackuppath`` configuration
6055 repository by setting the ``ui.origbackuppath`` configuration
6053 option.
6056 option.
6054
6057
6055 See :hg:`help dates` for a list of formats valid for -d/--date.
6058 See :hg:`help dates` for a list of formats valid for -d/--date.
6056
6059
6057 See :hg:`help backout` for a way to reverse the effect of an
6060 See :hg:`help backout` for a way to reverse the effect of an
6058 earlier changeset.
6061 earlier changeset.
6059
6062
6060 Returns 0 on success.
6063 Returns 0 on success.
6061 """
6064 """
6062
6065
6063 opts = pycompat.byteskwargs(opts)
6066 opts = pycompat.byteskwargs(opts)
6064 if opts.get(b"date"):
6067 if opts.get(b"date"):
6065 cmdutil.check_incompatible_arguments(opts, b'date', [b'rev'])
6068 cmdutil.check_incompatible_arguments(opts, b'date', [b'rev'])
6066 opts[b"rev"] = cmdutil.finddate(ui, repo, opts[b"date"])
6069 opts[b"rev"] = cmdutil.finddate(ui, repo, opts[b"date"])
6067
6070
6068 parent, p2 = repo.dirstate.parents()
6071 parent, p2 = repo.dirstate.parents()
6069 if not opts.get(b'rev') and p2 != nullid:
6072 if not opts.get(b'rev') and p2 != nullid:
6070 # revert after merge is a trap for new users (issue2915)
6073 # revert after merge is a trap for new users (issue2915)
6071 raise error.Abort(
6074 raise error.Abort(
6072 _(b'uncommitted merge with no revision specified'),
6075 _(b'uncommitted merge with no revision specified'),
6073 hint=_(b"use 'hg update' or see 'hg help revert'"),
6076 hint=_(b"use 'hg update' or see 'hg help revert'"),
6074 )
6077 )
6075
6078
6076 rev = opts.get(b'rev')
6079 rev = opts.get(b'rev')
6077 if rev:
6080 if rev:
6078 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
6081 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
6079 ctx = scmutil.revsingle(repo, rev)
6082 ctx = scmutil.revsingle(repo, rev)
6080
6083
6081 if not (
6084 if not (
6082 pats
6085 pats
6083 or opts.get(b'include')
6086 or opts.get(b'include')
6084 or opts.get(b'exclude')
6087 or opts.get(b'exclude')
6085 or opts.get(b'all')
6088 or opts.get(b'all')
6086 or opts.get(b'interactive')
6089 or opts.get(b'interactive')
6087 ):
6090 ):
6088 msg = _(b"no files or directories specified")
6091 msg = _(b"no files or directories specified")
6089 if p2 != nullid:
6092 if p2 != nullid:
6090 hint = _(
6093 hint = _(
6091 b"uncommitted merge, use --all to discard all changes,"
6094 b"uncommitted merge, use --all to discard all changes,"
6092 b" or 'hg update -C .' to abort the merge"
6095 b" or 'hg update -C .' to abort the merge"
6093 )
6096 )
6094 raise error.Abort(msg, hint=hint)
6097 raise error.Abort(msg, hint=hint)
6095 dirty = any(repo.status())
6098 dirty = any(repo.status())
6096 node = ctx.node()
6099 node = ctx.node()
6097 if node != parent:
6100 if node != parent:
6098 if dirty:
6101 if dirty:
6099 hint = (
6102 hint = (
6100 _(
6103 _(
6101 b"uncommitted changes, use --all to discard all"
6104 b"uncommitted changes, use --all to discard all"
6102 b" changes, or 'hg update %d' to update"
6105 b" changes, or 'hg update %d' to update"
6103 )
6106 )
6104 % ctx.rev()
6107 % ctx.rev()
6105 )
6108 )
6106 else:
6109 else:
6107 hint = (
6110 hint = (
6108 _(
6111 _(
6109 b"use --all to revert all files,"
6112 b"use --all to revert all files,"
6110 b" or 'hg update %d' to update"
6113 b" or 'hg update %d' to update"
6111 )
6114 )
6112 % ctx.rev()
6115 % ctx.rev()
6113 )
6116 )
6114 elif dirty:
6117 elif dirty:
6115 hint = _(b"uncommitted changes, use --all to discard all changes")
6118 hint = _(b"uncommitted changes, use --all to discard all changes")
6116 else:
6119 else:
6117 hint = _(b"use --all to revert all files")
6120 hint = _(b"use --all to revert all files")
6118 raise error.Abort(msg, hint=hint)
6121 raise error.Abort(msg, hint=hint)
6119
6122
6120 return cmdutil.revert(ui, repo, ctx, *pats, **pycompat.strkwargs(opts))
6123 return cmdutil.revert(ui, repo, ctx, *pats, **pycompat.strkwargs(opts))
6121
6124
6122
6125
6123 @command(
6126 @command(
6124 b'rollback',
6127 b'rollback',
6125 dryrunopts + [(b'f', b'force', False, _(b'ignore safety measures'))],
6128 dryrunopts + [(b'f', b'force', False, _(b'ignore safety measures'))],
6126 helpcategory=command.CATEGORY_MAINTENANCE,
6129 helpcategory=command.CATEGORY_MAINTENANCE,
6127 )
6130 )
6128 def rollback(ui, repo, **opts):
6131 def rollback(ui, repo, **opts):
6129 """roll back the last transaction (DANGEROUS) (DEPRECATED)
6132 """roll back the last transaction (DANGEROUS) (DEPRECATED)
6130
6133
6131 Please use :hg:`commit --amend` instead of rollback to correct
6134 Please use :hg:`commit --amend` instead of rollback to correct
6132 mistakes in the last commit.
6135 mistakes in the last commit.
6133
6136
6134 This command should be used with care. There is only one level of
6137 This command should be used with care. There is only one level of
6135 rollback, and there is no way to undo a rollback. It will also
6138 rollback, and there is no way to undo a rollback. It will also
6136 restore the dirstate at the time of the last transaction, losing
6139 restore the dirstate at the time of the last transaction, losing
6137 any dirstate changes since that time. This command does not alter
6140 any dirstate changes since that time. This command does not alter
6138 the working directory.
6141 the working directory.
6139
6142
6140 Transactions are used to encapsulate the effects of all commands
6143 Transactions are used to encapsulate the effects of all commands
6141 that create new changesets or propagate existing changesets into a
6144 that create new changesets or propagate existing changesets into a
6142 repository.
6145 repository.
6143
6146
6144 .. container:: verbose
6147 .. container:: verbose
6145
6148
6146 For example, the following commands are transactional, and their
6149 For example, the following commands are transactional, and their
6147 effects can be rolled back:
6150 effects can be rolled back:
6148
6151
6149 - commit
6152 - commit
6150 - import
6153 - import
6151 - pull
6154 - pull
6152 - push (with this repository as the destination)
6155 - push (with this repository as the destination)
6153 - unbundle
6156 - unbundle
6154
6157
6155 To avoid permanent data loss, rollback will refuse to rollback a
6158 To avoid permanent data loss, rollback will refuse to rollback a
6156 commit transaction if it isn't checked out. Use --force to
6159 commit transaction if it isn't checked out. Use --force to
6157 override this protection.
6160 override this protection.
6158
6161
6159 The rollback command can be entirely disabled by setting the
6162 The rollback command can be entirely disabled by setting the
6160 ``ui.rollback`` configuration setting to false. If you're here
6163 ``ui.rollback`` configuration setting to false. If you're here
6161 because you want to use rollback and it's disabled, you can
6164 because you want to use rollback and it's disabled, you can
6162 re-enable the command by setting ``ui.rollback`` to true.
6165 re-enable the command by setting ``ui.rollback`` to true.
6163
6166
6164 This command is not intended for use on public repositories. Once
6167 This command is not intended for use on public repositories. Once
6165 changes are visible for pull by other users, rolling a transaction
6168 changes are visible for pull by other users, rolling a transaction
6166 back locally is ineffective (someone else may already have pulled
6169 back locally is ineffective (someone else may already have pulled
6167 the changes). Furthermore, a race is possible with readers of the
6170 the changes). Furthermore, a race is possible with readers of the
6168 repository; for example an in-progress pull from the repository
6171 repository; for example an in-progress pull from the repository
6169 may fail if a rollback is performed.
6172 may fail if a rollback is performed.
6170
6173
6171 Returns 0 on success, 1 if no rollback data is available.
6174 Returns 0 on success, 1 if no rollback data is available.
6172 """
6175 """
6173 if not ui.configbool(b'ui', b'rollback'):
6176 if not ui.configbool(b'ui', b'rollback'):
6174 raise error.Abort(
6177 raise error.Abort(
6175 _(b'rollback is disabled because it is unsafe'),
6178 _(b'rollback is disabled because it is unsafe'),
6176 hint=b'see `hg help -v rollback` for information',
6179 hint=b'see `hg help -v rollback` for information',
6177 )
6180 )
6178 return repo.rollback(dryrun=opts.get('dry_run'), force=opts.get('force'))
6181 return repo.rollback(dryrun=opts.get('dry_run'), force=opts.get('force'))
6179
6182
6180
6183
6181 @command(
6184 @command(
6182 b'root',
6185 b'root',
6183 [] + formatteropts,
6186 [] + formatteropts,
6184 intents={INTENT_READONLY},
6187 intents={INTENT_READONLY},
6185 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6188 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6186 )
6189 )
6187 def root(ui, repo, **opts):
6190 def root(ui, repo, **opts):
6188 """print the root (top) of the current working directory
6191 """print the root (top) of the current working directory
6189
6192
6190 Print the root directory of the current repository.
6193 Print the root directory of the current repository.
6191
6194
6192 .. container:: verbose
6195 .. container:: verbose
6193
6196
6194 Template:
6197 Template:
6195
6198
6196 The following keywords are supported in addition to the common template
6199 The following keywords are supported in addition to the common template
6197 keywords and functions. See also :hg:`help templates`.
6200 keywords and functions. See also :hg:`help templates`.
6198
6201
6199 :hgpath: String. Path to the .hg directory.
6202 :hgpath: String. Path to the .hg directory.
6200 :storepath: String. Path to the directory holding versioned data.
6203 :storepath: String. Path to the directory holding versioned data.
6201
6204
6202 Returns 0 on success.
6205 Returns 0 on success.
6203 """
6206 """
6204 opts = pycompat.byteskwargs(opts)
6207 opts = pycompat.byteskwargs(opts)
6205 with ui.formatter(b'root', opts) as fm:
6208 with ui.formatter(b'root', opts) as fm:
6206 fm.startitem()
6209 fm.startitem()
6207 fm.write(b'reporoot', b'%s\n', repo.root)
6210 fm.write(b'reporoot', b'%s\n', repo.root)
6208 fm.data(hgpath=repo.path, storepath=repo.spath)
6211 fm.data(hgpath=repo.path, storepath=repo.spath)
6209
6212
6210
6213
6211 @command(
6214 @command(
6212 b'serve',
6215 b'serve',
6213 [
6216 [
6214 (
6217 (
6215 b'A',
6218 b'A',
6216 b'accesslog',
6219 b'accesslog',
6217 b'',
6220 b'',
6218 _(b'name of access log file to write to'),
6221 _(b'name of access log file to write to'),
6219 _(b'FILE'),
6222 _(b'FILE'),
6220 ),
6223 ),
6221 (b'd', b'daemon', None, _(b'run server in background')),
6224 (b'd', b'daemon', None, _(b'run server in background')),
6222 (b'', b'daemon-postexec', [], _(b'used internally by daemon mode')),
6225 (b'', b'daemon-postexec', [], _(b'used internally by daemon mode')),
6223 (
6226 (
6224 b'E',
6227 b'E',
6225 b'errorlog',
6228 b'errorlog',
6226 b'',
6229 b'',
6227 _(b'name of error log file to write to'),
6230 _(b'name of error log file to write to'),
6228 _(b'FILE'),
6231 _(b'FILE'),
6229 ),
6232 ),
6230 # use string type, then we can check if something was passed
6233 # use string type, then we can check if something was passed
6231 (
6234 (
6232 b'p',
6235 b'p',
6233 b'port',
6236 b'port',
6234 b'',
6237 b'',
6235 _(b'port to listen on (default: 8000)'),
6238 _(b'port to listen on (default: 8000)'),
6236 _(b'PORT'),
6239 _(b'PORT'),
6237 ),
6240 ),
6238 (
6241 (
6239 b'a',
6242 b'a',
6240 b'address',
6243 b'address',
6241 b'',
6244 b'',
6242 _(b'address to listen on (default: all interfaces)'),
6245 _(b'address to listen on (default: all interfaces)'),
6243 _(b'ADDR'),
6246 _(b'ADDR'),
6244 ),
6247 ),
6245 (
6248 (
6246 b'',
6249 b'',
6247 b'prefix',
6250 b'prefix',
6248 b'',
6251 b'',
6249 _(b'prefix path to serve from (default: server root)'),
6252 _(b'prefix path to serve from (default: server root)'),
6250 _(b'PREFIX'),
6253 _(b'PREFIX'),
6251 ),
6254 ),
6252 (
6255 (
6253 b'n',
6256 b'n',
6254 b'name',
6257 b'name',
6255 b'',
6258 b'',
6256 _(b'name to show in web pages (default: working directory)'),
6259 _(b'name to show in web pages (default: working directory)'),
6257 _(b'NAME'),
6260 _(b'NAME'),
6258 ),
6261 ),
6259 (
6262 (
6260 b'',
6263 b'',
6261 b'web-conf',
6264 b'web-conf',
6262 b'',
6265 b'',
6263 _(b"name of the hgweb config file (see 'hg help hgweb')"),
6266 _(b"name of the hgweb config file (see 'hg help hgweb')"),
6264 _(b'FILE'),
6267 _(b'FILE'),
6265 ),
6268 ),
6266 (
6269 (
6267 b'',
6270 b'',
6268 b'webdir-conf',
6271 b'webdir-conf',
6269 b'',
6272 b'',
6270 _(b'name of the hgweb config file (DEPRECATED)'),
6273 _(b'name of the hgweb config file (DEPRECATED)'),
6271 _(b'FILE'),
6274 _(b'FILE'),
6272 ),
6275 ),
6273 (
6276 (
6274 b'',
6277 b'',
6275 b'pid-file',
6278 b'pid-file',
6276 b'',
6279 b'',
6277 _(b'name of file to write process ID to'),
6280 _(b'name of file to write process ID to'),
6278 _(b'FILE'),
6281 _(b'FILE'),
6279 ),
6282 ),
6280 (b'', b'stdio', None, _(b'for remote clients (ADVANCED)')),
6283 (b'', b'stdio', None, _(b'for remote clients (ADVANCED)')),
6281 (
6284 (
6282 b'',
6285 b'',
6283 b'cmdserver',
6286 b'cmdserver',
6284 b'',
6287 b'',
6285 _(b'for remote clients (ADVANCED)'),
6288 _(b'for remote clients (ADVANCED)'),
6286 _(b'MODE'),
6289 _(b'MODE'),
6287 ),
6290 ),
6288 (b't', b'templates', b'', _(b'web templates to use'), _(b'TEMPLATE')),
6291 (b't', b'templates', b'', _(b'web templates to use'), _(b'TEMPLATE')),
6289 (b'', b'style', b'', _(b'template style to use'), _(b'STYLE')),
6292 (b'', b'style', b'', _(b'template style to use'), _(b'STYLE')),
6290 (b'6', b'ipv6', None, _(b'use IPv6 in addition to IPv4')),
6293 (b'6', b'ipv6', None, _(b'use IPv6 in addition to IPv4')),
6291 (b'', b'certificate', b'', _(b'SSL certificate file'), _(b'FILE')),
6294 (b'', b'certificate', b'', _(b'SSL certificate file'), _(b'FILE')),
6292 (b'', b'print-url', None, _(b'start and print only the URL')),
6295 (b'', b'print-url', None, _(b'start and print only the URL')),
6293 ]
6296 ]
6294 + subrepoopts,
6297 + subrepoopts,
6295 _(b'[OPTION]...'),
6298 _(b'[OPTION]...'),
6296 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
6299 helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
6297 helpbasic=True,
6300 helpbasic=True,
6298 optionalrepo=True,
6301 optionalrepo=True,
6299 )
6302 )
6300 def serve(ui, repo, **opts):
6303 def serve(ui, repo, **opts):
6301 """start stand-alone webserver
6304 """start stand-alone webserver
6302
6305
6303 Start a local HTTP repository browser and pull server. You can use
6306 Start a local HTTP repository browser and pull server. You can use
6304 this for ad-hoc sharing and browsing of repositories. It is
6307 this for ad-hoc sharing and browsing of repositories. It is
6305 recommended to use a real web server to serve a repository for
6308 recommended to use a real web server to serve a repository for
6306 longer periods of time.
6309 longer periods of time.
6307
6310
6308 Please note that the server does not implement access control.
6311 Please note that the server does not implement access control.
6309 This means that, by default, anybody can read from the server and
6312 This means that, by default, anybody can read from the server and
6310 nobody can write to it by default. Set the ``web.allow-push``
6313 nobody can write to it by default. Set the ``web.allow-push``
6311 option to ``*`` to allow everybody to push to the server. You
6314 option to ``*`` to allow everybody to push to the server. You
6312 should use a real web server if you need to authenticate users.
6315 should use a real web server if you need to authenticate users.
6313
6316
6314 By default, the server logs accesses to stdout and errors to
6317 By default, the server logs accesses to stdout and errors to
6315 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
6318 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
6316 files.
6319 files.
6317
6320
6318 To have the server choose a free port number to listen on, specify
6321 To have the server choose a free port number to listen on, specify
6319 a port number of 0; in this case, the server will print the port
6322 a port number of 0; in this case, the server will print the port
6320 number it uses.
6323 number it uses.
6321
6324
6322 Returns 0 on success.
6325 Returns 0 on success.
6323 """
6326 """
6324
6327
6325 cmdutil.check_incompatible_arguments(opts, 'stdio', ['cmdserver'])
6328 cmdutil.check_incompatible_arguments(opts, 'stdio', ['cmdserver'])
6326 opts = pycompat.byteskwargs(opts)
6329 opts = pycompat.byteskwargs(opts)
6327 if opts[b"print_url"] and ui.verbose:
6330 if opts[b"print_url"] and ui.verbose:
6328 raise error.Abort(_(b"cannot use --print-url with --verbose"))
6331 raise error.Abort(_(b"cannot use --print-url with --verbose"))
6329
6332
6330 if opts[b"stdio"]:
6333 if opts[b"stdio"]:
6331 if repo is None:
6334 if repo is None:
6332 raise error.RepoError(
6335 raise error.RepoError(
6333 _(b"there is no Mercurial repository here (.hg not found)")
6336 _(b"there is no Mercurial repository here (.hg not found)")
6334 )
6337 )
6335 s = wireprotoserver.sshserver(ui, repo)
6338 s = wireprotoserver.sshserver(ui, repo)
6336 s.serve_forever()
6339 s.serve_forever()
6337
6340
6338 service = server.createservice(ui, repo, opts)
6341 service = server.createservice(ui, repo, opts)
6339 return server.runservice(opts, initfn=service.init, runfn=service.run)
6342 return server.runservice(opts, initfn=service.init, runfn=service.run)
6340
6343
6341
6344
6342 @command(
6345 @command(
6343 b'shelve',
6346 b'shelve',
6344 [
6347 [
6345 (
6348 (
6346 b'A',
6349 b'A',
6347 b'addremove',
6350 b'addremove',
6348 None,
6351 None,
6349 _(b'mark new/missing files as added/removed before shelving'),
6352 _(b'mark new/missing files as added/removed before shelving'),
6350 ),
6353 ),
6351 (b'u', b'unknown', None, _(b'store unknown files in the shelve')),
6354 (b'u', b'unknown', None, _(b'store unknown files in the shelve')),
6352 (b'', b'cleanup', None, _(b'delete all shelved changes')),
6355 (b'', b'cleanup', None, _(b'delete all shelved changes')),
6353 (
6356 (
6354 b'',
6357 b'',
6355 b'date',
6358 b'date',
6356 b'',
6359 b'',
6357 _(b'shelve with the specified commit date'),
6360 _(b'shelve with the specified commit date'),
6358 _(b'DATE'),
6361 _(b'DATE'),
6359 ),
6362 ),
6360 (b'd', b'delete', None, _(b'delete the named shelved change(s)')),
6363 (b'd', b'delete', None, _(b'delete the named shelved change(s)')),
6361 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
6364 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
6362 (
6365 (
6363 b'k',
6366 b'k',
6364 b'keep',
6367 b'keep',
6365 False,
6368 False,
6366 _(b'shelve, but keep changes in the working directory'),
6369 _(b'shelve, but keep changes in the working directory'),
6367 ),
6370 ),
6368 (b'l', b'list', None, _(b'list current shelves')),
6371 (b'l', b'list', None, _(b'list current shelves')),
6369 (b'm', b'message', b'', _(b'use text as shelve message'), _(b'TEXT')),
6372 (b'm', b'message', b'', _(b'use text as shelve message'), _(b'TEXT')),
6370 (
6373 (
6371 b'n',
6374 b'n',
6372 b'name',
6375 b'name',
6373 b'',
6376 b'',
6374 _(b'use the given name for the shelved commit'),
6377 _(b'use the given name for the shelved commit'),
6375 _(b'NAME'),
6378 _(b'NAME'),
6376 ),
6379 ),
6377 (
6380 (
6378 b'p',
6381 b'p',
6379 b'patch',
6382 b'patch',
6380 None,
6383 None,
6381 _(
6384 _(
6382 b'output patches for changes (provide the names of the shelved '
6385 b'output patches for changes (provide the names of the shelved '
6383 b'changes as positional arguments)'
6386 b'changes as positional arguments)'
6384 ),
6387 ),
6385 ),
6388 ),
6386 (b'i', b'interactive', None, _(b'interactive mode')),
6389 (b'i', b'interactive', None, _(b'interactive mode')),
6387 (
6390 (
6388 b'',
6391 b'',
6389 b'stat',
6392 b'stat',
6390 None,
6393 None,
6391 _(
6394 _(
6392 b'output diffstat-style summary of changes (provide the names of '
6395 b'output diffstat-style summary of changes (provide the names of '
6393 b'the shelved changes as positional arguments)'
6396 b'the shelved changes as positional arguments)'
6394 ),
6397 ),
6395 ),
6398 ),
6396 ]
6399 ]
6397 + cmdutil.walkopts,
6400 + cmdutil.walkopts,
6398 _(b'hg shelve [OPTION]... [FILE]...'),
6401 _(b'hg shelve [OPTION]... [FILE]...'),
6399 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6402 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6400 )
6403 )
6401 def shelve(ui, repo, *pats, **opts):
6404 def shelve(ui, repo, *pats, **opts):
6402 '''save and set aside changes from the working directory
6405 '''save and set aside changes from the working directory
6403
6406
6404 Shelving takes files that "hg status" reports as not clean, saves
6407 Shelving takes files that "hg status" reports as not clean, saves
6405 the modifications to a bundle (a shelved change), and reverts the
6408 the modifications to a bundle (a shelved change), and reverts the
6406 files so that their state in the working directory becomes clean.
6409 files so that their state in the working directory becomes clean.
6407
6410
6408 To restore these changes to the working directory, using "hg
6411 To restore these changes to the working directory, using "hg
6409 unshelve"; this will work even if you switch to a different
6412 unshelve"; this will work even if you switch to a different
6410 commit.
6413 commit.
6411
6414
6412 When no files are specified, "hg shelve" saves all not-clean
6415 When no files are specified, "hg shelve" saves all not-clean
6413 files. If specific files or directories are named, only changes to
6416 files. If specific files or directories are named, only changes to
6414 those files are shelved.
6417 those files are shelved.
6415
6418
6416 In bare shelve (when no files are specified, without interactive,
6419 In bare shelve (when no files are specified, without interactive,
6417 include and exclude option), shelving remembers information if the
6420 include and exclude option), shelving remembers information if the
6418 working directory was on newly created branch, in other words working
6421 working directory was on newly created branch, in other words working
6419 directory was on different branch than its first parent. In this
6422 directory was on different branch than its first parent. In this
6420 situation unshelving restores branch information to the working directory.
6423 situation unshelving restores branch information to the working directory.
6421
6424
6422 Each shelved change has a name that makes it easier to find later.
6425 Each shelved change has a name that makes it easier to find later.
6423 The name of a shelved change defaults to being based on the active
6426 The name of a shelved change defaults to being based on the active
6424 bookmark, or if there is no active bookmark, the current named
6427 bookmark, or if there is no active bookmark, the current named
6425 branch. To specify a different name, use ``--name``.
6428 branch. To specify a different name, use ``--name``.
6426
6429
6427 To see a list of existing shelved changes, use the ``--list``
6430 To see a list of existing shelved changes, use the ``--list``
6428 option. For each shelved change, this will print its name, age,
6431 option. For each shelved change, this will print its name, age,
6429 and description; use ``--patch`` or ``--stat`` for more details.
6432 and description; use ``--patch`` or ``--stat`` for more details.
6430
6433
6431 To delete specific shelved changes, use ``--delete``. To delete
6434 To delete specific shelved changes, use ``--delete``. To delete
6432 all shelved changes, use ``--cleanup``.
6435 all shelved changes, use ``--cleanup``.
6433 '''
6436 '''
6434 opts = pycompat.byteskwargs(opts)
6437 opts = pycompat.byteskwargs(opts)
6435 allowables = [
6438 allowables = [
6436 (b'addremove', {b'create'}), # 'create' is pseudo action
6439 (b'addremove', {b'create'}), # 'create' is pseudo action
6437 (b'unknown', {b'create'}),
6440 (b'unknown', {b'create'}),
6438 (b'cleanup', {b'cleanup'}),
6441 (b'cleanup', {b'cleanup'}),
6439 # ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
6442 # ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
6440 (b'delete', {b'delete'}),
6443 (b'delete', {b'delete'}),
6441 (b'edit', {b'create'}),
6444 (b'edit', {b'create'}),
6442 (b'keep', {b'create'}),
6445 (b'keep', {b'create'}),
6443 (b'list', {b'list'}),
6446 (b'list', {b'list'}),
6444 (b'message', {b'create'}),
6447 (b'message', {b'create'}),
6445 (b'name', {b'create'}),
6448 (b'name', {b'create'}),
6446 (b'patch', {b'patch', b'list'}),
6449 (b'patch', {b'patch', b'list'}),
6447 (b'stat', {b'stat', b'list'}),
6450 (b'stat', {b'stat', b'list'}),
6448 ]
6451 ]
6449
6452
6450 def checkopt(opt):
6453 def checkopt(opt):
6451 if opts.get(opt):
6454 if opts.get(opt):
6452 for i, allowable in allowables:
6455 for i, allowable in allowables:
6453 if opts[i] and opt not in allowable:
6456 if opts[i] and opt not in allowable:
6454 raise error.Abort(
6457 raise error.Abort(
6455 _(
6458 _(
6456 b"options '--%s' and '--%s' may not be "
6459 b"options '--%s' and '--%s' may not be "
6457 b"used together"
6460 b"used together"
6458 )
6461 )
6459 % (opt, i)
6462 % (opt, i)
6460 )
6463 )
6461 return True
6464 return True
6462
6465
6463 if checkopt(b'cleanup'):
6466 if checkopt(b'cleanup'):
6464 if pats:
6467 if pats:
6465 raise error.Abort(_(b"cannot specify names when using '--cleanup'"))
6468 raise error.Abort(_(b"cannot specify names when using '--cleanup'"))
6466 return shelvemod.cleanupcmd(ui, repo)
6469 return shelvemod.cleanupcmd(ui, repo)
6467 elif checkopt(b'delete'):
6470 elif checkopt(b'delete'):
6468 return shelvemod.deletecmd(ui, repo, pats)
6471 return shelvemod.deletecmd(ui, repo, pats)
6469 elif checkopt(b'list'):
6472 elif checkopt(b'list'):
6470 return shelvemod.listcmd(ui, repo, pats, opts)
6473 return shelvemod.listcmd(ui, repo, pats, opts)
6471 elif checkopt(b'patch') or checkopt(b'stat'):
6474 elif checkopt(b'patch') or checkopt(b'stat'):
6472 return shelvemod.patchcmds(ui, repo, pats, opts)
6475 return shelvemod.patchcmds(ui, repo, pats, opts)
6473 else:
6476 else:
6474 return shelvemod.createcmd(ui, repo, pats, opts)
6477 return shelvemod.createcmd(ui, repo, pats, opts)
6475
6478
6476
6479
6477 _NOTTERSE = b'nothing'
6480 _NOTTERSE = b'nothing'
6478
6481
6479
6482
6480 @command(
6483 @command(
6481 b'status|st',
6484 b'status|st',
6482 [
6485 [
6483 (b'A', b'all', None, _(b'show status of all files')),
6486 (b'A', b'all', None, _(b'show status of all files')),
6484 (b'm', b'modified', None, _(b'show only modified files')),
6487 (b'm', b'modified', None, _(b'show only modified files')),
6485 (b'a', b'added', None, _(b'show only added files')),
6488 (b'a', b'added', None, _(b'show only added files')),
6486 (b'r', b'removed', None, _(b'show only removed files')),
6489 (b'r', b'removed', None, _(b'show only removed files')),
6487 (b'd', b'deleted', None, _(b'show only missing files')),
6490 (b'd', b'deleted', None, _(b'show only missing files')),
6488 (b'c', b'clean', None, _(b'show only files without changes')),
6491 (b'c', b'clean', None, _(b'show only files without changes')),
6489 (b'u', b'unknown', None, _(b'show only unknown (not tracked) files')),
6492 (b'u', b'unknown', None, _(b'show only unknown (not tracked) files')),
6490 (b'i', b'ignored', None, _(b'show only ignored files')),
6493 (b'i', b'ignored', None, _(b'show only ignored files')),
6491 (b'n', b'no-status', None, _(b'hide status prefix')),
6494 (b'n', b'no-status', None, _(b'hide status prefix')),
6492 (b't', b'terse', _NOTTERSE, _(b'show the terse output (EXPERIMENTAL)')),
6495 (b't', b'terse', _NOTTERSE, _(b'show the terse output (EXPERIMENTAL)')),
6493 (
6496 (
6494 b'C',
6497 b'C',
6495 b'copies',
6498 b'copies',
6496 None,
6499 None,
6497 _(b'show source of copied files (DEFAULT: ui.statuscopies)'),
6500 _(b'show source of copied files (DEFAULT: ui.statuscopies)'),
6498 ),
6501 ),
6499 (
6502 (
6500 b'0',
6503 b'0',
6501 b'print0',
6504 b'print0',
6502 None,
6505 None,
6503 _(b'end filenames with NUL, for use with xargs'),
6506 _(b'end filenames with NUL, for use with xargs'),
6504 ),
6507 ),
6505 (b'', b'rev', [], _(b'show difference from revision'), _(b'REV')),
6508 (b'', b'rev', [], _(b'show difference from revision'), _(b'REV')),
6506 (
6509 (
6507 b'',
6510 b'',
6508 b'change',
6511 b'change',
6509 b'',
6512 b'',
6510 _(b'list the changed files of a revision'),
6513 _(b'list the changed files of a revision'),
6511 _(b'REV'),
6514 _(b'REV'),
6512 ),
6515 ),
6513 ]
6516 ]
6514 + walkopts
6517 + walkopts
6515 + subrepoopts
6518 + subrepoopts
6516 + formatteropts,
6519 + formatteropts,
6517 _(b'[OPTION]... [FILE]...'),
6520 _(b'[OPTION]... [FILE]...'),
6518 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6521 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6519 helpbasic=True,
6522 helpbasic=True,
6520 inferrepo=True,
6523 inferrepo=True,
6521 intents={INTENT_READONLY},
6524 intents={INTENT_READONLY},
6522 )
6525 )
6523 def status(ui, repo, *pats, **opts):
6526 def status(ui, repo, *pats, **opts):
6524 """show changed files in the working directory
6527 """show changed files in the working directory
6525
6528
6526 Show status of files in the repository. If names are given, only
6529 Show status of files in the repository. If names are given, only
6527 files that match are shown. Files that are clean or ignored or
6530 files that match are shown. Files that are clean or ignored or
6528 the source of a copy/move operation, are not listed unless
6531 the source of a copy/move operation, are not listed unless
6529 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
6532 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
6530 Unless options described with "show only ..." are given, the
6533 Unless options described with "show only ..." are given, the
6531 options -mardu are used.
6534 options -mardu are used.
6532
6535
6533 Option -q/--quiet hides untracked (unknown and ignored) files
6536 Option -q/--quiet hides untracked (unknown and ignored) files
6534 unless explicitly requested with -u/--unknown or -i/--ignored.
6537 unless explicitly requested with -u/--unknown or -i/--ignored.
6535
6538
6536 .. note::
6539 .. note::
6537
6540
6538 :hg:`status` may appear to disagree with diff if permissions have
6541 :hg:`status` may appear to disagree with diff if permissions have
6539 changed or a merge has occurred. The standard diff format does
6542 changed or a merge has occurred. The standard diff format does
6540 not report permission changes and diff only reports changes
6543 not report permission changes and diff only reports changes
6541 relative to one merge parent.
6544 relative to one merge parent.
6542
6545
6543 If one revision is given, it is used as the base revision.
6546 If one revision is given, it is used as the base revision.
6544 If two revisions are given, the differences between them are
6547 If two revisions are given, the differences between them are
6545 shown. The --change option can also be used as a shortcut to list
6548 shown. The --change option can also be used as a shortcut to list
6546 the changed files of a revision from its first parent.
6549 the changed files of a revision from its first parent.
6547
6550
6548 The codes used to show the status of files are::
6551 The codes used to show the status of files are::
6549
6552
6550 M = modified
6553 M = modified
6551 A = added
6554 A = added
6552 R = removed
6555 R = removed
6553 C = clean
6556 C = clean
6554 ! = missing (deleted by non-hg command, but still tracked)
6557 ! = missing (deleted by non-hg command, but still tracked)
6555 ? = not tracked
6558 ? = not tracked
6556 I = ignored
6559 I = ignored
6557 = origin of the previous file (with --copies)
6560 = origin of the previous file (with --copies)
6558
6561
6559 .. container:: verbose
6562 .. container:: verbose
6560
6563
6561 The -t/--terse option abbreviates the output by showing only the directory
6564 The -t/--terse option abbreviates the output by showing only the directory
6562 name if all the files in it share the same status. The option takes an
6565 name if all the files in it share the same status. The option takes an
6563 argument indicating the statuses to abbreviate: 'm' for 'modified', 'a'
6566 argument indicating the statuses to abbreviate: 'm' for 'modified', 'a'
6564 for 'added', 'r' for 'removed', 'd' for 'deleted', 'u' for 'unknown', 'i'
6567 for 'added', 'r' for 'removed', 'd' for 'deleted', 'u' for 'unknown', 'i'
6565 for 'ignored' and 'c' for clean.
6568 for 'ignored' and 'c' for clean.
6566
6569
6567 It abbreviates only those statuses which are passed. Note that clean and
6570 It abbreviates only those statuses which are passed. Note that clean and
6568 ignored files are not displayed with '--terse ic' unless the -c/--clean
6571 ignored files are not displayed with '--terse ic' unless the -c/--clean
6569 and -i/--ignored options are also used.
6572 and -i/--ignored options are also used.
6570
6573
6571 The -v/--verbose option shows information when the repository is in an
6574 The -v/--verbose option shows information when the repository is in an
6572 unfinished merge, shelve, rebase state etc. You can have this behavior
6575 unfinished merge, shelve, rebase state etc. You can have this behavior
6573 turned on by default by enabling the ``commands.status.verbose`` option.
6576 turned on by default by enabling the ``commands.status.verbose`` option.
6574
6577
6575 You can skip displaying some of these states by setting
6578 You can skip displaying some of these states by setting
6576 ``commands.status.skipstates`` to one or more of: 'bisect', 'graft',
6579 ``commands.status.skipstates`` to one or more of: 'bisect', 'graft',
6577 'histedit', 'merge', 'rebase', or 'unshelve'.
6580 'histedit', 'merge', 'rebase', or 'unshelve'.
6578
6581
6579 Template:
6582 Template:
6580
6583
6581 The following keywords are supported in addition to the common template
6584 The following keywords are supported in addition to the common template
6582 keywords and functions. See also :hg:`help templates`.
6585 keywords and functions. See also :hg:`help templates`.
6583
6586
6584 :path: String. Repository-absolute path of the file.
6587 :path: String. Repository-absolute path of the file.
6585 :source: String. Repository-absolute path of the file originated from.
6588 :source: String. Repository-absolute path of the file originated from.
6586 Available if ``--copies`` is specified.
6589 Available if ``--copies`` is specified.
6587 :status: String. Character denoting file's status.
6590 :status: String. Character denoting file's status.
6588
6591
6589 Examples:
6592 Examples:
6590
6593
6591 - show changes in the working directory relative to a
6594 - show changes in the working directory relative to a
6592 changeset::
6595 changeset::
6593
6596
6594 hg status --rev 9353
6597 hg status --rev 9353
6595
6598
6596 - show changes in the working directory relative to the
6599 - show changes in the working directory relative to the
6597 current directory (see :hg:`help patterns` for more information)::
6600 current directory (see :hg:`help patterns` for more information)::
6598
6601
6599 hg status re:
6602 hg status re:
6600
6603
6601 - show all changes including copies in an existing changeset::
6604 - show all changes including copies in an existing changeset::
6602
6605
6603 hg status --copies --change 9353
6606 hg status --copies --change 9353
6604
6607
6605 - get a NUL separated list of added files, suitable for xargs::
6608 - get a NUL separated list of added files, suitable for xargs::
6606
6609
6607 hg status -an0
6610 hg status -an0
6608
6611
6609 - show more information about the repository status, abbreviating
6612 - show more information about the repository status, abbreviating
6610 added, removed, modified, deleted, and untracked paths::
6613 added, removed, modified, deleted, and untracked paths::
6611
6614
6612 hg status -v -t mardu
6615 hg status -v -t mardu
6613
6616
6614 Returns 0 on success.
6617 Returns 0 on success.
6615
6618
6616 """
6619 """
6617
6620
6618 cmdutil.check_at_most_one_arg(opts, 'rev', 'change')
6621 cmdutil.check_at_most_one_arg(opts, 'rev', 'change')
6619 opts = pycompat.byteskwargs(opts)
6622 opts = pycompat.byteskwargs(opts)
6620 revs = opts.get(b'rev')
6623 revs = opts.get(b'rev')
6621 change = opts.get(b'change')
6624 change = opts.get(b'change')
6622 terse = opts.get(b'terse')
6625 terse = opts.get(b'terse')
6623 if terse is _NOTTERSE:
6626 if terse is _NOTTERSE:
6624 if revs:
6627 if revs:
6625 terse = b''
6628 terse = b''
6626 else:
6629 else:
6627 terse = ui.config(b'commands', b'status.terse')
6630 terse = ui.config(b'commands', b'status.terse')
6628
6631
6629 if revs and terse:
6632 if revs and terse:
6630 msg = _(b'cannot use --terse with --rev')
6633 msg = _(b'cannot use --terse with --rev')
6631 raise error.Abort(msg)
6634 raise error.Abort(msg)
6632 elif change:
6635 elif change:
6633 repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
6636 repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
6634 ctx2 = scmutil.revsingle(repo, change, None)
6637 ctx2 = scmutil.revsingle(repo, change, None)
6635 ctx1 = ctx2.p1()
6638 ctx1 = ctx2.p1()
6636 else:
6639 else:
6637 repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn')
6640 repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn')
6638 ctx1, ctx2 = scmutil.revpair(repo, revs)
6641 ctx1, ctx2 = scmutil.revpair(repo, revs)
6639
6642
6640 forcerelativevalue = None
6643 forcerelativevalue = None
6641 if ui.hasconfig(b'commands', b'status.relative'):
6644 if ui.hasconfig(b'commands', b'status.relative'):
6642 forcerelativevalue = ui.configbool(b'commands', b'status.relative')
6645 forcerelativevalue = ui.configbool(b'commands', b'status.relative')
6643 uipathfn = scmutil.getuipathfn(
6646 uipathfn = scmutil.getuipathfn(
6644 repo,
6647 repo,
6645 legacyrelativevalue=bool(pats),
6648 legacyrelativevalue=bool(pats),
6646 forcerelativevalue=forcerelativevalue,
6649 forcerelativevalue=forcerelativevalue,
6647 )
6650 )
6648
6651
6649 if opts.get(b'print0'):
6652 if opts.get(b'print0'):
6650 end = b'\0'
6653 end = b'\0'
6651 else:
6654 else:
6652 end = b'\n'
6655 end = b'\n'
6653 states = b'modified added removed deleted unknown ignored clean'.split()
6656 states = b'modified added removed deleted unknown ignored clean'.split()
6654 show = [k for k in states if opts.get(k)]
6657 show = [k for k in states if opts.get(k)]
6655 if opts.get(b'all'):
6658 if opts.get(b'all'):
6656 show += ui.quiet and (states[:4] + [b'clean']) or states
6659 show += ui.quiet and (states[:4] + [b'clean']) or states
6657
6660
6658 if not show:
6661 if not show:
6659 if ui.quiet:
6662 if ui.quiet:
6660 show = states[:4]
6663 show = states[:4]
6661 else:
6664 else:
6662 show = states[:5]
6665 show = states[:5]
6663
6666
6664 m = scmutil.match(ctx2, pats, opts)
6667 m = scmutil.match(ctx2, pats, opts)
6665 if terse:
6668 if terse:
6666 # we need to compute clean and unknown to terse
6669 # we need to compute clean and unknown to terse
6667 stat = repo.status(
6670 stat = repo.status(
6668 ctx1.node(),
6671 ctx1.node(),
6669 ctx2.node(),
6672 ctx2.node(),
6670 m,
6673 m,
6671 b'ignored' in show or b'i' in terse,
6674 b'ignored' in show or b'i' in terse,
6672 clean=True,
6675 clean=True,
6673 unknown=True,
6676 unknown=True,
6674 listsubrepos=opts.get(b'subrepos'),
6677 listsubrepos=opts.get(b'subrepos'),
6675 )
6678 )
6676
6679
6677 stat = cmdutil.tersedir(stat, terse)
6680 stat = cmdutil.tersedir(stat, terse)
6678 else:
6681 else:
6679 stat = repo.status(
6682 stat = repo.status(
6680 ctx1.node(),
6683 ctx1.node(),
6681 ctx2.node(),
6684 ctx2.node(),
6682 m,
6685 m,
6683 b'ignored' in show,
6686 b'ignored' in show,
6684 b'clean' in show,
6687 b'clean' in show,
6685 b'unknown' in show,
6688 b'unknown' in show,
6686 opts.get(b'subrepos'),
6689 opts.get(b'subrepos'),
6687 )
6690 )
6688
6691
6689 changestates = zip(
6692 changestates = zip(
6690 states,
6693 states,
6691 pycompat.iterbytestr(b'MAR!?IC'),
6694 pycompat.iterbytestr(b'MAR!?IC'),
6692 [getattr(stat, s.decode('utf8')) for s in states],
6695 [getattr(stat, s.decode('utf8')) for s in states],
6693 )
6696 )
6694
6697
6695 copy = {}
6698 copy = {}
6696 if (
6699 if (
6697 opts.get(b'all')
6700 opts.get(b'all')
6698 or opts.get(b'copies')
6701 or opts.get(b'copies')
6699 or ui.configbool(b'ui', b'statuscopies')
6702 or ui.configbool(b'ui', b'statuscopies')
6700 ) and not opts.get(b'no_status'):
6703 ) and not opts.get(b'no_status'):
6701 copy = copies.pathcopies(ctx1, ctx2, m)
6704 copy = copies.pathcopies(ctx1, ctx2, m)
6702
6705
6703 morestatus = None
6706 morestatus = None
6704 if (
6707 if (
6705 ui.verbose or ui.configbool(b'commands', b'status.verbose')
6708 ui.verbose or ui.configbool(b'commands', b'status.verbose')
6706 ) and not ui.plain():
6709 ) and not ui.plain():
6707 morestatus = cmdutil.readmorestatus(repo)
6710 morestatus = cmdutil.readmorestatus(repo)
6708
6711
6709 ui.pager(b'status')
6712 ui.pager(b'status')
6710 fm = ui.formatter(b'status', opts)
6713 fm = ui.formatter(b'status', opts)
6711 fmt = b'%s' + end
6714 fmt = b'%s' + end
6712 showchar = not opts.get(b'no_status')
6715 showchar = not opts.get(b'no_status')
6713
6716
6714 for state, char, files in changestates:
6717 for state, char, files in changestates:
6715 if state in show:
6718 if state in show:
6716 label = b'status.' + state
6719 label = b'status.' + state
6717 for f in files:
6720 for f in files:
6718 fm.startitem()
6721 fm.startitem()
6719 fm.context(ctx=ctx2)
6722 fm.context(ctx=ctx2)
6720 fm.data(itemtype=b'file', path=f)
6723 fm.data(itemtype=b'file', path=f)
6721 fm.condwrite(showchar, b'status', b'%s ', char, label=label)
6724 fm.condwrite(showchar, b'status', b'%s ', char, label=label)
6722 fm.plain(fmt % uipathfn(f), label=label)
6725 fm.plain(fmt % uipathfn(f), label=label)
6723 if f in copy:
6726 if f in copy:
6724 fm.data(source=copy[f])
6727 fm.data(source=copy[f])
6725 fm.plain(
6728 fm.plain(
6726 (b' %s' + end) % uipathfn(copy[f]),
6729 (b' %s' + end) % uipathfn(copy[f]),
6727 label=b'status.copied',
6730 label=b'status.copied',
6728 )
6731 )
6729 if morestatus:
6732 if morestatus:
6730 morestatus.formatfile(f, fm)
6733 morestatus.formatfile(f, fm)
6731
6734
6732 if morestatus:
6735 if morestatus:
6733 morestatus.formatfooter(fm)
6736 morestatus.formatfooter(fm)
6734 fm.end()
6737 fm.end()
6735
6738
6736
6739
6737 @command(
6740 @command(
6738 b'summary|sum',
6741 b'summary|sum',
6739 [(b'', b'remote', None, _(b'check for push and pull'))],
6742 [(b'', b'remote', None, _(b'check for push and pull'))],
6740 b'[--remote]',
6743 b'[--remote]',
6741 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6744 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
6742 helpbasic=True,
6745 helpbasic=True,
6743 intents={INTENT_READONLY},
6746 intents={INTENT_READONLY},
6744 )
6747 )
6745 def summary(ui, repo, **opts):
6748 def summary(ui, repo, **opts):
6746 """summarize working directory state
6749 """summarize working directory state
6747
6750
6748 This generates a brief summary of the working directory state,
6751 This generates a brief summary of the working directory state,
6749 including parents, branch, commit status, phase and available updates.
6752 including parents, branch, commit status, phase and available updates.
6750
6753
6751 With the --remote option, this will check the default paths for
6754 With the --remote option, this will check the default paths for
6752 incoming and outgoing changes. This can be time-consuming.
6755 incoming and outgoing changes. This can be time-consuming.
6753
6756
6754 Returns 0 on success.
6757 Returns 0 on success.
6755 """
6758 """
6756
6759
6757 opts = pycompat.byteskwargs(opts)
6760 opts = pycompat.byteskwargs(opts)
6758 ui.pager(b'summary')
6761 ui.pager(b'summary')
6759 ctx = repo[None]
6762 ctx = repo[None]
6760 parents = ctx.parents()
6763 parents = ctx.parents()
6761 pnode = parents[0].node()
6764 pnode = parents[0].node()
6762 marks = []
6765 marks = []
6763
6766
6764 try:
6767 try:
6765 ms = mergestatemod.mergestate.read(repo)
6768 ms = mergestatemod.mergestate.read(repo)
6766 except error.UnsupportedMergeRecords as e:
6769 except error.UnsupportedMergeRecords as e:
6767 s = b' '.join(e.recordtypes)
6770 s = b' '.join(e.recordtypes)
6768 ui.warn(
6771 ui.warn(
6769 _(b'warning: merge state has unsupported record types: %s\n') % s
6772 _(b'warning: merge state has unsupported record types: %s\n') % s
6770 )
6773 )
6771 unresolved = []
6774 unresolved = []
6772 else:
6775 else:
6773 unresolved = list(ms.unresolved())
6776 unresolved = list(ms.unresolved())
6774
6777
6775 for p in parents:
6778 for p in parents:
6776 # label with log.changeset (instead of log.parent) since this
6779 # label with log.changeset (instead of log.parent) since this
6777 # shows a working directory parent *changeset*:
6780 # shows a working directory parent *changeset*:
6778 # i18n: column positioning for "hg summary"
6781 # i18n: column positioning for "hg summary"
6779 ui.write(
6782 ui.write(
6780 _(b'parent: %d:%s ') % (p.rev(), p),
6783 _(b'parent: %d:%s ') % (p.rev(), p),
6781 label=logcmdutil.changesetlabels(p),
6784 label=logcmdutil.changesetlabels(p),
6782 )
6785 )
6783 ui.write(b' '.join(p.tags()), label=b'log.tag')
6786 ui.write(b' '.join(p.tags()), label=b'log.tag')
6784 if p.bookmarks():
6787 if p.bookmarks():
6785 marks.extend(p.bookmarks())
6788 marks.extend(p.bookmarks())
6786 if p.rev() == -1:
6789 if p.rev() == -1:
6787 if not len(repo):
6790 if not len(repo):
6788 ui.write(_(b' (empty repository)'))
6791 ui.write(_(b' (empty repository)'))
6789 else:
6792 else:
6790 ui.write(_(b' (no revision checked out)'))
6793 ui.write(_(b' (no revision checked out)'))
6791 if p.obsolete():
6794 if p.obsolete():
6792 ui.write(_(b' (obsolete)'))
6795 ui.write(_(b' (obsolete)'))
6793 if p.isunstable():
6796 if p.isunstable():
6794 instabilities = (
6797 instabilities = (
6795 ui.label(instability, b'trouble.%s' % instability)
6798 ui.label(instability, b'trouble.%s' % instability)
6796 for instability in p.instabilities()
6799 for instability in p.instabilities()
6797 )
6800 )
6798 ui.write(b' (' + b', '.join(instabilities) + b')')
6801 ui.write(b' (' + b', '.join(instabilities) + b')')
6799 ui.write(b'\n')
6802 ui.write(b'\n')
6800 if p.description():
6803 if p.description():
6801 ui.status(
6804 ui.status(
6802 b' ' + p.description().splitlines()[0].strip() + b'\n',
6805 b' ' + p.description().splitlines()[0].strip() + b'\n',
6803 label=b'log.summary',
6806 label=b'log.summary',
6804 )
6807 )
6805
6808
6806 branch = ctx.branch()
6809 branch = ctx.branch()
6807 bheads = repo.branchheads(branch)
6810 bheads = repo.branchheads(branch)
6808 # i18n: column positioning for "hg summary"
6811 # i18n: column positioning for "hg summary"
6809 m = _(b'branch: %s\n') % branch
6812 m = _(b'branch: %s\n') % branch
6810 if branch != b'default':
6813 if branch != b'default':
6811 ui.write(m, label=b'log.branch')
6814 ui.write(m, label=b'log.branch')
6812 else:
6815 else:
6813 ui.status(m, label=b'log.branch')
6816 ui.status(m, label=b'log.branch')
6814
6817
6815 if marks:
6818 if marks:
6816 active = repo._activebookmark
6819 active = repo._activebookmark
6817 # i18n: column positioning for "hg summary"
6820 # i18n: column positioning for "hg summary"
6818 ui.write(_(b'bookmarks:'), label=b'log.bookmark')
6821 ui.write(_(b'bookmarks:'), label=b'log.bookmark')
6819 if active is not None:
6822 if active is not None:
6820 if active in marks:
6823 if active in marks:
6821 ui.write(b' *' + active, label=bookmarks.activebookmarklabel)
6824 ui.write(b' *' + active, label=bookmarks.activebookmarklabel)
6822 marks.remove(active)
6825 marks.remove(active)
6823 else:
6826 else:
6824 ui.write(b' [%s]' % active, label=bookmarks.activebookmarklabel)
6827 ui.write(b' [%s]' % active, label=bookmarks.activebookmarklabel)
6825 for m in marks:
6828 for m in marks:
6826 ui.write(b' ' + m, label=b'log.bookmark')
6829 ui.write(b' ' + m, label=b'log.bookmark')
6827 ui.write(b'\n', label=b'log.bookmark')
6830 ui.write(b'\n', label=b'log.bookmark')
6828
6831
6829 status = repo.status(unknown=True)
6832 status = repo.status(unknown=True)
6830
6833
6831 c = repo.dirstate.copies()
6834 c = repo.dirstate.copies()
6832 copied, renamed = [], []
6835 copied, renamed = [], []
6833 for d, s in pycompat.iteritems(c):
6836 for d, s in pycompat.iteritems(c):
6834 if s in status.removed:
6837 if s in status.removed:
6835 status.removed.remove(s)
6838 status.removed.remove(s)
6836 renamed.append(d)
6839 renamed.append(d)
6837 else:
6840 else:
6838 copied.append(d)
6841 copied.append(d)
6839 if d in status.added:
6842 if d in status.added:
6840 status.added.remove(d)
6843 status.added.remove(d)
6841
6844
6842 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
6845 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
6843
6846
6844 labels = [
6847 labels = [
6845 (ui.label(_(b'%d modified'), b'status.modified'), status.modified),
6848 (ui.label(_(b'%d modified'), b'status.modified'), status.modified),
6846 (ui.label(_(b'%d added'), b'status.added'), status.added),
6849 (ui.label(_(b'%d added'), b'status.added'), status.added),
6847 (ui.label(_(b'%d removed'), b'status.removed'), status.removed),
6850 (ui.label(_(b'%d removed'), b'status.removed'), status.removed),
6848 (ui.label(_(b'%d renamed'), b'status.copied'), renamed),
6851 (ui.label(_(b'%d renamed'), b'status.copied'), renamed),
6849 (ui.label(_(b'%d copied'), b'status.copied'), copied),
6852 (ui.label(_(b'%d copied'), b'status.copied'), copied),
6850 (ui.label(_(b'%d deleted'), b'status.deleted'), status.deleted),
6853 (ui.label(_(b'%d deleted'), b'status.deleted'), status.deleted),
6851 (ui.label(_(b'%d unknown'), b'status.unknown'), status.unknown),
6854 (ui.label(_(b'%d unknown'), b'status.unknown'), status.unknown),
6852 (ui.label(_(b'%d unresolved'), b'resolve.unresolved'), unresolved),
6855 (ui.label(_(b'%d unresolved'), b'resolve.unresolved'), unresolved),
6853 (ui.label(_(b'%d subrepos'), b'status.modified'), subs),
6856 (ui.label(_(b'%d subrepos'), b'status.modified'), subs),
6854 ]
6857 ]
6855 t = []
6858 t = []
6856 for l, s in labels:
6859 for l, s in labels:
6857 if s:
6860 if s:
6858 t.append(l % len(s))
6861 t.append(l % len(s))
6859
6862
6860 t = b', '.join(t)
6863 t = b', '.join(t)
6861 cleanworkdir = False
6864 cleanworkdir = False
6862
6865
6863 if repo.vfs.exists(b'graftstate'):
6866 if repo.vfs.exists(b'graftstate'):
6864 t += _(b' (graft in progress)')
6867 t += _(b' (graft in progress)')
6865 if repo.vfs.exists(b'updatestate'):
6868 if repo.vfs.exists(b'updatestate'):
6866 t += _(b' (interrupted update)')
6869 t += _(b' (interrupted update)')
6867 elif len(parents) > 1:
6870 elif len(parents) > 1:
6868 t += _(b' (merge)')
6871 t += _(b' (merge)')
6869 elif branch != parents[0].branch():
6872 elif branch != parents[0].branch():
6870 t += _(b' (new branch)')
6873 t += _(b' (new branch)')
6871 elif parents[0].closesbranch() and pnode in repo.branchheads(
6874 elif parents[0].closesbranch() and pnode in repo.branchheads(
6872 branch, closed=True
6875 branch, closed=True
6873 ):
6876 ):
6874 t += _(b' (head closed)')
6877 t += _(b' (head closed)')
6875 elif not (
6878 elif not (
6876 status.modified
6879 status.modified
6877 or status.added
6880 or status.added
6878 or status.removed
6881 or status.removed
6879 or renamed
6882 or renamed
6880 or copied
6883 or copied
6881 or subs
6884 or subs
6882 ):
6885 ):
6883 t += _(b' (clean)')
6886 t += _(b' (clean)')
6884 cleanworkdir = True
6887 cleanworkdir = True
6885 elif pnode not in bheads:
6888 elif pnode not in bheads:
6886 t += _(b' (new branch head)')
6889 t += _(b' (new branch head)')
6887
6890
6888 if parents:
6891 if parents:
6889 pendingphase = max(p.phase() for p in parents)
6892 pendingphase = max(p.phase() for p in parents)
6890 else:
6893 else:
6891 pendingphase = phases.public
6894 pendingphase = phases.public
6892
6895
6893 if pendingphase > phases.newcommitphase(ui):
6896 if pendingphase > phases.newcommitphase(ui):
6894 t += b' (%s)' % phases.phasenames[pendingphase]
6897 t += b' (%s)' % phases.phasenames[pendingphase]
6895
6898
6896 if cleanworkdir:
6899 if cleanworkdir:
6897 # i18n: column positioning for "hg summary"
6900 # i18n: column positioning for "hg summary"
6898 ui.status(_(b'commit: %s\n') % t.strip())
6901 ui.status(_(b'commit: %s\n') % t.strip())
6899 else:
6902 else:
6900 # i18n: column positioning for "hg summary"
6903 # i18n: column positioning for "hg summary"
6901 ui.write(_(b'commit: %s\n') % t.strip())
6904 ui.write(_(b'commit: %s\n') % t.strip())
6902
6905
6903 # all ancestors of branch heads - all ancestors of parent = new csets
6906 # all ancestors of branch heads - all ancestors of parent = new csets
6904 new = len(
6907 new = len(
6905 repo.changelog.findmissing([pctx.node() for pctx in parents], bheads)
6908 repo.changelog.findmissing([pctx.node() for pctx in parents], bheads)
6906 )
6909 )
6907
6910
6908 if new == 0:
6911 if new == 0:
6909 # i18n: column positioning for "hg summary"
6912 # i18n: column positioning for "hg summary"
6910 ui.status(_(b'update: (current)\n'))
6913 ui.status(_(b'update: (current)\n'))
6911 elif pnode not in bheads:
6914 elif pnode not in bheads:
6912 # i18n: column positioning for "hg summary"
6915 # i18n: column positioning for "hg summary"
6913 ui.write(_(b'update: %d new changesets (update)\n') % new)
6916 ui.write(_(b'update: %d new changesets (update)\n') % new)
6914 else:
6917 else:
6915 # i18n: column positioning for "hg summary"
6918 # i18n: column positioning for "hg summary"
6916 ui.write(
6919 ui.write(
6917 _(b'update: %d new changesets, %d branch heads (merge)\n')
6920 _(b'update: %d new changesets, %d branch heads (merge)\n')
6918 % (new, len(bheads))
6921 % (new, len(bheads))
6919 )
6922 )
6920
6923
6921 t = []
6924 t = []
6922 draft = len(repo.revs(b'draft()'))
6925 draft = len(repo.revs(b'draft()'))
6923 if draft:
6926 if draft:
6924 t.append(_(b'%d draft') % draft)
6927 t.append(_(b'%d draft') % draft)
6925 secret = len(repo.revs(b'secret()'))
6928 secret = len(repo.revs(b'secret()'))
6926 if secret:
6929 if secret:
6927 t.append(_(b'%d secret') % secret)
6930 t.append(_(b'%d secret') % secret)
6928
6931
6929 if draft or secret:
6932 if draft or secret:
6930 ui.status(_(b'phases: %s\n') % b', '.join(t))
6933 ui.status(_(b'phases: %s\n') % b', '.join(t))
6931
6934
6932 if obsolete.isenabled(repo, obsolete.createmarkersopt):
6935 if obsolete.isenabled(repo, obsolete.createmarkersopt):
6933 for trouble in (b"orphan", b"contentdivergent", b"phasedivergent"):
6936 for trouble in (b"orphan", b"contentdivergent", b"phasedivergent"):
6934 numtrouble = len(repo.revs(trouble + b"()"))
6937 numtrouble = len(repo.revs(trouble + b"()"))
6935 # We write all the possibilities to ease translation
6938 # We write all the possibilities to ease translation
6936 troublemsg = {
6939 troublemsg = {
6937 b"orphan": _(b"orphan: %d changesets"),
6940 b"orphan": _(b"orphan: %d changesets"),
6938 b"contentdivergent": _(b"content-divergent: %d changesets"),
6941 b"contentdivergent": _(b"content-divergent: %d changesets"),
6939 b"phasedivergent": _(b"phase-divergent: %d changesets"),
6942 b"phasedivergent": _(b"phase-divergent: %d changesets"),
6940 }
6943 }
6941 if numtrouble > 0:
6944 if numtrouble > 0:
6942 ui.status(troublemsg[trouble] % numtrouble + b"\n")
6945 ui.status(troublemsg[trouble] % numtrouble + b"\n")
6943
6946
6944 cmdutil.summaryhooks(ui, repo)
6947 cmdutil.summaryhooks(ui, repo)
6945
6948
6946 if opts.get(b'remote'):
6949 if opts.get(b'remote'):
6947 needsincoming, needsoutgoing = True, True
6950 needsincoming, needsoutgoing = True, True
6948 else:
6951 else:
6949 needsincoming, needsoutgoing = False, False
6952 needsincoming, needsoutgoing = False, False
6950 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
6953 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
6951 if i:
6954 if i:
6952 needsincoming = True
6955 needsincoming = True
6953 if o:
6956 if o:
6954 needsoutgoing = True
6957 needsoutgoing = True
6955 if not needsincoming and not needsoutgoing:
6958 if not needsincoming and not needsoutgoing:
6956 return
6959 return
6957
6960
6958 def getincoming():
6961 def getincoming():
6959 source, branches = hg.parseurl(ui.expandpath(b'default'))
6962 source, branches = hg.parseurl(ui.expandpath(b'default'))
6960 sbranch = branches[0]
6963 sbranch = branches[0]
6961 try:
6964 try:
6962 other = hg.peer(repo, {}, source)
6965 other = hg.peer(repo, {}, source)
6963 except error.RepoError:
6966 except error.RepoError:
6964 if opts.get(b'remote'):
6967 if opts.get(b'remote'):
6965 raise
6968 raise
6966 return source, sbranch, None, None, None
6969 return source, sbranch, None, None, None
6967 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
6970 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
6968 if revs:
6971 if revs:
6969 revs = [other.lookup(rev) for rev in revs]
6972 revs = [other.lookup(rev) for rev in revs]
6970 ui.debug(b'comparing with %s\n' % util.hidepassword(source))
6973 ui.debug(b'comparing with %s\n' % util.hidepassword(source))
6971 repo.ui.pushbuffer()
6974 repo.ui.pushbuffer()
6972 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
6975 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
6973 repo.ui.popbuffer()
6976 repo.ui.popbuffer()
6974 return source, sbranch, other, commoninc, commoninc[1]
6977 return source, sbranch, other, commoninc, commoninc[1]
6975
6978
6976 if needsincoming:
6979 if needsincoming:
6977 source, sbranch, sother, commoninc, incoming = getincoming()
6980 source, sbranch, sother, commoninc, incoming = getincoming()
6978 else:
6981 else:
6979 source = sbranch = sother = commoninc = incoming = None
6982 source = sbranch = sother = commoninc = incoming = None
6980
6983
6981 def getoutgoing():
6984 def getoutgoing():
6982 dest, branches = hg.parseurl(ui.expandpath(b'default-push', b'default'))
6985 dest, branches = hg.parseurl(ui.expandpath(b'default-push', b'default'))
6983 dbranch = branches[0]
6986 dbranch = branches[0]
6984 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
6987 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
6985 if source != dest:
6988 if source != dest:
6986 try:
6989 try:
6987 dother = hg.peer(repo, {}, dest)
6990 dother = hg.peer(repo, {}, dest)
6988 except error.RepoError:
6991 except error.RepoError:
6989 if opts.get(b'remote'):
6992 if opts.get(b'remote'):
6990 raise
6993 raise
6991 return dest, dbranch, None, None
6994 return dest, dbranch, None, None
6992 ui.debug(b'comparing with %s\n' % util.hidepassword(dest))
6995 ui.debug(b'comparing with %s\n' % util.hidepassword(dest))
6993 elif sother is None:
6996 elif sother is None:
6994 # there is no explicit destination peer, but source one is invalid
6997 # there is no explicit destination peer, but source one is invalid
6995 return dest, dbranch, None, None
6998 return dest, dbranch, None, None
6996 else:
6999 else:
6997 dother = sother
7000 dother = sother
6998 if source != dest or (sbranch is not None and sbranch != dbranch):
7001 if source != dest or (sbranch is not None and sbranch != dbranch):
6999 common = None
7002 common = None
7000 else:
7003 else:
7001 common = commoninc
7004 common = commoninc
7002 if revs:
7005 if revs:
7003 revs = [repo.lookup(rev) for rev in revs]
7006 revs = [repo.lookup(rev) for rev in revs]
7004 repo.ui.pushbuffer()
7007 repo.ui.pushbuffer()
7005 outgoing = discovery.findcommonoutgoing(
7008 outgoing = discovery.findcommonoutgoing(
7006 repo, dother, onlyheads=revs, commoninc=common
7009 repo, dother, onlyheads=revs, commoninc=common
7007 )
7010 )
7008 repo.ui.popbuffer()
7011 repo.ui.popbuffer()
7009 return dest, dbranch, dother, outgoing
7012 return dest, dbranch, dother, outgoing
7010
7013
7011 if needsoutgoing:
7014 if needsoutgoing:
7012 dest, dbranch, dother, outgoing = getoutgoing()
7015 dest, dbranch, dother, outgoing = getoutgoing()
7013 else:
7016 else:
7014 dest = dbranch = dother = outgoing = None
7017 dest = dbranch = dother = outgoing = None
7015
7018
7016 if opts.get(b'remote'):
7019 if opts.get(b'remote'):
7017 t = []
7020 t = []
7018 if incoming:
7021 if incoming:
7019 t.append(_(b'1 or more incoming'))
7022 t.append(_(b'1 or more incoming'))
7020 o = outgoing.missing
7023 o = outgoing.missing
7021 if o:
7024 if o:
7022 t.append(_(b'%d outgoing') % len(o))
7025 t.append(_(b'%d outgoing') % len(o))
7023 other = dother or sother
7026 other = dother or sother
7024 if b'bookmarks' in other.listkeys(b'namespaces'):
7027 if b'bookmarks' in other.listkeys(b'namespaces'):
7025 counts = bookmarks.summary(repo, other)
7028 counts = bookmarks.summary(repo, other)
7026 if counts[0] > 0:
7029 if counts[0] > 0:
7027 t.append(_(b'%d incoming bookmarks') % counts[0])
7030 t.append(_(b'%d incoming bookmarks') % counts[0])
7028 if counts[1] > 0:
7031 if counts[1] > 0:
7029 t.append(_(b'%d outgoing bookmarks') % counts[1])
7032 t.append(_(b'%d outgoing bookmarks') % counts[1])
7030
7033
7031 if t:
7034 if t:
7032 # i18n: column positioning for "hg summary"
7035 # i18n: column positioning for "hg summary"
7033 ui.write(_(b'remote: %s\n') % (b', '.join(t)))
7036 ui.write(_(b'remote: %s\n') % (b', '.join(t)))
7034 else:
7037 else:
7035 # i18n: column positioning for "hg summary"
7038 # i18n: column positioning for "hg summary"
7036 ui.status(_(b'remote: (synced)\n'))
7039 ui.status(_(b'remote: (synced)\n'))
7037
7040
7038 cmdutil.summaryremotehooks(
7041 cmdutil.summaryremotehooks(
7039 ui,
7042 ui,
7040 repo,
7043 repo,
7041 opts,
7044 opts,
7042 (
7045 (
7043 (source, sbranch, sother, commoninc),
7046 (source, sbranch, sother, commoninc),
7044 (dest, dbranch, dother, outgoing),
7047 (dest, dbranch, dother, outgoing),
7045 ),
7048 ),
7046 )
7049 )
7047
7050
7048
7051
7049 @command(
7052 @command(
7050 b'tag',
7053 b'tag',
7051 [
7054 [
7052 (b'f', b'force', None, _(b'force tag')),
7055 (b'f', b'force', None, _(b'force tag')),
7053 (b'l', b'local', None, _(b'make the tag local')),
7056 (b'l', b'local', None, _(b'make the tag local')),
7054 (b'r', b'rev', b'', _(b'revision to tag'), _(b'REV')),
7057 (b'r', b'rev', b'', _(b'revision to tag'), _(b'REV')),
7055 (b'', b'remove', None, _(b'remove a tag')),
7058 (b'', b'remove', None, _(b'remove a tag')),
7056 # -l/--local is already there, commitopts cannot be used
7059 # -l/--local is already there, commitopts cannot be used
7057 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
7060 (b'e', b'edit', None, _(b'invoke editor on commit messages')),
7058 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
7061 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
7059 ]
7062 ]
7060 + commitopts2,
7063 + commitopts2,
7061 _(b'[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'),
7064 _(b'[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'),
7062 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
7065 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
7063 )
7066 )
7064 def tag(ui, repo, name1, *names, **opts):
7067 def tag(ui, repo, name1, *names, **opts):
7065 """add one or more tags for the current or given revision
7068 """add one or more tags for the current or given revision
7066
7069
7067 Name a particular revision using <name>.
7070 Name a particular revision using <name>.
7068
7071
7069 Tags are used to name particular revisions of the repository and are
7072 Tags are used to name particular revisions of the repository and are
7070 very useful to compare different revisions, to go back to significant
7073 very useful to compare different revisions, to go back to significant
7071 earlier versions or to mark branch points as releases, etc. Changing
7074 earlier versions or to mark branch points as releases, etc. Changing
7072 an existing tag is normally disallowed; use -f/--force to override.
7075 an existing tag is normally disallowed; use -f/--force to override.
7073
7076
7074 If no revision is given, the parent of the working directory is
7077 If no revision is given, the parent of the working directory is
7075 used.
7078 used.
7076
7079
7077 To facilitate version control, distribution, and merging of tags,
7080 To facilitate version control, distribution, and merging of tags,
7078 they are stored as a file named ".hgtags" which is managed similarly
7081 they are stored as a file named ".hgtags" which is managed similarly
7079 to other project files and can be hand-edited if necessary. This
7082 to other project files and can be hand-edited if necessary. This
7080 also means that tagging creates a new commit. The file
7083 also means that tagging creates a new commit. The file
7081 ".hg/localtags" is used for local tags (not shared among
7084 ".hg/localtags" is used for local tags (not shared among
7082 repositories).
7085 repositories).
7083
7086
7084 Tag commits are usually made at the head of a branch. If the parent
7087 Tag commits are usually made at the head of a branch. If the parent
7085 of the working directory is not a branch head, :hg:`tag` aborts; use
7088 of the working directory is not a branch head, :hg:`tag` aborts; use
7086 -f/--force to force the tag commit to be based on a non-head
7089 -f/--force to force the tag commit to be based on a non-head
7087 changeset.
7090 changeset.
7088
7091
7089 See :hg:`help dates` for a list of formats valid for -d/--date.
7092 See :hg:`help dates` for a list of formats valid for -d/--date.
7090
7093
7091 Since tag names have priority over branch names during revision
7094 Since tag names have priority over branch names during revision
7092 lookup, using an existing branch name as a tag name is discouraged.
7095 lookup, using an existing branch name as a tag name is discouraged.
7093
7096
7094 Returns 0 on success.
7097 Returns 0 on success.
7095 """
7098 """
7096 cmdutil.check_incompatible_arguments(opts, 'remove', ['rev'])
7099 cmdutil.check_incompatible_arguments(opts, 'remove', ['rev'])
7097 opts = pycompat.byteskwargs(opts)
7100 opts = pycompat.byteskwargs(opts)
7098 with repo.wlock(), repo.lock():
7101 with repo.wlock(), repo.lock():
7099 rev_ = b"."
7102 rev_ = b"."
7100 names = [t.strip() for t in (name1,) + names]
7103 names = [t.strip() for t in (name1,) + names]
7101 if len(names) != len(set(names)):
7104 if len(names) != len(set(names)):
7102 raise error.Abort(_(b'tag names must be unique'))
7105 raise error.Abort(_(b'tag names must be unique'))
7103 for n in names:
7106 for n in names:
7104 scmutil.checknewlabel(repo, n, b'tag')
7107 scmutil.checknewlabel(repo, n, b'tag')
7105 if not n:
7108 if not n:
7106 raise error.Abort(
7109 raise error.Abort(
7107 _(b'tag names cannot consist entirely of whitespace')
7110 _(b'tag names cannot consist entirely of whitespace')
7108 )
7111 )
7109 if opts.get(b'rev'):
7112 if opts.get(b'rev'):
7110 rev_ = opts[b'rev']
7113 rev_ = opts[b'rev']
7111 message = opts.get(b'message')
7114 message = opts.get(b'message')
7112 if opts.get(b'remove'):
7115 if opts.get(b'remove'):
7113 if opts.get(b'local'):
7116 if opts.get(b'local'):
7114 expectedtype = b'local'
7117 expectedtype = b'local'
7115 else:
7118 else:
7116 expectedtype = b'global'
7119 expectedtype = b'global'
7117
7120
7118 for n in names:
7121 for n in names:
7119 if repo.tagtype(n) == b'global':
7122 if repo.tagtype(n) == b'global':
7120 alltags = tagsmod.findglobaltags(ui, repo)
7123 alltags = tagsmod.findglobaltags(ui, repo)
7121 if alltags[n][0] == nullid:
7124 if alltags[n][0] == nullid:
7122 raise error.Abort(_(b"tag '%s' is already removed") % n)
7125 raise error.Abort(_(b"tag '%s' is already removed") % n)
7123 if not repo.tagtype(n):
7126 if not repo.tagtype(n):
7124 raise error.Abort(_(b"tag '%s' does not exist") % n)
7127 raise error.Abort(_(b"tag '%s' does not exist") % n)
7125 if repo.tagtype(n) != expectedtype:
7128 if repo.tagtype(n) != expectedtype:
7126 if expectedtype == b'global':
7129 if expectedtype == b'global':
7127 raise error.Abort(
7130 raise error.Abort(
7128 _(b"tag '%s' is not a global tag") % n
7131 _(b"tag '%s' is not a global tag") % n
7129 )
7132 )
7130 else:
7133 else:
7131 raise error.Abort(_(b"tag '%s' is not a local tag") % n)
7134 raise error.Abort(_(b"tag '%s' is not a local tag") % n)
7132 rev_ = b'null'
7135 rev_ = b'null'
7133 if not message:
7136 if not message:
7134 # we don't translate commit messages
7137 # we don't translate commit messages
7135 message = b'Removed tag %s' % b', '.join(names)
7138 message = b'Removed tag %s' % b', '.join(names)
7136 elif not opts.get(b'force'):
7139 elif not opts.get(b'force'):
7137 for n in names:
7140 for n in names:
7138 if n in repo.tags():
7141 if n in repo.tags():
7139 raise error.Abort(
7142 raise error.Abort(
7140 _(b"tag '%s' already exists (use -f to force)") % n
7143 _(b"tag '%s' already exists (use -f to force)") % n
7141 )
7144 )
7142 if not opts.get(b'local'):
7145 if not opts.get(b'local'):
7143 p1, p2 = repo.dirstate.parents()
7146 p1, p2 = repo.dirstate.parents()
7144 if p2 != nullid:
7147 if p2 != nullid:
7145 raise error.Abort(_(b'uncommitted merge'))
7148 raise error.Abort(_(b'uncommitted merge'))
7146 bheads = repo.branchheads()
7149 bheads = repo.branchheads()
7147 if not opts.get(b'force') and bheads and p1 not in bheads:
7150 if not opts.get(b'force') and bheads and p1 not in bheads:
7148 raise error.Abort(
7151 raise error.Abort(
7149 _(
7152 _(
7150 b'working directory is not at a branch head '
7153 b'working directory is not at a branch head '
7151 b'(use -f to force)'
7154 b'(use -f to force)'
7152 )
7155 )
7153 )
7156 )
7154 node = scmutil.revsingle(repo, rev_).node()
7157 node = scmutil.revsingle(repo, rev_).node()
7155
7158
7156 if not message:
7159 if not message:
7157 # we don't translate commit messages
7160 # we don't translate commit messages
7158 message = b'Added tag %s for changeset %s' % (
7161 message = b'Added tag %s for changeset %s' % (
7159 b', '.join(names),
7162 b', '.join(names),
7160 short(node),
7163 short(node),
7161 )
7164 )
7162
7165
7163 date = opts.get(b'date')
7166 date = opts.get(b'date')
7164 if date:
7167 if date:
7165 date = dateutil.parsedate(date)
7168 date = dateutil.parsedate(date)
7166
7169
7167 if opts.get(b'remove'):
7170 if opts.get(b'remove'):
7168 editform = b'tag.remove'
7171 editform = b'tag.remove'
7169 else:
7172 else:
7170 editform = b'tag.add'
7173 editform = b'tag.add'
7171 editor = cmdutil.getcommiteditor(
7174 editor = cmdutil.getcommiteditor(
7172 editform=editform, **pycompat.strkwargs(opts)
7175 editform=editform, **pycompat.strkwargs(opts)
7173 )
7176 )
7174
7177
7175 # don't allow tagging the null rev
7178 # don't allow tagging the null rev
7176 if (
7179 if (
7177 not opts.get(b'remove')
7180 not opts.get(b'remove')
7178 and scmutil.revsingle(repo, rev_).rev() == nullrev
7181 and scmutil.revsingle(repo, rev_).rev() == nullrev
7179 ):
7182 ):
7180 raise error.Abort(_(b"cannot tag null revision"))
7183 raise error.Abort(_(b"cannot tag null revision"))
7181
7184
7182 tagsmod.tag(
7185 tagsmod.tag(
7183 repo,
7186 repo,
7184 names,
7187 names,
7185 node,
7188 node,
7186 message,
7189 message,
7187 opts.get(b'local'),
7190 opts.get(b'local'),
7188 opts.get(b'user'),
7191 opts.get(b'user'),
7189 date,
7192 date,
7190 editor=editor,
7193 editor=editor,
7191 )
7194 )
7192
7195
7193
7196
7194 @command(
7197 @command(
7195 b'tags',
7198 b'tags',
7196 formatteropts,
7199 formatteropts,
7197 b'',
7200 b'',
7198 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
7201 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
7199 intents={INTENT_READONLY},
7202 intents={INTENT_READONLY},
7200 )
7203 )
7201 def tags(ui, repo, **opts):
7204 def tags(ui, repo, **opts):
7202 """list repository tags
7205 """list repository tags
7203
7206
7204 This lists both regular and local tags. When the -v/--verbose
7207 This lists both regular and local tags. When the -v/--verbose
7205 switch is used, a third column "local" is printed for local tags.
7208 switch is used, a third column "local" is printed for local tags.
7206 When the -q/--quiet switch is used, only the tag name is printed.
7209 When the -q/--quiet switch is used, only the tag name is printed.
7207
7210
7208 .. container:: verbose
7211 .. container:: verbose
7209
7212
7210 Template:
7213 Template:
7211
7214
7212 The following keywords are supported in addition to the common template
7215 The following keywords are supported in addition to the common template
7213 keywords and functions such as ``{tag}``. See also
7216 keywords and functions such as ``{tag}``. See also
7214 :hg:`help templates`.
7217 :hg:`help templates`.
7215
7218
7216 :type: String. ``local`` for local tags.
7219 :type: String. ``local`` for local tags.
7217
7220
7218 Returns 0 on success.
7221 Returns 0 on success.
7219 """
7222 """
7220
7223
7221 opts = pycompat.byteskwargs(opts)
7224 opts = pycompat.byteskwargs(opts)
7222 ui.pager(b'tags')
7225 ui.pager(b'tags')
7223 fm = ui.formatter(b'tags', opts)
7226 fm = ui.formatter(b'tags', opts)
7224 hexfunc = fm.hexfunc
7227 hexfunc = fm.hexfunc
7225
7228
7226 for t, n in reversed(repo.tagslist()):
7229 for t, n in reversed(repo.tagslist()):
7227 hn = hexfunc(n)
7230 hn = hexfunc(n)
7228 label = b'tags.normal'
7231 label = b'tags.normal'
7229 tagtype = b''
7232 tagtype = b''
7230 if repo.tagtype(t) == b'local':
7233 if repo.tagtype(t) == b'local':
7231 label = b'tags.local'
7234 label = b'tags.local'
7232 tagtype = b'local'
7235 tagtype = b'local'
7233
7236
7234 fm.startitem()
7237 fm.startitem()
7235 fm.context(repo=repo)
7238 fm.context(repo=repo)
7236 fm.write(b'tag', b'%s', t, label=label)
7239 fm.write(b'tag', b'%s', t, label=label)
7237 fmt = b" " * (30 - encoding.colwidth(t)) + b' %5d:%s'
7240 fmt = b" " * (30 - encoding.colwidth(t)) + b' %5d:%s'
7238 fm.condwrite(
7241 fm.condwrite(
7239 not ui.quiet,
7242 not ui.quiet,
7240 b'rev node',
7243 b'rev node',
7241 fmt,
7244 fmt,
7242 repo.changelog.rev(n),
7245 repo.changelog.rev(n),
7243 hn,
7246 hn,
7244 label=label,
7247 label=label,
7245 )
7248 )
7246 fm.condwrite(
7249 fm.condwrite(
7247 ui.verbose and tagtype, b'type', b' %s', tagtype, label=label
7250 ui.verbose and tagtype, b'type', b' %s', tagtype, label=label
7248 )
7251 )
7249 fm.plain(b'\n')
7252 fm.plain(b'\n')
7250 fm.end()
7253 fm.end()
7251
7254
7252
7255
7253 @command(
7256 @command(
7254 b'tip',
7257 b'tip',
7255 [
7258 [
7256 (b'p', b'patch', None, _(b'show patch')),
7259 (b'p', b'patch', None, _(b'show patch')),
7257 (b'g', b'git', None, _(b'use git extended diff format')),
7260 (b'g', b'git', None, _(b'use git extended diff format')),
7258 ]
7261 ]
7259 + templateopts,
7262 + templateopts,
7260 _(b'[-p] [-g]'),
7263 _(b'[-p] [-g]'),
7261 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
7264 helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
7262 )
7265 )
7263 def tip(ui, repo, **opts):
7266 def tip(ui, repo, **opts):
7264 """show the tip revision (DEPRECATED)
7267 """show the tip revision (DEPRECATED)
7265
7268
7266 The tip revision (usually just called the tip) is the changeset
7269 The tip revision (usually just called the tip) is the changeset
7267 most recently added to the repository (and therefore the most
7270 most recently added to the repository (and therefore the most
7268 recently changed head).
7271 recently changed head).
7269
7272
7270 If you have just made a commit, that commit will be the tip. If
7273 If you have just made a commit, that commit will be the tip. If
7271 you have just pulled changes from another repository, the tip of
7274 you have just pulled changes from another repository, the tip of
7272 that repository becomes the current tip. The "tip" tag is special
7275 that repository becomes the current tip. The "tip" tag is special
7273 and cannot be renamed or assigned to a different changeset.
7276 and cannot be renamed or assigned to a different changeset.
7274
7277
7275 This command is deprecated, please use :hg:`heads` instead.
7278 This command is deprecated, please use :hg:`heads` instead.
7276
7279
7277 Returns 0 on success.
7280 Returns 0 on success.
7278 """
7281 """
7279 opts = pycompat.byteskwargs(opts)
7282 opts = pycompat.byteskwargs(opts)
7280 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
7283 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
7281 displayer.show(repo[b'tip'])
7284 displayer.show(repo[b'tip'])
7282 displayer.close()
7285 displayer.close()
7283
7286
7284
7287
7285 @command(
7288 @command(
7286 b'unbundle',
7289 b'unbundle',
7287 [
7290 [
7288 (
7291 (
7289 b'u',
7292 b'u',
7290 b'update',
7293 b'update',
7291 None,
7294 None,
7292 _(b'update to new branch head if changesets were unbundled'),
7295 _(b'update to new branch head if changesets were unbundled'),
7293 )
7296 )
7294 ],
7297 ],
7295 _(b'[-u] FILE...'),
7298 _(b'[-u] FILE...'),
7296 helpcategory=command.CATEGORY_IMPORT_EXPORT,
7299 helpcategory=command.CATEGORY_IMPORT_EXPORT,
7297 )
7300 )
7298 def unbundle(ui, repo, fname1, *fnames, **opts):
7301 def unbundle(ui, repo, fname1, *fnames, **opts):
7299 """apply one or more bundle files
7302 """apply one or more bundle files
7300
7303
7301 Apply one or more bundle files generated by :hg:`bundle`.
7304 Apply one or more bundle files generated by :hg:`bundle`.
7302
7305
7303 Returns 0 on success, 1 if an update has unresolved files.
7306 Returns 0 on success, 1 if an update has unresolved files.
7304 """
7307 """
7305 fnames = (fname1,) + fnames
7308 fnames = (fname1,) + fnames
7306
7309
7307 with repo.lock():
7310 with repo.lock():
7308 for fname in fnames:
7311 for fname in fnames:
7309 f = hg.openpath(ui, fname)
7312 f = hg.openpath(ui, fname)
7310 gen = exchange.readbundle(ui, f, fname)
7313 gen = exchange.readbundle(ui, f, fname)
7311 if isinstance(gen, streamclone.streamcloneapplier):
7314 if isinstance(gen, streamclone.streamcloneapplier):
7312 raise error.Abort(
7315 raise error.Abort(
7313 _(
7316 _(
7314 b'packed bundles cannot be applied with '
7317 b'packed bundles cannot be applied with '
7315 b'"hg unbundle"'
7318 b'"hg unbundle"'
7316 ),
7319 ),
7317 hint=_(b'use "hg debugapplystreamclonebundle"'),
7320 hint=_(b'use "hg debugapplystreamclonebundle"'),
7318 )
7321 )
7319 url = b'bundle:' + fname
7322 url = b'bundle:' + fname
7320 try:
7323 try:
7321 txnname = b'unbundle'
7324 txnname = b'unbundle'
7322 if not isinstance(gen, bundle2.unbundle20):
7325 if not isinstance(gen, bundle2.unbundle20):
7323 txnname = b'unbundle\n%s' % util.hidepassword(url)
7326 txnname = b'unbundle\n%s' % util.hidepassword(url)
7324 with repo.transaction(txnname) as tr:
7327 with repo.transaction(txnname) as tr:
7325 op = bundle2.applybundle(
7328 op = bundle2.applybundle(
7326 repo, gen, tr, source=b'unbundle', url=url
7329 repo, gen, tr, source=b'unbundle', url=url
7327 )
7330 )
7328 except error.BundleUnknownFeatureError as exc:
7331 except error.BundleUnknownFeatureError as exc:
7329 raise error.Abort(
7332 raise error.Abort(
7330 _(b'%s: unknown bundle feature, %s') % (fname, exc),
7333 _(b'%s: unknown bundle feature, %s') % (fname, exc),
7331 hint=_(
7334 hint=_(
7332 b"see https://mercurial-scm.org/"
7335 b"see https://mercurial-scm.org/"
7333 b"wiki/BundleFeature for more "
7336 b"wiki/BundleFeature for more "
7334 b"information"
7337 b"information"
7335 ),
7338 ),
7336 )
7339 )
7337 modheads = bundle2.combinechangegroupresults(op)
7340 modheads = bundle2.combinechangegroupresults(op)
7338
7341
7339 return postincoming(ui, repo, modheads, opts.get('update'), None, None)
7342 return postincoming(ui, repo, modheads, opts.get('update'), None, None)
7340
7343
7341
7344
7342 @command(
7345 @command(
7343 b'unshelve',
7346 b'unshelve',
7344 [
7347 [
7345 (b'a', b'abort', None, _(b'abort an incomplete unshelve operation')),
7348 (b'a', b'abort', None, _(b'abort an incomplete unshelve operation')),
7346 (
7349 (
7347 b'c',
7350 b'c',
7348 b'continue',
7351 b'continue',
7349 None,
7352 None,
7350 _(b'continue an incomplete unshelve operation'),
7353 _(b'continue an incomplete unshelve operation'),
7351 ),
7354 ),
7352 (b'i', b'interactive', None, _(b'use interactive mode (EXPERIMENTAL)')),
7355 (b'i', b'interactive', None, _(b'use interactive mode (EXPERIMENTAL)')),
7353 (b'k', b'keep', None, _(b'keep shelve after unshelving')),
7356 (b'k', b'keep', None, _(b'keep shelve after unshelving')),
7354 (
7357 (
7355 b'n',
7358 b'n',
7356 b'name',
7359 b'name',
7357 b'',
7360 b'',
7358 _(b'restore shelved change with given name'),
7361 _(b'restore shelved change with given name'),
7359 _(b'NAME'),
7362 _(b'NAME'),
7360 ),
7363 ),
7361 (b't', b'tool', b'', _(b'specify merge tool')),
7364 (b't', b'tool', b'', _(b'specify merge tool')),
7362 (
7365 (
7363 b'',
7366 b'',
7364 b'date',
7367 b'date',
7365 b'',
7368 b'',
7366 _(b'set date for temporary commits (DEPRECATED)'),
7369 _(b'set date for temporary commits (DEPRECATED)'),
7367 _(b'DATE'),
7370 _(b'DATE'),
7368 ),
7371 ),
7369 ],
7372 ],
7370 _(b'hg unshelve [OPTION]... [[-n] SHELVED]'),
7373 _(b'hg unshelve [OPTION]... [[-n] SHELVED]'),
7371 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7374 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7372 )
7375 )
7373 def unshelve(ui, repo, *shelved, **opts):
7376 def unshelve(ui, repo, *shelved, **opts):
7374 """restore a shelved change to the working directory
7377 """restore a shelved change to the working directory
7375
7378
7376 This command accepts an optional name of a shelved change to
7379 This command accepts an optional name of a shelved change to
7377 restore. If none is given, the most recent shelved change is used.
7380 restore. If none is given, the most recent shelved change is used.
7378
7381
7379 If a shelved change is applied successfully, the bundle that
7382 If a shelved change is applied successfully, the bundle that
7380 contains the shelved changes is moved to a backup location
7383 contains the shelved changes is moved to a backup location
7381 (.hg/shelve-backup).
7384 (.hg/shelve-backup).
7382
7385
7383 Since you can restore a shelved change on top of an arbitrary
7386 Since you can restore a shelved change on top of an arbitrary
7384 commit, it is possible that unshelving will result in a conflict
7387 commit, it is possible that unshelving will result in a conflict
7385 between your changes and the commits you are unshelving onto. If
7388 between your changes and the commits you are unshelving onto. If
7386 this occurs, you must resolve the conflict, then use
7389 this occurs, you must resolve the conflict, then use
7387 ``--continue`` to complete the unshelve operation. (The bundle
7390 ``--continue`` to complete the unshelve operation. (The bundle
7388 will not be moved until you successfully complete the unshelve.)
7391 will not be moved until you successfully complete the unshelve.)
7389
7392
7390 (Alternatively, you can use ``--abort`` to abandon an unshelve
7393 (Alternatively, you can use ``--abort`` to abandon an unshelve
7391 that causes a conflict. This reverts the unshelved changes, and
7394 that causes a conflict. This reverts the unshelved changes, and
7392 leaves the bundle in place.)
7395 leaves the bundle in place.)
7393
7396
7394 If bare shelved change (without interactive, include and exclude
7397 If bare shelved change (without interactive, include and exclude
7395 option) was done on newly created branch it would restore branch
7398 option) was done on newly created branch it would restore branch
7396 information to the working directory.
7399 information to the working directory.
7397
7400
7398 After a successful unshelve, the shelved changes are stored in a
7401 After a successful unshelve, the shelved changes are stored in a
7399 backup directory. Only the N most recent backups are kept. N
7402 backup directory. Only the N most recent backups are kept. N
7400 defaults to 10 but can be overridden using the ``shelve.maxbackups``
7403 defaults to 10 but can be overridden using the ``shelve.maxbackups``
7401 configuration option.
7404 configuration option.
7402
7405
7403 .. container:: verbose
7406 .. container:: verbose
7404
7407
7405 Timestamp in seconds is used to decide order of backups. More
7408 Timestamp in seconds is used to decide order of backups. More
7406 than ``maxbackups`` backups are kept, if same timestamp
7409 than ``maxbackups`` backups are kept, if same timestamp
7407 prevents from deciding exact order of them, for safety.
7410 prevents from deciding exact order of them, for safety.
7408
7411
7409 Selected changes can be unshelved with ``--interactive`` flag.
7412 Selected changes can be unshelved with ``--interactive`` flag.
7410 The working directory is updated with the selected changes, and
7413 The working directory is updated with the selected changes, and
7411 only the unselected changes remain shelved.
7414 only the unselected changes remain shelved.
7412 Note: The whole shelve is applied to working directory first before
7415 Note: The whole shelve is applied to working directory first before
7413 running interactively. So, this will bring up all the conflicts between
7416 running interactively. So, this will bring up all the conflicts between
7414 working directory and the shelve, irrespective of which changes will be
7417 working directory and the shelve, irrespective of which changes will be
7415 unshelved.
7418 unshelved.
7416 """
7419 """
7417 with repo.wlock():
7420 with repo.wlock():
7418 return shelvemod.unshelvecmd(ui, repo, *shelved, **opts)
7421 return shelvemod.unshelvecmd(ui, repo, *shelved, **opts)
7419
7422
7420
7423
7421 statemod.addunfinished(
7424 statemod.addunfinished(
7422 b'unshelve',
7425 b'unshelve',
7423 fname=b'shelvedstate',
7426 fname=b'shelvedstate',
7424 continueflag=True,
7427 continueflag=True,
7425 abortfunc=shelvemod.hgabortunshelve,
7428 abortfunc=shelvemod.hgabortunshelve,
7426 continuefunc=shelvemod.hgcontinueunshelve,
7429 continuefunc=shelvemod.hgcontinueunshelve,
7427 cmdmsg=_(b'unshelve already in progress'),
7430 cmdmsg=_(b'unshelve already in progress'),
7428 )
7431 )
7429
7432
7430
7433
7431 @command(
7434 @command(
7432 b'update|up|checkout|co',
7435 b'update|up|checkout|co',
7433 [
7436 [
7434 (b'C', b'clean', None, _(b'discard uncommitted changes (no backup)')),
7437 (b'C', b'clean', None, _(b'discard uncommitted changes (no backup)')),
7435 (b'c', b'check', None, _(b'require clean working directory')),
7438 (b'c', b'check', None, _(b'require clean working directory')),
7436 (b'm', b'merge', None, _(b'merge uncommitted changes')),
7439 (b'm', b'merge', None, _(b'merge uncommitted changes')),
7437 (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')),
7440 (b'd', b'date', b'', _(b'tipmost revision matching date'), _(b'DATE')),
7438 (b'r', b'rev', b'', _(b'revision'), _(b'REV')),
7441 (b'r', b'rev', b'', _(b'revision'), _(b'REV')),
7439 ]
7442 ]
7440 + mergetoolopts,
7443 + mergetoolopts,
7441 _(b'[-C|-c|-m] [-d DATE] [[-r] REV]'),
7444 _(b'[-C|-c|-m] [-d DATE] [[-r] REV]'),
7442 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7445 helpcategory=command.CATEGORY_WORKING_DIRECTORY,
7443 helpbasic=True,
7446 helpbasic=True,
7444 )
7447 )
7445 def update(ui, repo, node=None, **opts):
7448 def update(ui, repo, node=None, **opts):
7446 """update working directory (or switch revisions)
7449 """update working directory (or switch revisions)
7447
7450
7448 Update the repository's working directory to the specified
7451 Update the repository's working directory to the specified
7449 changeset. If no changeset is specified, update to the tip of the
7452 changeset. If no changeset is specified, update to the tip of the
7450 current named branch and move the active bookmark (see :hg:`help
7453 current named branch and move the active bookmark (see :hg:`help
7451 bookmarks`).
7454 bookmarks`).
7452
7455
7453 Update sets the working directory's parent revision to the specified
7456 Update sets the working directory's parent revision to the specified
7454 changeset (see :hg:`help parents`).
7457 changeset (see :hg:`help parents`).
7455
7458
7456 If the changeset is not a descendant or ancestor of the working
7459 If the changeset is not a descendant or ancestor of the working
7457 directory's parent and there are uncommitted changes, the update is
7460 directory's parent and there are uncommitted changes, the update is
7458 aborted. With the -c/--check option, the working directory is checked
7461 aborted. With the -c/--check option, the working directory is checked
7459 for uncommitted changes; if none are found, the working directory is
7462 for uncommitted changes; if none are found, the working directory is
7460 updated to the specified changeset.
7463 updated to the specified changeset.
7461
7464
7462 .. container:: verbose
7465 .. container:: verbose
7463
7466
7464 The -C/--clean, -c/--check, and -m/--merge options control what
7467 The -C/--clean, -c/--check, and -m/--merge options control what
7465 happens if the working directory contains uncommitted changes.
7468 happens if the working directory contains uncommitted changes.
7466 At most of one of them can be specified.
7469 At most of one of them can be specified.
7467
7470
7468 1. If no option is specified, and if
7471 1. If no option is specified, and if
7469 the requested changeset is an ancestor or descendant of
7472 the requested changeset is an ancestor or descendant of
7470 the working directory's parent, the uncommitted changes
7473 the working directory's parent, the uncommitted changes
7471 are merged into the requested changeset and the merged
7474 are merged into the requested changeset and the merged
7472 result is left uncommitted. If the requested changeset is
7475 result is left uncommitted. If the requested changeset is
7473 not an ancestor or descendant (that is, it is on another
7476 not an ancestor or descendant (that is, it is on another
7474 branch), the update is aborted and the uncommitted changes
7477 branch), the update is aborted and the uncommitted changes
7475 are preserved.
7478 are preserved.
7476
7479
7477 2. With the -m/--merge option, the update is allowed even if the
7480 2. With the -m/--merge option, the update is allowed even if the
7478 requested changeset is not an ancestor or descendant of
7481 requested changeset is not an ancestor or descendant of
7479 the working directory's parent.
7482 the working directory's parent.
7480
7483
7481 3. With the -c/--check option, the update is aborted and the
7484 3. With the -c/--check option, the update is aborted and the
7482 uncommitted changes are preserved.
7485 uncommitted changes are preserved.
7483
7486
7484 4. With the -C/--clean option, uncommitted changes are discarded and
7487 4. With the -C/--clean option, uncommitted changes are discarded and
7485 the working directory is updated to the requested changeset.
7488 the working directory is updated to the requested changeset.
7486
7489
7487 To cancel an uncommitted merge (and lose your changes), use
7490 To cancel an uncommitted merge (and lose your changes), use
7488 :hg:`merge --abort`.
7491 :hg:`merge --abort`.
7489
7492
7490 Use null as the changeset to remove the working directory (like
7493 Use null as the changeset to remove the working directory (like
7491 :hg:`clone -U`).
7494 :hg:`clone -U`).
7492
7495
7493 If you want to revert just one file to an older revision, use
7496 If you want to revert just one file to an older revision, use
7494 :hg:`revert [-r REV] NAME`.
7497 :hg:`revert [-r REV] NAME`.
7495
7498
7496 See :hg:`help dates` for a list of formats valid for -d/--date.
7499 See :hg:`help dates` for a list of formats valid for -d/--date.
7497
7500
7498 Returns 0 on success, 1 if there are unresolved files.
7501 Returns 0 on success, 1 if there are unresolved files.
7499 """
7502 """
7500 cmdutil.check_at_most_one_arg(opts, 'clean', 'check', 'merge')
7503 cmdutil.check_at_most_one_arg(opts, 'clean', 'check', 'merge')
7501 rev = opts.get('rev')
7504 rev = opts.get('rev')
7502 date = opts.get('date')
7505 date = opts.get('date')
7503 clean = opts.get('clean')
7506 clean = opts.get('clean')
7504 check = opts.get('check')
7507 check = opts.get('check')
7505 merge = opts.get('merge')
7508 merge = opts.get('merge')
7506 if rev and node:
7509 if rev and node:
7507 raise error.Abort(_(b"please specify just one revision"))
7510 raise error.Abort(_(b"please specify just one revision"))
7508
7511
7509 if ui.configbool(b'commands', b'update.requiredest'):
7512 if ui.configbool(b'commands', b'update.requiredest'):
7510 if not node and not rev and not date:
7513 if not node and not rev and not date:
7511 raise error.Abort(
7514 raise error.Abort(
7512 _(b'you must specify a destination'),
7515 _(b'you must specify a destination'),
7513 hint=_(b'for example: hg update ".::"'),
7516 hint=_(b'for example: hg update ".::"'),
7514 )
7517 )
7515
7518
7516 if rev is None or rev == b'':
7519 if rev is None or rev == b'':
7517 rev = node
7520 rev = node
7518
7521
7519 if date and rev is not None:
7522 if date and rev is not None:
7520 raise error.Abort(_(b"you can't specify a revision and a date"))
7523 raise error.Abort(_(b"you can't specify a revision and a date"))
7521
7524
7522 updatecheck = None
7525 updatecheck = None
7523 if check:
7526 if check:
7524 updatecheck = b'abort'
7527 updatecheck = b'abort'
7525 elif merge:
7528 elif merge:
7526 updatecheck = b'none'
7529 updatecheck = b'none'
7527
7530
7528 with repo.wlock():
7531 with repo.wlock():
7529 cmdutil.clearunfinished(repo)
7532 cmdutil.clearunfinished(repo)
7530 if date:
7533 if date:
7531 rev = cmdutil.finddate(ui, repo, date)
7534 rev = cmdutil.finddate(ui, repo, date)
7532
7535
7533 # if we defined a bookmark, we have to remember the original name
7536 # if we defined a bookmark, we have to remember the original name
7534 brev = rev
7537 brev = rev
7535 if rev:
7538 if rev:
7536 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
7539 repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
7537 ctx = scmutil.revsingle(repo, rev, default=None)
7540 ctx = scmutil.revsingle(repo, rev, default=None)
7538 rev = ctx.rev()
7541 rev = ctx.rev()
7539 hidden = ctx.hidden()
7542 hidden = ctx.hidden()
7540 overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')}
7543 overrides = {(b'ui', b'forcemerge'): opts.get('tool', b'')}
7541 with ui.configoverride(overrides, b'update'):
7544 with ui.configoverride(overrides, b'update'):
7542 ret = hg.updatetotally(
7545 ret = hg.updatetotally(
7543 ui, repo, rev, brev, clean=clean, updatecheck=updatecheck
7546 ui, repo, rev, brev, clean=clean, updatecheck=updatecheck
7544 )
7547 )
7545 if hidden:
7548 if hidden:
7546 ctxstr = ctx.hex()[:12]
7549 ctxstr = ctx.hex()[:12]
7547 ui.warn(_(b"updated to hidden changeset %s\n") % ctxstr)
7550 ui.warn(_(b"updated to hidden changeset %s\n") % ctxstr)
7548
7551
7549 if ctx.obsolete():
7552 if ctx.obsolete():
7550 obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
7553 obsfatemsg = obsutil._getfilteredreason(repo, ctxstr, ctx)
7551 ui.warn(b"(%s)\n" % obsfatemsg)
7554 ui.warn(b"(%s)\n" % obsfatemsg)
7552 return ret
7555 return ret
7553
7556
7554
7557
7555 @command(
7558 @command(
7556 b'verify',
7559 b'verify',
7557 [(b'', b'full', False, b'perform more checks (EXPERIMENTAL)')],
7560 [(b'', b'full', False, b'perform more checks (EXPERIMENTAL)')],
7558 helpcategory=command.CATEGORY_MAINTENANCE,
7561 helpcategory=command.CATEGORY_MAINTENANCE,
7559 )
7562 )
7560 def verify(ui, repo, **opts):
7563 def verify(ui, repo, **opts):
7561 """verify the integrity of the repository
7564 """verify the integrity of the repository
7562
7565
7563 Verify the integrity of the current repository.
7566 Verify the integrity of the current repository.
7564
7567
7565 This will perform an extensive check of the repository's
7568 This will perform an extensive check of the repository's
7566 integrity, validating the hashes and checksums of each entry in
7569 integrity, validating the hashes and checksums of each entry in
7567 the changelog, manifest, and tracked files, as well as the
7570 the changelog, manifest, and tracked files, as well as the
7568 integrity of their crosslinks and indices.
7571 integrity of their crosslinks and indices.
7569
7572
7570 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
7573 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
7571 for more information about recovery from corruption of the
7574 for more information about recovery from corruption of the
7572 repository.
7575 repository.
7573
7576
7574 Returns 0 on success, 1 if errors are encountered.
7577 Returns 0 on success, 1 if errors are encountered.
7575 """
7578 """
7576 opts = pycompat.byteskwargs(opts)
7579 opts = pycompat.byteskwargs(opts)
7577
7580
7578 level = None
7581 level = None
7579 if opts[b'full']:
7582 if opts[b'full']:
7580 level = verifymod.VERIFY_FULL
7583 level = verifymod.VERIFY_FULL
7581 return hg.verify(repo, level)
7584 return hg.verify(repo, level)
7582
7585
7583
7586
7584 @command(
7587 @command(
7585 b'version',
7588 b'version',
7586 [] + formatteropts,
7589 [] + formatteropts,
7587 helpcategory=command.CATEGORY_HELP,
7590 helpcategory=command.CATEGORY_HELP,
7588 norepo=True,
7591 norepo=True,
7589 intents={INTENT_READONLY},
7592 intents={INTENT_READONLY},
7590 )
7593 )
7591 def version_(ui, **opts):
7594 def version_(ui, **opts):
7592 """output version and copyright information
7595 """output version and copyright information
7593
7596
7594 .. container:: verbose
7597 .. container:: verbose
7595
7598
7596 Template:
7599 Template:
7597
7600
7598 The following keywords are supported. See also :hg:`help templates`.
7601 The following keywords are supported. See also :hg:`help templates`.
7599
7602
7600 :extensions: List of extensions.
7603 :extensions: List of extensions.
7601 :ver: String. Version number.
7604 :ver: String. Version number.
7602
7605
7603 And each entry of ``{extensions}`` provides the following sub-keywords
7606 And each entry of ``{extensions}`` provides the following sub-keywords
7604 in addition to ``{ver}``.
7607 in addition to ``{ver}``.
7605
7608
7606 :bundled: Boolean. True if included in the release.
7609 :bundled: Boolean. True if included in the release.
7607 :name: String. Extension name.
7610 :name: String. Extension name.
7608 """
7611 """
7609 opts = pycompat.byteskwargs(opts)
7612 opts = pycompat.byteskwargs(opts)
7610 if ui.verbose:
7613 if ui.verbose:
7611 ui.pager(b'version')
7614 ui.pager(b'version')
7612 fm = ui.formatter(b"version", opts)
7615 fm = ui.formatter(b"version", opts)
7613 fm.startitem()
7616 fm.startitem()
7614 fm.write(
7617 fm.write(
7615 b"ver", _(b"Mercurial Distributed SCM (version %s)\n"), util.version()
7618 b"ver", _(b"Mercurial Distributed SCM (version %s)\n"), util.version()
7616 )
7619 )
7617 license = _(
7620 license = _(
7618 b"(see https://mercurial-scm.org for more information)\n"
7621 b"(see https://mercurial-scm.org for more information)\n"
7619 b"\nCopyright (C) 2005-2020 Matt Mackall and others\n"
7622 b"\nCopyright (C) 2005-2020 Matt Mackall and others\n"
7620 b"This is free software; see the source for copying conditions. "
7623 b"This is free software; see the source for copying conditions. "
7621 b"There is NO\nwarranty; "
7624 b"There is NO\nwarranty; "
7622 b"not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
7625 b"not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
7623 )
7626 )
7624 if not ui.quiet:
7627 if not ui.quiet:
7625 fm.plain(license)
7628 fm.plain(license)
7626
7629
7627 if ui.verbose:
7630 if ui.verbose:
7628 fm.plain(_(b"\nEnabled extensions:\n\n"))
7631 fm.plain(_(b"\nEnabled extensions:\n\n"))
7629 # format names and versions into columns
7632 # format names and versions into columns
7630 names = []
7633 names = []
7631 vers = []
7634 vers = []
7632 isinternals = []
7635 isinternals = []
7633 for name, module in sorted(extensions.extensions()):
7636 for name, module in sorted(extensions.extensions()):
7634 names.append(name)
7637 names.append(name)
7635 vers.append(extensions.moduleversion(module) or None)
7638 vers.append(extensions.moduleversion(module) or None)
7636 isinternals.append(extensions.ismoduleinternal(module))
7639 isinternals.append(extensions.ismoduleinternal(module))
7637 fn = fm.nested(b"extensions", tmpl=b'{name}\n')
7640 fn = fm.nested(b"extensions", tmpl=b'{name}\n')
7638 if names:
7641 if names:
7639 namefmt = b" %%-%ds " % max(len(n) for n in names)
7642 namefmt = b" %%-%ds " % max(len(n) for n in names)
7640 places = [_(b"external"), _(b"internal")]
7643 places = [_(b"external"), _(b"internal")]
7641 for n, v, p in zip(names, vers, isinternals):
7644 for n, v, p in zip(names, vers, isinternals):
7642 fn.startitem()
7645 fn.startitem()
7643 fn.condwrite(ui.verbose, b"name", namefmt, n)
7646 fn.condwrite(ui.verbose, b"name", namefmt, n)
7644 if ui.verbose:
7647 if ui.verbose:
7645 fn.plain(b"%s " % places[p])
7648 fn.plain(b"%s " % places[p])
7646 fn.data(bundled=p)
7649 fn.data(bundled=p)
7647 fn.condwrite(ui.verbose and v, b"ver", b"%s", v)
7650 fn.condwrite(ui.verbose and v, b"ver", b"%s", v)
7648 if ui.verbose:
7651 if ui.verbose:
7649 fn.plain(b"\n")
7652 fn.plain(b"\n")
7650 fn.end()
7653 fn.end()
7651 fm.end()
7654 fm.end()
7652
7655
7653
7656
7654 def loadcmdtable(ui, name, cmdtable):
7657 def loadcmdtable(ui, name, cmdtable):
7655 """Load command functions from specified cmdtable
7658 """Load command functions from specified cmdtable
7656 """
7659 """
7657 overrides = [cmd for cmd in cmdtable if cmd in table]
7660 overrides = [cmd for cmd in cmdtable if cmd in table]
7658 if overrides:
7661 if overrides:
7659 ui.warn(
7662 ui.warn(
7660 _(b"extension '%s' overrides commands: %s\n")
7663 _(b"extension '%s' overrides commands: %s\n")
7661 % (name, b" ".join(overrides))
7664 % (name, b" ".join(overrides))
7662 )
7665 )
7663 table.update(cmdtable)
7666 table.update(cmdtable)
@@ -1,3157 +1,2752 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import weakref
11 import weakref
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 nullrev,
17 nullrev,
18 )
18 )
19 from .thirdparty import attr
20 from . import (
19 from . import (
21 bookmarks as bookmod,
20 bookmarks as bookmod,
22 bundle2,
21 bundle2,
22 bundlecaches,
23 changegroup,
23 changegroup,
24 discovery,
24 discovery,
25 error,
25 error,
26 exchangev2,
26 exchangev2,
27 lock as lockmod,
27 lock as lockmod,
28 logexchange,
28 logexchange,
29 narrowspec,
29 narrowspec,
30 obsolete,
30 obsolete,
31 obsutil,
31 obsutil,
32 phases,
32 phases,
33 pushkey,
33 pushkey,
34 pycompat,
34 pycompat,
35 requirements,
35 requirements,
36 scmutil,
36 scmutil,
37 sslutil,
38 streamclone,
37 streamclone,
39 url as urlmod,
38 url as urlmod,
40 util,
39 util,
41 wireprototypes,
40 wireprototypes,
42 )
41 )
43 from .utils import (
42 from .utils import (
44 hashutil,
43 hashutil,
45 stringutil,
44 stringutil,
46 )
45 )
47
46
48 urlerr = util.urlerr
47 urlerr = util.urlerr
49 urlreq = util.urlreq
48 urlreq = util.urlreq
50
49
51 _NARROWACL_SECTION = b'narrowacl'
50 _NARROWACL_SECTION = b'narrowacl'
52
51
53 # Maps bundle version human names to changegroup versions.
54 _bundlespeccgversions = {
55 b'v1': b'01',
56 b'v2': b'02',
57 b'packed1': b's1',
58 b'bundle2': b'02', # legacy
59 }
60
61 # Maps bundle version with content opts to choose which part to bundle
62 _bundlespeccontentopts = {
63 b'v1': {
64 b'changegroup': True,
65 b'cg.version': b'01',
66 b'obsolescence': False,
67 b'phases': False,
68 b'tagsfnodescache': False,
69 b'revbranchcache': False,
70 },
71 b'v2': {
72 b'changegroup': True,
73 b'cg.version': b'02',
74 b'obsolescence': False,
75 b'phases': False,
76 b'tagsfnodescache': True,
77 b'revbranchcache': True,
78 },
79 b'packed1': {b'cg.version': b's1'},
80 }
81 _bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
82
83 _bundlespecvariants = {
84 b"streamv2": {
85 b"changegroup": False,
86 b"streamv2": True,
87 b"tagsfnodescache": False,
88 b"revbranchcache": False,
89 }
90 }
91
92 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
93 _bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
94
95
96 @attr.s
97 class bundlespec(object):
98 compression = attr.ib()
99 wirecompression = attr.ib()
100 version = attr.ib()
101 wireversion = attr.ib()
102 params = attr.ib()
103 contentopts = attr.ib()
104
105
106 def parsebundlespec(repo, spec, strict=True):
107 """Parse a bundle string specification into parts.
108
109 Bundle specifications denote a well-defined bundle/exchange format.
110 The content of a given specification should not change over time in
111 order to ensure that bundles produced by a newer version of Mercurial are
112 readable from an older version.
113
114 The string currently has the form:
115
116 <compression>-<type>[;<parameter0>[;<parameter1>]]
117
118 Where <compression> is one of the supported compression formats
119 and <type> is (currently) a version string. A ";" can follow the type and
120 all text afterwards is interpreted as URI encoded, ";" delimited key=value
121 pairs.
122
123 If ``strict`` is True (the default) <compression> is required. Otherwise,
124 it is optional.
125
126 Returns a bundlespec object of (compression, version, parameters).
127 Compression will be ``None`` if not in strict mode and a compression isn't
128 defined.
129
130 An ``InvalidBundleSpecification`` is raised when the specification is
131 not syntactically well formed.
132
133 An ``UnsupportedBundleSpecification`` is raised when the compression or
134 bundle type/version is not recognized.
135
136 Note: this function will likely eventually return a more complex data
137 structure, including bundle2 part information.
138 """
139
140 def parseparams(s):
141 if b';' not in s:
142 return s, {}
143
144 params = {}
145 version, paramstr = s.split(b';', 1)
146
147 for p in paramstr.split(b';'):
148 if b'=' not in p:
149 raise error.InvalidBundleSpecification(
150 _(
151 b'invalid bundle specification: '
152 b'missing "=" in parameter: %s'
153 )
154 % p
155 )
156
157 key, value = p.split(b'=', 1)
158 key = urlreq.unquote(key)
159 value = urlreq.unquote(value)
160 params[key] = value
161
162 return version, params
163
164 if strict and b'-' not in spec:
165 raise error.InvalidBundleSpecification(
166 _(
167 b'invalid bundle specification; '
168 b'must be prefixed with compression: %s'
169 )
170 % spec
171 )
172
173 if b'-' in spec:
174 compression, version = spec.split(b'-', 1)
175
176 if compression not in util.compengines.supportedbundlenames:
177 raise error.UnsupportedBundleSpecification(
178 _(b'%s compression is not supported') % compression
179 )
180
181 version, params = parseparams(version)
182
183 if version not in _bundlespeccgversions:
184 raise error.UnsupportedBundleSpecification(
185 _(b'%s is not a recognized bundle version') % version
186 )
187 else:
188 # Value could be just the compression or just the version, in which
189 # case some defaults are assumed (but only when not in strict mode).
190 assert not strict
191
192 spec, params = parseparams(spec)
193
194 if spec in util.compengines.supportedbundlenames:
195 compression = spec
196 version = b'v1'
197 # Generaldelta repos require v2.
198 if b'generaldelta' in repo.requirements:
199 version = b'v2'
200 # Modern compression engines require v2.
201 if compression not in _bundlespecv1compengines:
202 version = b'v2'
203 elif spec in _bundlespeccgversions:
204 if spec == b'packed1':
205 compression = b'none'
206 else:
207 compression = b'bzip2'
208 version = spec
209 else:
210 raise error.UnsupportedBundleSpecification(
211 _(b'%s is not a recognized bundle specification') % spec
212 )
213
214 # Bundle version 1 only supports a known set of compression engines.
215 if version == b'v1' and compression not in _bundlespecv1compengines:
216 raise error.UnsupportedBundleSpecification(
217 _(b'compression engine %s is not supported on v1 bundles')
218 % compression
219 )
220
221 # The specification for packed1 can optionally declare the data formats
222 # required to apply it. If we see this metadata, compare against what the
223 # repo supports and error if the bundle isn't compatible.
224 if version == b'packed1' and b'requirements' in params:
225 requirements = set(params[b'requirements'].split(b','))
226 missingreqs = requirements - repo.supportedformats
227 if missingreqs:
228 raise error.UnsupportedBundleSpecification(
229 _(b'missing support for repository features: %s')
230 % b', '.join(sorted(missingreqs))
231 )
232
233 # Compute contentopts based on the version
234 contentopts = _bundlespeccontentopts.get(version, {}).copy()
235
236 # Process the variants
237 if b"stream" in params and params[b"stream"] == b"v2":
238 variant = _bundlespecvariants[b"streamv2"]
239 contentopts.update(variant)
240
241 engine = util.compengines.forbundlename(compression)
242 compression, wirecompression = engine.bundletype()
243 wireversion = _bundlespeccgversions[version]
244
245 return bundlespec(
246 compression, wirecompression, version, wireversion, params, contentopts
247 )
248
249
52
250 def readbundle(ui, fh, fname, vfs=None):
53 def readbundle(ui, fh, fname, vfs=None):
251 header = changegroup.readexactly(fh, 4)
54 header = changegroup.readexactly(fh, 4)
252
55
253 alg = None
56 alg = None
254 if not fname:
57 if not fname:
255 fname = b"stream"
58 fname = b"stream"
256 if not header.startswith(b'HG') and header.startswith(b'\0'):
59 if not header.startswith(b'HG') and header.startswith(b'\0'):
257 fh = changegroup.headerlessfixup(fh, header)
60 fh = changegroup.headerlessfixup(fh, header)
258 header = b"HG10"
61 header = b"HG10"
259 alg = b'UN'
62 alg = b'UN'
260 elif vfs:
63 elif vfs:
261 fname = vfs.join(fname)
64 fname = vfs.join(fname)
262
65
263 magic, version = header[0:2], header[2:4]
66 magic, version = header[0:2], header[2:4]
264
67
265 if magic != b'HG':
68 if magic != b'HG':
266 raise error.Abort(_(b'%s: not a Mercurial bundle') % fname)
69 raise error.Abort(_(b'%s: not a Mercurial bundle') % fname)
267 if version == b'10':
70 if version == b'10':
268 if alg is None:
71 if alg is None:
269 alg = changegroup.readexactly(fh, 2)
72 alg = changegroup.readexactly(fh, 2)
270 return changegroup.cg1unpacker(fh, alg)
73 return changegroup.cg1unpacker(fh, alg)
271 elif version.startswith(b'2'):
74 elif version.startswith(b'2'):
272 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
75 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
273 elif version == b'S1':
76 elif version == b'S1':
274 return streamclone.streamcloneapplier(fh)
77 return streamclone.streamcloneapplier(fh)
275 else:
78 else:
276 raise error.Abort(
79 raise error.Abort(
277 _(b'%s: unknown bundle version %s') % (fname, version)
80 _(b'%s: unknown bundle version %s') % (fname, version)
278 )
81 )
279
82
280
83
281 def getbundlespec(ui, fh):
84 def getbundlespec(ui, fh):
282 """Infer the bundlespec from a bundle file handle.
85 """Infer the bundlespec from a bundle file handle.
283
86
284 The input file handle is seeked and the original seek position is not
87 The input file handle is seeked and the original seek position is not
285 restored.
88 restored.
286 """
89 """
287
90
288 def speccompression(alg):
91 def speccompression(alg):
289 try:
92 try:
290 return util.compengines.forbundletype(alg).bundletype()[0]
93 return util.compengines.forbundletype(alg).bundletype()[0]
291 except KeyError:
94 except KeyError:
292 return None
95 return None
293
96
294 b = readbundle(ui, fh, None)
97 b = readbundle(ui, fh, None)
295 if isinstance(b, changegroup.cg1unpacker):
98 if isinstance(b, changegroup.cg1unpacker):
296 alg = b._type
99 alg = b._type
297 if alg == b'_truncatedBZ':
100 if alg == b'_truncatedBZ':
298 alg = b'BZ'
101 alg = b'BZ'
299 comp = speccompression(alg)
102 comp = speccompression(alg)
300 if not comp:
103 if not comp:
301 raise error.Abort(_(b'unknown compression algorithm: %s') % alg)
104 raise error.Abort(_(b'unknown compression algorithm: %s') % alg)
302 return b'%s-v1' % comp
105 return b'%s-v1' % comp
303 elif isinstance(b, bundle2.unbundle20):
106 elif isinstance(b, bundle2.unbundle20):
304 if b'Compression' in b.params:
107 if b'Compression' in b.params:
305 comp = speccompression(b.params[b'Compression'])
108 comp = speccompression(b.params[b'Compression'])
306 if not comp:
109 if not comp:
307 raise error.Abort(
110 raise error.Abort(
308 _(b'unknown compression algorithm: %s') % comp
111 _(b'unknown compression algorithm: %s') % comp
309 )
112 )
310 else:
113 else:
311 comp = b'none'
114 comp = b'none'
312
115
313 version = None
116 version = None
314 for part in b.iterparts():
117 for part in b.iterparts():
315 if part.type == b'changegroup':
118 if part.type == b'changegroup':
316 version = part.params[b'version']
119 version = part.params[b'version']
317 if version in (b'01', b'02'):
120 if version in (b'01', b'02'):
318 version = b'v2'
121 version = b'v2'
319 else:
122 else:
320 raise error.Abort(
123 raise error.Abort(
321 _(
124 _(
322 b'changegroup version %s does not have '
125 b'changegroup version %s does not have '
323 b'a known bundlespec'
126 b'a known bundlespec'
324 )
127 )
325 % version,
128 % version,
326 hint=_(b'try upgrading your Mercurial client'),
129 hint=_(b'try upgrading your Mercurial client'),
327 )
130 )
328 elif part.type == b'stream2' and version is None:
131 elif part.type == b'stream2' and version is None:
329 # A stream2 part requires to be part of a v2 bundle
132 # A stream2 part requires to be part of a v2 bundle
330 requirements = urlreq.unquote(part.params[b'requirements'])
133 requirements = urlreq.unquote(part.params[b'requirements'])
331 splitted = requirements.split()
134 splitted = requirements.split()
332 params = bundle2._formatrequirementsparams(splitted)
135 params = bundle2._formatrequirementsparams(splitted)
333 return b'none-v2;stream=v2;%s' % params
136 return b'none-v2;stream=v2;%s' % params
334
137
335 if not version:
138 if not version:
336 raise error.Abort(
139 raise error.Abort(
337 _(b'could not identify changegroup version in bundle')
140 _(b'could not identify changegroup version in bundle')
338 )
141 )
339
142
340 return b'%s-%s' % (comp, version)
143 return b'%s-%s' % (comp, version)
341 elif isinstance(b, streamclone.streamcloneapplier):
144 elif isinstance(b, streamclone.streamcloneapplier):
342 requirements = streamclone.readbundle1header(fh)[2]
145 requirements = streamclone.readbundle1header(fh)[2]
343 formatted = bundle2._formatrequirementsparams(requirements)
146 formatted = bundle2._formatrequirementsparams(requirements)
344 return b'none-packed1;%s' % formatted
147 return b'none-packed1;%s' % formatted
345 else:
148 else:
346 raise error.Abort(_(b'unknown bundle type: %s') % b)
149 raise error.Abort(_(b'unknown bundle type: %s') % b)
347
150
348
151
349 def _computeoutgoing(repo, heads, common):
152 def _computeoutgoing(repo, heads, common):
350 """Computes which revs are outgoing given a set of common
153 """Computes which revs are outgoing given a set of common
351 and a set of heads.
154 and a set of heads.
352
155
353 This is a separate function so extensions can have access to
156 This is a separate function so extensions can have access to
354 the logic.
157 the logic.
355
158
356 Returns a discovery.outgoing object.
159 Returns a discovery.outgoing object.
357 """
160 """
358 cl = repo.changelog
161 cl = repo.changelog
359 if common:
162 if common:
360 hasnode = cl.hasnode
163 hasnode = cl.hasnode
361 common = [n for n in common if hasnode(n)]
164 common = [n for n in common if hasnode(n)]
362 else:
165 else:
363 common = [nullid]
166 common = [nullid]
364 if not heads:
167 if not heads:
365 heads = cl.heads()
168 heads = cl.heads()
366 return discovery.outgoing(repo, common, heads)
169 return discovery.outgoing(repo, common, heads)
367
170
368
171
369 def _checkpublish(pushop):
172 def _checkpublish(pushop):
370 repo = pushop.repo
173 repo = pushop.repo
371 ui = repo.ui
174 ui = repo.ui
372 behavior = ui.config(b'experimental', b'auto-publish')
175 behavior = ui.config(b'experimental', b'auto-publish')
373 if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'):
176 if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'):
374 return
177 return
375 remotephases = listkeys(pushop.remote, b'phases')
178 remotephases = listkeys(pushop.remote, b'phases')
376 if not remotephases.get(b'publishing', False):
179 if not remotephases.get(b'publishing', False):
377 return
180 return
378
181
379 if pushop.revs is None:
182 if pushop.revs is None:
380 published = repo.filtered(b'served').revs(b'not public()')
183 published = repo.filtered(b'served').revs(b'not public()')
381 else:
184 else:
382 published = repo.revs(b'::%ln - public()', pushop.revs)
185 published = repo.revs(b'::%ln - public()', pushop.revs)
383 if published:
186 if published:
384 if behavior == b'warn':
187 if behavior == b'warn':
385 ui.warn(
188 ui.warn(
386 _(b'%i changesets about to be published\n') % len(published)
189 _(b'%i changesets about to be published\n') % len(published)
387 )
190 )
388 elif behavior == b'confirm':
191 elif behavior == b'confirm':
389 if ui.promptchoice(
192 if ui.promptchoice(
390 _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No')
193 _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No')
391 % len(published)
194 % len(published)
392 ):
195 ):
393 raise error.Abort(_(b'user quit'))
196 raise error.Abort(_(b'user quit'))
394 elif behavior == b'abort':
197 elif behavior == b'abort':
395 msg = _(b'push would publish %i changesets') % len(published)
198 msg = _(b'push would publish %i changesets') % len(published)
396 hint = _(
199 hint = _(
397 b"use --publish or adjust 'experimental.auto-publish'"
200 b"use --publish or adjust 'experimental.auto-publish'"
398 b" config"
201 b" config"
399 )
202 )
400 raise error.Abort(msg, hint=hint)
203 raise error.Abort(msg, hint=hint)
401
204
402
205
403 def _forcebundle1(op):
206 def _forcebundle1(op):
404 """return true if a pull/push must use bundle1
207 """return true if a pull/push must use bundle1
405
208
406 This function is used to allow testing of the older bundle version"""
209 This function is used to allow testing of the older bundle version"""
407 ui = op.repo.ui
210 ui = op.repo.ui
408 # The goal is this config is to allow developer to choose the bundle
211 # The goal is this config is to allow developer to choose the bundle
409 # version used during exchanged. This is especially handy during test.
212 # version used during exchanged. This is especially handy during test.
410 # Value is a list of bundle version to be picked from, highest version
213 # Value is a list of bundle version to be picked from, highest version
411 # should be used.
214 # should be used.
412 #
215 #
413 # developer config: devel.legacy.exchange
216 # developer config: devel.legacy.exchange
414 exchange = ui.configlist(b'devel', b'legacy.exchange')
217 exchange = ui.configlist(b'devel', b'legacy.exchange')
415 forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange
218 forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange
416 return forcebundle1 or not op.remote.capable(b'bundle2')
219 return forcebundle1 or not op.remote.capable(b'bundle2')
417
220
418
221
419 class pushoperation(object):
222 class pushoperation(object):
420 """A object that represent a single push operation
223 """A object that represent a single push operation
421
224
422 Its purpose is to carry push related state and very common operations.
225 Its purpose is to carry push related state and very common operations.
423
226
424 A new pushoperation should be created at the beginning of each push and
227 A new pushoperation should be created at the beginning of each push and
425 discarded afterward.
228 discarded afterward.
426 """
229 """
427
230
428 def __init__(
231 def __init__(
429 self,
232 self,
430 repo,
233 repo,
431 remote,
234 remote,
432 force=False,
235 force=False,
433 revs=None,
236 revs=None,
434 newbranch=False,
237 newbranch=False,
435 bookmarks=(),
238 bookmarks=(),
436 publish=False,
239 publish=False,
437 pushvars=None,
240 pushvars=None,
438 ):
241 ):
439 # repo we push from
242 # repo we push from
440 self.repo = repo
243 self.repo = repo
441 self.ui = repo.ui
244 self.ui = repo.ui
442 # repo we push to
245 # repo we push to
443 self.remote = remote
246 self.remote = remote
444 # force option provided
247 # force option provided
445 self.force = force
248 self.force = force
446 # revs to be pushed (None is "all")
249 # revs to be pushed (None is "all")
447 self.revs = revs
250 self.revs = revs
448 # bookmark explicitly pushed
251 # bookmark explicitly pushed
449 self.bookmarks = bookmarks
252 self.bookmarks = bookmarks
450 # allow push of new branch
253 # allow push of new branch
451 self.newbranch = newbranch
254 self.newbranch = newbranch
452 # step already performed
255 # step already performed
453 # (used to check what steps have been already performed through bundle2)
256 # (used to check what steps have been already performed through bundle2)
454 self.stepsdone = set()
257 self.stepsdone = set()
455 # Integer version of the changegroup push result
258 # Integer version of the changegroup push result
456 # - None means nothing to push
259 # - None means nothing to push
457 # - 0 means HTTP error
260 # - 0 means HTTP error
458 # - 1 means we pushed and remote head count is unchanged *or*
261 # - 1 means we pushed and remote head count is unchanged *or*
459 # we have outgoing changesets but refused to push
262 # we have outgoing changesets but refused to push
460 # - other values as described by addchangegroup()
263 # - other values as described by addchangegroup()
461 self.cgresult = None
264 self.cgresult = None
462 # Boolean value for the bookmark push
265 # Boolean value for the bookmark push
463 self.bkresult = None
266 self.bkresult = None
464 # discover.outgoing object (contains common and outgoing data)
267 # discover.outgoing object (contains common and outgoing data)
465 self.outgoing = None
268 self.outgoing = None
466 # all remote topological heads before the push
269 # all remote topological heads before the push
467 self.remoteheads = None
270 self.remoteheads = None
468 # Details of the remote branch pre and post push
271 # Details of the remote branch pre and post push
469 #
272 #
470 # mapping: {'branch': ([remoteheads],
273 # mapping: {'branch': ([remoteheads],
471 # [newheads],
274 # [newheads],
472 # [unsyncedheads],
275 # [unsyncedheads],
473 # [discardedheads])}
276 # [discardedheads])}
474 # - branch: the branch name
277 # - branch: the branch name
475 # - remoteheads: the list of remote heads known locally
278 # - remoteheads: the list of remote heads known locally
476 # None if the branch is new
279 # None if the branch is new
477 # - newheads: the new remote heads (known locally) with outgoing pushed
280 # - newheads: the new remote heads (known locally) with outgoing pushed
478 # - unsyncedheads: the list of remote heads unknown locally.
281 # - unsyncedheads: the list of remote heads unknown locally.
479 # - discardedheads: the list of remote heads made obsolete by the push
282 # - discardedheads: the list of remote heads made obsolete by the push
480 self.pushbranchmap = None
283 self.pushbranchmap = None
481 # testable as a boolean indicating if any nodes are missing locally.
284 # testable as a boolean indicating if any nodes are missing locally.
482 self.incoming = None
285 self.incoming = None
483 # summary of the remote phase situation
286 # summary of the remote phase situation
484 self.remotephases = None
287 self.remotephases = None
485 # phases changes that must be pushed along side the changesets
288 # phases changes that must be pushed along side the changesets
486 self.outdatedphases = None
289 self.outdatedphases = None
487 # phases changes that must be pushed if changeset push fails
290 # phases changes that must be pushed if changeset push fails
488 self.fallbackoutdatedphases = None
291 self.fallbackoutdatedphases = None
489 # outgoing obsmarkers
292 # outgoing obsmarkers
490 self.outobsmarkers = set()
293 self.outobsmarkers = set()
491 # outgoing bookmarks, list of (bm, oldnode | '', newnode | '')
294 # outgoing bookmarks, list of (bm, oldnode | '', newnode | '')
492 self.outbookmarks = []
295 self.outbookmarks = []
493 # transaction manager
296 # transaction manager
494 self.trmanager = None
297 self.trmanager = None
495 # map { pushkey partid -> callback handling failure}
298 # map { pushkey partid -> callback handling failure}
496 # used to handle exception from mandatory pushkey part failure
299 # used to handle exception from mandatory pushkey part failure
497 self.pkfailcb = {}
300 self.pkfailcb = {}
498 # an iterable of pushvars or None
301 # an iterable of pushvars or None
499 self.pushvars = pushvars
302 self.pushvars = pushvars
500 # publish pushed changesets
303 # publish pushed changesets
501 self.publish = publish
304 self.publish = publish
502
305
503 @util.propertycache
306 @util.propertycache
504 def futureheads(self):
307 def futureheads(self):
505 """future remote heads if the changeset push succeeds"""
308 """future remote heads if the changeset push succeeds"""
506 return self.outgoing.ancestorsof
309 return self.outgoing.ancestorsof
507
310
508 @util.propertycache
311 @util.propertycache
509 def fallbackheads(self):
312 def fallbackheads(self):
510 """future remote heads if the changeset push fails"""
313 """future remote heads if the changeset push fails"""
511 if self.revs is None:
314 if self.revs is None:
512 # not target to push, all common are relevant
315 # not target to push, all common are relevant
513 return self.outgoing.commonheads
316 return self.outgoing.commonheads
514 unfi = self.repo.unfiltered()
317 unfi = self.repo.unfiltered()
515 # I want cheads = heads(::ancestorsof and ::commonheads)
318 # I want cheads = heads(::ancestorsof and ::commonheads)
516 # (ancestorsof is revs with secret changeset filtered out)
319 # (ancestorsof is revs with secret changeset filtered out)
517 #
320 #
518 # This can be expressed as:
321 # This can be expressed as:
519 # cheads = ( (ancestorsof and ::commonheads)
322 # cheads = ( (ancestorsof and ::commonheads)
520 # + (commonheads and ::ancestorsof))"
323 # + (commonheads and ::ancestorsof))"
521 # )
324 # )
522 #
325 #
523 # while trying to push we already computed the following:
326 # while trying to push we already computed the following:
524 # common = (::commonheads)
327 # common = (::commonheads)
525 # missing = ((commonheads::ancestorsof) - commonheads)
328 # missing = ((commonheads::ancestorsof) - commonheads)
526 #
329 #
527 # We can pick:
330 # We can pick:
528 # * ancestorsof part of common (::commonheads)
331 # * ancestorsof part of common (::commonheads)
529 common = self.outgoing.common
332 common = self.outgoing.common
530 rev = self.repo.changelog.index.rev
333 rev = self.repo.changelog.index.rev
531 cheads = [node for node in self.revs if rev(node) in common]
334 cheads = [node for node in self.revs if rev(node) in common]
532 # and
335 # and
533 # * commonheads parents on missing
336 # * commonheads parents on missing
534 revset = unfi.set(
337 revset = unfi.set(
535 b'%ln and parents(roots(%ln))',
338 b'%ln and parents(roots(%ln))',
536 self.outgoing.commonheads,
339 self.outgoing.commonheads,
537 self.outgoing.missing,
340 self.outgoing.missing,
538 )
341 )
539 cheads.extend(c.node() for c in revset)
342 cheads.extend(c.node() for c in revset)
540 return cheads
343 return cheads
541
344
542 @property
345 @property
543 def commonheads(self):
346 def commonheads(self):
544 """set of all common heads after changeset bundle push"""
347 """set of all common heads after changeset bundle push"""
545 if self.cgresult:
348 if self.cgresult:
546 return self.futureheads
349 return self.futureheads
547 else:
350 else:
548 return self.fallbackheads
351 return self.fallbackheads
549
352
550
353
551 # mapping of message used when pushing bookmark
354 # mapping of message used when pushing bookmark
552 bookmsgmap = {
355 bookmsgmap = {
553 b'update': (
356 b'update': (
554 _(b"updating bookmark %s\n"),
357 _(b"updating bookmark %s\n"),
555 _(b'updating bookmark %s failed!\n'),
358 _(b'updating bookmark %s failed!\n'),
556 ),
359 ),
557 b'export': (
360 b'export': (
558 _(b"exporting bookmark %s\n"),
361 _(b"exporting bookmark %s\n"),
559 _(b'exporting bookmark %s failed!\n'),
362 _(b'exporting bookmark %s failed!\n'),
560 ),
363 ),
561 b'delete': (
364 b'delete': (
562 _(b"deleting remote bookmark %s\n"),
365 _(b"deleting remote bookmark %s\n"),
563 _(b'deleting remote bookmark %s failed!\n'),
366 _(b'deleting remote bookmark %s failed!\n'),
564 ),
367 ),
565 }
368 }
566
369
567
370
568 def push(
371 def push(
569 repo,
372 repo,
570 remote,
373 remote,
571 force=False,
374 force=False,
572 revs=None,
375 revs=None,
573 newbranch=False,
376 newbranch=False,
574 bookmarks=(),
377 bookmarks=(),
575 publish=False,
378 publish=False,
576 opargs=None,
379 opargs=None,
577 ):
380 ):
578 '''Push outgoing changesets (limited by revs) from a local
381 '''Push outgoing changesets (limited by revs) from a local
579 repository to remote. Return an integer:
382 repository to remote. Return an integer:
580 - None means nothing to push
383 - None means nothing to push
581 - 0 means HTTP error
384 - 0 means HTTP error
582 - 1 means we pushed and remote head count is unchanged *or*
385 - 1 means we pushed and remote head count is unchanged *or*
583 we have outgoing changesets but refused to push
386 we have outgoing changesets but refused to push
584 - other values as described by addchangegroup()
387 - other values as described by addchangegroup()
585 '''
388 '''
586 if opargs is None:
389 if opargs is None:
587 opargs = {}
390 opargs = {}
588 pushop = pushoperation(
391 pushop = pushoperation(
589 repo,
392 repo,
590 remote,
393 remote,
591 force,
394 force,
592 revs,
395 revs,
593 newbranch,
396 newbranch,
594 bookmarks,
397 bookmarks,
595 publish,
398 publish,
596 **pycompat.strkwargs(opargs)
399 **pycompat.strkwargs(opargs)
597 )
400 )
598 if pushop.remote.local():
401 if pushop.remote.local():
599 missing = (
402 missing = (
600 set(pushop.repo.requirements) - pushop.remote.local().supported
403 set(pushop.repo.requirements) - pushop.remote.local().supported
601 )
404 )
602 if missing:
405 if missing:
603 msg = _(
406 msg = _(
604 b"required features are not"
407 b"required features are not"
605 b" supported in the destination:"
408 b" supported in the destination:"
606 b" %s"
409 b" %s"
607 ) % (b', '.join(sorted(missing)))
410 ) % (b', '.join(sorted(missing)))
608 raise error.Abort(msg)
411 raise error.Abort(msg)
609
412
610 if not pushop.remote.canpush():
413 if not pushop.remote.canpush():
611 raise error.Abort(_(b"destination does not support push"))
414 raise error.Abort(_(b"destination does not support push"))
612
415
613 if not pushop.remote.capable(b'unbundle'):
416 if not pushop.remote.capable(b'unbundle'):
614 raise error.Abort(
417 raise error.Abort(
615 _(
418 _(
616 b'cannot push: destination does not support the '
419 b'cannot push: destination does not support the '
617 b'unbundle wire protocol command'
420 b'unbundle wire protocol command'
618 )
421 )
619 )
422 )
620
423
621 # get lock as we might write phase data
424 # get lock as we might write phase data
622 wlock = lock = None
425 wlock = lock = None
623 try:
426 try:
624 # bundle2 push may receive a reply bundle touching bookmarks
427 # bundle2 push may receive a reply bundle touching bookmarks
625 # requiring the wlock. Take it now to ensure proper ordering.
428 # requiring the wlock. Take it now to ensure proper ordering.
626 maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback')
429 maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback')
627 if (
430 if (
628 (not _forcebundle1(pushop))
431 (not _forcebundle1(pushop))
629 and maypushback
432 and maypushback
630 and not bookmod.bookmarksinstore(repo)
433 and not bookmod.bookmarksinstore(repo)
631 ):
434 ):
632 wlock = pushop.repo.wlock()
435 wlock = pushop.repo.wlock()
633 lock = pushop.repo.lock()
436 lock = pushop.repo.lock()
634 pushop.trmanager = transactionmanager(
437 pushop.trmanager = transactionmanager(
635 pushop.repo, b'push-response', pushop.remote.url()
438 pushop.repo, b'push-response', pushop.remote.url()
636 )
439 )
637 except error.LockUnavailable as err:
440 except error.LockUnavailable as err:
638 # source repo cannot be locked.
441 # source repo cannot be locked.
639 # We do not abort the push, but just disable the local phase
442 # We do not abort the push, but just disable the local phase
640 # synchronisation.
443 # synchronisation.
641 msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr(
444 msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr(
642 err
445 err
643 )
446 )
644 pushop.ui.debug(msg)
447 pushop.ui.debug(msg)
645
448
646 with wlock or util.nullcontextmanager():
449 with wlock or util.nullcontextmanager():
647 with lock or util.nullcontextmanager():
450 with lock or util.nullcontextmanager():
648 with pushop.trmanager or util.nullcontextmanager():
451 with pushop.trmanager or util.nullcontextmanager():
649 pushop.repo.checkpush(pushop)
452 pushop.repo.checkpush(pushop)
650 _checkpublish(pushop)
453 _checkpublish(pushop)
651 _pushdiscovery(pushop)
454 _pushdiscovery(pushop)
652 if not pushop.force:
455 if not pushop.force:
653 _checksubrepostate(pushop)
456 _checksubrepostate(pushop)
654 if not _forcebundle1(pushop):
457 if not _forcebundle1(pushop):
655 _pushbundle2(pushop)
458 _pushbundle2(pushop)
656 _pushchangeset(pushop)
459 _pushchangeset(pushop)
657 _pushsyncphase(pushop)
460 _pushsyncphase(pushop)
658 _pushobsolete(pushop)
461 _pushobsolete(pushop)
659 _pushbookmark(pushop)
462 _pushbookmark(pushop)
660
463
661 if repo.ui.configbool(b'experimental', b'remotenames'):
464 if repo.ui.configbool(b'experimental', b'remotenames'):
662 logexchange.pullremotenames(repo, remote)
465 logexchange.pullremotenames(repo, remote)
663
466
664 return pushop
467 return pushop
665
468
666
469
667 # list of steps to perform discovery before push
470 # list of steps to perform discovery before push
668 pushdiscoveryorder = []
471 pushdiscoveryorder = []
669
472
670 # Mapping between step name and function
473 # Mapping between step name and function
671 #
474 #
672 # This exists to help extensions wrap steps if necessary
475 # This exists to help extensions wrap steps if necessary
673 pushdiscoverymapping = {}
476 pushdiscoverymapping = {}
674
477
675
478
676 def pushdiscovery(stepname):
479 def pushdiscovery(stepname):
677 """decorator for function performing discovery before push
480 """decorator for function performing discovery before push
678
481
679 The function is added to the step -> function mapping and appended to the
482 The function is added to the step -> function mapping and appended to the
680 list of steps. Beware that decorated function will be added in order (this
483 list of steps. Beware that decorated function will be added in order (this
681 may matter).
484 may matter).
682
485
683 You can only use this decorator for a new step, if you want to wrap a step
486 You can only use this decorator for a new step, if you want to wrap a step
684 from an extension, change the pushdiscovery dictionary directly."""
487 from an extension, change the pushdiscovery dictionary directly."""
685
488
686 def dec(func):
489 def dec(func):
687 assert stepname not in pushdiscoverymapping
490 assert stepname not in pushdiscoverymapping
688 pushdiscoverymapping[stepname] = func
491 pushdiscoverymapping[stepname] = func
689 pushdiscoveryorder.append(stepname)
492 pushdiscoveryorder.append(stepname)
690 return func
493 return func
691
494
692 return dec
495 return dec
693
496
694
497
695 def _pushdiscovery(pushop):
498 def _pushdiscovery(pushop):
696 """Run all discovery steps"""
499 """Run all discovery steps"""
697 for stepname in pushdiscoveryorder:
500 for stepname in pushdiscoveryorder:
698 step = pushdiscoverymapping[stepname]
501 step = pushdiscoverymapping[stepname]
699 step(pushop)
502 step(pushop)
700
503
701
504
702 def _checksubrepostate(pushop):
505 def _checksubrepostate(pushop):
703 """Ensure all outgoing referenced subrepo revisions are present locally"""
506 """Ensure all outgoing referenced subrepo revisions are present locally"""
704 for n in pushop.outgoing.missing:
507 for n in pushop.outgoing.missing:
705 ctx = pushop.repo[n]
508 ctx = pushop.repo[n]
706
509
707 if b'.hgsub' in ctx.manifest() and b'.hgsubstate' in ctx.files():
510 if b'.hgsub' in ctx.manifest() and b'.hgsubstate' in ctx.files():
708 for subpath in sorted(ctx.substate):
511 for subpath in sorted(ctx.substate):
709 sub = ctx.sub(subpath)
512 sub = ctx.sub(subpath)
710 sub.verify(onpush=True)
513 sub.verify(onpush=True)
711
514
712
515
713 @pushdiscovery(b'changeset')
516 @pushdiscovery(b'changeset')
714 def _pushdiscoverychangeset(pushop):
517 def _pushdiscoverychangeset(pushop):
715 """discover the changeset that need to be pushed"""
518 """discover the changeset that need to be pushed"""
716 fci = discovery.findcommonincoming
519 fci = discovery.findcommonincoming
717 if pushop.revs:
520 if pushop.revs:
718 commoninc = fci(
521 commoninc = fci(
719 pushop.repo,
522 pushop.repo,
720 pushop.remote,
523 pushop.remote,
721 force=pushop.force,
524 force=pushop.force,
722 ancestorsof=pushop.revs,
525 ancestorsof=pushop.revs,
723 )
526 )
724 else:
527 else:
725 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
528 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
726 common, inc, remoteheads = commoninc
529 common, inc, remoteheads = commoninc
727 fco = discovery.findcommonoutgoing
530 fco = discovery.findcommonoutgoing
728 outgoing = fco(
531 outgoing = fco(
729 pushop.repo,
532 pushop.repo,
730 pushop.remote,
533 pushop.remote,
731 onlyheads=pushop.revs,
534 onlyheads=pushop.revs,
732 commoninc=commoninc,
535 commoninc=commoninc,
733 force=pushop.force,
536 force=pushop.force,
734 )
537 )
735 pushop.outgoing = outgoing
538 pushop.outgoing = outgoing
736 pushop.remoteheads = remoteheads
539 pushop.remoteheads = remoteheads
737 pushop.incoming = inc
540 pushop.incoming = inc
738
541
739
542
740 @pushdiscovery(b'phase')
543 @pushdiscovery(b'phase')
741 def _pushdiscoveryphase(pushop):
544 def _pushdiscoveryphase(pushop):
742 """discover the phase that needs to be pushed
545 """discover the phase that needs to be pushed
743
546
744 (computed for both success and failure case for changesets push)"""
547 (computed for both success and failure case for changesets push)"""
745 outgoing = pushop.outgoing
548 outgoing = pushop.outgoing
746 unfi = pushop.repo.unfiltered()
549 unfi = pushop.repo.unfiltered()
747 remotephases = listkeys(pushop.remote, b'phases')
550 remotephases = listkeys(pushop.remote, b'phases')
748
551
749 if (
552 if (
750 pushop.ui.configbool(b'ui', b'_usedassubrepo')
553 pushop.ui.configbool(b'ui', b'_usedassubrepo')
751 and remotephases # server supports phases
554 and remotephases # server supports phases
752 and not pushop.outgoing.missing # no changesets to be pushed
555 and not pushop.outgoing.missing # no changesets to be pushed
753 and remotephases.get(b'publishing', False)
556 and remotephases.get(b'publishing', False)
754 ):
557 ):
755 # When:
558 # When:
756 # - this is a subrepo push
559 # - this is a subrepo push
757 # - and remote support phase
560 # - and remote support phase
758 # - and no changeset are to be pushed
561 # - and no changeset are to be pushed
759 # - and remote is publishing
562 # - and remote is publishing
760 # We may be in issue 3781 case!
563 # We may be in issue 3781 case!
761 # We drop the possible phase synchronisation done by
564 # We drop the possible phase synchronisation done by
762 # courtesy to publish changesets possibly locally draft
565 # courtesy to publish changesets possibly locally draft
763 # on the remote.
566 # on the remote.
764 pushop.outdatedphases = []
567 pushop.outdatedphases = []
765 pushop.fallbackoutdatedphases = []
568 pushop.fallbackoutdatedphases = []
766 return
569 return
767
570
768 pushop.remotephases = phases.remotephasessummary(
571 pushop.remotephases = phases.remotephasessummary(
769 pushop.repo, pushop.fallbackheads, remotephases
572 pushop.repo, pushop.fallbackheads, remotephases
770 )
573 )
771 droots = pushop.remotephases.draftroots
574 droots = pushop.remotephases.draftroots
772
575
773 extracond = b''
576 extracond = b''
774 if not pushop.remotephases.publishing:
577 if not pushop.remotephases.publishing:
775 extracond = b' and public()'
578 extracond = b' and public()'
776 revset = b'heads((%%ln::%%ln) %s)' % extracond
579 revset = b'heads((%%ln::%%ln) %s)' % extracond
777 # Get the list of all revs draft on remote by public here.
580 # Get the list of all revs draft on remote by public here.
778 # XXX Beware that revset break if droots is not strictly
581 # XXX Beware that revset break if droots is not strictly
779 # XXX root we may want to ensure it is but it is costly
582 # XXX root we may want to ensure it is but it is costly
780 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
583 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
781 if not pushop.remotephases.publishing and pushop.publish:
584 if not pushop.remotephases.publishing and pushop.publish:
782 future = list(
585 future = list(
783 unfi.set(
586 unfi.set(
784 b'%ln and (not public() or %ln::)', pushop.futureheads, droots
587 b'%ln and (not public() or %ln::)', pushop.futureheads, droots
785 )
588 )
786 )
589 )
787 elif not outgoing.missing:
590 elif not outgoing.missing:
788 future = fallback
591 future = fallback
789 else:
592 else:
790 # adds changeset we are going to push as draft
593 # adds changeset we are going to push as draft
791 #
594 #
792 # should not be necessary for publishing server, but because of an
595 # should not be necessary for publishing server, but because of an
793 # issue fixed in xxxxx we have to do it anyway.
596 # issue fixed in xxxxx we have to do it anyway.
794 fdroots = list(
597 fdroots = list(
795 unfi.set(b'roots(%ln + %ln::)', outgoing.missing, droots)
598 unfi.set(b'roots(%ln + %ln::)', outgoing.missing, droots)
796 )
599 )
797 fdroots = [f.node() for f in fdroots]
600 fdroots = [f.node() for f in fdroots]
798 future = list(unfi.set(revset, fdroots, pushop.futureheads))
601 future = list(unfi.set(revset, fdroots, pushop.futureheads))
799 pushop.outdatedphases = future
602 pushop.outdatedphases = future
800 pushop.fallbackoutdatedphases = fallback
603 pushop.fallbackoutdatedphases = fallback
801
604
802
605
803 @pushdiscovery(b'obsmarker')
606 @pushdiscovery(b'obsmarker')
804 def _pushdiscoveryobsmarkers(pushop):
607 def _pushdiscoveryobsmarkers(pushop):
805 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
608 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
806 return
609 return
807
610
808 if not pushop.repo.obsstore:
611 if not pushop.repo.obsstore:
809 return
612 return
810
613
811 if b'obsolete' not in listkeys(pushop.remote, b'namespaces'):
614 if b'obsolete' not in listkeys(pushop.remote, b'namespaces'):
812 return
615 return
813
616
814 repo = pushop.repo
617 repo = pushop.repo
815 # very naive computation, that can be quite expensive on big repo.
618 # very naive computation, that can be quite expensive on big repo.
816 # However: evolution is currently slow on them anyway.
619 # However: evolution is currently slow on them anyway.
817 nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads))
620 nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads))
818 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
621 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
819
622
820
623
821 @pushdiscovery(b'bookmarks')
624 @pushdiscovery(b'bookmarks')
822 def _pushdiscoverybookmarks(pushop):
625 def _pushdiscoverybookmarks(pushop):
823 ui = pushop.ui
626 ui = pushop.ui
824 repo = pushop.repo.unfiltered()
627 repo = pushop.repo.unfiltered()
825 remote = pushop.remote
628 remote = pushop.remote
826 ui.debug(b"checking for updated bookmarks\n")
629 ui.debug(b"checking for updated bookmarks\n")
827 ancestors = ()
630 ancestors = ()
828 if pushop.revs:
631 if pushop.revs:
829 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
632 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
830 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
633 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
831
634
832 remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks'))
635 remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks'))
833
636
834 explicit = {
637 explicit = {
835 repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
638 repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
836 }
639 }
837
640
838 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
641 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
839 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
642 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
840
643
841
644
842 def _processcompared(pushop, pushed, explicit, remotebms, comp):
645 def _processcompared(pushop, pushed, explicit, remotebms, comp):
843 """take decision on bookmarks to push to the remote repo
646 """take decision on bookmarks to push to the remote repo
844
647
845 Exists to help extensions alter this behavior.
648 Exists to help extensions alter this behavior.
846 """
649 """
847 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
650 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
848
651
849 repo = pushop.repo
652 repo = pushop.repo
850
653
851 for b, scid, dcid in advsrc:
654 for b, scid, dcid in advsrc:
852 if b in explicit:
655 if b in explicit:
853 explicit.remove(b)
656 explicit.remove(b)
854 if not pushed or repo[scid].rev() in pushed:
657 if not pushed or repo[scid].rev() in pushed:
855 pushop.outbookmarks.append((b, dcid, scid))
658 pushop.outbookmarks.append((b, dcid, scid))
856 # search added bookmark
659 # search added bookmark
857 for b, scid, dcid in addsrc:
660 for b, scid, dcid in addsrc:
858 if b in explicit:
661 if b in explicit:
859 explicit.remove(b)
662 explicit.remove(b)
860 if bookmod.isdivergent(b):
663 if bookmod.isdivergent(b):
861 pushop.ui.warn(_(b'cannot push divergent bookmark %s!\n') % b)
664 pushop.ui.warn(_(b'cannot push divergent bookmark %s!\n') % b)
862 pushop.bkresult = 2
665 pushop.bkresult = 2
863 else:
666 else:
864 pushop.outbookmarks.append((b, b'', scid))
667 pushop.outbookmarks.append((b, b'', scid))
865 # search for overwritten bookmark
668 # search for overwritten bookmark
866 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
669 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
867 if b in explicit:
670 if b in explicit:
868 explicit.remove(b)
671 explicit.remove(b)
869 pushop.outbookmarks.append((b, dcid, scid))
672 pushop.outbookmarks.append((b, dcid, scid))
870 # search for bookmark to delete
673 # search for bookmark to delete
871 for b, scid, dcid in adddst:
674 for b, scid, dcid in adddst:
872 if b in explicit:
675 if b in explicit:
873 explicit.remove(b)
676 explicit.remove(b)
874 # treat as "deleted locally"
677 # treat as "deleted locally"
875 pushop.outbookmarks.append((b, dcid, b''))
678 pushop.outbookmarks.append((b, dcid, b''))
876 # identical bookmarks shouldn't get reported
679 # identical bookmarks shouldn't get reported
877 for b, scid, dcid in same:
680 for b, scid, dcid in same:
878 if b in explicit:
681 if b in explicit:
879 explicit.remove(b)
682 explicit.remove(b)
880
683
881 if explicit:
684 if explicit:
882 explicit = sorted(explicit)
685 explicit = sorted(explicit)
883 # we should probably list all of them
686 # we should probably list all of them
884 pushop.ui.warn(
687 pushop.ui.warn(
885 _(
688 _(
886 b'bookmark %s does not exist on the local '
689 b'bookmark %s does not exist on the local '
887 b'or remote repository!\n'
690 b'or remote repository!\n'
888 )
691 )
889 % explicit[0]
692 % explicit[0]
890 )
693 )
891 pushop.bkresult = 2
694 pushop.bkresult = 2
892
695
893 pushop.outbookmarks.sort()
696 pushop.outbookmarks.sort()
894
697
895
698
896 def _pushcheckoutgoing(pushop):
699 def _pushcheckoutgoing(pushop):
897 outgoing = pushop.outgoing
700 outgoing = pushop.outgoing
898 unfi = pushop.repo.unfiltered()
701 unfi = pushop.repo.unfiltered()
899 if not outgoing.missing:
702 if not outgoing.missing:
900 # nothing to push
703 # nothing to push
901 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
704 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
902 return False
705 return False
903 # something to push
706 # something to push
904 if not pushop.force:
707 if not pushop.force:
905 # if repo.obsstore == False --> no obsolete
708 # if repo.obsstore == False --> no obsolete
906 # then, save the iteration
709 # then, save the iteration
907 if unfi.obsstore:
710 if unfi.obsstore:
908 # this message are here for 80 char limit reason
711 # this message are here for 80 char limit reason
909 mso = _(b"push includes obsolete changeset: %s!")
712 mso = _(b"push includes obsolete changeset: %s!")
910 mspd = _(b"push includes phase-divergent changeset: %s!")
713 mspd = _(b"push includes phase-divergent changeset: %s!")
911 mscd = _(b"push includes content-divergent changeset: %s!")
714 mscd = _(b"push includes content-divergent changeset: %s!")
912 mst = {
715 mst = {
913 b"orphan": _(b"push includes orphan changeset: %s!"),
716 b"orphan": _(b"push includes orphan changeset: %s!"),
914 b"phase-divergent": mspd,
717 b"phase-divergent": mspd,
915 b"content-divergent": mscd,
718 b"content-divergent": mscd,
916 }
719 }
917 # If we are to push if there is at least one
720 # If we are to push if there is at least one
918 # obsolete or unstable changeset in missing, at
721 # obsolete or unstable changeset in missing, at
919 # least one of the missinghead will be obsolete or
722 # least one of the missinghead will be obsolete or
920 # unstable. So checking heads only is ok
723 # unstable. So checking heads only is ok
921 for node in outgoing.ancestorsof:
724 for node in outgoing.ancestorsof:
922 ctx = unfi[node]
725 ctx = unfi[node]
923 if ctx.obsolete():
726 if ctx.obsolete():
924 raise error.Abort(mso % ctx)
727 raise error.Abort(mso % ctx)
925 elif ctx.isunstable():
728 elif ctx.isunstable():
926 # TODO print more than one instability in the abort
729 # TODO print more than one instability in the abort
927 # message
730 # message
928 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
731 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
929
732
930 discovery.checkheads(pushop)
733 discovery.checkheads(pushop)
931 return True
734 return True
932
735
933
736
934 # List of names of steps to perform for an outgoing bundle2, order matters.
737 # List of names of steps to perform for an outgoing bundle2, order matters.
935 b2partsgenorder = []
738 b2partsgenorder = []
936
739
937 # Mapping between step name and function
740 # Mapping between step name and function
938 #
741 #
939 # This exists to help extensions wrap steps if necessary
742 # This exists to help extensions wrap steps if necessary
940 b2partsgenmapping = {}
743 b2partsgenmapping = {}
941
744
942
745
943 def b2partsgenerator(stepname, idx=None):
746 def b2partsgenerator(stepname, idx=None):
944 """decorator for function generating bundle2 part
747 """decorator for function generating bundle2 part
945
748
946 The function is added to the step -> function mapping and appended to the
749 The function is added to the step -> function mapping and appended to the
947 list of steps. Beware that decorated functions will be added in order
750 list of steps. Beware that decorated functions will be added in order
948 (this may matter).
751 (this may matter).
949
752
950 You can only use this decorator for new steps, if you want to wrap a step
753 You can only use this decorator for new steps, if you want to wrap a step
951 from an extension, attack the b2partsgenmapping dictionary directly."""
754 from an extension, attack the b2partsgenmapping dictionary directly."""
952
755
953 def dec(func):
756 def dec(func):
954 assert stepname not in b2partsgenmapping
757 assert stepname not in b2partsgenmapping
955 b2partsgenmapping[stepname] = func
758 b2partsgenmapping[stepname] = func
956 if idx is None:
759 if idx is None:
957 b2partsgenorder.append(stepname)
760 b2partsgenorder.append(stepname)
958 else:
761 else:
959 b2partsgenorder.insert(idx, stepname)
762 b2partsgenorder.insert(idx, stepname)
960 return func
763 return func
961
764
962 return dec
765 return dec
963
766
964
767
965 def _pushb2ctxcheckheads(pushop, bundler):
768 def _pushb2ctxcheckheads(pushop, bundler):
966 """Generate race condition checking parts
769 """Generate race condition checking parts
967
770
968 Exists as an independent function to aid extensions
771 Exists as an independent function to aid extensions
969 """
772 """
970 # * 'force' do not check for push race,
773 # * 'force' do not check for push race,
971 # * if we don't push anything, there are nothing to check.
774 # * if we don't push anything, there are nothing to check.
972 if not pushop.force and pushop.outgoing.ancestorsof:
775 if not pushop.force and pushop.outgoing.ancestorsof:
973 allowunrelated = b'related' in bundler.capabilities.get(
776 allowunrelated = b'related' in bundler.capabilities.get(
974 b'checkheads', ()
777 b'checkheads', ()
975 )
778 )
976 emptyremote = pushop.pushbranchmap is None
779 emptyremote = pushop.pushbranchmap is None
977 if not allowunrelated or emptyremote:
780 if not allowunrelated or emptyremote:
978 bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
781 bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
979 else:
782 else:
980 affected = set()
783 affected = set()
981 for branch, heads in pycompat.iteritems(pushop.pushbranchmap):
784 for branch, heads in pycompat.iteritems(pushop.pushbranchmap):
982 remoteheads, newheads, unsyncedheads, discardedheads = heads
785 remoteheads, newheads, unsyncedheads, discardedheads = heads
983 if remoteheads is not None:
786 if remoteheads is not None:
984 remote = set(remoteheads)
787 remote = set(remoteheads)
985 affected |= set(discardedheads) & remote
788 affected |= set(discardedheads) & remote
986 affected |= remote - set(newheads)
789 affected |= remote - set(newheads)
987 if affected:
790 if affected:
988 data = iter(sorted(affected))
791 data = iter(sorted(affected))
989 bundler.newpart(b'check:updated-heads', data=data)
792 bundler.newpart(b'check:updated-heads', data=data)
990
793
991
794
992 def _pushing(pushop):
795 def _pushing(pushop):
993 """return True if we are pushing anything"""
796 """return True if we are pushing anything"""
994 return bool(
797 return bool(
995 pushop.outgoing.missing
798 pushop.outgoing.missing
996 or pushop.outdatedphases
799 or pushop.outdatedphases
997 or pushop.outobsmarkers
800 or pushop.outobsmarkers
998 or pushop.outbookmarks
801 or pushop.outbookmarks
999 )
802 )
1000
803
1001
804
1002 @b2partsgenerator(b'check-bookmarks')
805 @b2partsgenerator(b'check-bookmarks')
1003 def _pushb2checkbookmarks(pushop, bundler):
806 def _pushb2checkbookmarks(pushop, bundler):
1004 """insert bookmark move checking"""
807 """insert bookmark move checking"""
1005 if not _pushing(pushop) or pushop.force:
808 if not _pushing(pushop) or pushop.force:
1006 return
809 return
1007 b2caps = bundle2.bundle2caps(pushop.remote)
810 b2caps = bundle2.bundle2caps(pushop.remote)
1008 hasbookmarkcheck = b'bookmarks' in b2caps
811 hasbookmarkcheck = b'bookmarks' in b2caps
1009 if not (pushop.outbookmarks and hasbookmarkcheck):
812 if not (pushop.outbookmarks and hasbookmarkcheck):
1010 return
813 return
1011 data = []
814 data = []
1012 for book, old, new in pushop.outbookmarks:
815 for book, old, new in pushop.outbookmarks:
1013 data.append((book, old))
816 data.append((book, old))
1014 checkdata = bookmod.binaryencode(data)
817 checkdata = bookmod.binaryencode(data)
1015 bundler.newpart(b'check:bookmarks', data=checkdata)
818 bundler.newpart(b'check:bookmarks', data=checkdata)
1016
819
1017
820
1018 @b2partsgenerator(b'check-phases')
821 @b2partsgenerator(b'check-phases')
1019 def _pushb2checkphases(pushop, bundler):
822 def _pushb2checkphases(pushop, bundler):
1020 """insert phase move checking"""
823 """insert phase move checking"""
1021 if not _pushing(pushop) or pushop.force:
824 if not _pushing(pushop) or pushop.force:
1022 return
825 return
1023 b2caps = bundle2.bundle2caps(pushop.remote)
826 b2caps = bundle2.bundle2caps(pushop.remote)
1024 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
827 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1025 if pushop.remotephases is not None and hasphaseheads:
828 if pushop.remotephases is not None and hasphaseheads:
1026 # check that the remote phase has not changed
829 # check that the remote phase has not changed
1027 checks = {p: [] for p in phases.allphases}
830 checks = {p: [] for p in phases.allphases}
1028 checks[phases.public].extend(pushop.remotephases.publicheads)
831 checks[phases.public].extend(pushop.remotephases.publicheads)
1029 checks[phases.draft].extend(pushop.remotephases.draftroots)
832 checks[phases.draft].extend(pushop.remotephases.draftroots)
1030 if any(pycompat.itervalues(checks)):
833 if any(pycompat.itervalues(checks)):
1031 for phase in checks:
834 for phase in checks:
1032 checks[phase].sort()
835 checks[phase].sort()
1033 checkdata = phases.binaryencode(checks)
836 checkdata = phases.binaryencode(checks)
1034 bundler.newpart(b'check:phases', data=checkdata)
837 bundler.newpart(b'check:phases', data=checkdata)
1035
838
1036
839
1037 @b2partsgenerator(b'changeset')
840 @b2partsgenerator(b'changeset')
1038 def _pushb2ctx(pushop, bundler):
841 def _pushb2ctx(pushop, bundler):
1039 """handle changegroup push through bundle2
842 """handle changegroup push through bundle2
1040
843
1041 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
844 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
1042 """
845 """
1043 if b'changesets' in pushop.stepsdone:
846 if b'changesets' in pushop.stepsdone:
1044 return
847 return
1045 pushop.stepsdone.add(b'changesets')
848 pushop.stepsdone.add(b'changesets')
1046 # Send known heads to the server for race detection.
849 # Send known heads to the server for race detection.
1047 if not _pushcheckoutgoing(pushop):
850 if not _pushcheckoutgoing(pushop):
1048 return
851 return
1049 pushop.repo.prepushoutgoinghooks(pushop)
852 pushop.repo.prepushoutgoinghooks(pushop)
1050
853
1051 _pushb2ctxcheckheads(pushop, bundler)
854 _pushb2ctxcheckheads(pushop, bundler)
1052
855
1053 b2caps = bundle2.bundle2caps(pushop.remote)
856 b2caps = bundle2.bundle2caps(pushop.remote)
1054 version = b'01'
857 version = b'01'
1055 cgversions = b2caps.get(b'changegroup')
858 cgversions = b2caps.get(b'changegroup')
1056 if cgversions: # 3.1 and 3.2 ship with an empty value
859 if cgversions: # 3.1 and 3.2 ship with an empty value
1057 cgversions = [
860 cgversions = [
1058 v
861 v
1059 for v in cgversions
862 for v in cgversions
1060 if v in changegroup.supportedoutgoingversions(pushop.repo)
863 if v in changegroup.supportedoutgoingversions(pushop.repo)
1061 ]
864 ]
1062 if not cgversions:
865 if not cgversions:
1063 raise error.Abort(_(b'no common changegroup version'))
866 raise error.Abort(_(b'no common changegroup version'))
1064 version = max(cgversions)
867 version = max(cgversions)
1065 cgstream = changegroup.makestream(
868 cgstream = changegroup.makestream(
1066 pushop.repo, pushop.outgoing, version, b'push'
869 pushop.repo, pushop.outgoing, version, b'push'
1067 )
870 )
1068 cgpart = bundler.newpart(b'changegroup', data=cgstream)
871 cgpart = bundler.newpart(b'changegroup', data=cgstream)
1069 if cgversions:
872 if cgversions:
1070 cgpart.addparam(b'version', version)
873 cgpart.addparam(b'version', version)
1071 if scmutil.istreemanifest(pushop.repo):
874 if scmutil.istreemanifest(pushop.repo):
1072 cgpart.addparam(b'treemanifest', b'1')
875 cgpart.addparam(b'treemanifest', b'1')
1073 if b'exp-sidedata-flag' in pushop.repo.requirements:
876 if b'exp-sidedata-flag' in pushop.repo.requirements:
1074 cgpart.addparam(b'exp-sidedata', b'1')
877 cgpart.addparam(b'exp-sidedata', b'1')
1075
878
1076 def handlereply(op):
879 def handlereply(op):
1077 """extract addchangegroup returns from server reply"""
880 """extract addchangegroup returns from server reply"""
1078 cgreplies = op.records.getreplies(cgpart.id)
881 cgreplies = op.records.getreplies(cgpart.id)
1079 assert len(cgreplies[b'changegroup']) == 1
882 assert len(cgreplies[b'changegroup']) == 1
1080 pushop.cgresult = cgreplies[b'changegroup'][0][b'return']
883 pushop.cgresult = cgreplies[b'changegroup'][0][b'return']
1081
884
1082 return handlereply
885 return handlereply
1083
886
1084
887
1085 @b2partsgenerator(b'phase')
888 @b2partsgenerator(b'phase')
1086 def _pushb2phases(pushop, bundler):
889 def _pushb2phases(pushop, bundler):
1087 """handle phase push through bundle2"""
890 """handle phase push through bundle2"""
1088 if b'phases' in pushop.stepsdone:
891 if b'phases' in pushop.stepsdone:
1089 return
892 return
1090 b2caps = bundle2.bundle2caps(pushop.remote)
893 b2caps = bundle2.bundle2caps(pushop.remote)
1091 ui = pushop.repo.ui
894 ui = pushop.repo.ui
1092
895
1093 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
896 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1094 haspushkey = b'pushkey' in b2caps
897 haspushkey = b'pushkey' in b2caps
1095 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
898 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1096
899
1097 if hasphaseheads and not legacyphase:
900 if hasphaseheads and not legacyphase:
1098 return _pushb2phaseheads(pushop, bundler)
901 return _pushb2phaseheads(pushop, bundler)
1099 elif haspushkey:
902 elif haspushkey:
1100 return _pushb2phasespushkey(pushop, bundler)
903 return _pushb2phasespushkey(pushop, bundler)
1101
904
1102
905
1103 def _pushb2phaseheads(pushop, bundler):
906 def _pushb2phaseheads(pushop, bundler):
1104 """push phase information through a bundle2 - binary part"""
907 """push phase information through a bundle2 - binary part"""
1105 pushop.stepsdone.add(b'phases')
908 pushop.stepsdone.add(b'phases')
1106 if pushop.outdatedphases:
909 if pushop.outdatedphases:
1107 updates = {p: [] for p in phases.allphases}
910 updates = {p: [] for p in phases.allphases}
1108 updates[0].extend(h.node() for h in pushop.outdatedphases)
911 updates[0].extend(h.node() for h in pushop.outdatedphases)
1109 phasedata = phases.binaryencode(updates)
912 phasedata = phases.binaryencode(updates)
1110 bundler.newpart(b'phase-heads', data=phasedata)
913 bundler.newpart(b'phase-heads', data=phasedata)
1111
914
1112
915
1113 def _pushb2phasespushkey(pushop, bundler):
916 def _pushb2phasespushkey(pushop, bundler):
1114 """push phase information through a bundle2 - pushkey part"""
917 """push phase information through a bundle2 - pushkey part"""
1115 pushop.stepsdone.add(b'phases')
918 pushop.stepsdone.add(b'phases')
1116 part2node = []
919 part2node = []
1117
920
1118 def handlefailure(pushop, exc):
921 def handlefailure(pushop, exc):
1119 targetid = int(exc.partid)
922 targetid = int(exc.partid)
1120 for partid, node in part2node:
923 for partid, node in part2node:
1121 if partid == targetid:
924 if partid == targetid:
1122 raise error.Abort(_(b'updating %s to public failed') % node)
925 raise error.Abort(_(b'updating %s to public failed') % node)
1123
926
1124 enc = pushkey.encode
927 enc = pushkey.encode
1125 for newremotehead in pushop.outdatedphases:
928 for newremotehead in pushop.outdatedphases:
1126 part = bundler.newpart(b'pushkey')
929 part = bundler.newpart(b'pushkey')
1127 part.addparam(b'namespace', enc(b'phases'))
930 part.addparam(b'namespace', enc(b'phases'))
1128 part.addparam(b'key', enc(newremotehead.hex()))
931 part.addparam(b'key', enc(newremotehead.hex()))
1129 part.addparam(b'old', enc(b'%d' % phases.draft))
932 part.addparam(b'old', enc(b'%d' % phases.draft))
1130 part.addparam(b'new', enc(b'%d' % phases.public))
933 part.addparam(b'new', enc(b'%d' % phases.public))
1131 part2node.append((part.id, newremotehead))
934 part2node.append((part.id, newremotehead))
1132 pushop.pkfailcb[part.id] = handlefailure
935 pushop.pkfailcb[part.id] = handlefailure
1133
936
1134 def handlereply(op):
937 def handlereply(op):
1135 for partid, node in part2node:
938 for partid, node in part2node:
1136 partrep = op.records.getreplies(partid)
939 partrep = op.records.getreplies(partid)
1137 results = partrep[b'pushkey']
940 results = partrep[b'pushkey']
1138 assert len(results) <= 1
941 assert len(results) <= 1
1139 msg = None
942 msg = None
1140 if not results:
943 if not results:
1141 msg = _(b'server ignored update of %s to public!\n') % node
944 msg = _(b'server ignored update of %s to public!\n') % node
1142 elif not int(results[0][b'return']):
945 elif not int(results[0][b'return']):
1143 msg = _(b'updating %s to public failed!\n') % node
946 msg = _(b'updating %s to public failed!\n') % node
1144 if msg is not None:
947 if msg is not None:
1145 pushop.ui.warn(msg)
948 pushop.ui.warn(msg)
1146
949
1147 return handlereply
950 return handlereply
1148
951
1149
952
1150 @b2partsgenerator(b'obsmarkers')
953 @b2partsgenerator(b'obsmarkers')
1151 def _pushb2obsmarkers(pushop, bundler):
954 def _pushb2obsmarkers(pushop, bundler):
1152 if b'obsmarkers' in pushop.stepsdone:
955 if b'obsmarkers' in pushop.stepsdone:
1153 return
956 return
1154 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
957 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
1155 if obsolete.commonversion(remoteversions) is None:
958 if obsolete.commonversion(remoteversions) is None:
1156 return
959 return
1157 pushop.stepsdone.add(b'obsmarkers')
960 pushop.stepsdone.add(b'obsmarkers')
1158 if pushop.outobsmarkers:
961 if pushop.outobsmarkers:
1159 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
962 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1160 bundle2.buildobsmarkerspart(bundler, markers)
963 bundle2.buildobsmarkerspart(bundler, markers)
1161
964
1162
965
1163 @b2partsgenerator(b'bookmarks')
966 @b2partsgenerator(b'bookmarks')
1164 def _pushb2bookmarks(pushop, bundler):
967 def _pushb2bookmarks(pushop, bundler):
1165 """handle bookmark push through bundle2"""
968 """handle bookmark push through bundle2"""
1166 if b'bookmarks' in pushop.stepsdone:
969 if b'bookmarks' in pushop.stepsdone:
1167 return
970 return
1168 b2caps = bundle2.bundle2caps(pushop.remote)
971 b2caps = bundle2.bundle2caps(pushop.remote)
1169
972
1170 legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange')
973 legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange')
1171 legacybooks = b'bookmarks' in legacy
974 legacybooks = b'bookmarks' in legacy
1172
975
1173 if not legacybooks and b'bookmarks' in b2caps:
976 if not legacybooks and b'bookmarks' in b2caps:
1174 return _pushb2bookmarkspart(pushop, bundler)
977 return _pushb2bookmarkspart(pushop, bundler)
1175 elif b'pushkey' in b2caps:
978 elif b'pushkey' in b2caps:
1176 return _pushb2bookmarkspushkey(pushop, bundler)
979 return _pushb2bookmarkspushkey(pushop, bundler)
1177
980
1178
981
1179 def _bmaction(old, new):
982 def _bmaction(old, new):
1180 """small utility for bookmark pushing"""
983 """small utility for bookmark pushing"""
1181 if not old:
984 if not old:
1182 return b'export'
985 return b'export'
1183 elif not new:
986 elif not new:
1184 return b'delete'
987 return b'delete'
1185 return b'update'
988 return b'update'
1186
989
1187
990
1188 def _abortonsecretctx(pushop, node, b):
991 def _abortonsecretctx(pushop, node, b):
1189 """abort if a given bookmark points to a secret changeset"""
992 """abort if a given bookmark points to a secret changeset"""
1190 if node and pushop.repo[node].phase() == phases.secret:
993 if node and pushop.repo[node].phase() == phases.secret:
1191 raise error.Abort(
994 raise error.Abort(
1192 _(b'cannot push bookmark %s as it points to a secret changeset') % b
995 _(b'cannot push bookmark %s as it points to a secret changeset') % b
1193 )
996 )
1194
997
1195
998
1196 def _pushb2bookmarkspart(pushop, bundler):
999 def _pushb2bookmarkspart(pushop, bundler):
1197 pushop.stepsdone.add(b'bookmarks')
1000 pushop.stepsdone.add(b'bookmarks')
1198 if not pushop.outbookmarks:
1001 if not pushop.outbookmarks:
1199 return
1002 return
1200
1003
1201 allactions = []
1004 allactions = []
1202 data = []
1005 data = []
1203 for book, old, new in pushop.outbookmarks:
1006 for book, old, new in pushop.outbookmarks:
1204 _abortonsecretctx(pushop, new, book)
1007 _abortonsecretctx(pushop, new, book)
1205 data.append((book, new))
1008 data.append((book, new))
1206 allactions.append((book, _bmaction(old, new)))
1009 allactions.append((book, _bmaction(old, new)))
1207 checkdata = bookmod.binaryencode(data)
1010 checkdata = bookmod.binaryencode(data)
1208 bundler.newpart(b'bookmarks', data=checkdata)
1011 bundler.newpart(b'bookmarks', data=checkdata)
1209
1012
1210 def handlereply(op):
1013 def handlereply(op):
1211 ui = pushop.ui
1014 ui = pushop.ui
1212 # if success
1015 # if success
1213 for book, action in allactions:
1016 for book, action in allactions:
1214 ui.status(bookmsgmap[action][0] % book)
1017 ui.status(bookmsgmap[action][0] % book)
1215
1018
1216 return handlereply
1019 return handlereply
1217
1020
1218
1021
1219 def _pushb2bookmarkspushkey(pushop, bundler):
1022 def _pushb2bookmarkspushkey(pushop, bundler):
1220 pushop.stepsdone.add(b'bookmarks')
1023 pushop.stepsdone.add(b'bookmarks')
1221 part2book = []
1024 part2book = []
1222 enc = pushkey.encode
1025 enc = pushkey.encode
1223
1026
1224 def handlefailure(pushop, exc):
1027 def handlefailure(pushop, exc):
1225 targetid = int(exc.partid)
1028 targetid = int(exc.partid)
1226 for partid, book, action in part2book:
1029 for partid, book, action in part2book:
1227 if partid == targetid:
1030 if partid == targetid:
1228 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1031 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1229 # we should not be called for part we did not generated
1032 # we should not be called for part we did not generated
1230 assert False
1033 assert False
1231
1034
1232 for book, old, new in pushop.outbookmarks:
1035 for book, old, new in pushop.outbookmarks:
1233 _abortonsecretctx(pushop, new, book)
1036 _abortonsecretctx(pushop, new, book)
1234 part = bundler.newpart(b'pushkey')
1037 part = bundler.newpart(b'pushkey')
1235 part.addparam(b'namespace', enc(b'bookmarks'))
1038 part.addparam(b'namespace', enc(b'bookmarks'))
1236 part.addparam(b'key', enc(book))
1039 part.addparam(b'key', enc(book))
1237 part.addparam(b'old', enc(hex(old)))
1040 part.addparam(b'old', enc(hex(old)))
1238 part.addparam(b'new', enc(hex(new)))
1041 part.addparam(b'new', enc(hex(new)))
1239 action = b'update'
1042 action = b'update'
1240 if not old:
1043 if not old:
1241 action = b'export'
1044 action = b'export'
1242 elif not new:
1045 elif not new:
1243 action = b'delete'
1046 action = b'delete'
1244 part2book.append((part.id, book, action))
1047 part2book.append((part.id, book, action))
1245 pushop.pkfailcb[part.id] = handlefailure
1048 pushop.pkfailcb[part.id] = handlefailure
1246
1049
1247 def handlereply(op):
1050 def handlereply(op):
1248 ui = pushop.ui
1051 ui = pushop.ui
1249 for partid, book, action in part2book:
1052 for partid, book, action in part2book:
1250 partrep = op.records.getreplies(partid)
1053 partrep = op.records.getreplies(partid)
1251 results = partrep[b'pushkey']
1054 results = partrep[b'pushkey']
1252 assert len(results) <= 1
1055 assert len(results) <= 1
1253 if not results:
1056 if not results:
1254 pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book)
1057 pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book)
1255 else:
1058 else:
1256 ret = int(results[0][b'return'])
1059 ret = int(results[0][b'return'])
1257 if ret:
1060 if ret:
1258 ui.status(bookmsgmap[action][0] % book)
1061 ui.status(bookmsgmap[action][0] % book)
1259 else:
1062 else:
1260 ui.warn(bookmsgmap[action][1] % book)
1063 ui.warn(bookmsgmap[action][1] % book)
1261 if pushop.bkresult is not None:
1064 if pushop.bkresult is not None:
1262 pushop.bkresult = 1
1065 pushop.bkresult = 1
1263
1066
1264 return handlereply
1067 return handlereply
1265
1068
1266
1069
1267 @b2partsgenerator(b'pushvars', idx=0)
1070 @b2partsgenerator(b'pushvars', idx=0)
1268 def _getbundlesendvars(pushop, bundler):
1071 def _getbundlesendvars(pushop, bundler):
1269 '''send shellvars via bundle2'''
1072 '''send shellvars via bundle2'''
1270 pushvars = pushop.pushvars
1073 pushvars = pushop.pushvars
1271 if pushvars:
1074 if pushvars:
1272 shellvars = {}
1075 shellvars = {}
1273 for raw in pushvars:
1076 for raw in pushvars:
1274 if b'=' not in raw:
1077 if b'=' not in raw:
1275 msg = (
1078 msg = (
1276 b"unable to parse variable '%s', should follow "
1079 b"unable to parse variable '%s', should follow "
1277 b"'KEY=VALUE' or 'KEY=' format"
1080 b"'KEY=VALUE' or 'KEY=' format"
1278 )
1081 )
1279 raise error.Abort(msg % raw)
1082 raise error.Abort(msg % raw)
1280 k, v = raw.split(b'=', 1)
1083 k, v = raw.split(b'=', 1)
1281 shellvars[k] = v
1084 shellvars[k] = v
1282
1085
1283 part = bundler.newpart(b'pushvars')
1086 part = bundler.newpart(b'pushvars')
1284
1087
1285 for key, value in pycompat.iteritems(shellvars):
1088 for key, value in pycompat.iteritems(shellvars):
1286 part.addparam(key, value, mandatory=False)
1089 part.addparam(key, value, mandatory=False)
1287
1090
1288
1091
1289 def _pushbundle2(pushop):
1092 def _pushbundle2(pushop):
1290 """push data to the remote using bundle2
1093 """push data to the remote using bundle2
1291
1094
1292 The only currently supported type of data is changegroup but this will
1095 The only currently supported type of data is changegroup but this will
1293 evolve in the future."""
1096 evolve in the future."""
1294 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1097 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1295 pushback = pushop.trmanager and pushop.ui.configbool(
1098 pushback = pushop.trmanager and pushop.ui.configbool(
1296 b'experimental', b'bundle2.pushback'
1099 b'experimental', b'bundle2.pushback'
1297 )
1100 )
1298
1101
1299 # create reply capability
1102 # create reply capability
1300 capsblob = bundle2.encodecaps(
1103 capsblob = bundle2.encodecaps(
1301 bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client')
1104 bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client')
1302 )
1105 )
1303 bundler.newpart(b'replycaps', data=capsblob)
1106 bundler.newpart(b'replycaps', data=capsblob)
1304 replyhandlers = []
1107 replyhandlers = []
1305 for partgenname in b2partsgenorder:
1108 for partgenname in b2partsgenorder:
1306 partgen = b2partsgenmapping[partgenname]
1109 partgen = b2partsgenmapping[partgenname]
1307 ret = partgen(pushop, bundler)
1110 ret = partgen(pushop, bundler)
1308 if callable(ret):
1111 if callable(ret):
1309 replyhandlers.append(ret)
1112 replyhandlers.append(ret)
1310 # do not push if nothing to push
1113 # do not push if nothing to push
1311 if bundler.nbparts <= 1:
1114 if bundler.nbparts <= 1:
1312 return
1115 return
1313 stream = util.chunkbuffer(bundler.getchunks())
1116 stream = util.chunkbuffer(bundler.getchunks())
1314 try:
1117 try:
1315 try:
1118 try:
1316 with pushop.remote.commandexecutor() as e:
1119 with pushop.remote.commandexecutor() as e:
1317 reply = e.callcommand(
1120 reply = e.callcommand(
1318 b'unbundle',
1121 b'unbundle',
1319 {
1122 {
1320 b'bundle': stream,
1123 b'bundle': stream,
1321 b'heads': [b'force'],
1124 b'heads': [b'force'],
1322 b'url': pushop.remote.url(),
1125 b'url': pushop.remote.url(),
1323 },
1126 },
1324 ).result()
1127 ).result()
1325 except error.BundleValueError as exc:
1128 except error.BundleValueError as exc:
1326 raise error.Abort(_(b'missing support for %s') % exc)
1129 raise error.Abort(_(b'missing support for %s') % exc)
1327 try:
1130 try:
1328 trgetter = None
1131 trgetter = None
1329 if pushback:
1132 if pushback:
1330 trgetter = pushop.trmanager.transaction
1133 trgetter = pushop.trmanager.transaction
1331 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1134 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1332 except error.BundleValueError as exc:
1135 except error.BundleValueError as exc:
1333 raise error.Abort(_(b'missing support for %s') % exc)
1136 raise error.Abort(_(b'missing support for %s') % exc)
1334 except bundle2.AbortFromPart as exc:
1137 except bundle2.AbortFromPart as exc:
1335 pushop.ui.status(_(b'remote: %s\n') % exc)
1138 pushop.ui.status(_(b'remote: %s\n') % exc)
1336 if exc.hint is not None:
1139 if exc.hint is not None:
1337 pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
1140 pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
1338 raise error.Abort(_(b'push failed on remote'))
1141 raise error.Abort(_(b'push failed on remote'))
1339 except error.PushkeyFailed as exc:
1142 except error.PushkeyFailed as exc:
1340 partid = int(exc.partid)
1143 partid = int(exc.partid)
1341 if partid not in pushop.pkfailcb:
1144 if partid not in pushop.pkfailcb:
1342 raise
1145 raise
1343 pushop.pkfailcb[partid](pushop, exc)
1146 pushop.pkfailcb[partid](pushop, exc)
1344 for rephand in replyhandlers:
1147 for rephand in replyhandlers:
1345 rephand(op)
1148 rephand(op)
1346
1149
1347
1150
1348 def _pushchangeset(pushop):
1151 def _pushchangeset(pushop):
1349 """Make the actual push of changeset bundle to remote repo"""
1152 """Make the actual push of changeset bundle to remote repo"""
1350 if b'changesets' in pushop.stepsdone:
1153 if b'changesets' in pushop.stepsdone:
1351 return
1154 return
1352 pushop.stepsdone.add(b'changesets')
1155 pushop.stepsdone.add(b'changesets')
1353 if not _pushcheckoutgoing(pushop):
1156 if not _pushcheckoutgoing(pushop):
1354 return
1157 return
1355
1158
1356 # Should have verified this in push().
1159 # Should have verified this in push().
1357 assert pushop.remote.capable(b'unbundle')
1160 assert pushop.remote.capable(b'unbundle')
1358
1161
1359 pushop.repo.prepushoutgoinghooks(pushop)
1162 pushop.repo.prepushoutgoinghooks(pushop)
1360 outgoing = pushop.outgoing
1163 outgoing = pushop.outgoing
1361 # TODO: get bundlecaps from remote
1164 # TODO: get bundlecaps from remote
1362 bundlecaps = None
1165 bundlecaps = None
1363 # create a changegroup from local
1166 # create a changegroup from local
1364 if pushop.revs is None and not (
1167 if pushop.revs is None and not (
1365 outgoing.excluded or pushop.repo.changelog.filteredrevs
1168 outgoing.excluded or pushop.repo.changelog.filteredrevs
1366 ):
1169 ):
1367 # push everything,
1170 # push everything,
1368 # use the fast path, no race possible on push
1171 # use the fast path, no race possible on push
1369 cg = changegroup.makechangegroup(
1172 cg = changegroup.makechangegroup(
1370 pushop.repo,
1173 pushop.repo,
1371 outgoing,
1174 outgoing,
1372 b'01',
1175 b'01',
1373 b'push',
1176 b'push',
1374 fastpath=True,
1177 fastpath=True,
1375 bundlecaps=bundlecaps,
1178 bundlecaps=bundlecaps,
1376 )
1179 )
1377 else:
1180 else:
1378 cg = changegroup.makechangegroup(
1181 cg = changegroup.makechangegroup(
1379 pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps
1182 pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps
1380 )
1183 )
1381
1184
1382 # apply changegroup to remote
1185 # apply changegroup to remote
1383 # local repo finds heads on server, finds out what
1186 # local repo finds heads on server, finds out what
1384 # revs it must push. once revs transferred, if server
1187 # revs it must push. once revs transferred, if server
1385 # finds it has different heads (someone else won
1188 # finds it has different heads (someone else won
1386 # commit/push race), server aborts.
1189 # commit/push race), server aborts.
1387 if pushop.force:
1190 if pushop.force:
1388 remoteheads = [b'force']
1191 remoteheads = [b'force']
1389 else:
1192 else:
1390 remoteheads = pushop.remoteheads
1193 remoteheads = pushop.remoteheads
1391 # ssh: return remote's addchangegroup()
1194 # ssh: return remote's addchangegroup()
1392 # http: return remote's addchangegroup() or 0 for error
1195 # http: return remote's addchangegroup() or 0 for error
1393 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
1196 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
1394
1197
1395
1198
1396 def _pushsyncphase(pushop):
1199 def _pushsyncphase(pushop):
1397 """synchronise phase information locally and remotely"""
1200 """synchronise phase information locally and remotely"""
1398 cheads = pushop.commonheads
1201 cheads = pushop.commonheads
1399 # even when we don't push, exchanging phase data is useful
1202 # even when we don't push, exchanging phase data is useful
1400 remotephases = listkeys(pushop.remote, b'phases')
1203 remotephases = listkeys(pushop.remote, b'phases')
1401 if (
1204 if (
1402 pushop.ui.configbool(b'ui', b'_usedassubrepo')
1205 pushop.ui.configbool(b'ui', b'_usedassubrepo')
1403 and remotephases # server supports phases
1206 and remotephases # server supports phases
1404 and pushop.cgresult is None # nothing was pushed
1207 and pushop.cgresult is None # nothing was pushed
1405 and remotephases.get(b'publishing', False)
1208 and remotephases.get(b'publishing', False)
1406 ):
1209 ):
1407 # When:
1210 # When:
1408 # - this is a subrepo push
1211 # - this is a subrepo push
1409 # - and remote support phase
1212 # - and remote support phase
1410 # - and no changeset was pushed
1213 # - and no changeset was pushed
1411 # - and remote is publishing
1214 # - and remote is publishing
1412 # We may be in issue 3871 case!
1215 # We may be in issue 3871 case!
1413 # We drop the possible phase synchronisation done by
1216 # We drop the possible phase synchronisation done by
1414 # courtesy to publish changesets possibly locally draft
1217 # courtesy to publish changesets possibly locally draft
1415 # on the remote.
1218 # on the remote.
1416 remotephases = {b'publishing': b'True'}
1219 remotephases = {b'publishing': b'True'}
1417 if not remotephases: # old server or public only reply from non-publishing
1220 if not remotephases: # old server or public only reply from non-publishing
1418 _localphasemove(pushop, cheads)
1221 _localphasemove(pushop, cheads)
1419 # don't push any phase data as there is nothing to push
1222 # don't push any phase data as there is nothing to push
1420 else:
1223 else:
1421 ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
1224 ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
1422 pheads, droots = ana
1225 pheads, droots = ana
1423 ### Apply remote phase on local
1226 ### Apply remote phase on local
1424 if remotephases.get(b'publishing', False):
1227 if remotephases.get(b'publishing', False):
1425 _localphasemove(pushop, cheads)
1228 _localphasemove(pushop, cheads)
1426 else: # publish = False
1229 else: # publish = False
1427 _localphasemove(pushop, pheads)
1230 _localphasemove(pushop, pheads)
1428 _localphasemove(pushop, cheads, phases.draft)
1231 _localphasemove(pushop, cheads, phases.draft)
1429 ### Apply local phase on remote
1232 ### Apply local phase on remote
1430
1233
1431 if pushop.cgresult:
1234 if pushop.cgresult:
1432 if b'phases' in pushop.stepsdone:
1235 if b'phases' in pushop.stepsdone:
1433 # phases already pushed though bundle2
1236 # phases already pushed though bundle2
1434 return
1237 return
1435 outdated = pushop.outdatedphases
1238 outdated = pushop.outdatedphases
1436 else:
1239 else:
1437 outdated = pushop.fallbackoutdatedphases
1240 outdated = pushop.fallbackoutdatedphases
1438
1241
1439 pushop.stepsdone.add(b'phases')
1242 pushop.stepsdone.add(b'phases')
1440
1243
1441 # filter heads already turned public by the push
1244 # filter heads already turned public by the push
1442 outdated = [c for c in outdated if c.node() not in pheads]
1245 outdated = [c for c in outdated if c.node() not in pheads]
1443 # fallback to independent pushkey command
1246 # fallback to independent pushkey command
1444 for newremotehead in outdated:
1247 for newremotehead in outdated:
1445 with pushop.remote.commandexecutor() as e:
1248 with pushop.remote.commandexecutor() as e:
1446 r = e.callcommand(
1249 r = e.callcommand(
1447 b'pushkey',
1250 b'pushkey',
1448 {
1251 {
1449 b'namespace': b'phases',
1252 b'namespace': b'phases',
1450 b'key': newremotehead.hex(),
1253 b'key': newremotehead.hex(),
1451 b'old': b'%d' % phases.draft,
1254 b'old': b'%d' % phases.draft,
1452 b'new': b'%d' % phases.public,
1255 b'new': b'%d' % phases.public,
1453 },
1256 },
1454 ).result()
1257 ).result()
1455
1258
1456 if not r:
1259 if not r:
1457 pushop.ui.warn(
1260 pushop.ui.warn(
1458 _(b'updating %s to public failed!\n') % newremotehead
1261 _(b'updating %s to public failed!\n') % newremotehead
1459 )
1262 )
1460
1263
1461
1264
1462 def _localphasemove(pushop, nodes, phase=phases.public):
1265 def _localphasemove(pushop, nodes, phase=phases.public):
1463 """move <nodes> to <phase> in the local source repo"""
1266 """move <nodes> to <phase> in the local source repo"""
1464 if pushop.trmanager:
1267 if pushop.trmanager:
1465 phases.advanceboundary(
1268 phases.advanceboundary(
1466 pushop.repo, pushop.trmanager.transaction(), phase, nodes
1269 pushop.repo, pushop.trmanager.transaction(), phase, nodes
1467 )
1270 )
1468 else:
1271 else:
1469 # repo is not locked, do not change any phases!
1272 # repo is not locked, do not change any phases!
1470 # Informs the user that phases should have been moved when
1273 # Informs the user that phases should have been moved when
1471 # applicable.
1274 # applicable.
1472 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1275 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1473 phasestr = phases.phasenames[phase]
1276 phasestr = phases.phasenames[phase]
1474 if actualmoves:
1277 if actualmoves:
1475 pushop.ui.status(
1278 pushop.ui.status(
1476 _(
1279 _(
1477 b'cannot lock source repo, skipping '
1280 b'cannot lock source repo, skipping '
1478 b'local %s phase update\n'
1281 b'local %s phase update\n'
1479 )
1282 )
1480 % phasestr
1283 % phasestr
1481 )
1284 )
1482
1285
1483
1286
1484 def _pushobsolete(pushop):
1287 def _pushobsolete(pushop):
1485 """utility function to push obsolete markers to a remote"""
1288 """utility function to push obsolete markers to a remote"""
1486 if b'obsmarkers' in pushop.stepsdone:
1289 if b'obsmarkers' in pushop.stepsdone:
1487 return
1290 return
1488 repo = pushop.repo
1291 repo = pushop.repo
1489 remote = pushop.remote
1292 remote = pushop.remote
1490 pushop.stepsdone.add(b'obsmarkers')
1293 pushop.stepsdone.add(b'obsmarkers')
1491 if pushop.outobsmarkers:
1294 if pushop.outobsmarkers:
1492 pushop.ui.debug(b'try to push obsolete markers to remote\n')
1295 pushop.ui.debug(b'try to push obsolete markers to remote\n')
1493 rslts = []
1296 rslts = []
1494 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1297 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1495 remotedata = obsolete._pushkeyescape(markers)
1298 remotedata = obsolete._pushkeyescape(markers)
1496 for key in sorted(remotedata, reverse=True):
1299 for key in sorted(remotedata, reverse=True):
1497 # reverse sort to ensure we end with dump0
1300 # reverse sort to ensure we end with dump0
1498 data = remotedata[key]
1301 data = remotedata[key]
1499 rslts.append(remote.pushkey(b'obsolete', key, b'', data))
1302 rslts.append(remote.pushkey(b'obsolete', key, b'', data))
1500 if [r for r in rslts if not r]:
1303 if [r for r in rslts if not r]:
1501 msg = _(b'failed to push some obsolete markers!\n')
1304 msg = _(b'failed to push some obsolete markers!\n')
1502 repo.ui.warn(msg)
1305 repo.ui.warn(msg)
1503
1306
1504
1307
1505 def _pushbookmark(pushop):
1308 def _pushbookmark(pushop):
1506 """Update bookmark position on remote"""
1309 """Update bookmark position on remote"""
1507 if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone:
1310 if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone:
1508 return
1311 return
1509 pushop.stepsdone.add(b'bookmarks')
1312 pushop.stepsdone.add(b'bookmarks')
1510 ui = pushop.ui
1313 ui = pushop.ui
1511 remote = pushop.remote
1314 remote = pushop.remote
1512
1315
1513 for b, old, new in pushop.outbookmarks:
1316 for b, old, new in pushop.outbookmarks:
1514 action = b'update'
1317 action = b'update'
1515 if not old:
1318 if not old:
1516 action = b'export'
1319 action = b'export'
1517 elif not new:
1320 elif not new:
1518 action = b'delete'
1321 action = b'delete'
1519
1322
1520 with remote.commandexecutor() as e:
1323 with remote.commandexecutor() as e:
1521 r = e.callcommand(
1324 r = e.callcommand(
1522 b'pushkey',
1325 b'pushkey',
1523 {
1326 {
1524 b'namespace': b'bookmarks',
1327 b'namespace': b'bookmarks',
1525 b'key': b,
1328 b'key': b,
1526 b'old': hex(old),
1329 b'old': hex(old),
1527 b'new': hex(new),
1330 b'new': hex(new),
1528 },
1331 },
1529 ).result()
1332 ).result()
1530
1333
1531 if r:
1334 if r:
1532 ui.status(bookmsgmap[action][0] % b)
1335 ui.status(bookmsgmap[action][0] % b)
1533 else:
1336 else:
1534 ui.warn(bookmsgmap[action][1] % b)
1337 ui.warn(bookmsgmap[action][1] % b)
1535 # discovery can have set the value form invalid entry
1338 # discovery can have set the value form invalid entry
1536 if pushop.bkresult is not None:
1339 if pushop.bkresult is not None:
1537 pushop.bkresult = 1
1340 pushop.bkresult = 1
1538
1341
1539
1342
1540 class pulloperation(object):
1343 class pulloperation(object):
1541 """A object that represent a single pull operation
1344 """A object that represent a single pull operation
1542
1345
1543 It purpose is to carry pull related state and very common operation.
1346 It purpose is to carry pull related state and very common operation.
1544
1347
1545 A new should be created at the beginning of each pull and discarded
1348 A new should be created at the beginning of each pull and discarded
1546 afterward.
1349 afterward.
1547 """
1350 """
1548
1351
1549 def __init__(
1352 def __init__(
1550 self,
1353 self,
1551 repo,
1354 repo,
1552 remote,
1355 remote,
1553 heads=None,
1356 heads=None,
1554 force=False,
1357 force=False,
1555 bookmarks=(),
1358 bookmarks=(),
1556 remotebookmarks=None,
1359 remotebookmarks=None,
1557 streamclonerequested=None,
1360 streamclonerequested=None,
1558 includepats=None,
1361 includepats=None,
1559 excludepats=None,
1362 excludepats=None,
1560 depth=None,
1363 depth=None,
1561 ):
1364 ):
1562 # repo we pull into
1365 # repo we pull into
1563 self.repo = repo
1366 self.repo = repo
1564 # repo we pull from
1367 # repo we pull from
1565 self.remote = remote
1368 self.remote = remote
1566 # revision we try to pull (None is "all")
1369 # revision we try to pull (None is "all")
1567 self.heads = heads
1370 self.heads = heads
1568 # bookmark pulled explicitly
1371 # bookmark pulled explicitly
1569 self.explicitbookmarks = [
1372 self.explicitbookmarks = [
1570 repo._bookmarks.expandname(bookmark) for bookmark in bookmarks
1373 repo._bookmarks.expandname(bookmark) for bookmark in bookmarks
1571 ]
1374 ]
1572 # do we force pull?
1375 # do we force pull?
1573 self.force = force
1376 self.force = force
1574 # whether a streaming clone was requested
1377 # whether a streaming clone was requested
1575 self.streamclonerequested = streamclonerequested
1378 self.streamclonerequested = streamclonerequested
1576 # transaction manager
1379 # transaction manager
1577 self.trmanager = None
1380 self.trmanager = None
1578 # set of common changeset between local and remote before pull
1381 # set of common changeset between local and remote before pull
1579 self.common = None
1382 self.common = None
1580 # set of pulled head
1383 # set of pulled head
1581 self.rheads = None
1384 self.rheads = None
1582 # list of missing changeset to fetch remotely
1385 # list of missing changeset to fetch remotely
1583 self.fetch = None
1386 self.fetch = None
1584 # remote bookmarks data
1387 # remote bookmarks data
1585 self.remotebookmarks = remotebookmarks
1388 self.remotebookmarks = remotebookmarks
1586 # result of changegroup pulling (used as return code by pull)
1389 # result of changegroup pulling (used as return code by pull)
1587 self.cgresult = None
1390 self.cgresult = None
1588 # list of step already done
1391 # list of step already done
1589 self.stepsdone = set()
1392 self.stepsdone = set()
1590 # Whether we attempted a clone from pre-generated bundles.
1393 # Whether we attempted a clone from pre-generated bundles.
1591 self.clonebundleattempted = False
1394 self.clonebundleattempted = False
1592 # Set of file patterns to include.
1395 # Set of file patterns to include.
1593 self.includepats = includepats
1396 self.includepats = includepats
1594 # Set of file patterns to exclude.
1397 # Set of file patterns to exclude.
1595 self.excludepats = excludepats
1398 self.excludepats = excludepats
1596 # Number of ancestor changesets to pull from each pulled head.
1399 # Number of ancestor changesets to pull from each pulled head.
1597 self.depth = depth
1400 self.depth = depth
1598
1401
1599 @util.propertycache
1402 @util.propertycache
1600 def pulledsubset(self):
1403 def pulledsubset(self):
1601 """heads of the set of changeset target by the pull"""
1404 """heads of the set of changeset target by the pull"""
1602 # compute target subset
1405 # compute target subset
1603 if self.heads is None:
1406 if self.heads is None:
1604 # We pulled every thing possible
1407 # We pulled every thing possible
1605 # sync on everything common
1408 # sync on everything common
1606 c = set(self.common)
1409 c = set(self.common)
1607 ret = list(self.common)
1410 ret = list(self.common)
1608 for n in self.rheads:
1411 for n in self.rheads:
1609 if n not in c:
1412 if n not in c:
1610 ret.append(n)
1413 ret.append(n)
1611 return ret
1414 return ret
1612 else:
1415 else:
1613 # We pulled a specific subset
1416 # We pulled a specific subset
1614 # sync on this subset
1417 # sync on this subset
1615 return self.heads
1418 return self.heads
1616
1419
1617 @util.propertycache
1420 @util.propertycache
1618 def canusebundle2(self):
1421 def canusebundle2(self):
1619 return not _forcebundle1(self)
1422 return not _forcebundle1(self)
1620
1423
1621 @util.propertycache
1424 @util.propertycache
1622 def remotebundle2caps(self):
1425 def remotebundle2caps(self):
1623 return bundle2.bundle2caps(self.remote)
1426 return bundle2.bundle2caps(self.remote)
1624
1427
1625 def gettransaction(self):
1428 def gettransaction(self):
1626 # deprecated; talk to trmanager directly
1429 # deprecated; talk to trmanager directly
1627 return self.trmanager.transaction()
1430 return self.trmanager.transaction()
1628
1431
1629
1432
1630 class transactionmanager(util.transactional):
1433 class transactionmanager(util.transactional):
1631 """An object to manage the life cycle of a transaction
1434 """An object to manage the life cycle of a transaction
1632
1435
1633 It creates the transaction on demand and calls the appropriate hooks when
1436 It creates the transaction on demand and calls the appropriate hooks when
1634 closing the transaction."""
1437 closing the transaction."""
1635
1438
1636 def __init__(self, repo, source, url):
1439 def __init__(self, repo, source, url):
1637 self.repo = repo
1440 self.repo = repo
1638 self.source = source
1441 self.source = source
1639 self.url = url
1442 self.url = url
1640 self._tr = None
1443 self._tr = None
1641
1444
1642 def transaction(self):
1445 def transaction(self):
1643 """Return an open transaction object, constructing if necessary"""
1446 """Return an open transaction object, constructing if necessary"""
1644 if not self._tr:
1447 if not self._tr:
1645 trname = b'%s\n%s' % (self.source, util.hidepassword(self.url))
1448 trname = b'%s\n%s' % (self.source, util.hidepassword(self.url))
1646 self._tr = self.repo.transaction(trname)
1449 self._tr = self.repo.transaction(trname)
1647 self._tr.hookargs[b'source'] = self.source
1450 self._tr.hookargs[b'source'] = self.source
1648 self._tr.hookargs[b'url'] = self.url
1451 self._tr.hookargs[b'url'] = self.url
1649 return self._tr
1452 return self._tr
1650
1453
1651 def close(self):
1454 def close(self):
1652 """close transaction if created"""
1455 """close transaction if created"""
1653 if self._tr is not None:
1456 if self._tr is not None:
1654 self._tr.close()
1457 self._tr.close()
1655
1458
1656 def release(self):
1459 def release(self):
1657 """release transaction if created"""
1460 """release transaction if created"""
1658 if self._tr is not None:
1461 if self._tr is not None:
1659 self._tr.release()
1462 self._tr.release()
1660
1463
1661
1464
1662 def listkeys(remote, namespace):
1465 def listkeys(remote, namespace):
1663 with remote.commandexecutor() as e:
1466 with remote.commandexecutor() as e:
1664 return e.callcommand(b'listkeys', {b'namespace': namespace}).result()
1467 return e.callcommand(b'listkeys', {b'namespace': namespace}).result()
1665
1468
1666
1469
1667 def _fullpullbundle2(repo, pullop):
1470 def _fullpullbundle2(repo, pullop):
1668 # The server may send a partial reply, i.e. when inlining
1471 # The server may send a partial reply, i.e. when inlining
1669 # pre-computed bundles. In that case, update the common
1472 # pre-computed bundles. In that case, update the common
1670 # set based on the results and pull another bundle.
1473 # set based on the results and pull another bundle.
1671 #
1474 #
1672 # There are two indicators that the process is finished:
1475 # There are two indicators that the process is finished:
1673 # - no changeset has been added, or
1476 # - no changeset has been added, or
1674 # - all remote heads are known locally.
1477 # - all remote heads are known locally.
1675 # The head check must use the unfiltered view as obsoletion
1478 # The head check must use the unfiltered view as obsoletion
1676 # markers can hide heads.
1479 # markers can hide heads.
1677 unfi = repo.unfiltered()
1480 unfi = repo.unfiltered()
1678 unficl = unfi.changelog
1481 unficl = unfi.changelog
1679
1482
1680 def headsofdiff(h1, h2):
1483 def headsofdiff(h1, h2):
1681 """Returns heads(h1 % h2)"""
1484 """Returns heads(h1 % h2)"""
1682 res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
1485 res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
1683 return {ctx.node() for ctx in res}
1486 return {ctx.node() for ctx in res}
1684
1487
1685 def headsofunion(h1, h2):
1488 def headsofunion(h1, h2):
1686 """Returns heads((h1 + h2) - null)"""
1489 """Returns heads((h1 + h2) - null)"""
1687 res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
1490 res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
1688 return {ctx.node() for ctx in res}
1491 return {ctx.node() for ctx in res}
1689
1492
1690 while True:
1493 while True:
1691 old_heads = unficl.heads()
1494 old_heads = unficl.heads()
1692 clstart = len(unficl)
1495 clstart = len(unficl)
1693 _pullbundle2(pullop)
1496 _pullbundle2(pullop)
1694 if requirements.NARROW_REQUIREMENT in repo.requirements:
1497 if requirements.NARROW_REQUIREMENT in repo.requirements:
1695 # XXX narrow clones filter the heads on the server side during
1498 # XXX narrow clones filter the heads on the server side during
1696 # XXX getbundle and result in partial replies as well.
1499 # XXX getbundle and result in partial replies as well.
1697 # XXX Disable pull bundles in this case as band aid to avoid
1500 # XXX Disable pull bundles in this case as band aid to avoid
1698 # XXX extra round trips.
1501 # XXX extra round trips.
1699 break
1502 break
1700 if clstart == len(unficl):
1503 if clstart == len(unficl):
1701 break
1504 break
1702 if all(unficl.hasnode(n) for n in pullop.rheads):
1505 if all(unficl.hasnode(n) for n in pullop.rheads):
1703 break
1506 break
1704 new_heads = headsofdiff(unficl.heads(), old_heads)
1507 new_heads = headsofdiff(unficl.heads(), old_heads)
1705 pullop.common = headsofunion(new_heads, pullop.common)
1508 pullop.common = headsofunion(new_heads, pullop.common)
1706 pullop.rheads = set(pullop.rheads) - pullop.common
1509 pullop.rheads = set(pullop.rheads) - pullop.common
1707
1510
1708
1511
1709 def add_confirm_callback(repo, pullop):
1512 def add_confirm_callback(repo, pullop):
1710 """ adds a finalize callback to transaction which can be used to show stats
1513 """ adds a finalize callback to transaction which can be used to show stats
1711 to user and confirm the pull before committing transaction """
1514 to user and confirm the pull before committing transaction """
1712
1515
1713 tr = pullop.trmanager.transaction()
1516 tr = pullop.trmanager.transaction()
1714 scmutil.registersummarycallback(
1517 scmutil.registersummarycallback(
1715 repo, tr, txnname=b'pull', as_validator=True
1518 repo, tr, txnname=b'pull', as_validator=True
1716 )
1519 )
1717 reporef = weakref.ref(repo.unfiltered())
1520 reporef = weakref.ref(repo.unfiltered())
1718
1521
1719 def prompt(tr):
1522 def prompt(tr):
1720 repo = reporef()
1523 repo = reporef()
1721 cm = _(b'accept incoming changes (yn)?$$ &Yes $$ &No')
1524 cm = _(b'accept incoming changes (yn)?$$ &Yes $$ &No')
1722 if repo.ui.promptchoice(cm):
1525 if repo.ui.promptchoice(cm):
1723 raise error.Abort(b"user aborted")
1526 raise error.Abort(b"user aborted")
1724
1527
1725 tr.addvalidator(b'900-pull-prompt', prompt)
1528 tr.addvalidator(b'900-pull-prompt', prompt)
1726
1529
1727
1530
1728 def pull(
1531 def pull(
1729 repo,
1532 repo,
1730 remote,
1533 remote,
1731 heads=None,
1534 heads=None,
1732 force=False,
1535 force=False,
1733 bookmarks=(),
1536 bookmarks=(),
1734 opargs=None,
1537 opargs=None,
1735 streamclonerequested=None,
1538 streamclonerequested=None,
1736 includepats=None,
1539 includepats=None,
1737 excludepats=None,
1540 excludepats=None,
1738 depth=None,
1541 depth=None,
1739 confirm=None,
1542 confirm=None,
1740 ):
1543 ):
1741 """Fetch repository data from a remote.
1544 """Fetch repository data from a remote.
1742
1545
1743 This is the main function used to retrieve data from a remote repository.
1546 This is the main function used to retrieve data from a remote repository.
1744
1547
1745 ``repo`` is the local repository to clone into.
1548 ``repo`` is the local repository to clone into.
1746 ``remote`` is a peer instance.
1549 ``remote`` is a peer instance.
1747 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1550 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1748 default) means to pull everything from the remote.
1551 default) means to pull everything from the remote.
1749 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1552 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1750 default, all remote bookmarks are pulled.
1553 default, all remote bookmarks are pulled.
1751 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1554 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1752 initialization.
1555 initialization.
1753 ``streamclonerequested`` is a boolean indicating whether a "streaming
1556 ``streamclonerequested`` is a boolean indicating whether a "streaming
1754 clone" is requested. A "streaming clone" is essentially a raw file copy
1557 clone" is requested. A "streaming clone" is essentially a raw file copy
1755 of revlogs from the server. This only works when the local repository is
1558 of revlogs from the server. This only works when the local repository is
1756 empty. The default value of ``None`` means to respect the server
1559 empty. The default value of ``None`` means to respect the server
1757 configuration for preferring stream clones.
1560 configuration for preferring stream clones.
1758 ``includepats`` and ``excludepats`` define explicit file patterns to
1561 ``includepats`` and ``excludepats`` define explicit file patterns to
1759 include and exclude in storage, respectively. If not defined, narrow
1562 include and exclude in storage, respectively. If not defined, narrow
1760 patterns from the repo instance are used, if available.
1563 patterns from the repo instance are used, if available.
1761 ``depth`` is an integer indicating the DAG depth of history we're
1564 ``depth`` is an integer indicating the DAG depth of history we're
1762 interested in. If defined, for each revision specified in ``heads``, we
1565 interested in. If defined, for each revision specified in ``heads``, we
1763 will fetch up to this many of its ancestors and data associated with them.
1566 will fetch up to this many of its ancestors and data associated with them.
1764 ``confirm`` is a boolean indicating whether the pull should be confirmed
1567 ``confirm`` is a boolean indicating whether the pull should be confirmed
1765 before committing the transaction. This overrides HGPLAIN.
1568 before committing the transaction. This overrides HGPLAIN.
1766
1569
1767 Returns the ``pulloperation`` created for this pull.
1570 Returns the ``pulloperation`` created for this pull.
1768 """
1571 """
1769 if opargs is None:
1572 if opargs is None:
1770 opargs = {}
1573 opargs = {}
1771
1574
1772 # We allow the narrow patterns to be passed in explicitly to provide more
1575 # We allow the narrow patterns to be passed in explicitly to provide more
1773 # flexibility for API consumers.
1576 # flexibility for API consumers.
1774 if includepats or excludepats:
1577 if includepats or excludepats:
1775 includepats = includepats or set()
1578 includepats = includepats or set()
1776 excludepats = excludepats or set()
1579 excludepats = excludepats or set()
1777 else:
1580 else:
1778 includepats, excludepats = repo.narrowpats
1581 includepats, excludepats = repo.narrowpats
1779
1582
1780 narrowspec.validatepatterns(includepats)
1583 narrowspec.validatepatterns(includepats)
1781 narrowspec.validatepatterns(excludepats)
1584 narrowspec.validatepatterns(excludepats)
1782
1585
1783 pullop = pulloperation(
1586 pullop = pulloperation(
1784 repo,
1587 repo,
1785 remote,
1588 remote,
1786 heads,
1589 heads,
1787 force,
1590 force,
1788 bookmarks=bookmarks,
1591 bookmarks=bookmarks,
1789 streamclonerequested=streamclonerequested,
1592 streamclonerequested=streamclonerequested,
1790 includepats=includepats,
1593 includepats=includepats,
1791 excludepats=excludepats,
1594 excludepats=excludepats,
1792 depth=depth,
1595 depth=depth,
1793 **pycompat.strkwargs(opargs)
1596 **pycompat.strkwargs(opargs)
1794 )
1597 )
1795
1598
1796 peerlocal = pullop.remote.local()
1599 peerlocal = pullop.remote.local()
1797 if peerlocal:
1600 if peerlocal:
1798 missing = set(peerlocal.requirements) - pullop.repo.supported
1601 missing = set(peerlocal.requirements) - pullop.repo.supported
1799 if missing:
1602 if missing:
1800 msg = _(
1603 msg = _(
1801 b"required features are not"
1604 b"required features are not"
1802 b" supported in the destination:"
1605 b" supported in the destination:"
1803 b" %s"
1606 b" %s"
1804 ) % (b', '.join(sorted(missing)))
1607 ) % (b', '.join(sorted(missing)))
1805 raise error.Abort(msg)
1608 raise error.Abort(msg)
1806
1609
1807 pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
1610 pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
1808 wlock = util.nullcontextmanager()
1611 wlock = util.nullcontextmanager()
1809 if not bookmod.bookmarksinstore(repo):
1612 if not bookmod.bookmarksinstore(repo):
1810 wlock = repo.wlock()
1613 wlock = repo.wlock()
1811 with wlock, repo.lock(), pullop.trmanager:
1614 with wlock, repo.lock(), pullop.trmanager:
1812 if confirm or (
1615 if confirm or (
1813 repo.ui.configbool(b"pull", b"confirm") and not repo.ui.plain()
1616 repo.ui.configbool(b"pull", b"confirm") and not repo.ui.plain()
1814 ):
1617 ):
1815 add_confirm_callback(repo, pullop)
1618 add_confirm_callback(repo, pullop)
1816
1619
1817 # Use the modern wire protocol, if available.
1620 # Use the modern wire protocol, if available.
1818 if remote.capable(b'command-changesetdata'):
1621 if remote.capable(b'command-changesetdata'):
1819 exchangev2.pull(pullop)
1622 exchangev2.pull(pullop)
1820 else:
1623 else:
1821 # This should ideally be in _pullbundle2(). However, it needs to run
1624 # This should ideally be in _pullbundle2(). However, it needs to run
1822 # before discovery to avoid extra work.
1625 # before discovery to avoid extra work.
1823 _maybeapplyclonebundle(pullop)
1626 _maybeapplyclonebundle(pullop)
1824 streamclone.maybeperformlegacystreamclone(pullop)
1627 streamclone.maybeperformlegacystreamclone(pullop)
1825 _pulldiscovery(pullop)
1628 _pulldiscovery(pullop)
1826 if pullop.canusebundle2:
1629 if pullop.canusebundle2:
1827 _fullpullbundle2(repo, pullop)
1630 _fullpullbundle2(repo, pullop)
1828 _pullchangeset(pullop)
1631 _pullchangeset(pullop)
1829 _pullphase(pullop)
1632 _pullphase(pullop)
1830 _pullbookmarks(pullop)
1633 _pullbookmarks(pullop)
1831 _pullobsolete(pullop)
1634 _pullobsolete(pullop)
1832
1635
1833 # storing remotenames
1636 # storing remotenames
1834 if repo.ui.configbool(b'experimental', b'remotenames'):
1637 if repo.ui.configbool(b'experimental', b'remotenames'):
1835 logexchange.pullremotenames(repo, remote)
1638 logexchange.pullremotenames(repo, remote)
1836
1639
1837 return pullop
1640 return pullop
1838
1641
1839
1642
1840 # list of steps to perform discovery before pull
1643 # list of steps to perform discovery before pull
1841 pulldiscoveryorder = []
1644 pulldiscoveryorder = []
1842
1645
1843 # Mapping between step name and function
1646 # Mapping between step name and function
1844 #
1647 #
1845 # This exists to help extensions wrap steps if necessary
1648 # This exists to help extensions wrap steps if necessary
1846 pulldiscoverymapping = {}
1649 pulldiscoverymapping = {}
1847
1650
1848
1651
1849 def pulldiscovery(stepname):
1652 def pulldiscovery(stepname):
1850 """decorator for function performing discovery before pull
1653 """decorator for function performing discovery before pull
1851
1654
1852 The function is added to the step -> function mapping and appended to the
1655 The function is added to the step -> function mapping and appended to the
1853 list of steps. Beware that decorated function will be added in order (this
1656 list of steps. Beware that decorated function will be added in order (this
1854 may matter).
1657 may matter).
1855
1658
1856 You can only use this decorator for a new step, if you want to wrap a step
1659 You can only use this decorator for a new step, if you want to wrap a step
1857 from an extension, change the pulldiscovery dictionary directly."""
1660 from an extension, change the pulldiscovery dictionary directly."""
1858
1661
1859 def dec(func):
1662 def dec(func):
1860 assert stepname not in pulldiscoverymapping
1663 assert stepname not in pulldiscoverymapping
1861 pulldiscoverymapping[stepname] = func
1664 pulldiscoverymapping[stepname] = func
1862 pulldiscoveryorder.append(stepname)
1665 pulldiscoveryorder.append(stepname)
1863 return func
1666 return func
1864
1667
1865 return dec
1668 return dec
1866
1669
1867
1670
1868 def _pulldiscovery(pullop):
1671 def _pulldiscovery(pullop):
1869 """Run all discovery steps"""
1672 """Run all discovery steps"""
1870 for stepname in pulldiscoveryorder:
1673 for stepname in pulldiscoveryorder:
1871 step = pulldiscoverymapping[stepname]
1674 step = pulldiscoverymapping[stepname]
1872 step(pullop)
1675 step(pullop)
1873
1676
1874
1677
1875 @pulldiscovery(b'b1:bookmarks')
1678 @pulldiscovery(b'b1:bookmarks')
1876 def _pullbookmarkbundle1(pullop):
1679 def _pullbookmarkbundle1(pullop):
1877 """fetch bookmark data in bundle1 case
1680 """fetch bookmark data in bundle1 case
1878
1681
1879 If not using bundle2, we have to fetch bookmarks before changeset
1682 If not using bundle2, we have to fetch bookmarks before changeset
1880 discovery to reduce the chance and impact of race conditions."""
1683 discovery to reduce the chance and impact of race conditions."""
1881 if pullop.remotebookmarks is not None:
1684 if pullop.remotebookmarks is not None:
1882 return
1685 return
1883 if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps:
1686 if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps:
1884 # all known bundle2 servers now support listkeys, but lets be nice with
1687 # all known bundle2 servers now support listkeys, but lets be nice with
1885 # new implementation.
1688 # new implementation.
1886 return
1689 return
1887 books = listkeys(pullop.remote, b'bookmarks')
1690 books = listkeys(pullop.remote, b'bookmarks')
1888 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1691 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1889
1692
1890
1693
1891 @pulldiscovery(b'changegroup')
1694 @pulldiscovery(b'changegroup')
1892 def _pulldiscoverychangegroup(pullop):
1695 def _pulldiscoverychangegroup(pullop):
1893 """discovery phase for the pull
1696 """discovery phase for the pull
1894
1697
1895 Current handle changeset discovery only, will change handle all discovery
1698 Current handle changeset discovery only, will change handle all discovery
1896 at some point."""
1699 at some point."""
1897 tmp = discovery.findcommonincoming(
1700 tmp = discovery.findcommonincoming(
1898 pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force
1701 pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force
1899 )
1702 )
1900 common, fetch, rheads = tmp
1703 common, fetch, rheads = tmp
1901 has_node = pullop.repo.unfiltered().changelog.index.has_node
1704 has_node = pullop.repo.unfiltered().changelog.index.has_node
1902 if fetch and rheads:
1705 if fetch and rheads:
1903 # If a remote heads is filtered locally, put in back in common.
1706 # If a remote heads is filtered locally, put in back in common.
1904 #
1707 #
1905 # This is a hackish solution to catch most of "common but locally
1708 # This is a hackish solution to catch most of "common but locally
1906 # hidden situation". We do not performs discovery on unfiltered
1709 # hidden situation". We do not performs discovery on unfiltered
1907 # repository because it end up doing a pathological amount of round
1710 # repository because it end up doing a pathological amount of round
1908 # trip for w huge amount of changeset we do not care about.
1711 # trip for w huge amount of changeset we do not care about.
1909 #
1712 #
1910 # If a set of such "common but filtered" changeset exist on the server
1713 # If a set of such "common but filtered" changeset exist on the server
1911 # but are not including a remote heads, we'll not be able to detect it,
1714 # but are not including a remote heads, we'll not be able to detect it,
1912 scommon = set(common)
1715 scommon = set(common)
1913 for n in rheads:
1716 for n in rheads:
1914 if has_node(n):
1717 if has_node(n):
1915 if n not in scommon:
1718 if n not in scommon:
1916 common.append(n)
1719 common.append(n)
1917 if set(rheads).issubset(set(common)):
1720 if set(rheads).issubset(set(common)):
1918 fetch = []
1721 fetch = []
1919 pullop.common = common
1722 pullop.common = common
1920 pullop.fetch = fetch
1723 pullop.fetch = fetch
1921 pullop.rheads = rheads
1724 pullop.rheads = rheads
1922
1725
1923
1726
1924 def _pullbundle2(pullop):
1727 def _pullbundle2(pullop):
1925 """pull data using bundle2
1728 """pull data using bundle2
1926
1729
1927 For now, the only supported data are changegroup."""
1730 For now, the only supported data are changegroup."""
1928 kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}
1731 kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}
1929
1732
1930 # make ui easier to access
1733 # make ui easier to access
1931 ui = pullop.repo.ui
1734 ui = pullop.repo.ui
1932
1735
1933 # At the moment we don't do stream clones over bundle2. If that is
1736 # At the moment we don't do stream clones over bundle2. If that is
1934 # implemented then here's where the check for that will go.
1737 # implemented then here's where the check for that will go.
1935 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1738 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1936
1739
1937 # declare pull perimeters
1740 # declare pull perimeters
1938 kwargs[b'common'] = pullop.common
1741 kwargs[b'common'] = pullop.common
1939 kwargs[b'heads'] = pullop.heads or pullop.rheads
1742 kwargs[b'heads'] = pullop.heads or pullop.rheads
1940
1743
1941 # check server supports narrow and then adding includepats and excludepats
1744 # check server supports narrow and then adding includepats and excludepats
1942 servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
1745 servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
1943 if servernarrow and pullop.includepats:
1746 if servernarrow and pullop.includepats:
1944 kwargs[b'includepats'] = pullop.includepats
1747 kwargs[b'includepats'] = pullop.includepats
1945 if servernarrow and pullop.excludepats:
1748 if servernarrow and pullop.excludepats:
1946 kwargs[b'excludepats'] = pullop.excludepats
1749 kwargs[b'excludepats'] = pullop.excludepats
1947
1750
1948 if streaming:
1751 if streaming:
1949 kwargs[b'cg'] = False
1752 kwargs[b'cg'] = False
1950 kwargs[b'stream'] = True
1753 kwargs[b'stream'] = True
1951 pullop.stepsdone.add(b'changegroup')
1754 pullop.stepsdone.add(b'changegroup')
1952 pullop.stepsdone.add(b'phases')
1755 pullop.stepsdone.add(b'phases')
1953
1756
1954 else:
1757 else:
1955 # pulling changegroup
1758 # pulling changegroup
1956 pullop.stepsdone.add(b'changegroup')
1759 pullop.stepsdone.add(b'changegroup')
1957
1760
1958 kwargs[b'cg'] = pullop.fetch
1761 kwargs[b'cg'] = pullop.fetch
1959
1762
1960 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1763 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1961 hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())
1764 hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())
1962 if not legacyphase and hasbinaryphase:
1765 if not legacyphase and hasbinaryphase:
1963 kwargs[b'phases'] = True
1766 kwargs[b'phases'] = True
1964 pullop.stepsdone.add(b'phases')
1767 pullop.stepsdone.add(b'phases')
1965
1768
1966 if b'listkeys' in pullop.remotebundle2caps:
1769 if b'listkeys' in pullop.remotebundle2caps:
1967 if b'phases' not in pullop.stepsdone:
1770 if b'phases' not in pullop.stepsdone:
1968 kwargs[b'listkeys'] = [b'phases']
1771 kwargs[b'listkeys'] = [b'phases']
1969
1772
1970 bookmarksrequested = False
1773 bookmarksrequested = False
1971 legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')
1774 legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')
1972 hasbinarybook = b'bookmarks' in pullop.remotebundle2caps
1775 hasbinarybook = b'bookmarks' in pullop.remotebundle2caps
1973
1776
1974 if pullop.remotebookmarks is not None:
1777 if pullop.remotebookmarks is not None:
1975 pullop.stepsdone.add(b'request-bookmarks')
1778 pullop.stepsdone.add(b'request-bookmarks')
1976
1779
1977 if (
1780 if (
1978 b'request-bookmarks' not in pullop.stepsdone
1781 b'request-bookmarks' not in pullop.stepsdone
1979 and pullop.remotebookmarks is None
1782 and pullop.remotebookmarks is None
1980 and not legacybookmark
1783 and not legacybookmark
1981 and hasbinarybook
1784 and hasbinarybook
1982 ):
1785 ):
1983 kwargs[b'bookmarks'] = True
1786 kwargs[b'bookmarks'] = True
1984 bookmarksrequested = True
1787 bookmarksrequested = True
1985
1788
1986 if b'listkeys' in pullop.remotebundle2caps:
1789 if b'listkeys' in pullop.remotebundle2caps:
1987 if b'request-bookmarks' not in pullop.stepsdone:
1790 if b'request-bookmarks' not in pullop.stepsdone:
1988 # make sure to always includes bookmark data when migrating
1791 # make sure to always includes bookmark data when migrating
1989 # `hg incoming --bundle` to using this function.
1792 # `hg incoming --bundle` to using this function.
1990 pullop.stepsdone.add(b'request-bookmarks')
1793 pullop.stepsdone.add(b'request-bookmarks')
1991 kwargs.setdefault(b'listkeys', []).append(b'bookmarks')
1794 kwargs.setdefault(b'listkeys', []).append(b'bookmarks')
1992
1795
1993 # If this is a full pull / clone and the server supports the clone bundles
1796 # If this is a full pull / clone and the server supports the clone bundles
1994 # feature, tell the server whether we attempted a clone bundle. The
1797 # feature, tell the server whether we attempted a clone bundle. The
1995 # presence of this flag indicates the client supports clone bundles. This
1798 # presence of this flag indicates the client supports clone bundles. This
1996 # will enable the server to treat clients that support clone bundles
1799 # will enable the server to treat clients that support clone bundles
1997 # differently from those that don't.
1800 # differently from those that don't.
1998 if (
1801 if (
1999 pullop.remote.capable(b'clonebundles')
1802 pullop.remote.capable(b'clonebundles')
2000 and pullop.heads is None
1803 and pullop.heads is None
2001 and list(pullop.common) == [nullid]
1804 and list(pullop.common) == [nullid]
2002 ):
1805 ):
2003 kwargs[b'cbattempted'] = pullop.clonebundleattempted
1806 kwargs[b'cbattempted'] = pullop.clonebundleattempted
2004
1807
2005 if streaming:
1808 if streaming:
2006 pullop.repo.ui.status(_(b'streaming all changes\n'))
1809 pullop.repo.ui.status(_(b'streaming all changes\n'))
2007 elif not pullop.fetch:
1810 elif not pullop.fetch:
2008 pullop.repo.ui.status(_(b"no changes found\n"))
1811 pullop.repo.ui.status(_(b"no changes found\n"))
2009 pullop.cgresult = 0
1812 pullop.cgresult = 0
2010 else:
1813 else:
2011 if pullop.heads is None and list(pullop.common) == [nullid]:
1814 if pullop.heads is None and list(pullop.common) == [nullid]:
2012 pullop.repo.ui.status(_(b"requesting all changes\n"))
1815 pullop.repo.ui.status(_(b"requesting all changes\n"))
2013 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1816 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2014 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
1817 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
2015 if obsolete.commonversion(remoteversions) is not None:
1818 if obsolete.commonversion(remoteversions) is not None:
2016 kwargs[b'obsmarkers'] = True
1819 kwargs[b'obsmarkers'] = True
2017 pullop.stepsdone.add(b'obsmarkers')
1820 pullop.stepsdone.add(b'obsmarkers')
2018 _pullbundle2extraprepare(pullop, kwargs)
1821 _pullbundle2extraprepare(pullop, kwargs)
2019
1822
2020 with pullop.remote.commandexecutor() as e:
1823 with pullop.remote.commandexecutor() as e:
2021 args = dict(kwargs)
1824 args = dict(kwargs)
2022 args[b'source'] = b'pull'
1825 args[b'source'] = b'pull'
2023 bundle = e.callcommand(b'getbundle', args).result()
1826 bundle = e.callcommand(b'getbundle', args).result()
2024
1827
2025 try:
1828 try:
2026 op = bundle2.bundleoperation(
1829 op = bundle2.bundleoperation(
2027 pullop.repo, pullop.gettransaction, source=b'pull'
1830 pullop.repo, pullop.gettransaction, source=b'pull'
2028 )
1831 )
2029 op.modes[b'bookmarks'] = b'records'
1832 op.modes[b'bookmarks'] = b'records'
2030 bundle2.processbundle(pullop.repo, bundle, op=op)
1833 bundle2.processbundle(pullop.repo, bundle, op=op)
2031 except bundle2.AbortFromPart as exc:
1834 except bundle2.AbortFromPart as exc:
2032 pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc)
1835 pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc)
2033 raise error.Abort(_(b'pull failed on remote'), hint=exc.hint)
1836 raise error.Abort(_(b'pull failed on remote'), hint=exc.hint)
2034 except error.BundleValueError as exc:
1837 except error.BundleValueError as exc:
2035 raise error.Abort(_(b'missing support for %s') % exc)
1838 raise error.Abort(_(b'missing support for %s') % exc)
2036
1839
2037 if pullop.fetch:
1840 if pullop.fetch:
2038 pullop.cgresult = bundle2.combinechangegroupresults(op)
1841 pullop.cgresult = bundle2.combinechangegroupresults(op)
2039
1842
2040 # processing phases change
1843 # processing phases change
2041 for namespace, value in op.records[b'listkeys']:
1844 for namespace, value in op.records[b'listkeys']:
2042 if namespace == b'phases':
1845 if namespace == b'phases':
2043 _pullapplyphases(pullop, value)
1846 _pullapplyphases(pullop, value)
2044
1847
2045 # processing bookmark update
1848 # processing bookmark update
2046 if bookmarksrequested:
1849 if bookmarksrequested:
2047 books = {}
1850 books = {}
2048 for record in op.records[b'bookmarks']:
1851 for record in op.records[b'bookmarks']:
2049 books[record[b'bookmark']] = record[b"node"]
1852 books[record[b'bookmark']] = record[b"node"]
2050 pullop.remotebookmarks = books
1853 pullop.remotebookmarks = books
2051 else:
1854 else:
2052 for namespace, value in op.records[b'listkeys']:
1855 for namespace, value in op.records[b'listkeys']:
2053 if namespace == b'bookmarks':
1856 if namespace == b'bookmarks':
2054 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
1857 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
2055
1858
2056 # bookmark data were either already there or pulled in the bundle
1859 # bookmark data were either already there or pulled in the bundle
2057 if pullop.remotebookmarks is not None:
1860 if pullop.remotebookmarks is not None:
2058 _pullbookmarks(pullop)
1861 _pullbookmarks(pullop)
2059
1862
2060
1863
2061 def _pullbundle2extraprepare(pullop, kwargs):
1864 def _pullbundle2extraprepare(pullop, kwargs):
2062 """hook function so that extensions can extend the getbundle call"""
1865 """hook function so that extensions can extend the getbundle call"""
2063
1866
2064
1867
2065 def _pullchangeset(pullop):
1868 def _pullchangeset(pullop):
2066 """pull changeset from unbundle into the local repo"""
1869 """pull changeset from unbundle into the local repo"""
2067 # We delay the open of the transaction as late as possible so we
1870 # We delay the open of the transaction as late as possible so we
2068 # don't open transaction for nothing or you break future useful
1871 # don't open transaction for nothing or you break future useful
2069 # rollback call
1872 # rollback call
2070 if b'changegroup' in pullop.stepsdone:
1873 if b'changegroup' in pullop.stepsdone:
2071 return
1874 return
2072 pullop.stepsdone.add(b'changegroup')
1875 pullop.stepsdone.add(b'changegroup')
2073 if not pullop.fetch:
1876 if not pullop.fetch:
2074 pullop.repo.ui.status(_(b"no changes found\n"))
1877 pullop.repo.ui.status(_(b"no changes found\n"))
2075 pullop.cgresult = 0
1878 pullop.cgresult = 0
2076 return
1879 return
2077 tr = pullop.gettransaction()
1880 tr = pullop.gettransaction()
2078 if pullop.heads is None and list(pullop.common) == [nullid]:
1881 if pullop.heads is None and list(pullop.common) == [nullid]:
2079 pullop.repo.ui.status(_(b"requesting all changes\n"))
1882 pullop.repo.ui.status(_(b"requesting all changes\n"))
2080 elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):
1883 elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):
2081 # issue1320, avoid a race if remote changed after discovery
1884 # issue1320, avoid a race if remote changed after discovery
2082 pullop.heads = pullop.rheads
1885 pullop.heads = pullop.rheads
2083
1886
2084 if pullop.remote.capable(b'getbundle'):
1887 if pullop.remote.capable(b'getbundle'):
2085 # TODO: get bundlecaps from remote
1888 # TODO: get bundlecaps from remote
2086 cg = pullop.remote.getbundle(
1889 cg = pullop.remote.getbundle(
2087 b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
1890 b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
2088 )
1891 )
2089 elif pullop.heads is None:
1892 elif pullop.heads is None:
2090 with pullop.remote.commandexecutor() as e:
1893 with pullop.remote.commandexecutor() as e:
2091 cg = e.callcommand(
1894 cg = e.callcommand(
2092 b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',}
1895 b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',}
2093 ).result()
1896 ).result()
2094
1897
2095 elif not pullop.remote.capable(b'changegroupsubset'):
1898 elif not pullop.remote.capable(b'changegroupsubset'):
2096 raise error.Abort(
1899 raise error.Abort(
2097 _(
1900 _(
2098 b"partial pull cannot be done because "
1901 b"partial pull cannot be done because "
2099 b"other repository doesn't support "
1902 b"other repository doesn't support "
2100 b"changegroupsubset."
1903 b"changegroupsubset."
2101 )
1904 )
2102 )
1905 )
2103 else:
1906 else:
2104 with pullop.remote.commandexecutor() as e:
1907 with pullop.remote.commandexecutor() as e:
2105 cg = e.callcommand(
1908 cg = e.callcommand(
2106 b'changegroupsubset',
1909 b'changegroupsubset',
2107 {
1910 {
2108 b'bases': pullop.fetch,
1911 b'bases': pullop.fetch,
2109 b'heads': pullop.heads,
1912 b'heads': pullop.heads,
2110 b'source': b'pull',
1913 b'source': b'pull',
2111 },
1914 },
2112 ).result()
1915 ).result()
2113
1916
2114 bundleop = bundle2.applybundle(
1917 bundleop = bundle2.applybundle(
2115 pullop.repo, cg, tr, b'pull', pullop.remote.url()
1918 pullop.repo, cg, tr, b'pull', pullop.remote.url()
2116 )
1919 )
2117 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
1920 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
2118
1921
2119
1922
2120 def _pullphase(pullop):
1923 def _pullphase(pullop):
2121 # Get remote phases data from remote
1924 # Get remote phases data from remote
2122 if b'phases' in pullop.stepsdone:
1925 if b'phases' in pullop.stepsdone:
2123 return
1926 return
2124 remotephases = listkeys(pullop.remote, b'phases')
1927 remotephases = listkeys(pullop.remote, b'phases')
2125 _pullapplyphases(pullop, remotephases)
1928 _pullapplyphases(pullop, remotephases)
2126
1929
2127
1930
2128 def _pullapplyphases(pullop, remotephases):
1931 def _pullapplyphases(pullop, remotephases):
2129 """apply phase movement from observed remote state"""
1932 """apply phase movement from observed remote state"""
2130 if b'phases' in pullop.stepsdone:
1933 if b'phases' in pullop.stepsdone:
2131 return
1934 return
2132 pullop.stepsdone.add(b'phases')
1935 pullop.stepsdone.add(b'phases')
2133 publishing = bool(remotephases.get(b'publishing', False))
1936 publishing = bool(remotephases.get(b'publishing', False))
2134 if remotephases and not publishing:
1937 if remotephases and not publishing:
2135 # remote is new and non-publishing
1938 # remote is new and non-publishing
2136 pheads, _dr = phases.analyzeremotephases(
1939 pheads, _dr = phases.analyzeremotephases(
2137 pullop.repo, pullop.pulledsubset, remotephases
1940 pullop.repo, pullop.pulledsubset, remotephases
2138 )
1941 )
2139 dheads = pullop.pulledsubset
1942 dheads = pullop.pulledsubset
2140 else:
1943 else:
2141 # Remote is old or publishing all common changesets
1944 # Remote is old or publishing all common changesets
2142 # should be seen as public
1945 # should be seen as public
2143 pheads = pullop.pulledsubset
1946 pheads = pullop.pulledsubset
2144 dheads = []
1947 dheads = []
2145 unfi = pullop.repo.unfiltered()
1948 unfi = pullop.repo.unfiltered()
2146 phase = unfi._phasecache.phase
1949 phase = unfi._phasecache.phase
2147 rev = unfi.changelog.index.get_rev
1950 rev = unfi.changelog.index.get_rev
2148 public = phases.public
1951 public = phases.public
2149 draft = phases.draft
1952 draft = phases.draft
2150
1953
2151 # exclude changesets already public locally and update the others
1954 # exclude changesets already public locally and update the others
2152 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
1955 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
2153 if pheads:
1956 if pheads:
2154 tr = pullop.gettransaction()
1957 tr = pullop.gettransaction()
2155 phases.advanceboundary(pullop.repo, tr, public, pheads)
1958 phases.advanceboundary(pullop.repo, tr, public, pheads)
2156
1959
2157 # exclude changesets already draft locally and update the others
1960 # exclude changesets already draft locally and update the others
2158 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
1961 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
2159 if dheads:
1962 if dheads:
2160 tr = pullop.gettransaction()
1963 tr = pullop.gettransaction()
2161 phases.advanceboundary(pullop.repo, tr, draft, dheads)
1964 phases.advanceboundary(pullop.repo, tr, draft, dheads)
2162
1965
2163
1966
2164 def _pullbookmarks(pullop):
1967 def _pullbookmarks(pullop):
2165 """process the remote bookmark information to update the local one"""
1968 """process the remote bookmark information to update the local one"""
2166 if b'bookmarks' in pullop.stepsdone:
1969 if b'bookmarks' in pullop.stepsdone:
2167 return
1970 return
2168 pullop.stepsdone.add(b'bookmarks')
1971 pullop.stepsdone.add(b'bookmarks')
2169 repo = pullop.repo
1972 repo = pullop.repo
2170 remotebookmarks = pullop.remotebookmarks
1973 remotebookmarks = pullop.remotebookmarks
2171 bookmod.updatefromremote(
1974 bookmod.updatefromremote(
2172 repo.ui,
1975 repo.ui,
2173 repo,
1976 repo,
2174 remotebookmarks,
1977 remotebookmarks,
2175 pullop.remote.url(),
1978 pullop.remote.url(),
2176 pullop.gettransaction,
1979 pullop.gettransaction,
2177 explicit=pullop.explicitbookmarks,
1980 explicit=pullop.explicitbookmarks,
2178 )
1981 )
2179
1982
2180
1983
2181 def _pullobsolete(pullop):
1984 def _pullobsolete(pullop):
2182 """utility function to pull obsolete markers from a remote
1985 """utility function to pull obsolete markers from a remote
2183
1986
2184 The `gettransaction` is function that return the pull transaction, creating
1987 The `gettransaction` is function that return the pull transaction, creating
2185 one if necessary. We return the transaction to inform the calling code that
1988 one if necessary. We return the transaction to inform the calling code that
2186 a new transaction have been created (when applicable).
1989 a new transaction have been created (when applicable).
2187
1990
2188 Exists mostly to allow overriding for experimentation purpose"""
1991 Exists mostly to allow overriding for experimentation purpose"""
2189 if b'obsmarkers' in pullop.stepsdone:
1992 if b'obsmarkers' in pullop.stepsdone:
2190 return
1993 return
2191 pullop.stepsdone.add(b'obsmarkers')
1994 pullop.stepsdone.add(b'obsmarkers')
2192 tr = None
1995 tr = None
2193 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
1996 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2194 pullop.repo.ui.debug(b'fetching remote obsolete markers\n')
1997 pullop.repo.ui.debug(b'fetching remote obsolete markers\n')
2195 remoteobs = listkeys(pullop.remote, b'obsolete')
1998 remoteobs = listkeys(pullop.remote, b'obsolete')
2196 if b'dump0' in remoteobs:
1999 if b'dump0' in remoteobs:
2197 tr = pullop.gettransaction()
2000 tr = pullop.gettransaction()
2198 markers = []
2001 markers = []
2199 for key in sorted(remoteobs, reverse=True):
2002 for key in sorted(remoteobs, reverse=True):
2200 if key.startswith(b'dump'):
2003 if key.startswith(b'dump'):
2201 data = util.b85decode(remoteobs[key])
2004 data = util.b85decode(remoteobs[key])
2202 version, newmarks = obsolete._readmarkers(data)
2005 version, newmarks = obsolete._readmarkers(data)
2203 markers += newmarks
2006 markers += newmarks
2204 if markers:
2007 if markers:
2205 pullop.repo.obsstore.add(tr, markers)
2008 pullop.repo.obsstore.add(tr, markers)
2206 pullop.repo.invalidatevolatilesets()
2009 pullop.repo.invalidatevolatilesets()
2207 return tr
2010 return tr
2208
2011
2209
2012
2210 def applynarrowacl(repo, kwargs):
2013 def applynarrowacl(repo, kwargs):
2211 """Apply narrow fetch access control.
2014 """Apply narrow fetch access control.
2212
2015
2213 This massages the named arguments for getbundle wire protocol commands
2016 This massages the named arguments for getbundle wire protocol commands
2214 so requested data is filtered through access control rules.
2017 so requested data is filtered through access control rules.
2215 """
2018 """
2216 ui = repo.ui
2019 ui = repo.ui
2217 # TODO this assumes existence of HTTP and is a layering violation.
2020 # TODO this assumes existence of HTTP and is a layering violation.
2218 username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username())
2021 username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username())
2219 user_includes = ui.configlist(
2022 user_includes = ui.configlist(
2220 _NARROWACL_SECTION,
2023 _NARROWACL_SECTION,
2221 username + b'.includes',
2024 username + b'.includes',
2222 ui.configlist(_NARROWACL_SECTION, b'default.includes'),
2025 ui.configlist(_NARROWACL_SECTION, b'default.includes'),
2223 )
2026 )
2224 user_excludes = ui.configlist(
2027 user_excludes = ui.configlist(
2225 _NARROWACL_SECTION,
2028 _NARROWACL_SECTION,
2226 username + b'.excludes',
2029 username + b'.excludes',
2227 ui.configlist(_NARROWACL_SECTION, b'default.excludes'),
2030 ui.configlist(_NARROWACL_SECTION, b'default.excludes'),
2228 )
2031 )
2229 if not user_includes:
2032 if not user_includes:
2230 raise error.Abort(
2033 raise error.Abort(
2231 _(b"%s configuration for user %s is empty")
2034 _(b"%s configuration for user %s is empty")
2232 % (_NARROWACL_SECTION, username)
2035 % (_NARROWACL_SECTION, username)
2233 )
2036 )
2234
2037
2235 user_includes = [
2038 user_includes = [
2236 b'path:.' if p == b'*' else b'path:' + p for p in user_includes
2039 b'path:.' if p == b'*' else b'path:' + p for p in user_includes
2237 ]
2040 ]
2238 user_excludes = [
2041 user_excludes = [
2239 b'path:.' if p == b'*' else b'path:' + p for p in user_excludes
2042 b'path:.' if p == b'*' else b'path:' + p for p in user_excludes
2240 ]
2043 ]
2241
2044
2242 req_includes = set(kwargs.get('includepats', []))
2045 req_includes = set(kwargs.get('includepats', []))
2243 req_excludes = set(kwargs.get('excludepats', []))
2046 req_excludes = set(kwargs.get('excludepats', []))
2244
2047
2245 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
2048 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
2246 req_includes, req_excludes, user_includes, user_excludes
2049 req_includes, req_excludes, user_includes, user_excludes
2247 )
2050 )
2248
2051
2249 if invalid_includes:
2052 if invalid_includes:
2250 raise error.Abort(
2053 raise error.Abort(
2251 _(b"The following includes are not accessible for %s: %s")
2054 _(b"The following includes are not accessible for %s: %s")
2252 % (username, stringutil.pprint(invalid_includes))
2055 % (username, stringutil.pprint(invalid_includes))
2253 )
2056 )
2254
2057
2255 new_args = {}
2058 new_args = {}
2256 new_args.update(kwargs)
2059 new_args.update(kwargs)
2257 new_args['narrow'] = True
2060 new_args['narrow'] = True
2258 new_args['narrow_acl'] = True
2061 new_args['narrow_acl'] = True
2259 new_args['includepats'] = req_includes
2062 new_args['includepats'] = req_includes
2260 if req_excludes:
2063 if req_excludes:
2261 new_args['excludepats'] = req_excludes
2064 new_args['excludepats'] = req_excludes
2262
2065
2263 return new_args
2066 return new_args
2264
2067
2265
2068
2266 def _computeellipsis(repo, common, heads, known, match, depth=None):
2069 def _computeellipsis(repo, common, heads, known, match, depth=None):
2267 """Compute the shape of a narrowed DAG.
2070 """Compute the shape of a narrowed DAG.
2268
2071
2269 Args:
2072 Args:
2270 repo: The repository we're transferring.
2073 repo: The repository we're transferring.
2271 common: The roots of the DAG range we're transferring.
2074 common: The roots of the DAG range we're transferring.
2272 May be just [nullid], which means all ancestors of heads.
2075 May be just [nullid], which means all ancestors of heads.
2273 heads: The heads of the DAG range we're transferring.
2076 heads: The heads of the DAG range we're transferring.
2274 match: The narrowmatcher that allows us to identify relevant changes.
2077 match: The narrowmatcher that allows us to identify relevant changes.
2275 depth: If not None, only consider nodes to be full nodes if they are at
2078 depth: If not None, only consider nodes to be full nodes if they are at
2276 most depth changesets away from one of heads.
2079 most depth changesets away from one of heads.
2277
2080
2278 Returns:
2081 Returns:
2279 A tuple of (visitnodes, relevant_nodes, ellipsisroots) where:
2082 A tuple of (visitnodes, relevant_nodes, ellipsisroots) where:
2280
2083
2281 visitnodes: The list of nodes (either full or ellipsis) which
2084 visitnodes: The list of nodes (either full or ellipsis) which
2282 need to be sent to the client.
2085 need to be sent to the client.
2283 relevant_nodes: The set of changelog nodes which change a file inside
2086 relevant_nodes: The set of changelog nodes which change a file inside
2284 the narrowspec. The client needs these as non-ellipsis nodes.
2087 the narrowspec. The client needs these as non-ellipsis nodes.
2285 ellipsisroots: A dict of {rev: parents} that is used in
2088 ellipsisroots: A dict of {rev: parents} that is used in
2286 narrowchangegroup to produce ellipsis nodes with the
2089 narrowchangegroup to produce ellipsis nodes with the
2287 correct parents.
2090 correct parents.
2288 """
2091 """
2289 cl = repo.changelog
2092 cl = repo.changelog
2290 mfl = repo.manifestlog
2093 mfl = repo.manifestlog
2291
2094
2292 clrev = cl.rev
2095 clrev = cl.rev
2293
2096
2294 commonrevs = {clrev(n) for n in common} | {nullrev}
2097 commonrevs = {clrev(n) for n in common} | {nullrev}
2295 headsrevs = {clrev(n) for n in heads}
2098 headsrevs = {clrev(n) for n in heads}
2296
2099
2297 if depth:
2100 if depth:
2298 revdepth = {h: 0 for h in headsrevs}
2101 revdepth = {h: 0 for h in headsrevs}
2299
2102
2300 ellipsisheads = collections.defaultdict(set)
2103 ellipsisheads = collections.defaultdict(set)
2301 ellipsisroots = collections.defaultdict(set)
2104 ellipsisroots = collections.defaultdict(set)
2302
2105
2303 def addroot(head, curchange):
2106 def addroot(head, curchange):
2304 """Add a root to an ellipsis head, splitting heads with 3 roots."""
2107 """Add a root to an ellipsis head, splitting heads with 3 roots."""
2305 ellipsisroots[head].add(curchange)
2108 ellipsisroots[head].add(curchange)
2306 # Recursively split ellipsis heads with 3 roots by finding the
2109 # Recursively split ellipsis heads with 3 roots by finding the
2307 # roots' youngest common descendant which is an elided merge commit.
2110 # roots' youngest common descendant which is an elided merge commit.
2308 # That descendant takes 2 of the 3 roots as its own, and becomes a
2111 # That descendant takes 2 of the 3 roots as its own, and becomes a
2309 # root of the head.
2112 # root of the head.
2310 while len(ellipsisroots[head]) > 2:
2113 while len(ellipsisroots[head]) > 2:
2311 child, roots = splithead(head)
2114 child, roots = splithead(head)
2312 splitroots(head, child, roots)
2115 splitroots(head, child, roots)
2313 head = child # Recurse in case we just added a 3rd root
2116 head = child # Recurse in case we just added a 3rd root
2314
2117
2315 def splitroots(head, child, roots):
2118 def splitroots(head, child, roots):
2316 ellipsisroots[head].difference_update(roots)
2119 ellipsisroots[head].difference_update(roots)
2317 ellipsisroots[head].add(child)
2120 ellipsisroots[head].add(child)
2318 ellipsisroots[child].update(roots)
2121 ellipsisroots[child].update(roots)
2319 ellipsisroots[child].discard(child)
2122 ellipsisroots[child].discard(child)
2320
2123
2321 def splithead(head):
2124 def splithead(head):
2322 r1, r2, r3 = sorted(ellipsisroots[head])
2125 r1, r2, r3 = sorted(ellipsisroots[head])
2323 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2126 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2324 mid = repo.revs(
2127 mid = repo.revs(
2325 b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
2128 b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
2326 )
2129 )
2327 for j in mid:
2130 for j in mid:
2328 if j == nr2:
2131 if j == nr2:
2329 return nr2, (nr1, nr2)
2132 return nr2, (nr1, nr2)
2330 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2133 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2331 return j, (nr1, nr2)
2134 return j, (nr1, nr2)
2332 raise error.Abort(
2135 raise error.Abort(
2333 _(
2136 _(
2334 b'Failed to split up ellipsis node! head: %d, '
2137 b'Failed to split up ellipsis node! head: %d, '
2335 b'roots: %d %d %d'
2138 b'roots: %d %d %d'
2336 )
2139 )
2337 % (head, r1, r2, r3)
2140 % (head, r1, r2, r3)
2338 )
2141 )
2339
2142
2340 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2143 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2341 visit = reversed(missing)
2144 visit = reversed(missing)
2342 relevant_nodes = set()
2145 relevant_nodes = set()
2343 visitnodes = [cl.node(m) for m in missing]
2146 visitnodes = [cl.node(m) for m in missing]
2344 required = set(headsrevs) | known
2147 required = set(headsrevs) | known
2345 for rev in visit:
2148 for rev in visit:
2346 clrev = cl.changelogrevision(rev)
2149 clrev = cl.changelogrevision(rev)
2347 ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev]
2150 ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev]
2348 if depth is not None:
2151 if depth is not None:
2349 curdepth = revdepth[rev]
2152 curdepth = revdepth[rev]
2350 for p in ps:
2153 for p in ps:
2351 revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1))
2154 revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1))
2352 needed = False
2155 needed = False
2353 shallow_enough = depth is None or revdepth[rev] <= depth
2156 shallow_enough = depth is None or revdepth[rev] <= depth
2354 if shallow_enough:
2157 if shallow_enough:
2355 curmf = mfl[clrev.manifest].read()
2158 curmf = mfl[clrev.manifest].read()
2356 if ps:
2159 if ps:
2357 # We choose to not trust the changed files list in
2160 # We choose to not trust the changed files list in
2358 # changesets because it's not always correct. TODO: could
2161 # changesets because it's not always correct. TODO: could
2359 # we trust it for the non-merge case?
2162 # we trust it for the non-merge case?
2360 p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read()
2163 p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read()
2361 needed = bool(curmf.diff(p1mf, match))
2164 needed = bool(curmf.diff(p1mf, match))
2362 if not needed and len(ps) > 1:
2165 if not needed and len(ps) > 1:
2363 # For merge changes, the list of changed files is not
2166 # For merge changes, the list of changed files is not
2364 # helpful, since we need to emit the merge if a file
2167 # helpful, since we need to emit the merge if a file
2365 # in the narrow spec has changed on either side of the
2168 # in the narrow spec has changed on either side of the
2366 # merge. As a result, we do a manifest diff to check.
2169 # merge. As a result, we do a manifest diff to check.
2367 p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read()
2170 p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read()
2368 needed = bool(curmf.diff(p2mf, match))
2171 needed = bool(curmf.diff(p2mf, match))
2369 else:
2172 else:
2370 # For a root node, we need to include the node if any
2173 # For a root node, we need to include the node if any
2371 # files in the node match the narrowspec.
2174 # files in the node match the narrowspec.
2372 needed = any(curmf.walk(match))
2175 needed = any(curmf.walk(match))
2373
2176
2374 if needed:
2177 if needed:
2375 for head in ellipsisheads[rev]:
2178 for head in ellipsisheads[rev]:
2376 addroot(head, rev)
2179 addroot(head, rev)
2377 for p in ps:
2180 for p in ps:
2378 required.add(p)
2181 required.add(p)
2379 relevant_nodes.add(cl.node(rev))
2182 relevant_nodes.add(cl.node(rev))
2380 else:
2183 else:
2381 if not ps:
2184 if not ps:
2382 ps = [nullrev]
2185 ps = [nullrev]
2383 if rev in required:
2186 if rev in required:
2384 for head in ellipsisheads[rev]:
2187 for head in ellipsisheads[rev]:
2385 addroot(head, rev)
2188 addroot(head, rev)
2386 for p in ps:
2189 for p in ps:
2387 ellipsisheads[p].add(rev)
2190 ellipsisheads[p].add(rev)
2388 else:
2191 else:
2389 for p in ps:
2192 for p in ps:
2390 ellipsisheads[p] |= ellipsisheads[rev]
2193 ellipsisheads[p] |= ellipsisheads[rev]
2391
2194
2392 # add common changesets as roots of their reachable ellipsis heads
2195 # add common changesets as roots of their reachable ellipsis heads
2393 for c in commonrevs:
2196 for c in commonrevs:
2394 for head in ellipsisheads[c]:
2197 for head in ellipsisheads[c]:
2395 addroot(head, c)
2198 addroot(head, c)
2396 return visitnodes, relevant_nodes, ellipsisroots
2199 return visitnodes, relevant_nodes, ellipsisroots
2397
2200
2398
2201
2399 def caps20to10(repo, role):
2202 def caps20to10(repo, role):
2400 """return a set with appropriate options to use bundle20 during getbundle"""
2203 """return a set with appropriate options to use bundle20 during getbundle"""
2401 caps = {b'HG20'}
2204 caps = {b'HG20'}
2402 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2205 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2403 caps.add(b'bundle2=' + urlreq.quote(capsblob))
2206 caps.add(b'bundle2=' + urlreq.quote(capsblob))
2404 return caps
2207 return caps
2405
2208
2406
2209
2407 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2210 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2408 getbundle2partsorder = []
2211 getbundle2partsorder = []
2409
2212
2410 # Mapping between step name and function
2213 # Mapping between step name and function
2411 #
2214 #
2412 # This exists to help extensions wrap steps if necessary
2215 # This exists to help extensions wrap steps if necessary
2413 getbundle2partsmapping = {}
2216 getbundle2partsmapping = {}
2414
2217
2415
2218
2416 def getbundle2partsgenerator(stepname, idx=None):
2219 def getbundle2partsgenerator(stepname, idx=None):
2417 """decorator for function generating bundle2 part for getbundle
2220 """decorator for function generating bundle2 part for getbundle
2418
2221
2419 The function is added to the step -> function mapping and appended to the
2222 The function is added to the step -> function mapping and appended to the
2420 list of steps. Beware that decorated functions will be added in order
2223 list of steps. Beware that decorated functions will be added in order
2421 (this may matter).
2224 (this may matter).
2422
2225
2423 You can only use this decorator for new steps, if you want to wrap a step
2226 You can only use this decorator for new steps, if you want to wrap a step
2424 from an extension, attack the getbundle2partsmapping dictionary directly."""
2227 from an extension, attack the getbundle2partsmapping dictionary directly."""
2425
2228
2426 def dec(func):
2229 def dec(func):
2427 assert stepname not in getbundle2partsmapping
2230 assert stepname not in getbundle2partsmapping
2428 getbundle2partsmapping[stepname] = func
2231 getbundle2partsmapping[stepname] = func
2429 if idx is None:
2232 if idx is None:
2430 getbundle2partsorder.append(stepname)
2233 getbundle2partsorder.append(stepname)
2431 else:
2234 else:
2432 getbundle2partsorder.insert(idx, stepname)
2235 getbundle2partsorder.insert(idx, stepname)
2433 return func
2236 return func
2434
2237
2435 return dec
2238 return dec
2436
2239
2437
2240
2438 def bundle2requested(bundlecaps):
2241 def bundle2requested(bundlecaps):
2439 if bundlecaps is not None:
2242 if bundlecaps is not None:
2440 return any(cap.startswith(b'HG2') for cap in bundlecaps)
2243 return any(cap.startswith(b'HG2') for cap in bundlecaps)
2441 return False
2244 return False
2442
2245
2443
2246
2444 def getbundlechunks(
2247 def getbundlechunks(
2445 repo, source, heads=None, common=None, bundlecaps=None, **kwargs
2248 repo, source, heads=None, common=None, bundlecaps=None, **kwargs
2446 ):
2249 ):
2447 """Return chunks constituting a bundle's raw data.
2250 """Return chunks constituting a bundle's raw data.
2448
2251
2449 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2252 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2450 passed.
2253 passed.
2451
2254
2452 Returns a 2-tuple of a dict with metadata about the generated bundle
2255 Returns a 2-tuple of a dict with metadata about the generated bundle
2453 and an iterator over raw chunks (of varying sizes).
2256 and an iterator over raw chunks (of varying sizes).
2454 """
2257 """
2455 kwargs = pycompat.byteskwargs(kwargs)
2258 kwargs = pycompat.byteskwargs(kwargs)
2456 info = {}
2259 info = {}
2457 usebundle2 = bundle2requested(bundlecaps)
2260 usebundle2 = bundle2requested(bundlecaps)
2458 # bundle10 case
2261 # bundle10 case
2459 if not usebundle2:
2262 if not usebundle2:
2460 if bundlecaps and not kwargs.get(b'cg', True):
2263 if bundlecaps and not kwargs.get(b'cg', True):
2461 raise ValueError(
2264 raise ValueError(
2462 _(b'request for bundle10 must include changegroup')
2265 _(b'request for bundle10 must include changegroup')
2463 )
2266 )
2464
2267
2465 if kwargs:
2268 if kwargs:
2466 raise ValueError(
2269 raise ValueError(
2467 _(b'unsupported getbundle arguments: %s')
2270 _(b'unsupported getbundle arguments: %s')
2468 % b', '.join(sorted(kwargs.keys()))
2271 % b', '.join(sorted(kwargs.keys()))
2469 )
2272 )
2470 outgoing = _computeoutgoing(repo, heads, common)
2273 outgoing = _computeoutgoing(repo, heads, common)
2471 info[b'bundleversion'] = 1
2274 info[b'bundleversion'] = 1
2472 return (
2275 return (
2473 info,
2276 info,
2474 changegroup.makestream(
2277 changegroup.makestream(
2475 repo, outgoing, b'01', source, bundlecaps=bundlecaps
2278 repo, outgoing, b'01', source, bundlecaps=bundlecaps
2476 ),
2279 ),
2477 )
2280 )
2478
2281
2479 # bundle20 case
2282 # bundle20 case
2480 info[b'bundleversion'] = 2
2283 info[b'bundleversion'] = 2
2481 b2caps = {}
2284 b2caps = {}
2482 for bcaps in bundlecaps:
2285 for bcaps in bundlecaps:
2483 if bcaps.startswith(b'bundle2='):
2286 if bcaps.startswith(b'bundle2='):
2484 blob = urlreq.unquote(bcaps[len(b'bundle2=') :])
2287 blob = urlreq.unquote(bcaps[len(b'bundle2=') :])
2485 b2caps.update(bundle2.decodecaps(blob))
2288 b2caps.update(bundle2.decodecaps(blob))
2486 bundler = bundle2.bundle20(repo.ui, b2caps)
2289 bundler = bundle2.bundle20(repo.ui, b2caps)
2487
2290
2488 kwargs[b'heads'] = heads
2291 kwargs[b'heads'] = heads
2489 kwargs[b'common'] = common
2292 kwargs[b'common'] = common
2490
2293
2491 for name in getbundle2partsorder:
2294 for name in getbundle2partsorder:
2492 func = getbundle2partsmapping[name]
2295 func = getbundle2partsmapping[name]
2493 func(
2296 func(
2494 bundler,
2297 bundler,
2495 repo,
2298 repo,
2496 source,
2299 source,
2497 bundlecaps=bundlecaps,
2300 bundlecaps=bundlecaps,
2498 b2caps=b2caps,
2301 b2caps=b2caps,
2499 **pycompat.strkwargs(kwargs)
2302 **pycompat.strkwargs(kwargs)
2500 )
2303 )
2501
2304
2502 info[b'prefercompressed'] = bundler.prefercompressed
2305 info[b'prefercompressed'] = bundler.prefercompressed
2503
2306
2504 return info, bundler.getchunks()
2307 return info, bundler.getchunks()
2505
2308
2506
2309
2507 @getbundle2partsgenerator(b'stream2')
2310 @getbundle2partsgenerator(b'stream2')
2508 def _getbundlestream2(bundler, repo, *args, **kwargs):
2311 def _getbundlestream2(bundler, repo, *args, **kwargs):
2509 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2312 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2510
2313
2511
2314
2512 @getbundle2partsgenerator(b'changegroup')
2315 @getbundle2partsgenerator(b'changegroup')
2513 def _getbundlechangegrouppart(
2316 def _getbundlechangegrouppart(
2514 bundler,
2317 bundler,
2515 repo,
2318 repo,
2516 source,
2319 source,
2517 bundlecaps=None,
2320 bundlecaps=None,
2518 b2caps=None,
2321 b2caps=None,
2519 heads=None,
2322 heads=None,
2520 common=None,
2323 common=None,
2521 **kwargs
2324 **kwargs
2522 ):
2325 ):
2523 """add a changegroup part to the requested bundle"""
2326 """add a changegroup part to the requested bundle"""
2524 if not kwargs.get('cg', True) or not b2caps:
2327 if not kwargs.get('cg', True) or not b2caps:
2525 return
2328 return
2526
2329
2527 version = b'01'
2330 version = b'01'
2528 cgversions = b2caps.get(b'changegroup')
2331 cgversions = b2caps.get(b'changegroup')
2529 if cgversions: # 3.1 and 3.2 ship with an empty value
2332 if cgversions: # 3.1 and 3.2 ship with an empty value
2530 cgversions = [
2333 cgversions = [
2531 v
2334 v
2532 for v in cgversions
2335 for v in cgversions
2533 if v in changegroup.supportedoutgoingversions(repo)
2336 if v in changegroup.supportedoutgoingversions(repo)
2534 ]
2337 ]
2535 if not cgversions:
2338 if not cgversions:
2536 raise error.Abort(_(b'no common changegroup version'))
2339 raise error.Abort(_(b'no common changegroup version'))
2537 version = max(cgversions)
2340 version = max(cgversions)
2538
2341
2539 outgoing = _computeoutgoing(repo, heads, common)
2342 outgoing = _computeoutgoing(repo, heads, common)
2540 if not outgoing.missing:
2343 if not outgoing.missing:
2541 return
2344 return
2542
2345
2543 if kwargs.get('narrow', False):
2346 if kwargs.get('narrow', False):
2544 include = sorted(filter(bool, kwargs.get('includepats', [])))
2347 include = sorted(filter(bool, kwargs.get('includepats', [])))
2545 exclude = sorted(filter(bool, kwargs.get('excludepats', [])))
2348 exclude = sorted(filter(bool, kwargs.get('excludepats', [])))
2546 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2349 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2547 else:
2350 else:
2548 matcher = None
2351 matcher = None
2549
2352
2550 cgstream = changegroup.makestream(
2353 cgstream = changegroup.makestream(
2551 repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher
2354 repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher
2552 )
2355 )
2553
2356
2554 part = bundler.newpart(b'changegroup', data=cgstream)
2357 part = bundler.newpart(b'changegroup', data=cgstream)
2555 if cgversions:
2358 if cgversions:
2556 part.addparam(b'version', version)
2359 part.addparam(b'version', version)
2557
2360
2558 part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False)
2361 part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False)
2559
2362
2560 if scmutil.istreemanifest(repo):
2363 if scmutil.istreemanifest(repo):
2561 part.addparam(b'treemanifest', b'1')
2364 part.addparam(b'treemanifest', b'1')
2562
2365
2563 if b'exp-sidedata-flag' in repo.requirements:
2366 if b'exp-sidedata-flag' in repo.requirements:
2564 part.addparam(b'exp-sidedata', b'1')
2367 part.addparam(b'exp-sidedata', b'1')
2565
2368
2566 if (
2369 if (
2567 kwargs.get('narrow', False)
2370 kwargs.get('narrow', False)
2568 and kwargs.get('narrow_acl', False)
2371 and kwargs.get('narrow_acl', False)
2569 and (include or exclude)
2372 and (include or exclude)
2570 ):
2373 ):
2571 # this is mandatory because otherwise ACL clients won't work
2374 # this is mandatory because otherwise ACL clients won't work
2572 narrowspecpart = bundler.newpart(b'Narrow:responsespec')
2375 narrowspecpart = bundler.newpart(b'Narrow:responsespec')
2573 narrowspecpart.data = b'%s\0%s' % (
2376 narrowspecpart.data = b'%s\0%s' % (
2574 b'\n'.join(include),
2377 b'\n'.join(include),
2575 b'\n'.join(exclude),
2378 b'\n'.join(exclude),
2576 )
2379 )
2577
2380
2578
2381
2579 @getbundle2partsgenerator(b'bookmarks')
2382 @getbundle2partsgenerator(b'bookmarks')
2580 def _getbundlebookmarkpart(
2383 def _getbundlebookmarkpart(
2581 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2384 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2582 ):
2385 ):
2583 """add a bookmark part to the requested bundle"""
2386 """add a bookmark part to the requested bundle"""
2584 if not kwargs.get('bookmarks', False):
2387 if not kwargs.get('bookmarks', False):
2585 return
2388 return
2586 if not b2caps or b'bookmarks' not in b2caps:
2389 if not b2caps or b'bookmarks' not in b2caps:
2587 raise error.Abort(_(b'no common bookmarks exchange method'))
2390 raise error.Abort(_(b'no common bookmarks exchange method'))
2588 books = bookmod.listbinbookmarks(repo)
2391 books = bookmod.listbinbookmarks(repo)
2589 data = bookmod.binaryencode(books)
2392 data = bookmod.binaryencode(books)
2590 if data:
2393 if data:
2591 bundler.newpart(b'bookmarks', data=data)
2394 bundler.newpart(b'bookmarks', data=data)
2592
2395
2593
2396
2594 @getbundle2partsgenerator(b'listkeys')
2397 @getbundle2partsgenerator(b'listkeys')
2595 def _getbundlelistkeysparts(
2398 def _getbundlelistkeysparts(
2596 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2399 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2597 ):
2400 ):
2598 """add parts containing listkeys namespaces to the requested bundle"""
2401 """add parts containing listkeys namespaces to the requested bundle"""
2599 listkeys = kwargs.get('listkeys', ())
2402 listkeys = kwargs.get('listkeys', ())
2600 for namespace in listkeys:
2403 for namespace in listkeys:
2601 part = bundler.newpart(b'listkeys')
2404 part = bundler.newpart(b'listkeys')
2602 part.addparam(b'namespace', namespace)
2405 part.addparam(b'namespace', namespace)
2603 keys = repo.listkeys(namespace).items()
2406 keys = repo.listkeys(namespace).items()
2604 part.data = pushkey.encodekeys(keys)
2407 part.data = pushkey.encodekeys(keys)
2605
2408
2606
2409
2607 @getbundle2partsgenerator(b'obsmarkers')
2410 @getbundle2partsgenerator(b'obsmarkers')
2608 def _getbundleobsmarkerpart(
2411 def _getbundleobsmarkerpart(
2609 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2412 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2610 ):
2413 ):
2611 """add an obsolescence markers part to the requested bundle"""
2414 """add an obsolescence markers part to the requested bundle"""
2612 if kwargs.get('obsmarkers', False):
2415 if kwargs.get('obsmarkers', False):
2613 if heads is None:
2416 if heads is None:
2614 heads = repo.heads()
2417 heads = repo.heads()
2615 subset = [c.node() for c in repo.set(b'::%ln', heads)]
2418 subset = [c.node() for c in repo.set(b'::%ln', heads)]
2616 markers = repo.obsstore.relevantmarkers(subset)
2419 markers = repo.obsstore.relevantmarkers(subset)
2617 markers = obsutil.sortedmarkers(markers)
2420 markers = obsutil.sortedmarkers(markers)
2618 bundle2.buildobsmarkerspart(bundler, markers)
2421 bundle2.buildobsmarkerspart(bundler, markers)
2619
2422
2620
2423
2621 @getbundle2partsgenerator(b'phases')
2424 @getbundle2partsgenerator(b'phases')
2622 def _getbundlephasespart(
2425 def _getbundlephasespart(
2623 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2426 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2624 ):
2427 ):
2625 """add phase heads part to the requested bundle"""
2428 """add phase heads part to the requested bundle"""
2626 if kwargs.get('phases', False):
2429 if kwargs.get('phases', False):
2627 if not b2caps or b'heads' not in b2caps.get(b'phases'):
2430 if not b2caps or b'heads' not in b2caps.get(b'phases'):
2628 raise error.Abort(_(b'no common phases exchange method'))
2431 raise error.Abort(_(b'no common phases exchange method'))
2629 if heads is None:
2432 if heads is None:
2630 heads = repo.heads()
2433 heads = repo.heads()
2631
2434
2632 headsbyphase = collections.defaultdict(set)
2435 headsbyphase = collections.defaultdict(set)
2633 if repo.publishing():
2436 if repo.publishing():
2634 headsbyphase[phases.public] = heads
2437 headsbyphase[phases.public] = heads
2635 else:
2438 else:
2636 # find the appropriate heads to move
2439 # find the appropriate heads to move
2637
2440
2638 phase = repo._phasecache.phase
2441 phase = repo._phasecache.phase
2639 node = repo.changelog.node
2442 node = repo.changelog.node
2640 rev = repo.changelog.rev
2443 rev = repo.changelog.rev
2641 for h in heads:
2444 for h in heads:
2642 headsbyphase[phase(repo, rev(h))].add(h)
2445 headsbyphase[phase(repo, rev(h))].add(h)
2643 seenphases = list(headsbyphase.keys())
2446 seenphases = list(headsbyphase.keys())
2644
2447
2645 # We do not handle anything but public and draft phase for now)
2448 # We do not handle anything but public and draft phase for now)
2646 if seenphases:
2449 if seenphases:
2647 assert max(seenphases) <= phases.draft
2450 assert max(seenphases) <= phases.draft
2648
2451
2649 # if client is pulling non-public changesets, we need to find
2452 # if client is pulling non-public changesets, we need to find
2650 # intermediate public heads.
2453 # intermediate public heads.
2651 draftheads = headsbyphase.get(phases.draft, set())
2454 draftheads = headsbyphase.get(phases.draft, set())
2652 if draftheads:
2455 if draftheads:
2653 publicheads = headsbyphase.get(phases.public, set())
2456 publicheads = headsbyphase.get(phases.public, set())
2654
2457
2655 revset = b'heads(only(%ln, %ln) and public())'
2458 revset = b'heads(only(%ln, %ln) and public())'
2656 extraheads = repo.revs(revset, draftheads, publicheads)
2459 extraheads = repo.revs(revset, draftheads, publicheads)
2657 for r in extraheads:
2460 for r in extraheads:
2658 headsbyphase[phases.public].add(node(r))
2461 headsbyphase[phases.public].add(node(r))
2659
2462
2660 # transform data in a format used by the encoding function
2463 # transform data in a format used by the encoding function
2661 phasemapping = {
2464 phasemapping = {
2662 phase: sorted(headsbyphase[phase]) for phase in phases.allphases
2465 phase: sorted(headsbyphase[phase]) for phase in phases.allphases
2663 }
2466 }
2664
2467
2665 # generate the actual part
2468 # generate the actual part
2666 phasedata = phases.binaryencode(phasemapping)
2469 phasedata = phases.binaryencode(phasemapping)
2667 bundler.newpart(b'phase-heads', data=phasedata)
2470 bundler.newpart(b'phase-heads', data=phasedata)
2668
2471
2669
2472
2670 @getbundle2partsgenerator(b'hgtagsfnodes')
2473 @getbundle2partsgenerator(b'hgtagsfnodes')
2671 def _getbundletagsfnodes(
2474 def _getbundletagsfnodes(
2672 bundler,
2475 bundler,
2673 repo,
2476 repo,
2674 source,
2477 source,
2675 bundlecaps=None,
2478 bundlecaps=None,
2676 b2caps=None,
2479 b2caps=None,
2677 heads=None,
2480 heads=None,
2678 common=None,
2481 common=None,
2679 **kwargs
2482 **kwargs
2680 ):
2483 ):
2681 """Transfer the .hgtags filenodes mapping.
2484 """Transfer the .hgtags filenodes mapping.
2682
2485
2683 Only values for heads in this bundle will be transferred.
2486 Only values for heads in this bundle will be transferred.
2684
2487
2685 The part data consists of pairs of 20 byte changeset node and .hgtags
2488 The part data consists of pairs of 20 byte changeset node and .hgtags
2686 filenodes raw values.
2489 filenodes raw values.
2687 """
2490 """
2688 # Don't send unless:
2491 # Don't send unless:
2689 # - changeset are being exchanged,
2492 # - changeset are being exchanged,
2690 # - the client supports it.
2493 # - the client supports it.
2691 if not b2caps or not (kwargs.get('cg', True) and b'hgtagsfnodes' in b2caps):
2494 if not b2caps or not (kwargs.get('cg', True) and b'hgtagsfnodes' in b2caps):
2692 return
2495 return
2693
2496
2694 outgoing = _computeoutgoing(repo, heads, common)
2497 outgoing = _computeoutgoing(repo, heads, common)
2695 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2498 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2696
2499
2697
2500
2698 @getbundle2partsgenerator(b'cache:rev-branch-cache')
2501 @getbundle2partsgenerator(b'cache:rev-branch-cache')
2699 def _getbundlerevbranchcache(
2502 def _getbundlerevbranchcache(
2700 bundler,
2503 bundler,
2701 repo,
2504 repo,
2702 source,
2505 source,
2703 bundlecaps=None,
2506 bundlecaps=None,
2704 b2caps=None,
2507 b2caps=None,
2705 heads=None,
2508 heads=None,
2706 common=None,
2509 common=None,
2707 **kwargs
2510 **kwargs
2708 ):
2511 ):
2709 """Transfer the rev-branch-cache mapping
2512 """Transfer the rev-branch-cache mapping
2710
2513
2711 The payload is a series of data related to each branch
2514 The payload is a series of data related to each branch
2712
2515
2713 1) branch name length
2516 1) branch name length
2714 2) number of open heads
2517 2) number of open heads
2715 3) number of closed heads
2518 3) number of closed heads
2716 4) open heads nodes
2519 4) open heads nodes
2717 5) closed heads nodes
2520 5) closed heads nodes
2718 """
2521 """
2719 # Don't send unless:
2522 # Don't send unless:
2720 # - changeset are being exchanged,
2523 # - changeset are being exchanged,
2721 # - the client supports it.
2524 # - the client supports it.
2722 # - narrow bundle isn't in play (not currently compatible).
2525 # - narrow bundle isn't in play (not currently compatible).
2723 if (
2526 if (
2724 not kwargs.get('cg', True)
2527 not kwargs.get('cg', True)
2725 or not b2caps
2528 or not b2caps
2726 or b'rev-branch-cache' not in b2caps
2529 or b'rev-branch-cache' not in b2caps
2727 or kwargs.get('narrow', False)
2530 or kwargs.get('narrow', False)
2728 or repo.ui.has_section(_NARROWACL_SECTION)
2531 or repo.ui.has_section(_NARROWACL_SECTION)
2729 ):
2532 ):
2730 return
2533 return
2731
2534
2732 outgoing = _computeoutgoing(repo, heads, common)
2535 outgoing = _computeoutgoing(repo, heads, common)
2733 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2536 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2734
2537
2735
2538
2736 def check_heads(repo, their_heads, context):
2539 def check_heads(repo, their_heads, context):
2737 """check if the heads of a repo have been modified
2540 """check if the heads of a repo have been modified
2738
2541
2739 Used by peer for unbundling.
2542 Used by peer for unbundling.
2740 """
2543 """
2741 heads = repo.heads()
2544 heads = repo.heads()
2742 heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest()
2545 heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest()
2743 if not (
2546 if not (
2744 their_heads == [b'force']
2547 their_heads == [b'force']
2745 or their_heads == heads
2548 or their_heads == heads
2746 or their_heads == [b'hashed', heads_hash]
2549 or their_heads == [b'hashed', heads_hash]
2747 ):
2550 ):
2748 # someone else committed/pushed/unbundled while we
2551 # someone else committed/pushed/unbundled while we
2749 # were transferring data
2552 # were transferring data
2750 raise error.PushRaced(
2553 raise error.PushRaced(
2751 b'repository changed while %s - please try again' % context
2554 b'repository changed while %s - please try again' % context
2752 )
2555 )
2753
2556
2754
2557
2755 def unbundle(repo, cg, heads, source, url):
2558 def unbundle(repo, cg, heads, source, url):
2756 """Apply a bundle to a repo.
2559 """Apply a bundle to a repo.
2757
2560
2758 this function makes sure the repo is locked during the application and have
2561 this function makes sure the repo is locked during the application and have
2759 mechanism to check that no push race occurred between the creation of the
2562 mechanism to check that no push race occurred between the creation of the
2760 bundle and its application.
2563 bundle and its application.
2761
2564
2762 If the push was raced as PushRaced exception is raised."""
2565 If the push was raced as PushRaced exception is raised."""
2763 r = 0
2566 r = 0
2764 # need a transaction when processing a bundle2 stream
2567 # need a transaction when processing a bundle2 stream
2765 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2568 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2766 lockandtr = [None, None, None]
2569 lockandtr = [None, None, None]
2767 recordout = None
2570 recordout = None
2768 # quick fix for output mismatch with bundle2 in 3.4
2571 # quick fix for output mismatch with bundle2 in 3.4
2769 captureoutput = repo.ui.configbool(
2572 captureoutput = repo.ui.configbool(
2770 b'experimental', b'bundle2-output-capture'
2573 b'experimental', b'bundle2-output-capture'
2771 )
2574 )
2772 if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'):
2575 if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'):
2773 captureoutput = True
2576 captureoutput = True
2774 try:
2577 try:
2775 # note: outside bundle1, 'heads' is expected to be empty and this
2578 # note: outside bundle1, 'heads' is expected to be empty and this
2776 # 'check_heads' call wil be a no-op
2579 # 'check_heads' call wil be a no-op
2777 check_heads(repo, heads, b'uploading changes')
2580 check_heads(repo, heads, b'uploading changes')
2778 # push can proceed
2581 # push can proceed
2779 if not isinstance(cg, bundle2.unbundle20):
2582 if not isinstance(cg, bundle2.unbundle20):
2780 # legacy case: bundle1 (changegroup 01)
2583 # legacy case: bundle1 (changegroup 01)
2781 txnname = b"\n".join([source, util.hidepassword(url)])
2584 txnname = b"\n".join([source, util.hidepassword(url)])
2782 with repo.lock(), repo.transaction(txnname) as tr:
2585 with repo.lock(), repo.transaction(txnname) as tr:
2783 op = bundle2.applybundle(repo, cg, tr, source, url)
2586 op = bundle2.applybundle(repo, cg, tr, source, url)
2784 r = bundle2.combinechangegroupresults(op)
2587 r = bundle2.combinechangegroupresults(op)
2785 else:
2588 else:
2786 r = None
2589 r = None
2787 try:
2590 try:
2788
2591
2789 def gettransaction():
2592 def gettransaction():
2790 if not lockandtr[2]:
2593 if not lockandtr[2]:
2791 if not bookmod.bookmarksinstore(repo):
2594 if not bookmod.bookmarksinstore(repo):
2792 lockandtr[0] = repo.wlock()
2595 lockandtr[0] = repo.wlock()
2793 lockandtr[1] = repo.lock()
2596 lockandtr[1] = repo.lock()
2794 lockandtr[2] = repo.transaction(source)
2597 lockandtr[2] = repo.transaction(source)
2795 lockandtr[2].hookargs[b'source'] = source
2598 lockandtr[2].hookargs[b'source'] = source
2796 lockandtr[2].hookargs[b'url'] = url
2599 lockandtr[2].hookargs[b'url'] = url
2797 lockandtr[2].hookargs[b'bundle2'] = b'1'
2600 lockandtr[2].hookargs[b'bundle2'] = b'1'
2798 return lockandtr[2]
2601 return lockandtr[2]
2799
2602
2800 # Do greedy locking by default until we're satisfied with lazy
2603 # Do greedy locking by default until we're satisfied with lazy
2801 # locking.
2604 # locking.
2802 if not repo.ui.configbool(
2605 if not repo.ui.configbool(
2803 b'experimental', b'bundle2lazylocking'
2606 b'experimental', b'bundle2lazylocking'
2804 ):
2607 ):
2805 gettransaction()
2608 gettransaction()
2806
2609
2807 op = bundle2.bundleoperation(
2610 op = bundle2.bundleoperation(
2808 repo,
2611 repo,
2809 gettransaction,
2612 gettransaction,
2810 captureoutput=captureoutput,
2613 captureoutput=captureoutput,
2811 source=b'push',
2614 source=b'push',
2812 )
2615 )
2813 try:
2616 try:
2814 op = bundle2.processbundle(repo, cg, op=op)
2617 op = bundle2.processbundle(repo, cg, op=op)
2815 finally:
2618 finally:
2816 r = op.reply
2619 r = op.reply
2817 if captureoutput and r is not None:
2620 if captureoutput and r is not None:
2818 repo.ui.pushbuffer(error=True, subproc=True)
2621 repo.ui.pushbuffer(error=True, subproc=True)
2819
2622
2820 def recordout(output):
2623 def recordout(output):
2821 r.newpart(b'output', data=output, mandatory=False)
2624 r.newpart(b'output', data=output, mandatory=False)
2822
2625
2823 if lockandtr[2] is not None:
2626 if lockandtr[2] is not None:
2824 lockandtr[2].close()
2627 lockandtr[2].close()
2825 except BaseException as exc:
2628 except BaseException as exc:
2826 exc.duringunbundle2 = True
2629 exc.duringunbundle2 = True
2827 if captureoutput and r is not None:
2630 if captureoutput and r is not None:
2828 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2631 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2829
2632
2830 def recordout(output):
2633 def recordout(output):
2831 part = bundle2.bundlepart(
2634 part = bundle2.bundlepart(
2832 b'output', data=output, mandatory=False
2635 b'output', data=output, mandatory=False
2833 )
2636 )
2834 parts.append(part)
2637 parts.append(part)
2835
2638
2836 raise
2639 raise
2837 finally:
2640 finally:
2838 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2641 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2839 if recordout is not None:
2642 if recordout is not None:
2840 recordout(repo.ui.popbuffer())
2643 recordout(repo.ui.popbuffer())
2841 return r
2644 return r
2842
2645
2843
2646
2844 def _maybeapplyclonebundle(pullop):
2647 def _maybeapplyclonebundle(pullop):
2845 """Apply a clone bundle from a remote, if possible."""
2648 """Apply a clone bundle from a remote, if possible."""
2846
2649
2847 repo = pullop.repo
2650 repo = pullop.repo
2848 remote = pullop.remote
2651 remote = pullop.remote
2849
2652
2850 if not repo.ui.configbool(b'ui', b'clonebundles'):
2653 if not repo.ui.configbool(b'ui', b'clonebundles'):
2851 return
2654 return
2852
2655
2853 # Only run if local repo is empty.
2656 # Only run if local repo is empty.
2854 if len(repo):
2657 if len(repo):
2855 return
2658 return
2856
2659
2857 if pullop.heads:
2660 if pullop.heads:
2858 return
2661 return
2859
2662
2860 if not remote.capable(b'clonebundles'):
2663 if not remote.capable(b'clonebundles'):
2861 return
2664 return
2862
2665
2863 with remote.commandexecutor() as e:
2666 with remote.commandexecutor() as e:
2864 res = e.callcommand(b'clonebundles', {}).result()
2667 res = e.callcommand(b'clonebundles', {}).result()
2865
2668
2866 # If we call the wire protocol command, that's good enough to record the
2669 # If we call the wire protocol command, that's good enough to record the
2867 # attempt.
2670 # attempt.
2868 pullop.clonebundleattempted = True
2671 pullop.clonebundleattempted = True
2869
2672
2870 entries = parseclonebundlesmanifest(repo, res)
2673 entries = bundlecaches.parseclonebundlesmanifest(repo, res)
2871 if not entries:
2674 if not entries:
2872 repo.ui.note(
2675 repo.ui.note(
2873 _(
2676 _(
2874 b'no clone bundles available on remote; '
2677 b'no clone bundles available on remote; '
2875 b'falling back to regular clone\n'
2678 b'falling back to regular clone\n'
2876 )
2679 )
2877 )
2680 )
2878 return
2681 return
2879
2682
2880 entries = filterclonebundleentries(
2683 entries = bundlecaches.filterclonebundleentries(
2881 repo, entries, streamclonerequested=pullop.streamclonerequested
2684 repo, entries, streamclonerequested=pullop.streamclonerequested
2882 )
2685 )
2883
2686
2884 if not entries:
2687 if not entries:
2885 # There is a thundering herd concern here. However, if a server
2688 # There is a thundering herd concern here. However, if a server
2886 # operator doesn't advertise bundles appropriate for its clients,
2689 # operator doesn't advertise bundles appropriate for its clients,
2887 # they deserve what's coming. Furthermore, from a client's
2690 # they deserve what's coming. Furthermore, from a client's
2888 # perspective, no automatic fallback would mean not being able to
2691 # perspective, no automatic fallback would mean not being able to
2889 # clone!
2692 # clone!
2890 repo.ui.warn(
2693 repo.ui.warn(
2891 _(
2694 _(
2892 b'no compatible clone bundles available on server; '
2695 b'no compatible clone bundles available on server; '
2893 b'falling back to regular clone\n'
2696 b'falling back to regular clone\n'
2894 )
2697 )
2895 )
2698 )
2896 repo.ui.warn(
2699 repo.ui.warn(
2897 _(b'(you may want to report this to the server operator)\n')
2700 _(b'(you may want to report this to the server operator)\n')
2898 )
2701 )
2899 return
2702 return
2900
2703
2901 entries = sortclonebundleentries(repo.ui, entries)
2704 entries = bundlecaches.sortclonebundleentries(repo.ui, entries)
2902
2705
2903 url = entries[0][b'URL']
2706 url = entries[0][b'URL']
2904 repo.ui.status(_(b'applying clone bundle from %s\n') % url)
2707 repo.ui.status(_(b'applying clone bundle from %s\n') % url)
2905 if trypullbundlefromurl(repo.ui, repo, url):
2708 if trypullbundlefromurl(repo.ui, repo, url):
2906 repo.ui.status(_(b'finished applying clone bundle\n'))
2709 repo.ui.status(_(b'finished applying clone bundle\n'))
2907 # Bundle failed.
2710 # Bundle failed.
2908 #
2711 #
2909 # We abort by default to avoid the thundering herd of
2712 # We abort by default to avoid the thundering herd of
2910 # clients flooding a server that was expecting expensive
2713 # clients flooding a server that was expecting expensive
2911 # clone load to be offloaded.
2714 # clone load to be offloaded.
2912 elif repo.ui.configbool(b'ui', b'clonebundlefallback'):
2715 elif repo.ui.configbool(b'ui', b'clonebundlefallback'):
2913 repo.ui.warn(_(b'falling back to normal clone\n'))
2716 repo.ui.warn(_(b'falling back to normal clone\n'))
2914 else:
2717 else:
2915 raise error.Abort(
2718 raise error.Abort(
2916 _(b'error applying bundle'),
2719 _(b'error applying bundle'),
2917 hint=_(
2720 hint=_(
2918 b'if this error persists, consider contacting '
2721 b'if this error persists, consider contacting '
2919 b'the server operator or disable clone '
2722 b'the server operator or disable clone '
2920 b'bundles via '
2723 b'bundles via '
2921 b'"--config ui.clonebundles=false"'
2724 b'"--config ui.clonebundles=false"'
2922 ),
2725 ),
2923 )
2726 )
2924
2727
2925
2728
2926 def parseclonebundlesmanifest(repo, s):
2927 """Parses the raw text of a clone bundles manifest.
2928
2929 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2930 to the URL and other keys are the attributes for the entry.
2931 """
2932 m = []
2933 for line in s.splitlines():
2934 fields = line.split()
2935 if not fields:
2936 continue
2937 attrs = {b'URL': fields[0]}
2938 for rawattr in fields[1:]:
2939 key, value = rawattr.split(b'=', 1)
2940 key = urlreq.unquote(key)
2941 value = urlreq.unquote(value)
2942 attrs[key] = value
2943
2944 # Parse BUNDLESPEC into components. This makes client-side
2945 # preferences easier to specify since you can prefer a single
2946 # component of the BUNDLESPEC.
2947 if key == b'BUNDLESPEC':
2948 try:
2949 bundlespec = parsebundlespec(repo, value)
2950 attrs[b'COMPRESSION'] = bundlespec.compression
2951 attrs[b'VERSION'] = bundlespec.version
2952 except error.InvalidBundleSpecification:
2953 pass
2954 except error.UnsupportedBundleSpecification:
2955 pass
2956
2957 m.append(attrs)
2958
2959 return m
2960
2961
2962 def isstreamclonespec(bundlespec):
2963 # Stream clone v1
2964 if bundlespec.wirecompression == b'UN' and bundlespec.wireversion == b's1':
2965 return True
2966
2967 # Stream clone v2
2968 if (
2969 bundlespec.wirecompression == b'UN'
2970 and bundlespec.wireversion == b'02'
2971 and bundlespec.contentopts.get(b'streamv2')
2972 ):
2973 return True
2974
2975 return False
2976
2977
2978 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2979 """Remove incompatible clone bundle manifest entries.
2980
2981 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2982 and returns a new list consisting of only the entries that this client
2983 should be able to apply.
2984
2985 There is no guarantee we'll be able to apply all returned entries because
2986 the metadata we use to filter on may be missing or wrong.
2987 """
2988 newentries = []
2989 for entry in entries:
2990 spec = entry.get(b'BUNDLESPEC')
2991 if spec:
2992 try:
2993 bundlespec = parsebundlespec(repo, spec, strict=True)
2994
2995 # If a stream clone was requested, filter out non-streamclone
2996 # entries.
2997 if streamclonerequested and not isstreamclonespec(bundlespec):
2998 repo.ui.debug(
2999 b'filtering %s because not a stream clone\n'
3000 % entry[b'URL']
3001 )
3002 continue
3003
3004 except error.InvalidBundleSpecification as e:
3005 repo.ui.debug(stringutil.forcebytestr(e) + b'\n')
3006 continue
3007 except error.UnsupportedBundleSpecification as e:
3008 repo.ui.debug(
3009 b'filtering %s because unsupported bundle '
3010 b'spec: %s\n' % (entry[b'URL'], stringutil.forcebytestr(e))
3011 )
3012 continue
3013 # If we don't have a spec and requested a stream clone, we don't know
3014 # what the entry is so don't attempt to apply it.
3015 elif streamclonerequested:
3016 repo.ui.debug(
3017 b'filtering %s because cannot determine if a stream '
3018 b'clone bundle\n' % entry[b'URL']
3019 )
3020 continue
3021
3022 if b'REQUIRESNI' in entry and not sslutil.hassni:
3023 repo.ui.debug(
3024 b'filtering %s because SNI not supported\n' % entry[b'URL']
3025 )
3026 continue
3027
3028 if b'REQUIREDRAM' in entry:
3029 try:
3030 requiredram = util.sizetoint(entry[b'REQUIREDRAM'])
3031 except error.ParseError:
3032 repo.ui.debug(
3033 b'filtering %s due to a bad REQUIREDRAM attribute\n'
3034 % entry[b'URL']
3035 )
3036 continue
3037 actualram = repo.ui.estimatememory()
3038 if actualram is not None and actualram * 0.66 < requiredram:
3039 repo.ui.debug(
3040 b'filtering %s as it needs more than 2/3 of system memory\n'
3041 % entry[b'URL']
3042 )
3043 continue
3044
3045 newentries.append(entry)
3046
3047 return newentries
3048
3049
3050 class clonebundleentry(object):
3051 """Represents an item in a clone bundles manifest.
3052
3053 This rich class is needed to support sorting since sorted() in Python 3
3054 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
3055 won't work.
3056 """
3057
3058 def __init__(self, value, prefers):
3059 self.value = value
3060 self.prefers = prefers
3061
3062 def _cmp(self, other):
3063 for prefkey, prefvalue in self.prefers:
3064 avalue = self.value.get(prefkey)
3065 bvalue = other.value.get(prefkey)
3066
3067 # Special case for b missing attribute and a matches exactly.
3068 if avalue is not None and bvalue is None and avalue == prefvalue:
3069 return -1
3070
3071 # Special case for a missing attribute and b matches exactly.
3072 if bvalue is not None and avalue is None and bvalue == prefvalue:
3073 return 1
3074
3075 # We can't compare unless attribute present on both.
3076 if avalue is None or bvalue is None:
3077 continue
3078
3079 # Same values should fall back to next attribute.
3080 if avalue == bvalue:
3081 continue
3082
3083 # Exact matches come first.
3084 if avalue == prefvalue:
3085 return -1
3086 if bvalue == prefvalue:
3087 return 1
3088
3089 # Fall back to next attribute.
3090 continue
3091
3092 # If we got here we couldn't sort by attributes and prefers. Fall
3093 # back to index order.
3094 return 0
3095
3096 def __lt__(self, other):
3097 return self._cmp(other) < 0
3098
3099 def __gt__(self, other):
3100 return self._cmp(other) > 0
3101
3102 def __eq__(self, other):
3103 return self._cmp(other) == 0
3104
3105 def __le__(self, other):
3106 return self._cmp(other) <= 0
3107
3108 def __ge__(self, other):
3109 return self._cmp(other) >= 0
3110
3111 def __ne__(self, other):
3112 return self._cmp(other) != 0
3113
3114
3115 def sortclonebundleentries(ui, entries):
3116 prefers = ui.configlist(b'ui', b'clonebundleprefers')
3117 if not prefers:
3118 return list(entries)
3119
3120 def _split(p):
3121 if b'=' not in p:
3122 hint = _(b"each comma separated item should be key=value pairs")
3123 raise error.Abort(
3124 _(b"invalid ui.clonebundleprefers item: %s") % p, hint=hint
3125 )
3126 return p.split(b'=', 1)
3127
3128 prefers = [_split(p) for p in prefers]
3129
3130 items = sorted(clonebundleentry(v, prefers) for v in entries)
3131 return [i.value for i in items]
3132
3133
3134 def trypullbundlefromurl(ui, repo, url):
2729 def trypullbundlefromurl(ui, repo, url):
3135 """Attempt to apply a bundle from a URL."""
2730 """Attempt to apply a bundle from a URL."""
3136 with repo.lock(), repo.transaction(b'bundleurl') as tr:
2731 with repo.lock(), repo.transaction(b'bundleurl') as tr:
3137 try:
2732 try:
3138 fh = urlmod.open(ui, url)
2733 fh = urlmod.open(ui, url)
3139 cg = readbundle(ui, fh, b'stream')
2734 cg = readbundle(ui, fh, b'stream')
3140
2735
3141 if isinstance(cg, streamclone.streamcloneapplier):
2736 if isinstance(cg, streamclone.streamcloneapplier):
3142 cg.apply(repo)
2737 cg.apply(repo)
3143 else:
2738 else:
3144 bundle2.applybundle(repo, cg, tr, b'clonebundles', url)
2739 bundle2.applybundle(repo, cg, tr, b'clonebundles', url)
3145 return True
2740 return True
3146 except urlerr.httperror as e:
2741 except urlerr.httperror as e:
3147 ui.warn(
2742 ui.warn(
3148 _(b'HTTP error fetching bundle: %s\n')
2743 _(b'HTTP error fetching bundle: %s\n')
3149 % stringutil.forcebytestr(e)
2744 % stringutil.forcebytestr(e)
3150 )
2745 )
3151 except urlerr.urlerror as e:
2746 except urlerr.urlerror as e:
3152 ui.warn(
2747 ui.warn(
3153 _(b'error fetching bundle: %s\n')
2748 _(b'error fetching bundle: %s\n')
3154 % stringutil.forcebytestr(e.reason)
2749 % stringutil.forcebytestr(e.reason)
3155 )
2750 )
3156
2751
3157 return False
2752 return False
@@ -1,748 +1,749 b''
1 # wireprotov1server.py - Wire protocol version 1 server functionality
1 # wireprotov1server.py - Wire protocol version 1 server functionality
2 #
2 #
3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import binascii
10 import binascii
11 import os
11 import os
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 )
17 )
18 from .pycompat import getattr
18 from .pycompat import getattr
19
19
20 from . import (
20 from . import (
21 bundle2,
21 bundle2,
22 bundlecaches,
22 changegroup as changegroupmod,
23 changegroup as changegroupmod,
23 discovery,
24 discovery,
24 encoding,
25 encoding,
25 error,
26 error,
26 exchange,
27 exchange,
27 pushkey as pushkeymod,
28 pushkey as pushkeymod,
28 pycompat,
29 pycompat,
29 streamclone,
30 streamclone,
30 util,
31 util,
31 wireprototypes,
32 wireprototypes,
32 )
33 )
33
34
34 from .utils import (
35 from .utils import (
35 procutil,
36 procutil,
36 stringutil,
37 stringutil,
37 )
38 )
38
39
39 urlerr = util.urlerr
40 urlerr = util.urlerr
40 urlreq = util.urlreq
41 urlreq = util.urlreq
41
42
42 bundle2requiredmain = _(b'incompatible Mercurial client; bundle2 required')
43 bundle2requiredmain = _(b'incompatible Mercurial client; bundle2 required')
43 bundle2requiredhint = _(
44 bundle2requiredhint = _(
44 b'see https://www.mercurial-scm.org/wiki/IncompatibleClient'
45 b'see https://www.mercurial-scm.org/wiki/IncompatibleClient'
45 )
46 )
46 bundle2required = b'%s\n(%s)\n' % (bundle2requiredmain, bundle2requiredhint)
47 bundle2required = b'%s\n(%s)\n' % (bundle2requiredmain, bundle2requiredhint)
47
48
48
49
49 def clientcompressionsupport(proto):
50 def clientcompressionsupport(proto):
50 """Returns a list of compression methods supported by the client.
51 """Returns a list of compression methods supported by the client.
51
52
52 Returns a list of the compression methods supported by the client
53 Returns a list of the compression methods supported by the client
53 according to the protocol capabilities. If no such capability has
54 according to the protocol capabilities. If no such capability has
54 been announced, fallback to the default of zlib and uncompressed.
55 been announced, fallback to the default of zlib and uncompressed.
55 """
56 """
56 for cap in proto.getprotocaps():
57 for cap in proto.getprotocaps():
57 if cap.startswith(b'comp='):
58 if cap.startswith(b'comp='):
58 return cap[5:].split(b',')
59 return cap[5:].split(b',')
59 return [b'zlib', b'none']
60 return [b'zlib', b'none']
60
61
61
62
62 # wire protocol command can either return a string or one of these classes.
63 # wire protocol command can either return a string or one of these classes.
63
64
64
65
65 def getdispatchrepo(repo, proto, command):
66 def getdispatchrepo(repo, proto, command):
66 """Obtain the repo used for processing wire protocol commands.
67 """Obtain the repo used for processing wire protocol commands.
67
68
68 The intent of this function is to serve as a monkeypatch point for
69 The intent of this function is to serve as a monkeypatch point for
69 extensions that need commands to operate on different repo views under
70 extensions that need commands to operate on different repo views under
70 specialized circumstances.
71 specialized circumstances.
71 """
72 """
72 viewconfig = repo.ui.config(b'server', b'view')
73 viewconfig = repo.ui.config(b'server', b'view')
73 return repo.filtered(viewconfig)
74 return repo.filtered(viewconfig)
74
75
75
76
76 def dispatch(repo, proto, command):
77 def dispatch(repo, proto, command):
77 repo = getdispatchrepo(repo, proto, command)
78 repo = getdispatchrepo(repo, proto, command)
78
79
79 func, spec = commands[command]
80 func, spec = commands[command]
80 args = proto.getargs(spec)
81 args = proto.getargs(spec)
81
82
82 return func(repo, proto, *args)
83 return func(repo, proto, *args)
83
84
84
85
85 def options(cmd, keys, others):
86 def options(cmd, keys, others):
86 opts = {}
87 opts = {}
87 for k in keys:
88 for k in keys:
88 if k in others:
89 if k in others:
89 opts[k] = others[k]
90 opts[k] = others[k]
90 del others[k]
91 del others[k]
91 if others:
92 if others:
92 procutil.stderr.write(
93 procutil.stderr.write(
93 b"warning: %s ignored unexpected arguments %s\n"
94 b"warning: %s ignored unexpected arguments %s\n"
94 % (cmd, b",".join(others))
95 % (cmd, b",".join(others))
95 )
96 )
96 return opts
97 return opts
97
98
98
99
99 def bundle1allowed(repo, action):
100 def bundle1allowed(repo, action):
100 """Whether a bundle1 operation is allowed from the server.
101 """Whether a bundle1 operation is allowed from the server.
101
102
102 Priority is:
103 Priority is:
103
104
104 1. server.bundle1gd.<action> (if generaldelta active)
105 1. server.bundle1gd.<action> (if generaldelta active)
105 2. server.bundle1.<action>
106 2. server.bundle1.<action>
106 3. server.bundle1gd (if generaldelta active)
107 3. server.bundle1gd (if generaldelta active)
107 4. server.bundle1
108 4. server.bundle1
108 """
109 """
109 ui = repo.ui
110 ui = repo.ui
110 gd = b'generaldelta' in repo.requirements
111 gd = b'generaldelta' in repo.requirements
111
112
112 if gd:
113 if gd:
113 v = ui.configbool(b'server', b'bundle1gd.%s' % action)
114 v = ui.configbool(b'server', b'bundle1gd.%s' % action)
114 if v is not None:
115 if v is not None:
115 return v
116 return v
116
117
117 v = ui.configbool(b'server', b'bundle1.%s' % action)
118 v = ui.configbool(b'server', b'bundle1.%s' % action)
118 if v is not None:
119 if v is not None:
119 return v
120 return v
120
121
121 if gd:
122 if gd:
122 v = ui.configbool(b'server', b'bundle1gd')
123 v = ui.configbool(b'server', b'bundle1gd')
123 if v is not None:
124 if v is not None:
124 return v
125 return v
125
126
126 return ui.configbool(b'server', b'bundle1')
127 return ui.configbool(b'server', b'bundle1')
127
128
128
129
129 commands = wireprototypes.commanddict()
130 commands = wireprototypes.commanddict()
130
131
131
132
132 def wireprotocommand(name, args=None, permission=b'push'):
133 def wireprotocommand(name, args=None, permission=b'push'):
133 """Decorator to declare a wire protocol command.
134 """Decorator to declare a wire protocol command.
134
135
135 ``name`` is the name of the wire protocol command being provided.
136 ``name`` is the name of the wire protocol command being provided.
136
137
137 ``args`` defines the named arguments accepted by the command. It is
138 ``args`` defines the named arguments accepted by the command. It is
138 a space-delimited list of argument names. ``*`` denotes a special value
139 a space-delimited list of argument names. ``*`` denotes a special value
139 that says to accept all named arguments.
140 that says to accept all named arguments.
140
141
141 ``permission`` defines the permission type needed to run this command.
142 ``permission`` defines the permission type needed to run this command.
142 Can be ``push`` or ``pull``. These roughly map to read-write and read-only,
143 Can be ``push`` or ``pull``. These roughly map to read-write and read-only,
143 respectively. Default is to assume command requires ``push`` permissions
144 respectively. Default is to assume command requires ``push`` permissions
144 because otherwise commands not declaring their permissions could modify
145 because otherwise commands not declaring their permissions could modify
145 a repository that is supposed to be read-only.
146 a repository that is supposed to be read-only.
146 """
147 """
147 transports = {
148 transports = {
148 k for k, v in wireprototypes.TRANSPORTS.items() if v[b'version'] == 1
149 k for k, v in wireprototypes.TRANSPORTS.items() if v[b'version'] == 1
149 }
150 }
150
151
151 # Because SSHv2 is a mirror of SSHv1, we allow "batch" commands through to
152 # Because SSHv2 is a mirror of SSHv1, we allow "batch" commands through to
152 # SSHv2.
153 # SSHv2.
153 # TODO undo this hack when SSH is using the unified frame protocol.
154 # TODO undo this hack when SSH is using the unified frame protocol.
154 if name == b'batch':
155 if name == b'batch':
155 transports.add(wireprototypes.SSHV2)
156 transports.add(wireprototypes.SSHV2)
156
157
157 if permission not in (b'push', b'pull'):
158 if permission not in (b'push', b'pull'):
158 raise error.ProgrammingError(
159 raise error.ProgrammingError(
159 b'invalid wire protocol permission; '
160 b'invalid wire protocol permission; '
160 b'got %s; expected "push" or "pull"' % permission
161 b'got %s; expected "push" or "pull"' % permission
161 )
162 )
162
163
163 if args is None:
164 if args is None:
164 args = b''
165 args = b''
165
166
166 if not isinstance(args, bytes):
167 if not isinstance(args, bytes):
167 raise error.ProgrammingError(
168 raise error.ProgrammingError(
168 b'arguments for version 1 commands must be declared as bytes'
169 b'arguments for version 1 commands must be declared as bytes'
169 )
170 )
170
171
171 def register(func):
172 def register(func):
172 if name in commands:
173 if name in commands:
173 raise error.ProgrammingError(
174 raise error.ProgrammingError(
174 b'%s command already registered for version 1' % name
175 b'%s command already registered for version 1' % name
175 )
176 )
176 commands[name] = wireprototypes.commandentry(
177 commands[name] = wireprototypes.commandentry(
177 func, args=args, transports=transports, permission=permission
178 func, args=args, transports=transports, permission=permission
178 )
179 )
179
180
180 return func
181 return func
181
182
182 return register
183 return register
183
184
184
185
185 # TODO define a more appropriate permissions type to use for this.
186 # TODO define a more appropriate permissions type to use for this.
186 @wireprotocommand(b'batch', b'cmds *', permission=b'pull')
187 @wireprotocommand(b'batch', b'cmds *', permission=b'pull')
187 def batch(repo, proto, cmds, others):
188 def batch(repo, proto, cmds, others):
188 unescapearg = wireprototypes.unescapebatcharg
189 unescapearg = wireprototypes.unescapebatcharg
189 res = []
190 res = []
190 for pair in cmds.split(b';'):
191 for pair in cmds.split(b';'):
191 op, args = pair.split(b' ', 1)
192 op, args = pair.split(b' ', 1)
192 vals = {}
193 vals = {}
193 for a in args.split(b','):
194 for a in args.split(b','):
194 if a:
195 if a:
195 n, v = a.split(b'=')
196 n, v = a.split(b'=')
196 vals[unescapearg(n)] = unescapearg(v)
197 vals[unescapearg(n)] = unescapearg(v)
197 func, spec = commands[op]
198 func, spec = commands[op]
198
199
199 # Validate that client has permissions to perform this command.
200 # Validate that client has permissions to perform this command.
200 perm = commands[op].permission
201 perm = commands[op].permission
201 assert perm in (b'push', b'pull')
202 assert perm in (b'push', b'pull')
202 proto.checkperm(perm)
203 proto.checkperm(perm)
203
204
204 if spec:
205 if spec:
205 keys = spec.split()
206 keys = spec.split()
206 data = {}
207 data = {}
207 for k in keys:
208 for k in keys:
208 if k == b'*':
209 if k == b'*':
209 star = {}
210 star = {}
210 for key in vals.keys():
211 for key in vals.keys():
211 if key not in keys:
212 if key not in keys:
212 star[key] = vals[key]
213 star[key] = vals[key]
213 data[b'*'] = star
214 data[b'*'] = star
214 else:
215 else:
215 data[k] = vals[k]
216 data[k] = vals[k]
216 result = func(repo, proto, *[data[k] for k in keys])
217 result = func(repo, proto, *[data[k] for k in keys])
217 else:
218 else:
218 result = func(repo, proto)
219 result = func(repo, proto)
219 if isinstance(result, wireprototypes.ooberror):
220 if isinstance(result, wireprototypes.ooberror):
220 return result
221 return result
221
222
222 # For now, all batchable commands must return bytesresponse or
223 # For now, all batchable commands must return bytesresponse or
223 # raw bytes (for backwards compatibility).
224 # raw bytes (for backwards compatibility).
224 assert isinstance(result, (wireprototypes.bytesresponse, bytes))
225 assert isinstance(result, (wireprototypes.bytesresponse, bytes))
225 if isinstance(result, wireprototypes.bytesresponse):
226 if isinstance(result, wireprototypes.bytesresponse):
226 result = result.data
227 result = result.data
227 res.append(wireprototypes.escapebatcharg(result))
228 res.append(wireprototypes.escapebatcharg(result))
228
229
229 return wireprototypes.bytesresponse(b';'.join(res))
230 return wireprototypes.bytesresponse(b';'.join(res))
230
231
231
232
232 @wireprotocommand(b'between', b'pairs', permission=b'pull')
233 @wireprotocommand(b'between', b'pairs', permission=b'pull')
233 def between(repo, proto, pairs):
234 def between(repo, proto, pairs):
234 pairs = [wireprototypes.decodelist(p, b'-') for p in pairs.split(b" ")]
235 pairs = [wireprototypes.decodelist(p, b'-') for p in pairs.split(b" ")]
235 r = []
236 r = []
236 for b in repo.between(pairs):
237 for b in repo.between(pairs):
237 r.append(wireprototypes.encodelist(b) + b"\n")
238 r.append(wireprototypes.encodelist(b) + b"\n")
238
239
239 return wireprototypes.bytesresponse(b''.join(r))
240 return wireprototypes.bytesresponse(b''.join(r))
240
241
241
242
242 @wireprotocommand(b'branchmap', permission=b'pull')
243 @wireprotocommand(b'branchmap', permission=b'pull')
243 def branchmap(repo, proto):
244 def branchmap(repo, proto):
244 branchmap = repo.branchmap()
245 branchmap = repo.branchmap()
245 heads = []
246 heads = []
246 for branch, nodes in pycompat.iteritems(branchmap):
247 for branch, nodes in pycompat.iteritems(branchmap):
247 branchname = urlreq.quote(encoding.fromlocal(branch))
248 branchname = urlreq.quote(encoding.fromlocal(branch))
248 branchnodes = wireprototypes.encodelist(nodes)
249 branchnodes = wireprototypes.encodelist(nodes)
249 heads.append(b'%s %s' % (branchname, branchnodes))
250 heads.append(b'%s %s' % (branchname, branchnodes))
250
251
251 return wireprototypes.bytesresponse(b'\n'.join(heads))
252 return wireprototypes.bytesresponse(b'\n'.join(heads))
252
253
253
254
254 @wireprotocommand(b'branches', b'nodes', permission=b'pull')
255 @wireprotocommand(b'branches', b'nodes', permission=b'pull')
255 def branches(repo, proto, nodes):
256 def branches(repo, proto, nodes):
256 nodes = wireprototypes.decodelist(nodes)
257 nodes = wireprototypes.decodelist(nodes)
257 r = []
258 r = []
258 for b in repo.branches(nodes):
259 for b in repo.branches(nodes):
259 r.append(wireprototypes.encodelist(b) + b"\n")
260 r.append(wireprototypes.encodelist(b) + b"\n")
260
261
261 return wireprototypes.bytesresponse(b''.join(r))
262 return wireprototypes.bytesresponse(b''.join(r))
262
263
263
264
264 @wireprotocommand(b'clonebundles', b'', permission=b'pull')
265 @wireprotocommand(b'clonebundles', b'', permission=b'pull')
265 def clonebundles(repo, proto):
266 def clonebundles(repo, proto):
266 """Server command for returning info for available bundles to seed clones.
267 """Server command for returning info for available bundles to seed clones.
267
268
268 Clients will parse this response and determine what bundle to fetch.
269 Clients will parse this response and determine what bundle to fetch.
269
270
270 Extensions may wrap this command to filter or dynamically emit data
271 Extensions may wrap this command to filter or dynamically emit data
271 depending on the request. e.g. you could advertise URLs for the closest
272 depending on the request. e.g. you could advertise URLs for the closest
272 data center given the client's IP address.
273 data center given the client's IP address.
273 """
274 """
274 return wireprototypes.bytesresponse(
275 return wireprototypes.bytesresponse(
275 repo.vfs.tryread(b'clonebundles.manifest')
276 repo.vfs.tryread(b'clonebundles.manifest')
276 )
277 )
277
278
278
279
279 wireprotocaps = [
280 wireprotocaps = [
280 b'lookup',
281 b'lookup',
281 b'branchmap',
282 b'branchmap',
282 b'pushkey',
283 b'pushkey',
283 b'known',
284 b'known',
284 b'getbundle',
285 b'getbundle',
285 b'unbundlehash',
286 b'unbundlehash',
286 ]
287 ]
287
288
288
289
289 def _capabilities(repo, proto):
290 def _capabilities(repo, proto):
290 """return a list of capabilities for a repo
291 """return a list of capabilities for a repo
291
292
292 This function exists to allow extensions to easily wrap capabilities
293 This function exists to allow extensions to easily wrap capabilities
293 computation
294 computation
294
295
295 - returns a lists: easy to alter
296 - returns a lists: easy to alter
296 - change done here will be propagated to both `capabilities` and `hello`
297 - change done here will be propagated to both `capabilities` and `hello`
297 command without any other action needed.
298 command without any other action needed.
298 """
299 """
299 # copy to prevent modification of the global list
300 # copy to prevent modification of the global list
300 caps = list(wireprotocaps)
301 caps = list(wireprotocaps)
301
302
302 # Command of same name as capability isn't exposed to version 1 of
303 # Command of same name as capability isn't exposed to version 1 of
303 # transports. So conditionally add it.
304 # transports. So conditionally add it.
304 if commands.commandavailable(b'changegroupsubset', proto):
305 if commands.commandavailable(b'changegroupsubset', proto):
305 caps.append(b'changegroupsubset')
306 caps.append(b'changegroupsubset')
306
307
307 if streamclone.allowservergeneration(repo):
308 if streamclone.allowservergeneration(repo):
308 if repo.ui.configbool(b'server', b'preferuncompressed'):
309 if repo.ui.configbool(b'server', b'preferuncompressed'):
309 caps.append(b'stream-preferred')
310 caps.append(b'stream-preferred')
310 requiredformats = repo.requirements & repo.supportedformats
311 requiredformats = repo.requirements & repo.supportedformats
311 # if our local revlogs are just revlogv1, add 'stream' cap
312 # if our local revlogs are just revlogv1, add 'stream' cap
312 if not requiredformats - {b'revlogv1'}:
313 if not requiredformats - {b'revlogv1'}:
313 caps.append(b'stream')
314 caps.append(b'stream')
314 # otherwise, add 'streamreqs' detailing our local revlog format
315 # otherwise, add 'streamreqs' detailing our local revlog format
315 else:
316 else:
316 caps.append(b'streamreqs=%s' % b','.join(sorted(requiredformats)))
317 caps.append(b'streamreqs=%s' % b','.join(sorted(requiredformats)))
317 if repo.ui.configbool(b'experimental', b'bundle2-advertise'):
318 if repo.ui.configbool(b'experimental', b'bundle2-advertise'):
318 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=b'server'))
319 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=b'server'))
319 caps.append(b'bundle2=' + urlreq.quote(capsblob))
320 caps.append(b'bundle2=' + urlreq.quote(capsblob))
320 caps.append(b'unbundle=%s' % b','.join(bundle2.bundlepriority))
321 caps.append(b'unbundle=%s' % b','.join(bundle2.bundlepriority))
321
322
322 if repo.ui.configbool(b'experimental', b'narrow'):
323 if repo.ui.configbool(b'experimental', b'narrow'):
323 caps.append(wireprototypes.NARROWCAP)
324 caps.append(wireprototypes.NARROWCAP)
324 if repo.ui.configbool(b'experimental', b'narrowservebrokenellipses'):
325 if repo.ui.configbool(b'experimental', b'narrowservebrokenellipses'):
325 caps.append(wireprototypes.ELLIPSESCAP)
326 caps.append(wireprototypes.ELLIPSESCAP)
326
327
327 return proto.addcapabilities(repo, caps)
328 return proto.addcapabilities(repo, caps)
328
329
329
330
330 # If you are writing an extension and consider wrapping this function. Wrap
331 # If you are writing an extension and consider wrapping this function. Wrap
331 # `_capabilities` instead.
332 # `_capabilities` instead.
332 @wireprotocommand(b'capabilities', permission=b'pull')
333 @wireprotocommand(b'capabilities', permission=b'pull')
333 def capabilities(repo, proto):
334 def capabilities(repo, proto):
334 caps = _capabilities(repo, proto)
335 caps = _capabilities(repo, proto)
335 return wireprototypes.bytesresponse(b' '.join(sorted(caps)))
336 return wireprototypes.bytesresponse(b' '.join(sorted(caps)))
336
337
337
338
338 @wireprotocommand(b'changegroup', b'roots', permission=b'pull')
339 @wireprotocommand(b'changegroup', b'roots', permission=b'pull')
339 def changegroup(repo, proto, roots):
340 def changegroup(repo, proto, roots):
340 nodes = wireprototypes.decodelist(roots)
341 nodes = wireprototypes.decodelist(roots)
341 outgoing = discovery.outgoing(
342 outgoing = discovery.outgoing(
342 repo, missingroots=nodes, ancestorsof=repo.heads()
343 repo, missingroots=nodes, ancestorsof=repo.heads()
343 )
344 )
344 cg = changegroupmod.makechangegroup(repo, outgoing, b'01', b'serve')
345 cg = changegroupmod.makechangegroup(repo, outgoing, b'01', b'serve')
345 gen = iter(lambda: cg.read(32768), b'')
346 gen = iter(lambda: cg.read(32768), b'')
346 return wireprototypes.streamres(gen=gen)
347 return wireprototypes.streamres(gen=gen)
347
348
348
349
349 @wireprotocommand(b'changegroupsubset', b'bases heads', permission=b'pull')
350 @wireprotocommand(b'changegroupsubset', b'bases heads', permission=b'pull')
350 def changegroupsubset(repo, proto, bases, heads):
351 def changegroupsubset(repo, proto, bases, heads):
351 bases = wireprototypes.decodelist(bases)
352 bases = wireprototypes.decodelist(bases)
352 heads = wireprototypes.decodelist(heads)
353 heads = wireprototypes.decodelist(heads)
353 outgoing = discovery.outgoing(repo, missingroots=bases, ancestorsof=heads)
354 outgoing = discovery.outgoing(repo, missingroots=bases, ancestorsof=heads)
354 cg = changegroupmod.makechangegroup(repo, outgoing, b'01', b'serve')
355 cg = changegroupmod.makechangegroup(repo, outgoing, b'01', b'serve')
355 gen = iter(lambda: cg.read(32768), b'')
356 gen = iter(lambda: cg.read(32768), b'')
356 return wireprototypes.streamres(gen=gen)
357 return wireprototypes.streamres(gen=gen)
357
358
358
359
359 @wireprotocommand(b'debugwireargs', b'one two *', permission=b'pull')
360 @wireprotocommand(b'debugwireargs', b'one two *', permission=b'pull')
360 def debugwireargs(repo, proto, one, two, others):
361 def debugwireargs(repo, proto, one, two, others):
361 # only accept optional args from the known set
362 # only accept optional args from the known set
362 opts = options(b'debugwireargs', [b'three', b'four'], others)
363 opts = options(b'debugwireargs', [b'three', b'four'], others)
363 return wireprototypes.bytesresponse(
364 return wireprototypes.bytesresponse(
364 repo.debugwireargs(one, two, **pycompat.strkwargs(opts))
365 repo.debugwireargs(one, two, **pycompat.strkwargs(opts))
365 )
366 )
366
367
367
368
368 def find_pullbundle(repo, proto, opts, clheads, heads, common):
369 def find_pullbundle(repo, proto, opts, clheads, heads, common):
369 """Return a file object for the first matching pullbundle.
370 """Return a file object for the first matching pullbundle.
370
371
371 Pullbundles are specified in .hg/pullbundles.manifest similar to
372 Pullbundles are specified in .hg/pullbundles.manifest similar to
372 clonebundles.
373 clonebundles.
373 For each entry, the bundle specification is checked for compatibility:
374 For each entry, the bundle specification is checked for compatibility:
374 - Client features vs the BUNDLESPEC.
375 - Client features vs the BUNDLESPEC.
375 - Revisions shared with the clients vs base revisions of the bundle.
376 - Revisions shared with the clients vs base revisions of the bundle.
376 A bundle can be applied only if all its base revisions are known by
377 A bundle can be applied only if all its base revisions are known by
377 the client.
378 the client.
378 - At least one leaf of the bundle's DAG is missing on the client.
379 - At least one leaf of the bundle's DAG is missing on the client.
379 - Every leaf of the bundle's DAG is part of node set the client wants.
380 - Every leaf of the bundle's DAG is part of node set the client wants.
380 E.g. do not send a bundle of all changes if the client wants only
381 E.g. do not send a bundle of all changes if the client wants only
381 one specific branch of many.
382 one specific branch of many.
382 """
383 """
383
384
384 def decodehexstring(s):
385 def decodehexstring(s):
385 return {binascii.unhexlify(h) for h in s.split(b';')}
386 return {binascii.unhexlify(h) for h in s.split(b';')}
386
387
387 manifest = repo.vfs.tryread(b'pullbundles.manifest')
388 manifest = repo.vfs.tryread(b'pullbundles.manifest')
388 if not manifest:
389 if not manifest:
389 return None
390 return None
390 res = exchange.parseclonebundlesmanifest(repo, manifest)
391 res = bundlecaches.parseclonebundlesmanifest(repo, manifest)
391 res = exchange.filterclonebundleentries(repo, res)
392 res = bundlecaches.filterclonebundleentries(repo, res)
392 if not res:
393 if not res:
393 return None
394 return None
394 cl = repo.unfiltered().changelog
395 cl = repo.unfiltered().changelog
395 heads_anc = cl.ancestors([cl.rev(rev) for rev in heads], inclusive=True)
396 heads_anc = cl.ancestors([cl.rev(rev) for rev in heads], inclusive=True)
396 common_anc = cl.ancestors([cl.rev(rev) for rev in common], inclusive=True)
397 common_anc = cl.ancestors([cl.rev(rev) for rev in common], inclusive=True)
397 compformats = clientcompressionsupport(proto)
398 compformats = clientcompressionsupport(proto)
398 for entry in res:
399 for entry in res:
399 comp = entry.get(b'COMPRESSION')
400 comp = entry.get(b'COMPRESSION')
400 altcomp = util.compengines._bundlenames.get(comp)
401 altcomp = util.compengines._bundlenames.get(comp)
401 if comp and comp not in compformats and altcomp not in compformats:
402 if comp and comp not in compformats and altcomp not in compformats:
402 continue
403 continue
403 # No test yet for VERSION, since V2 is supported by any client
404 # No test yet for VERSION, since V2 is supported by any client
404 # that advertises partial pulls
405 # that advertises partial pulls
405 if b'heads' in entry:
406 if b'heads' in entry:
406 try:
407 try:
407 bundle_heads = decodehexstring(entry[b'heads'])
408 bundle_heads = decodehexstring(entry[b'heads'])
408 except TypeError:
409 except TypeError:
409 # Bad heads entry
410 # Bad heads entry
410 continue
411 continue
411 if bundle_heads.issubset(common):
412 if bundle_heads.issubset(common):
412 continue # Nothing new
413 continue # Nothing new
413 if all(cl.rev(rev) in common_anc for rev in bundle_heads):
414 if all(cl.rev(rev) in common_anc for rev in bundle_heads):
414 continue # Still nothing new
415 continue # Still nothing new
415 if any(
416 if any(
416 cl.rev(rev) not in heads_anc and cl.rev(rev) not in common_anc
417 cl.rev(rev) not in heads_anc and cl.rev(rev) not in common_anc
417 for rev in bundle_heads
418 for rev in bundle_heads
418 ):
419 ):
419 continue
420 continue
420 if b'bases' in entry:
421 if b'bases' in entry:
421 try:
422 try:
422 bundle_bases = decodehexstring(entry[b'bases'])
423 bundle_bases = decodehexstring(entry[b'bases'])
423 except TypeError:
424 except TypeError:
424 # Bad bases entry
425 # Bad bases entry
425 continue
426 continue
426 if not all(cl.rev(rev) in common_anc for rev in bundle_bases):
427 if not all(cl.rev(rev) in common_anc for rev in bundle_bases):
427 continue
428 continue
428 path = entry[b'URL']
429 path = entry[b'URL']
429 repo.ui.debug(b'sending pullbundle "%s"\n' % path)
430 repo.ui.debug(b'sending pullbundle "%s"\n' % path)
430 try:
431 try:
431 return repo.vfs.open(path)
432 return repo.vfs.open(path)
432 except IOError:
433 except IOError:
433 repo.ui.debug(b'pullbundle "%s" not accessible\n' % path)
434 repo.ui.debug(b'pullbundle "%s" not accessible\n' % path)
434 continue
435 continue
435 return None
436 return None
436
437
437
438
438 @wireprotocommand(b'getbundle', b'*', permission=b'pull')
439 @wireprotocommand(b'getbundle', b'*', permission=b'pull')
439 def getbundle(repo, proto, others):
440 def getbundle(repo, proto, others):
440 opts = options(
441 opts = options(
441 b'getbundle', wireprototypes.GETBUNDLE_ARGUMENTS.keys(), others
442 b'getbundle', wireprototypes.GETBUNDLE_ARGUMENTS.keys(), others
442 )
443 )
443 for k, v in pycompat.iteritems(opts):
444 for k, v in pycompat.iteritems(opts):
444 keytype = wireprototypes.GETBUNDLE_ARGUMENTS[k]
445 keytype = wireprototypes.GETBUNDLE_ARGUMENTS[k]
445 if keytype == b'nodes':
446 if keytype == b'nodes':
446 opts[k] = wireprototypes.decodelist(v)
447 opts[k] = wireprototypes.decodelist(v)
447 elif keytype == b'csv':
448 elif keytype == b'csv':
448 opts[k] = list(v.split(b','))
449 opts[k] = list(v.split(b','))
449 elif keytype == b'scsv':
450 elif keytype == b'scsv':
450 opts[k] = set(v.split(b','))
451 opts[k] = set(v.split(b','))
451 elif keytype == b'boolean':
452 elif keytype == b'boolean':
452 # Client should serialize False as '0', which is a non-empty string
453 # Client should serialize False as '0', which is a non-empty string
453 # so it evaluates as a True bool.
454 # so it evaluates as a True bool.
454 if v == b'0':
455 if v == b'0':
455 opts[k] = False
456 opts[k] = False
456 else:
457 else:
457 opts[k] = bool(v)
458 opts[k] = bool(v)
458 elif keytype != b'plain':
459 elif keytype != b'plain':
459 raise KeyError(b'unknown getbundle option type %s' % keytype)
460 raise KeyError(b'unknown getbundle option type %s' % keytype)
460
461
461 if not bundle1allowed(repo, b'pull'):
462 if not bundle1allowed(repo, b'pull'):
462 if not exchange.bundle2requested(opts.get(b'bundlecaps')):
463 if not exchange.bundle2requested(opts.get(b'bundlecaps')):
463 if proto.name == b'http-v1':
464 if proto.name == b'http-v1':
464 return wireprototypes.ooberror(bundle2required)
465 return wireprototypes.ooberror(bundle2required)
465 raise error.Abort(bundle2requiredmain, hint=bundle2requiredhint)
466 raise error.Abort(bundle2requiredmain, hint=bundle2requiredhint)
466
467
467 try:
468 try:
468 clheads = set(repo.changelog.heads())
469 clheads = set(repo.changelog.heads())
469 heads = set(opts.get(b'heads', set()))
470 heads = set(opts.get(b'heads', set()))
470 common = set(opts.get(b'common', set()))
471 common = set(opts.get(b'common', set()))
471 common.discard(nullid)
472 common.discard(nullid)
472 if (
473 if (
473 repo.ui.configbool(b'server', b'pullbundle')
474 repo.ui.configbool(b'server', b'pullbundle')
474 and b'partial-pull' in proto.getprotocaps()
475 and b'partial-pull' in proto.getprotocaps()
475 ):
476 ):
476 # Check if a pre-built bundle covers this request.
477 # Check if a pre-built bundle covers this request.
477 bundle = find_pullbundle(repo, proto, opts, clheads, heads, common)
478 bundle = find_pullbundle(repo, proto, opts, clheads, heads, common)
478 if bundle:
479 if bundle:
479 return wireprototypes.streamres(
480 return wireprototypes.streamres(
480 gen=util.filechunkiter(bundle), prefer_uncompressed=True
481 gen=util.filechunkiter(bundle), prefer_uncompressed=True
481 )
482 )
482
483
483 if repo.ui.configbool(b'server', b'disablefullbundle'):
484 if repo.ui.configbool(b'server', b'disablefullbundle'):
484 # Check to see if this is a full clone.
485 # Check to see if this is a full clone.
485 changegroup = opts.get(b'cg', True)
486 changegroup = opts.get(b'cg', True)
486 if changegroup and not common and clheads == heads:
487 if changegroup and not common and clheads == heads:
487 raise error.Abort(
488 raise error.Abort(
488 _(b'server has pull-based clones disabled'),
489 _(b'server has pull-based clones disabled'),
489 hint=_(b'remove --pull if specified or upgrade Mercurial'),
490 hint=_(b'remove --pull if specified or upgrade Mercurial'),
490 )
491 )
491
492
492 info, chunks = exchange.getbundlechunks(
493 info, chunks = exchange.getbundlechunks(
493 repo, b'serve', **pycompat.strkwargs(opts)
494 repo, b'serve', **pycompat.strkwargs(opts)
494 )
495 )
495 prefercompressed = info.get(b'prefercompressed', True)
496 prefercompressed = info.get(b'prefercompressed', True)
496 except error.Abort as exc:
497 except error.Abort as exc:
497 # cleanly forward Abort error to the client
498 # cleanly forward Abort error to the client
498 if not exchange.bundle2requested(opts.get(b'bundlecaps')):
499 if not exchange.bundle2requested(opts.get(b'bundlecaps')):
499 if proto.name == b'http-v1':
500 if proto.name == b'http-v1':
500 return wireprototypes.ooberror(exc.message + b'\n')
501 return wireprototypes.ooberror(exc.message + b'\n')
501 raise # cannot do better for bundle1 + ssh
502 raise # cannot do better for bundle1 + ssh
502 # bundle2 request expect a bundle2 reply
503 # bundle2 request expect a bundle2 reply
503 bundler = bundle2.bundle20(repo.ui)
504 bundler = bundle2.bundle20(repo.ui)
504 manargs = [(b'message', exc.message)]
505 manargs = [(b'message', exc.message)]
505 advargs = []
506 advargs = []
506 if exc.hint is not None:
507 if exc.hint is not None:
507 advargs.append((b'hint', exc.hint))
508 advargs.append((b'hint', exc.hint))
508 bundler.addpart(bundle2.bundlepart(b'error:abort', manargs, advargs))
509 bundler.addpart(bundle2.bundlepart(b'error:abort', manargs, advargs))
509 chunks = bundler.getchunks()
510 chunks = bundler.getchunks()
510 prefercompressed = False
511 prefercompressed = False
511
512
512 return wireprototypes.streamres(
513 return wireprototypes.streamres(
513 gen=chunks, prefer_uncompressed=not prefercompressed
514 gen=chunks, prefer_uncompressed=not prefercompressed
514 )
515 )
515
516
516
517
517 @wireprotocommand(b'heads', permission=b'pull')
518 @wireprotocommand(b'heads', permission=b'pull')
518 def heads(repo, proto):
519 def heads(repo, proto):
519 h = repo.heads()
520 h = repo.heads()
520 return wireprototypes.bytesresponse(wireprototypes.encodelist(h) + b'\n')
521 return wireprototypes.bytesresponse(wireprototypes.encodelist(h) + b'\n')
521
522
522
523
523 @wireprotocommand(b'hello', permission=b'pull')
524 @wireprotocommand(b'hello', permission=b'pull')
524 def hello(repo, proto):
525 def hello(repo, proto):
525 """Called as part of SSH handshake to obtain server info.
526 """Called as part of SSH handshake to obtain server info.
526
527
527 Returns a list of lines describing interesting things about the
528 Returns a list of lines describing interesting things about the
528 server, in an RFC822-like format.
529 server, in an RFC822-like format.
529
530
530 Currently, the only one defined is ``capabilities``, which consists of a
531 Currently, the only one defined is ``capabilities``, which consists of a
531 line of space separated tokens describing server abilities:
532 line of space separated tokens describing server abilities:
532
533
533 capabilities: <token0> <token1> <token2>
534 capabilities: <token0> <token1> <token2>
534 """
535 """
535 caps = capabilities(repo, proto).data
536 caps = capabilities(repo, proto).data
536 return wireprototypes.bytesresponse(b'capabilities: %s\n' % caps)
537 return wireprototypes.bytesresponse(b'capabilities: %s\n' % caps)
537
538
538
539
539 @wireprotocommand(b'listkeys', b'namespace', permission=b'pull')
540 @wireprotocommand(b'listkeys', b'namespace', permission=b'pull')
540 def listkeys(repo, proto, namespace):
541 def listkeys(repo, proto, namespace):
541 d = sorted(repo.listkeys(encoding.tolocal(namespace)).items())
542 d = sorted(repo.listkeys(encoding.tolocal(namespace)).items())
542 return wireprototypes.bytesresponse(pushkeymod.encodekeys(d))
543 return wireprototypes.bytesresponse(pushkeymod.encodekeys(d))
543
544
544
545
545 @wireprotocommand(b'lookup', b'key', permission=b'pull')
546 @wireprotocommand(b'lookup', b'key', permission=b'pull')
546 def lookup(repo, proto, key):
547 def lookup(repo, proto, key):
547 try:
548 try:
548 k = encoding.tolocal(key)
549 k = encoding.tolocal(key)
549 n = repo.lookup(k)
550 n = repo.lookup(k)
550 r = hex(n)
551 r = hex(n)
551 success = 1
552 success = 1
552 except Exception as inst:
553 except Exception as inst:
553 r = stringutil.forcebytestr(inst)
554 r = stringutil.forcebytestr(inst)
554 success = 0
555 success = 0
555 return wireprototypes.bytesresponse(b'%d %s\n' % (success, r))
556 return wireprototypes.bytesresponse(b'%d %s\n' % (success, r))
556
557
557
558
558 @wireprotocommand(b'known', b'nodes *', permission=b'pull')
559 @wireprotocommand(b'known', b'nodes *', permission=b'pull')
559 def known(repo, proto, nodes, others):
560 def known(repo, proto, nodes, others):
560 v = b''.join(
561 v = b''.join(
561 b and b'1' or b'0' for b in repo.known(wireprototypes.decodelist(nodes))
562 b and b'1' or b'0' for b in repo.known(wireprototypes.decodelist(nodes))
562 )
563 )
563 return wireprototypes.bytesresponse(v)
564 return wireprototypes.bytesresponse(v)
564
565
565
566
566 @wireprotocommand(b'protocaps', b'caps', permission=b'pull')
567 @wireprotocommand(b'protocaps', b'caps', permission=b'pull')
567 def protocaps(repo, proto, caps):
568 def protocaps(repo, proto, caps):
568 if proto.name == wireprototypes.SSHV1:
569 if proto.name == wireprototypes.SSHV1:
569 proto._protocaps = set(caps.split(b' '))
570 proto._protocaps = set(caps.split(b' '))
570 return wireprototypes.bytesresponse(b'OK')
571 return wireprototypes.bytesresponse(b'OK')
571
572
572
573
573 @wireprotocommand(b'pushkey', b'namespace key old new', permission=b'push')
574 @wireprotocommand(b'pushkey', b'namespace key old new', permission=b'push')
574 def pushkey(repo, proto, namespace, key, old, new):
575 def pushkey(repo, proto, namespace, key, old, new):
575 # compatibility with pre-1.8 clients which were accidentally
576 # compatibility with pre-1.8 clients which were accidentally
576 # sending raw binary nodes rather than utf-8-encoded hex
577 # sending raw binary nodes rather than utf-8-encoded hex
577 if len(new) == 20 and stringutil.escapestr(new) != new:
578 if len(new) == 20 and stringutil.escapestr(new) != new:
578 # looks like it could be a binary node
579 # looks like it could be a binary node
579 try:
580 try:
580 new.decode('utf-8')
581 new.decode('utf-8')
581 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
582 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
582 except UnicodeDecodeError:
583 except UnicodeDecodeError:
583 pass # binary, leave unmodified
584 pass # binary, leave unmodified
584 else:
585 else:
585 new = encoding.tolocal(new) # normal path
586 new = encoding.tolocal(new) # normal path
586
587
587 with proto.mayberedirectstdio() as output:
588 with proto.mayberedirectstdio() as output:
588 r = (
589 r = (
589 repo.pushkey(
590 repo.pushkey(
590 encoding.tolocal(namespace),
591 encoding.tolocal(namespace),
591 encoding.tolocal(key),
592 encoding.tolocal(key),
592 encoding.tolocal(old),
593 encoding.tolocal(old),
593 new,
594 new,
594 )
595 )
595 or False
596 or False
596 )
597 )
597
598
598 output = output.getvalue() if output else b''
599 output = output.getvalue() if output else b''
599 return wireprototypes.bytesresponse(b'%d\n%s' % (int(r), output))
600 return wireprototypes.bytesresponse(b'%d\n%s' % (int(r), output))
600
601
601
602
602 @wireprotocommand(b'stream_out', permission=b'pull')
603 @wireprotocommand(b'stream_out', permission=b'pull')
603 def stream(repo, proto):
604 def stream(repo, proto):
604 '''If the server supports streaming clone, it advertises the "stream"
605 '''If the server supports streaming clone, it advertises the "stream"
605 capability with a value representing the version and flags of the repo
606 capability with a value representing the version and flags of the repo
606 it is serving. Client checks to see if it understands the format.
607 it is serving. Client checks to see if it understands the format.
607 '''
608 '''
608 return wireprototypes.streamreslegacy(streamclone.generatev1wireproto(repo))
609 return wireprototypes.streamreslegacy(streamclone.generatev1wireproto(repo))
609
610
610
611
611 @wireprotocommand(b'unbundle', b'heads', permission=b'push')
612 @wireprotocommand(b'unbundle', b'heads', permission=b'push')
612 def unbundle(repo, proto, heads):
613 def unbundle(repo, proto, heads):
613 their_heads = wireprototypes.decodelist(heads)
614 their_heads = wireprototypes.decodelist(heads)
614
615
615 with proto.mayberedirectstdio() as output:
616 with proto.mayberedirectstdio() as output:
616 try:
617 try:
617 exchange.check_heads(repo, their_heads, b'preparing changes')
618 exchange.check_heads(repo, their_heads, b'preparing changes')
618 cleanup = lambda: None
619 cleanup = lambda: None
619 try:
620 try:
620 payload = proto.getpayload()
621 payload = proto.getpayload()
621 if repo.ui.configbool(b'server', b'streamunbundle'):
622 if repo.ui.configbool(b'server', b'streamunbundle'):
622
623
623 def cleanup():
624 def cleanup():
624 # Ensure that the full payload is consumed, so
625 # Ensure that the full payload is consumed, so
625 # that the connection doesn't contain trailing garbage.
626 # that the connection doesn't contain trailing garbage.
626 for p in payload:
627 for p in payload:
627 pass
628 pass
628
629
629 fp = util.chunkbuffer(payload)
630 fp = util.chunkbuffer(payload)
630 else:
631 else:
631 # write bundle data to temporary file as it can be big
632 # write bundle data to temporary file as it can be big
632 fp, tempname = None, None
633 fp, tempname = None, None
633
634
634 def cleanup():
635 def cleanup():
635 if fp:
636 if fp:
636 fp.close()
637 fp.close()
637 if tempname:
638 if tempname:
638 os.unlink(tempname)
639 os.unlink(tempname)
639
640
640 fd, tempname = pycompat.mkstemp(prefix=b'hg-unbundle-')
641 fd, tempname = pycompat.mkstemp(prefix=b'hg-unbundle-')
641 repo.ui.debug(
642 repo.ui.debug(
642 b'redirecting incoming bundle to %s\n' % tempname
643 b'redirecting incoming bundle to %s\n' % tempname
643 )
644 )
644 fp = os.fdopen(fd, pycompat.sysstr(b'wb+'))
645 fp = os.fdopen(fd, pycompat.sysstr(b'wb+'))
645 for p in payload:
646 for p in payload:
646 fp.write(p)
647 fp.write(p)
647 fp.seek(0)
648 fp.seek(0)
648
649
649 gen = exchange.readbundle(repo.ui, fp, None)
650 gen = exchange.readbundle(repo.ui, fp, None)
650 if isinstance(
651 if isinstance(
651 gen, changegroupmod.cg1unpacker
652 gen, changegroupmod.cg1unpacker
652 ) and not bundle1allowed(repo, b'push'):
653 ) and not bundle1allowed(repo, b'push'):
653 if proto.name == b'http-v1':
654 if proto.name == b'http-v1':
654 # need to special case http because stderr do not get to
655 # need to special case http because stderr do not get to
655 # the http client on failed push so we need to abuse
656 # the http client on failed push so we need to abuse
656 # some other error type to make sure the message get to
657 # some other error type to make sure the message get to
657 # the user.
658 # the user.
658 return wireprototypes.ooberror(bundle2required)
659 return wireprototypes.ooberror(bundle2required)
659 raise error.Abort(
660 raise error.Abort(
660 bundle2requiredmain, hint=bundle2requiredhint
661 bundle2requiredmain, hint=bundle2requiredhint
661 )
662 )
662
663
663 r = exchange.unbundle(
664 r = exchange.unbundle(
664 repo, gen, their_heads, b'serve', proto.client()
665 repo, gen, their_heads, b'serve', proto.client()
665 )
666 )
666 if util.safehasattr(r, b'addpart'):
667 if util.safehasattr(r, b'addpart'):
667 # The return looks streamable, we are in the bundle2 case
668 # The return looks streamable, we are in the bundle2 case
668 # and should return a stream.
669 # and should return a stream.
669 return wireprototypes.streamreslegacy(gen=r.getchunks())
670 return wireprototypes.streamreslegacy(gen=r.getchunks())
670 return wireprototypes.pushres(
671 return wireprototypes.pushres(
671 r, output.getvalue() if output else b''
672 r, output.getvalue() if output else b''
672 )
673 )
673
674
674 finally:
675 finally:
675 cleanup()
676 cleanup()
676
677
677 except (error.BundleValueError, error.Abort, error.PushRaced) as exc:
678 except (error.BundleValueError, error.Abort, error.PushRaced) as exc:
678 # handle non-bundle2 case first
679 # handle non-bundle2 case first
679 if not getattr(exc, 'duringunbundle2', False):
680 if not getattr(exc, 'duringunbundle2', False):
680 try:
681 try:
681 raise
682 raise
682 except error.Abort as exc:
683 except error.Abort as exc:
683 # The old code we moved used procutil.stderr directly.
684 # The old code we moved used procutil.stderr directly.
684 # We did not change it to minimise code change.
685 # We did not change it to minimise code change.
685 # This need to be moved to something proper.
686 # This need to be moved to something proper.
686 # Feel free to do it.
687 # Feel free to do it.
687 procutil.stderr.write(b"abort: %s\n" % exc.message)
688 procutil.stderr.write(b"abort: %s\n" % exc.message)
688 if exc.hint is not None:
689 if exc.hint is not None:
689 procutil.stderr.write(b"(%s)\n" % exc.hint)
690 procutil.stderr.write(b"(%s)\n" % exc.hint)
690 procutil.stderr.flush()
691 procutil.stderr.flush()
691 return wireprototypes.pushres(
692 return wireprototypes.pushres(
692 0, output.getvalue() if output else b''
693 0, output.getvalue() if output else b''
693 )
694 )
694 except error.PushRaced:
695 except error.PushRaced:
695 return wireprototypes.pusherr(
696 return wireprototypes.pusherr(
696 pycompat.bytestr(exc),
697 pycompat.bytestr(exc),
697 output.getvalue() if output else b'',
698 output.getvalue() if output else b'',
698 )
699 )
699
700
700 bundler = bundle2.bundle20(repo.ui)
701 bundler = bundle2.bundle20(repo.ui)
701 for out in getattr(exc, '_bundle2salvagedoutput', ()):
702 for out in getattr(exc, '_bundle2salvagedoutput', ()):
702 bundler.addpart(out)
703 bundler.addpart(out)
703 try:
704 try:
704 try:
705 try:
705 raise
706 raise
706 except error.PushkeyFailed as exc:
707 except error.PushkeyFailed as exc:
707 # check client caps
708 # check client caps
708 remotecaps = getattr(exc, '_replycaps', None)
709 remotecaps = getattr(exc, '_replycaps', None)
709 if (
710 if (
710 remotecaps is not None
711 remotecaps is not None
711 and b'pushkey' not in remotecaps.get(b'error', ())
712 and b'pushkey' not in remotecaps.get(b'error', ())
712 ):
713 ):
713 # no support remote side, fallback to Abort handler.
714 # no support remote side, fallback to Abort handler.
714 raise
715 raise
715 part = bundler.newpart(b'error:pushkey')
716 part = bundler.newpart(b'error:pushkey')
716 part.addparam(b'in-reply-to', exc.partid)
717 part.addparam(b'in-reply-to', exc.partid)
717 if exc.namespace is not None:
718 if exc.namespace is not None:
718 part.addparam(
719 part.addparam(
719 b'namespace', exc.namespace, mandatory=False
720 b'namespace', exc.namespace, mandatory=False
720 )
721 )
721 if exc.key is not None:
722 if exc.key is not None:
722 part.addparam(b'key', exc.key, mandatory=False)
723 part.addparam(b'key', exc.key, mandatory=False)
723 if exc.new is not None:
724 if exc.new is not None:
724 part.addparam(b'new', exc.new, mandatory=False)
725 part.addparam(b'new', exc.new, mandatory=False)
725 if exc.old is not None:
726 if exc.old is not None:
726 part.addparam(b'old', exc.old, mandatory=False)
727 part.addparam(b'old', exc.old, mandatory=False)
727 if exc.ret is not None:
728 if exc.ret is not None:
728 part.addparam(b'ret', exc.ret, mandatory=False)
729 part.addparam(b'ret', exc.ret, mandatory=False)
729 except error.BundleValueError as exc:
730 except error.BundleValueError as exc:
730 errpart = bundler.newpart(b'error:unsupportedcontent')
731 errpart = bundler.newpart(b'error:unsupportedcontent')
731 if exc.parttype is not None:
732 if exc.parttype is not None:
732 errpart.addparam(b'parttype', exc.parttype)
733 errpart.addparam(b'parttype', exc.parttype)
733 if exc.params:
734 if exc.params:
734 errpart.addparam(b'params', b'\0'.join(exc.params))
735 errpart.addparam(b'params', b'\0'.join(exc.params))
735 except error.Abort as exc:
736 except error.Abort as exc:
736 manargs = [(b'message', exc.message)]
737 manargs = [(b'message', exc.message)]
737 advargs = []
738 advargs = []
738 if exc.hint is not None:
739 if exc.hint is not None:
739 advargs.append((b'hint', exc.hint))
740 advargs.append((b'hint', exc.hint))
740 bundler.addpart(
741 bundler.addpart(
741 bundle2.bundlepart(b'error:abort', manargs, advargs)
742 bundle2.bundlepart(b'error:abort', manargs, advargs)
742 )
743 )
743 except error.PushRaced as exc:
744 except error.PushRaced as exc:
744 bundler.newpart(
745 bundler.newpart(
745 b'error:pushraced',
746 b'error:pushraced',
746 [(b'message', stringutil.forcebytestr(exc))],
747 [(b'message', stringutil.forcebytestr(exc))],
747 )
748 )
748 return wireprototypes.streamreslegacy(gen=bundler.getchunks())
749 return wireprototypes.streamreslegacy(gen=bundler.getchunks())
@@ -1,149 +1,149 b''
1 # coding=UTF-8
1 # coding=UTF-8
2
2
3 from __future__ import absolute_import
3 from __future__ import absolute_import
4
4
5 import base64
5 import base64
6 import zlib
6 import zlib
7
7
8 from mercurial import (
8 from mercurial import (
9 bundlecaches,
9 changegroup,
10 changegroup,
10 exchange,
11 extensions,
11 extensions,
12 revlog,
12 revlog,
13 util,
13 util,
14 )
14 )
15 from mercurial.revlogutils import flagutil
15 from mercurial.revlogutils import flagutil
16
16
17 # Test only: These flags are defined here only in the context of testing the
17 # Test only: These flags are defined here only in the context of testing the
18 # behavior of the flag processor. The canonical way to add flags is to get in
18 # behavior of the flag processor. The canonical way to add flags is to get in
19 # touch with the community and make them known in revlog.
19 # touch with the community and make them known in revlog.
20 REVIDX_NOOP = 1 << 3
20 REVIDX_NOOP = 1 << 3
21 REVIDX_BASE64 = 1 << 2
21 REVIDX_BASE64 = 1 << 2
22 REVIDX_GZIP = 1 << 1
22 REVIDX_GZIP = 1 << 1
23 REVIDX_FAIL = 1
23 REVIDX_FAIL = 1
24
24
25
25
26 def validatehash(self, text):
26 def validatehash(self, text):
27 return True
27 return True
28
28
29
29
30 def bypass(self, text):
30 def bypass(self, text):
31 return False
31 return False
32
32
33
33
34 def noopdonothing(self, text, sidedata):
34 def noopdonothing(self, text, sidedata):
35 return (text, True)
35 return (text, True)
36
36
37
37
38 def noopdonothingread(self, text):
38 def noopdonothingread(self, text):
39 return (text, True, {})
39 return (text, True, {})
40
40
41
41
42 def b64encode(self, text, sidedata):
42 def b64encode(self, text, sidedata):
43 return (base64.b64encode(text), False)
43 return (base64.b64encode(text), False)
44
44
45
45
46 def b64decode(self, text):
46 def b64decode(self, text):
47 return (base64.b64decode(text), True, {})
47 return (base64.b64decode(text), True, {})
48
48
49
49
50 def gzipcompress(self, text, sidedata):
50 def gzipcompress(self, text, sidedata):
51 return (zlib.compress(text), False)
51 return (zlib.compress(text), False)
52
52
53
53
54 def gzipdecompress(self, text):
54 def gzipdecompress(self, text):
55 return (zlib.decompress(text), True, {})
55 return (zlib.decompress(text), True, {})
56
56
57
57
58 def supportedoutgoingversions(orig, repo):
58 def supportedoutgoingversions(orig, repo):
59 versions = orig(repo)
59 versions = orig(repo)
60 versions.discard(b'01')
60 versions.discard(b'01')
61 versions.discard(b'02')
61 versions.discard(b'02')
62 versions.add(b'03')
62 versions.add(b'03')
63 return versions
63 return versions
64
64
65
65
66 def allsupportedversions(orig, ui):
66 def allsupportedversions(orig, ui):
67 versions = orig(ui)
67 versions = orig(ui)
68 versions.add(b'03')
68 versions.add(b'03')
69 return versions
69 return versions
70
70
71
71
72 def makewrappedfile(obj):
72 def makewrappedfile(obj):
73 class wrappedfile(obj.__class__):
73 class wrappedfile(obj.__class__):
74 def addrevision(
74 def addrevision(
75 self,
75 self,
76 text,
76 text,
77 transaction,
77 transaction,
78 link,
78 link,
79 p1,
79 p1,
80 p2,
80 p2,
81 cachedelta=None,
81 cachedelta=None,
82 node=None,
82 node=None,
83 flags=flagutil.REVIDX_DEFAULT_FLAGS,
83 flags=flagutil.REVIDX_DEFAULT_FLAGS,
84 ):
84 ):
85 if b'[NOOP]' in text:
85 if b'[NOOP]' in text:
86 flags |= REVIDX_NOOP
86 flags |= REVIDX_NOOP
87
87
88 if b'[BASE64]' in text:
88 if b'[BASE64]' in text:
89 flags |= REVIDX_BASE64
89 flags |= REVIDX_BASE64
90
90
91 if b'[GZIP]' in text:
91 if b'[GZIP]' in text:
92 flags |= REVIDX_GZIP
92 flags |= REVIDX_GZIP
93
93
94 # This addrevision wrapper is meant to add a flag we will not have
94 # This addrevision wrapper is meant to add a flag we will not have
95 # transforms registered for, ensuring we handle this error case.
95 # transforms registered for, ensuring we handle this error case.
96 if b'[FAIL]' in text:
96 if b'[FAIL]' in text:
97 flags |= REVIDX_FAIL
97 flags |= REVIDX_FAIL
98
98
99 return super(wrappedfile, self).addrevision(
99 return super(wrappedfile, self).addrevision(
100 text,
100 text,
101 transaction,
101 transaction,
102 link,
102 link,
103 p1,
103 p1,
104 p2,
104 p2,
105 cachedelta=cachedelta,
105 cachedelta=cachedelta,
106 node=node,
106 node=node,
107 flags=flags,
107 flags=flags,
108 )
108 )
109
109
110 obj.__class__ = wrappedfile
110 obj.__class__ = wrappedfile
111
111
112
112
113 def reposetup(ui, repo):
113 def reposetup(ui, repo):
114 class wrappingflagprocessorrepo(repo.__class__):
114 class wrappingflagprocessorrepo(repo.__class__):
115 def file(self, f):
115 def file(self, f):
116 orig = super(wrappingflagprocessorrepo, self).file(f)
116 orig = super(wrappingflagprocessorrepo, self).file(f)
117 makewrappedfile(orig)
117 makewrappedfile(orig)
118 return orig
118 return orig
119
119
120 repo.__class__ = wrappingflagprocessorrepo
120 repo.__class__ = wrappingflagprocessorrepo
121
121
122
122
123 def extsetup(ui):
123 def extsetup(ui):
124 # Enable changegroup3 for flags to be sent over the wire
124 # Enable changegroup3 for flags to be sent over the wire
125 wrapfunction = extensions.wrapfunction
125 wrapfunction = extensions.wrapfunction
126 wrapfunction(
126 wrapfunction(
127 changegroup, 'supportedoutgoingversions', supportedoutgoingversions
127 changegroup, 'supportedoutgoingversions', supportedoutgoingversions
128 )
128 )
129 wrapfunction(changegroup, 'allsupportedversions', allsupportedversions)
129 wrapfunction(changegroup, 'allsupportedversions', allsupportedversions)
130
130
131 # Teach revlog about our test flags
131 # Teach revlog about our test flags
132 flags = [REVIDX_NOOP, REVIDX_BASE64, REVIDX_GZIP, REVIDX_FAIL]
132 flags = [REVIDX_NOOP, REVIDX_BASE64, REVIDX_GZIP, REVIDX_FAIL]
133 flagutil.REVIDX_KNOWN_FLAGS |= util.bitsfrom(flags)
133 flagutil.REVIDX_KNOWN_FLAGS |= util.bitsfrom(flags)
134 revlog.REVIDX_FLAGS_ORDER.extend(flags)
134 revlog.REVIDX_FLAGS_ORDER.extend(flags)
135
135
136 # Teach exchange to use changegroup 3
136 # Teach exchange to use changegroup 3
137 for k in exchange._bundlespeccontentopts.keys():
137 for k in bundlecaches._bundlespeccontentopts.keys():
138 exchange._bundlespeccontentopts[k][b"cg.version"] = b"03"
138 bundlecaches._bundlespeccontentopts[k][b"cg.version"] = b"03"
139
139
140 # Register flag processors for each extension
140 # Register flag processors for each extension
141 flagutil.addflagprocessor(
141 flagutil.addflagprocessor(
142 REVIDX_NOOP, (noopdonothingread, noopdonothing, validatehash,)
142 REVIDX_NOOP, (noopdonothingread, noopdonothing, validatehash,)
143 )
143 )
144 flagutil.addflagprocessor(
144 flagutil.addflagprocessor(
145 REVIDX_BASE64, (b64decode, b64encode, bypass,),
145 REVIDX_BASE64, (b64decode, b64encode, bypass,),
146 )
146 )
147 flagutil.addflagprocessor(
147 flagutil.addflagprocessor(
148 REVIDX_GZIP, (gzipdecompress, gzipcompress, bypass)
148 REVIDX_GZIP, (gzipdecompress, gzipcompress, bypass)
149 )
149 )
General Comments 0
You need to be logged in to leave comments. Login now